config.name,config.backend.name,config.backend.version,config.backend._target_,config.backend.task,config.backend.library,config.backend.model_type,config.backend.model,config.backend.processor,config.backend.device,config.backend.device_ids,config.backend.seed,config.backend.inter_op_num_threads,config.backend.intra_op_num_threads,config.backend.model_kwargs.trust_remote_code,config.backend.no_weights,config.backend.device_map,config.backend.torch_dtype,config.backend.eval_mode,config.backend.to_bettertransformer,config.backend.low_cpu_mem_usage,config.backend.attn_implementation,config.backend.cache_implementation,config.backend.autocast_enabled,config.backend.autocast_dtype,config.backend.torch_compile,config.backend.torch_compile_target,config.backend.quantization_scheme,config.backend.deepspeed_inference,config.backend.peft_type,config.scenario.name,config.scenario._target_,config.scenario.iterations,config.scenario.duration,config.scenario.warmup_runs,config.scenario.input_shapes.batch_size,config.scenario.input_shapes.num_choices,config.scenario.input_shapes.sequence_length,config.scenario.new_tokens,config.scenario.memory,config.scenario.latency,config.scenario.energy,config.scenario.generate_kwargs.max_new_tokens,config.scenario.generate_kwargs.min_new_tokens,config.launcher.name,config.launcher._target_,config.launcher.device_isolation,config.launcher.device_isolation_action,config.launcher.numactl,config.launcher.start_method,config.environment.cpu,config.environment.cpu_count,config.environment.cpu_ram_mb,config.environment.system,config.environment.machine,config.environment.platform,config.environment.processor,config.environment.python_version,config.environment.gpu,config.environment.gpu_count,config.environment.gpu_vram_mb,config.environment.optimum_benchmark_version,config.environment.optimum_benchmark_commit,config.environment.transformers_version,config.environment.transformers_commit,config.environment.accelerate_version,config.environment.accelerate_commit,config.environment.diffusers_version,config.environment.diffusers_commit,config.environment.optimum_version,config.environment.optimum_commit,config.environment.timm_version,config.environment.timm_commit,config.environment.peft_version,config.environment.peft_commit,report.traceback,report.load.memory.unit,report.load.memory.max_ram,report.load.memory.max_global_vram,report.load.memory.max_process_vram,report.load.memory.max_reserved,report.load.memory.max_allocated,report.load.latency.unit,report.load.latency.count,report.load.latency.total,report.load.latency.mean,report.load.latency.stdev,report.load.latency.p50,report.load.latency.p90,report.load.latency.p95,report.load.latency.p99,report.load.latency.values,report.load.throughput,report.load.energy.unit,report.load.energy.cpu,report.load.energy.ram,report.load.energy.gpu,report.load.energy.total,report.load.efficiency,report.prefill.memory.unit,report.prefill.memory.max_ram,report.prefill.memory.max_global_vram,report.prefill.memory.max_process_vram,report.prefill.memory.max_reserved,report.prefill.memory.max_allocated,report.prefill.latency.unit,report.prefill.latency.count,report.prefill.latency.total,report.prefill.latency.mean,report.prefill.latency.stdev,report.prefill.latency.p50,report.prefill.latency.p90,report.prefill.latency.p95,report.prefill.latency.p99,report.prefill.latency.values,report.prefill.throughput.unit,report.prefill.throughput.value,report.prefill.energy.unit,report.prefill.energy.cpu,report.prefill.energy.ram,report.prefill.energy.gpu,report.prefill.energy.total,report.prefill.efficiency.unit,report.prefill.efficiency.value,report.decode.memory.unit,report.decode.memory.max_ram,report.decode.memory.max_global_vram,report.decode.memory.max_process_vram,report.decode.memory.max_reserved,report.decode.memory.max_allocated,report.decode.latency.unit,report.decode.latency.count,report.decode.latency.total,report.decode.latency.mean,report.decode.latency.stdev,report.decode.latency.p50,report.decode.latency.p90,report.decode.latency.p95,report.decode.latency.p99,report.decode.latency.values,report.decode.throughput.unit,report.decode.throughput.value,report.decode.energy.unit,report.decode.energy.cpu,report.decode.energy.ram,report.decode.energy.gpu,report.decode.energy.total,report.decode.efficiency.unit,report.decode.efficiency.value,report.per_token.memory,report.per_token.latency.unit,report.per_token.latency.count,report.per_token.latency.total,report.per_token.latency.mean,report.per_token.latency.stdev,report.per_token.latency.p50,report.per_token.latency.p90,report.per_token.latency.p95,report.per_token.latency.p99,report.per_token.latency.values,report.per_token.throughput.unit,report.per_token.throughput.value,report.per_token.energy,report.per_token.efficiency float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1068, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 845, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 845, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 634, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 230, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1054, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 856, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 596, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 426, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1522, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1613, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpri6l0x8s/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 1013, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 839, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 467, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 397, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 199, in _flash_attention_forward attn_output_unpad = flash_attn_varlen_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 1124, in flash_attn_varlen_func return FlashAttnVarlenFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 620, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_varlen_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 90, in _flash_attn_varlen_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.varlen_fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1709, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1153, in __init__ self.model = Qwen2MoeModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 918, in __init__ [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 918, in [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 691, in __init__ self.self_attn = QWEN2MOE_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 391, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 294, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 634, in __init__ self.attention = GPT_NEOX_ATTENTION_CLASSES[config._attn_implementation](config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 306, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 102, in __init__ self.query_key_value = nn.Linear(config.hidden_size, 3 * config.hidden_size, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 150.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 163, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 864, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,810.217472,14639.104,0.0,14243.856384,14221.3376,s,1,7.73187548828125,7.73187548828125,0.0,7.73187548828125,7.73187548828125,7.73187548828125,7.73187548828125,[7.73187548828125],,kWh,1.5289258250001583e-05,1.583704214532396e-06,5.663893419999809e-06,2.2536855884533787e-05,,MB,1176.8832,14737.670144,0.0,14329.839616,14290.688,s,10,2.162870071411133,0.2162870071411133,0.004696034653436737,0.2166063461303711,0.21995816955566405,0.22030833129882813,0.22058846069335938,"[0.20358029174804687, 0.21567996215820312, 0.21597410583496093, 0.21516217041015626, 0.21971200561523438, 0.2152815399169922, 0.21988035583496093, 0.21723858642578125, 0.21970256042480468, 0.2206584930419922]",tokens/s,1183.6124757738062,kWh,6.414262803894934e-06,7.073772449779575e-07,4.258982875782576e-06,1.1380622924655468e-05,tokens/kWh,22494375.017503712,MB,1197.019136,14752.350208,0.0,14344.51968,14290.69056,s,10,38.786872070312505,3.8786872070312506,0.006007349949207754,3.878911254882812,3.8850998779296875,3.8858206909179684,3.8863973413085935,"[3.86822705078125, 3.869866455078125, 3.8762998046875, 3.877141845703125, 3.875823974609375, 3.8806806640625, 3.884939697265625, 3.8824814453125, 3.88486962890625, 3.88654150390625]",tokens/s,16.24260906777792,kWh,0.0001135623148202716,1.2526194901041918e-05,7.550227537761758e-05,0.00020159078509893112,tokens/kWh,312514.2846637688,,s,630,38.78313149261476,0.06156052617875356,0.0005346627059583197,0.06147313499450684,0.06187085227966308,0.06209518413543701,0.0648411915588379,"[0.06455510711669922, 0.06257535934448243, 0.06161743927001953, 0.06133129501342773, 0.061139488220214845, 0.061219169616699216, 0.06158134460449219, 0.06133699035644531, 0.061130943298339846, 0.061174144744873045, 0.06128025436401367, 0.06136217498779297, 0.0612044792175293, 0.06106524658203125, 0.06133139038085938, 0.06137859344482422, 0.06139206314086914, 0.06144492721557617, 0.061488929748535155, 0.06133782577514649, 0.06118403244018555, 0.06113894271850586, 0.06117375946044922, 0.06125088119506836, 0.06095276641845703, 0.060787166595458984, 0.06112774276733399, 0.06120284652709961, 0.06129103851318359, 0.06122003173828125, 0.06128067016601563, 0.06118431854248047, 0.061415519714355465, 0.061571071624755856, 0.06145539093017578, 0.061434879302978515, 0.06140860748291015, 0.06142851257324219, 0.061468513488769534, 0.06149478530883789, 0.06123344039916992, 0.06148284912109375, 0.0613359375, 0.06105878448486328, 0.061403423309326174, 0.061265918731689455, 0.06129375839233398, 0.061341663360595704, 0.06132003021240234, 0.06149529647827148, 0.06137855911254883, 0.06113075256347656, 0.06128844833374023, 0.06150656127929687, 0.06157619094848633, 0.061556190490722654, 0.06131974411010742, 0.06125129699707031, 0.06128873443603516, 0.06154441452026367, 0.06125721740722656, 0.061409248352050784, 0.061698497772216795, 0.06434092712402344, 0.062304031372070315, 0.06152624130249024, 0.061203712463378905, 0.06103731155395508, 0.06134912109375, 0.06124364852905274, 0.06118761444091797, 0.061062175750732424, 0.06103238296508789, 0.061246910095214845, 0.0609285774230957, 0.06118915176391602, 0.06125609588623047, 0.06131974411010742, 0.06176358413696289, 0.06177785491943359, 0.061806655883789065, 0.06161324691772461, 0.06151046371459961, 0.061158462524414064, 0.061487041473388675, 0.0612567024230957, 0.06126572799682617, 0.061120704650878904, 0.06131916809082031, 0.06121273422241211, 0.061240863800048825, 0.06153667068481445, 0.06138675308227539, 0.06132534408569336, 0.06155875015258789, 0.061679615020751956, 0.061949600219726564, 0.06196012878417969, 0.061442462921142575, 0.061431774139404295, 0.061354015350341795, 0.061317119598388675, 0.06128844833374023, 0.06114508819580078, 0.06104403305053711, 0.060979774475097656, 0.061040000915527345, 0.06113766479492187, 0.06111983871459961, 0.06120515060424805, 0.061298561096191403, 0.06165843200683594, 0.06163497543334961, 0.06179471969604492, 0.0616357421875, 0.06159600067138672, 0.06145280075073242, 0.06136217498779297, 0.061341697692871094, 0.06113455963134766, 0.06125187301635742, 0.06138880157470703, 0.06128787231445312, 0.06129439926147461, 0.06137420654296875, 0.06135903930664063, 0.06497280120849609, 0.06270124816894532, 0.0614475212097168, 0.06150243377685547, 0.06108918380737305, 0.06117161560058594, 0.06119625473022461, 0.06116835021972656, 0.06128752136230469, 0.061094814300537106, 0.06107046508789062, 0.06114355087280274, 0.061059455871582034, 0.06107340621948242, 0.0612086067199707, 0.06176716613769531, 0.062091136932373045, 0.06186044692993164, 0.062132225036621094, 0.06180835342407227, 0.06179459381103516, 0.06145347213745117, 0.06143881607055664, 0.06113894271850586, 0.06113203048706055, 0.061105983734130856, 0.06141753768920898, 0.061752193450927736, 0.06111641693115234, 0.06166732788085937, 0.061087745666503906, 0.06137247848510742, 0.06146451187133789, 0.06138265609741211, 0.061337406158447266, 0.06145248031616211, 0.06176358413696289, 0.061572128295898435, 0.061512321472167966, 0.06148745727539062, 0.06116556930541992, 0.06127734375, 0.06176387023925781, 0.0615810546875, 0.0616640625, 0.06151504135131836, 0.06169055938720703, 0.061642784118652344, 0.06146047973632812, 0.06162163162231445, 0.06157171249389649, 0.061626399993896484, 0.061961822509765625, 0.061585182189941405, 0.061354591369628904, 0.06123519897460938, 0.06117382431030274, 0.06122284698486328, 0.061427391052246094, 0.06141513442993164, 0.06147126388549805, 0.061599807739257814, 0.061499393463134766, 0.06485810852050782, 0.06280556869506836, 0.06165139389038086, 0.06140447998046875, 0.06123180770874023, 0.061483009338378906, 0.06133481597900391, 0.06139344024658203, 0.06123097610473633, 0.06165126419067383, 0.061233150482177735, 0.06119619369506836, 0.061102176666259764, 0.06113663864135742, 0.061502784729003904, 0.06193657684326172, 0.06183939361572265, 0.061894622802734375, 0.061911041259765626, 0.06155820846557617, 0.06128662490844727, 0.06136441421508789, 0.06156508636474609, 0.061443775177001954, 0.06119456100463867, 0.06146662521362305, 0.061387966156005856, 0.06130771255493164, 0.06130207824707031, 0.06130505752563477, 0.06170057678222656, 0.061906017303466794, 0.061792415618896486, 0.061750049591064456, 0.06167958450317383, 0.0618535041809082, 0.061475006103515625, 0.06122905731201172, 0.061325183868408205, 0.06130476760864258, 0.06113299179077149, 0.06110620880126953, 0.06113481521606445, 0.06113894271850586, 0.061334945678710937, 0.06106534576416016, 0.06141331100463867, 0.06172079849243164, 0.06158367919921875, 0.06189056015014648, 0.06192079925537109, 0.061860321044921875, 0.06176540756225586, 0.06155699157714844, 0.06146214294433594, 0.061406623840332034, 0.06121696090698242, 0.06122918319702148, 0.06133414459228516, 0.061308895111083984, 0.06128236770629883, 0.06133695983886719, 0.061571678161621096, 0.06479977416992187, 0.0625940170288086, 0.06150147247314453, 0.061335521697998045, 0.06113689422607422, 0.06141929626464844, 0.061219039916992186, 0.061222942352294925, 0.0612856330871582, 0.06125641632080078, 0.06145769500732422, 0.061313758850097655, 0.06142761611938476, 0.06130239868164063, 0.0614354248046875, 0.06183417510986328, 0.06192127990722656, 0.06180454254150391, 0.06169715118408203, 0.06140607833862305, 0.06144316864013672, 0.06119247817993164, 0.06118390274047852, 0.06116985702514648, 0.061034271240234375, 0.061115135192871095, 0.061128097534179686, 0.06116364669799805, 0.061321727752685545, 0.06137200164794922, 0.061380992889404296, 0.06159769439697266, 0.06174720001220703, 0.06175539016723633, 0.06166678237915039, 0.06157561492919922, 0.06173295974731445, 0.061521953582763675, 0.06157104110717773, 0.061321247100830076, 0.061273887634277345, 0.061319103240966795, 0.061093441009521486, 0.06113542556762695, 0.061314849853515624, 0.061303009033203126, 0.06138044738769531, 0.06152582550048828, 0.06143600082397461, 0.06169843292236328, 0.061555774688720706, 0.0614901123046875, 0.06170217514038086, 0.06184483337402344, 0.06157513427734375, 0.061556640625, 0.06180326461791992, 0.061423614501953126, 0.06156224060058594, 0.06172713470458984, 0.06127017593383789, 0.061502655029296874, 0.06160406494140625, 0.06554598236083985, 0.06342863845825196, 0.06211376190185547, 0.06147715377807617, 0.06130265426635742, 0.061231006622314454, 0.06128271865844727, 0.06133964920043945, 0.06121065521240234, 0.06126793670654297, 0.061231136322021484, 0.061065185546875, 0.060947711944580076, 0.06101174545288086, 0.0613078727722168, 0.06163587188720703, 0.06171311950683594, 0.06195513534545898, 0.06190095901489258, 0.06173740768432617, 0.06152431869506836, 0.06144553756713867, 0.06122761535644531, 0.061284320831298825, 0.06124755096435547, 0.061269153594970704, 0.06145267105102539, 0.061560478210449215, 0.061413280487060545, 0.06125600051879883, 0.0614356803894043, 0.061494049072265626, 0.0617155532836914, 0.061578174591064454, 0.06181600189208984, 0.06171065521240234, 0.06176816177368164, 0.06145792007446289, 0.06157980728149414, 0.06180659103393555, 0.061689823150634766, 0.061394878387451175, 0.061454017639160155, 0.06149363327026367, 0.061400062561035154, 0.061321247100830076, 0.061468830108642576, 0.06176403045654297, 0.061731231689453124, 0.06167958450317383, 0.06171852874755859, 0.06180659103393555, 0.06156595230102539, 0.06129971313476563, 0.061287742614746094, 0.061440704345703125, 0.06165673446655273, 0.06137216186523437, 0.061603839874267576, 0.061506145477294924, 0.061530113220214844, 0.06177382278442383, 0.06155632019042969, 0.06560559844970704, 0.06320329666137696, 0.062007328033447266, 0.061638656616210936, 0.06126387023925781, 0.061445568084716795, 0.061432384490966795, 0.06149324798583984, 0.061290496826171874, 0.06150348663330078, 0.06137036895751953, 0.06156412887573242, 0.06140396881103516, 0.06138876724243164, 0.06174518585205078, 0.06177382278442383, 0.062182910919189455, 0.062304737091064454, 0.061892608642578124, 0.06167552185058594, 0.0615546875, 0.06159564971923828, 0.06129401779174805, 0.06124604797363281, 0.061257694244384764, 0.06134486389160156, 0.061295520782470705, 0.061489151000976565, 0.0614093132019043, 0.06103241729736328, 0.0612720947265625, 0.0615230712890625, 0.061645278930664064, 0.06199318313598633, 0.06179024124145508, 0.061730209350585936, 0.061794784545898436, 0.061800704956054685, 0.061742271423339844, 0.06150537490844726, 0.06150783920288086, 0.06152265548706055, 0.061373950958251954, 0.06139136123657227, 0.06160179138183594, 0.061417217254638674, 0.06169830322265625, 0.06147622299194336, 0.06151628875732422, 0.06152764892578125, 0.06185219192504883, 0.06211174392700195, 0.061818878173828126, 0.061712383270263675, 0.06159097671508789, 0.06159622573852539, 0.061400192260742184, 0.061524063110351565, 0.061426464080810546, 0.061411136627197264, 0.06152211380004883, 0.06136835098266601, 0.06165654373168945, 0.06534742736816407, 0.06290099334716796, 0.06167705535888672, 0.06150502395629883, 0.0613869743347168, 0.06128924942016602, 0.061515777587890626, 0.061464576721191405, 0.06145024108886719, 0.061470718383789064, 0.061394622802734375, 0.06137887954711914, 0.06157721710205078, 0.06123110580444336, 0.0615464973449707, 0.061949600219726564, 0.06207113647460937, 0.06198409652709961, 0.061878944396972654, 0.0617938232421875, 0.06158982467651367, 0.06139644622802734, 0.061207168579101565, 0.06133107376098633, 0.06148912048339844, 0.061353759765625, 0.06118060684204102, 0.061532161712646485, 0.061650974273681644, 0.06144204711914063, 0.0618106575012207, 0.061738014221191406, 0.06177824020385742, 0.06173331069946289, 0.06177199935913086, 0.06186188888549805, 0.062000801086425784, 0.06152771377563476, 0.06146937561035156, 0.06128639984130859, 0.06111638259887695, 0.061408767700195314, 0.06132585525512695, 0.06141299057006836, 0.061480575561523435, 0.06132515335083008, 0.061440929412841794, 0.061443424224853514, 0.061750942230224606, 0.06185881423950195, 0.06175129699707031, 0.06155673599243164, 0.06183935928344726, 0.061868030548095705, 0.06157516860961914, 0.0614824333190918, 0.06139894485473633, 0.061303489685058596, 0.0614870719909668, 0.06132294464111328, 0.06129900741577148, 0.061265918731689455, 0.06146665573120117, 0.0649062728881836, 0.06303334426879882, 0.06179840087890625, 0.06136422348022461, 0.061351295471191405, 0.06111433410644531, 0.061507678985595705, 0.06155728149414062, 0.06154025650024414, 0.06163241577148437, 0.06170646286010742, 0.06116348648071289, 0.061192192077636716, 0.06120819091796875, 0.06150182342529297, 0.06216447830200195, 0.062063102722167966, 0.06198428726196289, 0.06195574569702148, 0.06160262298583984, 0.06147894287109375, 0.06144160079956055, 0.061604095458984376, 0.06153792190551758, 0.061680191040039065, 0.061360095977783205, 0.061306880950927733, 0.06150348663330078, 0.06137449645996094, 0.06153420639038086, 0.061582592010498045, 0.06157583999633789, 0.061827167510986325, 0.06184307098388672, 0.06171990585327149, 0.06151663970947266, 0.06147884750366211, 0.06125593566894531, 0.06162633514404297, 0.061503040313720704, 0.061637054443359374, 0.061638656616210936, 0.06147686386108398, 0.06153823852539062, 0.061554206848144534, 0.06155094528198242, 0.0617760009765625, 0.06164691162109375, 0.061655040740966796, 0.06196428680419922, 0.061851646423339846, 0.06170009613037109, 0.06160105514526367, 0.06165167999267578, 0.061580543518066404, 0.06167350387573242, 0.06150201416015625, 0.06155059051513672, 0.06152207946777344, 0.061626014709472654, 0.06152816009521484, 0.06168787384033203, 0.061542625427246096, 0.06508134460449219, 0.0629227523803711, 0.06164067077636719, 0.06132499313354492, 0.061208927154541015, 0.06129199981689453, 0.06137500762939453, 0.0613642578125, 0.061423583984375, 0.061295967102050784, 0.061196094512939454, 0.06112956619262695, 0.06130278396606445, 0.061224960327148435, 0.06165507125854492, 0.06210355377197266, 0.06235123062133789, 0.062344993591308594, 0.06209977722167969, 0.061960193634033205, 0.06183695983886719, 0.06165055847167969, 0.06130467224121094, 0.06117875289916992, 0.06148303985595703, 0.06153740692138672, 0.06129135894775391, 0.061327392578125, 0.06129660797119141, 0.06133059310913086, 0.061730751037597655, 0.06169283294677735, 0.06190681457519531, 0.061914558410644534, 0.062111774444580076, 0.06208377456665039, 0.06176927947998047, 0.061488800048828125, 0.06176787185668945, 0.06163475036621094, 0.06146892929077148, 0.06158963012695313, 0.06142348861694336, 0.06163264083862305, 0.06146640014648438, 0.061266143798828124, 0.06144617462158203, 0.061431774139404295, 0.06168166351318359, 0.06182454299926758, 0.06192585754394531, 0.061917217254638675, 0.062098495483398436, 0.06185257720947265, 0.06182876968383789, 0.06162467193603516, 0.06157926559448242, 0.061669055938720706, 0.06150921630859375, 0.06137519836425781, 0.061869953155517576, 0.061580448150634765, 0.061496288299560546]",tokens/s,16.244175644248,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 864, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1054, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 856, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 596, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 426, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 609, in __init__ self.dense_4h_to_h = nn.Linear(config.intermediate_size, config.hidden_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 200.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 955, in __init__ self.model = MistralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 707, in __init__ [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 707, in [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 504, in __init__ self.self_attn = MISTRAL_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 275, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 196, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 162, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 163, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 164, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 768.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 608, in __init__ self.dense_h_to_4h = nn.Linear(config.hidden_size, config.intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1141, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 944, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 677, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 500, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 162, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 982, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.16 GiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 555, in __init__ self.self_attn = QWEN2_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 312, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 219, in __init__ self.o_proj = nn.Linear(self.num_heads * self.head_dim, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1054, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 856, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 596, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 426, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1522, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1613, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpd5m36m0h/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 864, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1068, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 845, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 845, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 634, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 230, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 560.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 608, in __init__ self.dense_h_to_4h = nn.Linear(config.hidden_size, config.intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,839.00416,3354.329088,0.0,2959.081472,2957.493248,s,1,7.4474365234375,7.4474365234375,0.0,7.4474365234375,7.4474365234375,7.4474365234375,7.4474365234375,[7.4474365234375],,kWh,9.354730991644071e-06,1.02380170073714e-06,2.9752801579963073e-06,1.3353812850377518e-05,,MB,1148.882944,3423.535104,0.0,3017.801728,2552.885248,s,10,0.609890682220459,0.060989068222045896,0.003264355478831742,0.06088916778564453,0.06263210525512695,0.0660835391998291,0.06884468635559082,"[0.06953497314453125, 0.06046691131591797, 0.061311424255371096, 0.05996384048461914, 0.06186511993408203, 0.06173257446289063, 0.057616863250732425, 0.05804412841796875, 0.05776764678955078, 0.06158720016479492]",tokens/s,4197.473538503134,kWh,2.2473518942627797e-06,2.4783270569686913e-07,1.4945033323538555e-06,3.989687932313504e-06,tokens/kWh,64165419.537350394,MB,1158.61504,3423.535104,0.0,3017.801728,2552.887808,s,10,14.256443725585939,1.4256443725585934,0.0115770949596713,1.4310405883789064,1.4351067993164062,1.4353154846191407,1.4354824328613283,"[1.4318619384765625, 1.394968017578125, 1.4350604248046874, 1.43022119140625, 1.4318599853515626, 1.417967529296875, 1.42188232421875, 1.4320689697265625, 1.435524169921875, 1.4250291748046875]",tokens/s,44.190543737730586,kWh,4.106144405865308e-05,4.528718605831331e-06,2.2645821535445745e-05,6.823598419993017e-05,tokens/kWh,923266.5248208506,,s,630,14.253904968261713,0.022625245981367805,0.0005464244241510381,0.022606528282165528,0.02291669692993164,0.023132241344451902,0.024322863998413095,"[0.023183231353759767, 0.02291663932800293, 0.02278883171081543, 0.022808576583862306, 0.02265497589111328, 0.022640640258789063, 0.02253363227844238, 0.02261427116394043, 0.02275312042236328, 0.022503776550292967, 0.02271808052062988, 0.02297702407836914, 0.02273683166503906, 0.024712480545043946, 0.023758975982666016, 0.022856159210205076, 0.023077280044555663, 0.022898080825805665, 0.022765344619750976, 0.022575647354125976, 0.022521600723266602, 0.022378751754760742, 0.022550527572631835, 0.022593408584594726, 0.02283942413330078, 0.022543487548828126, 0.022626623153686524, 0.022569536209106445, 0.022713375091552735, 0.022590431213378906, 0.02249932861328125, 0.023838720321655273, 0.02262182426452637, 0.022714752197265625, 0.022550527572631835, 0.022581247329711913, 0.02278201675415039, 0.02258732795715332, 0.022398399353027343, 0.02254729652404785, 0.022716127395629882, 0.022478847503662108, 0.022501375198364256, 0.02249318313598633, 0.02269923210144043, 0.022661376953125, 0.02268623924255371, 0.022560768127441407, 0.022579200744628908, 0.02261008071899414, 0.02271241569519043, 0.022654720306396484, 0.022686847686767576, 0.022614879608154295, 0.022592992782592775, 0.02287468719482422, 0.022529247283935547, 0.022560800552368164, 0.022756095886230468, 0.022668800354003905, 0.022426080703735352, 0.022487071990966796, 0.022548479080200197, 0.023119903564453124, 0.022724576950073242, 0.022409215927124023, 0.02220649528503418, 0.02205251121520996, 0.0221146240234375, 0.022192127227783204, 0.022208511352539064, 0.02207744026184082, 0.022214496612548828, 0.022020544052124023, 0.0223189754486084, 0.022269088745117186, 0.022164159774780274, 0.022280191421508787, 0.022148351669311523, 0.022150079727172853, 0.022097728729248048, 0.02221670341491699, 0.022181888580322266, 0.022177791595458983, 0.022071296691894532, 0.022013952255249023, 0.02191302490234375, 0.022032960891723633, 0.022022144317626953, 0.022079488754272462, 0.022066783905029298, 0.022063488006591796, 0.02219603157043457, 0.022224863052368163, 0.022044927597045898, 0.022024192810058595, 0.02200707244873047, 0.022225631713867187, 0.02215711975097656, 0.022026016235351564, 0.02234432029724121, 0.022028064727783202, 0.022296607971191405, 0.021916959762573244, 0.02265567970275879, 0.02254435157775879, 0.022071327209472656, 0.022071296691894532, 0.02188483238220215, 0.022011999130249024, 0.02260915184020996, 0.02194918441772461, 0.02205708885192871, 0.022245248794555663, 0.022113727569580077, 0.022206943511962892, 0.02203446388244629, 0.021958719253540038, 0.021907360076904296, 0.021928031921386718, 0.021923391342163086, 0.02188470458984375, 0.02195248031616211, 0.021940031051635743, 0.021855104446411134, 0.021863935470581054, 0.022661951065063475, 0.022147071838378905, 0.022763519287109374, 0.02294988822937012, 0.022845439910888672, 0.022405120849609376, 0.022355968475341798, 0.022202560424804688, 0.022124191284179688, 0.021919904708862306, 0.022120447158813478, 0.022197887420654296, 0.022339712142944335, 0.022354175567626953, 0.022471935272216796, 0.02253081512451172, 0.02295132827758789, 0.02255523109436035, 0.022994943618774414, 0.022799999237060546, 0.022623903274536134, 0.023065471649169923, 0.022785888671875, 0.022597280502319336, 0.02387798309326172, 0.0240863037109375, 0.02277596855163574, 0.0229039363861084, 0.022702592849731446, 0.022706623077392577, 0.022665216445922853, 0.022751232147216797, 0.023289503097534178, 0.022743392944335937, 0.022934560775756837, 0.02271536064147949, 0.022854719161987305, 0.022920127868652343, 0.02285478401184082, 0.023030656814575196, 0.02276927947998047, 0.022881887435913087, 0.022795040130615233, 0.022810047149658202, 0.022859807968139648, 0.023675424575805664, 0.02272217559814453, 0.022903167724609375, 0.022607200622558592, 0.022772319793701173, 0.022681663513183594, 0.022754848480224608, 0.023497184753417968, 0.022666976928710936, 0.02283535957336426, 0.02256230354309082, 0.022827648162841798, 0.022626304626464845, 0.022640640258789063, 0.022769664764404295, 0.023225664138793945, 0.023077568054199218, 0.023259136199951173, 0.023173280715942383, 0.022544384002685547, 0.02251158332824707, 0.022652959823608397, 0.022775808334350587, 0.02290892791748047, 0.02257449531555176, 0.02256752014160156, 0.02261561584472656, 0.022364288330078124, 0.02256108856201172, 0.022660736083984376, 0.022800575256347655, 0.022456512451171876, 0.022605823516845702, 0.02245631980895996, 0.022345024108886717, 0.022712095260620117, 0.022649759292602538, 0.022435039520263673, 0.022591327667236327, 0.022688703536987306, 0.022552448272705077, 0.022610048294067382, 0.022529119491577147, 0.02229916763305664, 0.02257475280761719, 0.023018207550048828, 0.022540288925170897, 0.022452224731445314, 0.02272220802307129, 0.02288243293762207, 0.0227740478515625, 0.022446016311645507, 0.02257935905456543, 0.022486656188964844, 0.022621728897094726, 0.022645023345947264, 0.022602144241333007, 0.022900415420532227, 0.023608896255493166, 0.02275766372680664, 0.02283692741394043, 0.022950687408447266, 0.022642688751220705, 0.02249068832397461, 0.022573503494262695, 0.022697984695434572, 0.022753183364868163, 0.022863967895507813, 0.02411427116394043, 0.022774688720703123, 0.022840927124023438, 0.02272502326965332, 0.0228351993560791, 0.022693151473999022, 0.022702592849731446, 0.022713823318481444, 0.022968063354492186, 0.022815744400024415, 0.022587392807006838, 0.02269523239135742, 0.022446815490722655, 0.023310752868652345, 0.02254630470275879, 0.02267558479309082, 0.02251366424560547, 0.022603776931762694, 0.022486112594604493, 0.02218281555175781, 0.022349472045898437, 0.02285398483276367, 0.022614015579223632, 0.02247270393371582, 0.022338592529296875, 0.02245910453796387, 0.02238489532470703, 0.03100876808166504, 0.022345727920532226, 0.022437280654907226, 0.023004831314086913, 0.022756288528442383, 0.02308233642578125, 0.022631135940551758, 0.02263033676147461, 0.0225581111907959, 0.022931488037109374, 0.022914751052856445, 0.022610815048217773, 0.022796287536621093, 0.022519615173339842, 0.022605119705200197, 0.022644832611083986, 0.022493984222412108, 0.022519168853759767, 0.022454912185668946, 0.022534143447875975, 0.02243756866455078, 0.022460704803466798, 0.02245020866394043, 0.022250944137573243, 0.022656927108764647, 0.02285593605041504, 0.022728736877441407, 0.02263897514343262, 0.022519359588623045, 0.022444480895996093, 0.02263654327392578, 0.02262118339538574, 0.02248099136352539, 0.02244700813293457, 0.02270191955566406, 0.022631647109985352, 0.022609888076782228, 0.022504383087158204, 0.02238876724243164, 0.022629919052124022, 0.02243836784362793, 0.02244105529785156, 0.022649023056030275, 0.022704864501953127, 0.02275225639343262, 0.02248806381225586, 0.022542335510253905, 0.022619808197021484, 0.022589120864868164, 0.025994720458984374, 0.02296268844604492, 0.022968320846557616, 0.02265907287597656, 0.022701248168945313, 0.022462879180908203, 0.02231884765625, 0.022506143569946287, 0.02284726333618164, 0.022794336318969727, 0.022550655364990235, 0.022609920501708985, 0.022521087646484375, 0.022597984313964845, 0.02260121536254883, 0.022498207092285158, 0.02268342399597168, 0.02270185661315918, 0.022759359359741212, 0.022497791290283203, 0.02265907287597656, 0.02262015914916992, 0.02271177673339844, 0.022557216644287108, 0.022558719635009765, 0.022845632553100587, 0.022673215866088867, 0.02263859176635742, 0.022543807983398438, 0.022186559677124025, 0.021960704803466798, 0.022147071838378905, 0.02186979293823242, 0.02207823944091797, 0.021831680297851562, 0.02206915283203125, 0.02237654495239258, 0.022383615493774413, 0.022701055526733398, 0.022589439392089843, 0.024408063888549804, 0.023171072006225587, 0.022514879226684572, 0.0221909122467041, 0.023010368347167968, 0.02240630340576172, 0.0226296329498291, 0.022368799209594725, 0.02232524871826172, 0.022621952056884765, 0.02206924819946289, 0.021881088256835938, 0.022013952255249023, 0.021975135803222655, 0.022089887619018554, 0.022036224365234374, 0.022023551940917967, 0.0218590087890625, 0.02200476837158203, 0.02197212791442871, 0.022021888732910156, 0.02192915153503418, 0.02197587203979492, 0.022379135131835936, 0.022173919677734376, 0.022226463317871092, 0.021964479446411132, 0.022118431091308594, 0.02220230484008789, 0.022063135147094726, 0.022002464294433595, 0.02198886489868164, 0.021981695175170898, 0.02195155143737793, 0.021860639572143556, 0.022002336502075195, 0.02205881690979004, 0.022085248947143556, 0.02190540885925293, 0.022053056716918946, 0.022276479721069335, 0.022228992462158204, 0.02286367988586426, 0.02215648078918457, 0.02227507209777832, 0.022317119598388672, 0.02263852882385254, 0.02293337631225586, 0.0223287353515625, 0.02227027130126953, 0.022438304901123047, 0.023226560592651366, 0.022771520614624022, 0.02260326385498047, 0.02274287986755371, 0.022722623825073243, 0.02278665542602539, 0.022824960708618162, 0.022734848022460938, 0.02272051239013672, 0.022755327224731444, 0.022731008529663085, 0.0229039363861084, 0.022833791732788086, 0.022837087631225585, 0.02263852882385254, 0.02268921661376953, 0.022838048934936524, 0.022664480209350586, 0.02273967933654785, 0.02291312026977539, 0.022699935913085938, 0.02267523193359375, 0.02268601608276367, 0.022827072143554686, 0.022847232818603517, 0.022917215347290038, 0.022722560882568358, 0.022648735046386717, 0.02264409637451172, 0.022553312301635743, 0.022644224166870116, 0.022808704376220703, 0.022563199996948242, 0.02332467269897461, 0.025610080718994142, 0.023443424224853515, 0.022732927322387696, 0.023035839080810548, 0.022710208892822267, 0.022505247116088867, 0.022814752578735352, 0.023142335891723632, 0.023289279937744142, 0.02302239990234375, 0.02292531204223633, 0.02270412826538086, 0.02264473533630371, 0.02268511962890625, 0.022563295364379884, 0.02272060775756836, 0.02319900894165039, 0.022688512802124024, 0.02257302474975586, 0.022744672775268555, 0.02250726318359375, 0.0226265926361084, 0.02274064064025879, 0.022556800842285157, 0.022534751892089845, 0.023566335678100587, 0.022595104217529298, 0.022617887496948243, 0.022663839340209962, 0.022497055053710937, 0.022749183654785156, 0.022453760147094725, 0.022385408401489258, 0.022571008682250978, 0.022766719818115233, 0.02268582344055176, 0.022639360427856445, 0.022816768646240236, 0.022585119247436523, 0.022714591979980468, 0.022579200744628908, 0.022603776931762694, 0.022605823516845702, 0.02292736053466797, 0.02288025665283203, 0.022551872253417968, 0.02256675148010254, 0.02298944091796875, 0.02272483253479004, 0.02275062370300293, 0.022651519775390625, 0.0227491512298584, 0.022577152252197266, 0.022429695129394533, 0.02254172706604004, 0.02267366409301758, 0.02273535919189453, 0.022665056228637695, 0.0225133113861084, 0.022769216537475587, 0.022749919891357422, 0.02280006408691406, 0.02266748809814453, 0.02268079948425293, 0.02297500801086426, 0.02262022399902344, 0.022608160018920898, 0.022691232681274414, 0.022590047836303712, 0.022584415435791014, 0.02245903968811035, 0.022585599899291993, 0.02260905647277832, 0.022614879608154295, 0.022439647674560546, 0.022442272186279297, 0.022374399185180666, 0.022431392669677735, 0.02278598403930664, 0.02276393508911133, 0.02267686462402344, 0.02257574462890625, 0.02258451271057129, 0.022764352798461913, 0.02270137596130371, 0.02261020851135254, 0.022700096130371095, 0.02249558448791504, 0.02410905647277832, 0.022759424209594727, 0.02275702476501465, 0.02283113670349121, 0.02253446388244629, 0.02285753631591797, 0.022589792251586915, 0.022695648193359376, 0.022603904724121094, 0.022747135162353514, 0.02246451187133789, 0.022598943710327148, 0.02260585594177246, 0.022542976379394532, 0.022816831588745118, 0.022783296585083008, 0.02279078483581543, 0.0227205753326416, 0.022786048889160155, 0.022697984695434572, 0.02265907287597656, 0.02264473533630371, 0.022535999298095702, 0.022697439193725587, 0.02258208084106445, 0.023175071716308594, 0.026408960342407226, 0.02552217674255371, 0.02266726493835449, 0.02263033676147461, 0.02259564781188965, 0.022649856567382814, 0.022508544921875, 0.022542335510253905, 0.022421247482299806, 0.022503679275512695, 0.022815807342529297, 0.02278825569152832, 0.02292815971374512, 0.023346080780029296, 0.02264838409423828, 0.022837568283081054, 0.0225664005279541, 0.02256752014160156, 0.022618112564086915, 0.022576288223266603, 0.02247123146057129, 0.022712064743041993, 0.022574623107910155, 0.02245475196838379, 0.02256876754760742, 0.02247248077392578, 0.022536319732666017, 0.023003360748291016, 0.022938207626342775, 0.02254217529296875, 0.022457952499389647, 0.022590015411376955, 0.02239897537231445, 0.022582815170288085, 0.022581119537353516, 0.022594144821166992, 0.02254377555847168, 0.022599647521972657, 0.0223603515625, 0.022475103378295898, 0.022512704849243163, 0.022604736328125, 0.022504928588867188, 0.02260544013977051, 0.022481727600097656, 0.022407039642333985, 0.02289072036743164, 0.02266316795349121, 0.022679040908813477, 0.022471168518066405, 0.022425504684448243, 0.02259891128540039, 0.02252047920227051, 0.022554208755493164, 0.022524511337280274, 0.02270412826538086, 0.02275868797302246, 0.022628671646118165, 0.02253606414794922, 0.022464128494262697, 0.022829055786132812, 0.02246486473083496, 0.02262201690673828, 0.02249772834777832, 0.022668928146362306, 0.02251046371459961, 0.022877439498901368, 0.022794815063476564, 0.022838272094726563, 0.022537023544311523, 0.02256480026245117, 0.022585599899291993, 0.022648256301879884, 0.022693632125854492, 0.022816991806030272, 0.022650720596313477]",tokens/s,44.19841449783633,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 582, in __init__ self.transformer = CodeGenModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 400, in __init__ self.h = nn.ModuleList([CodeGenBlock(config) for _ in range(config.n_layer)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 400, in self.h = nn.ModuleList([CodeGenBlock(config) for _ in range(config.n_layer)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 258, in __init__ self.mlp = CodeGenMLP(inner_dim, config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 237, in __init__ self.fc_out = nn.Linear(intermediate_size, embed_dim) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 891, in __init__ self.transformer = GPTJModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 645, in __init__ self.h = nn.ModuleList([GPTJBlock(config) for _ in range(config.n_layer)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 645, in self.h = nn.ModuleList([GPTJBlock(config) for _ in range(config.n_layer)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 453, in __init__ self.mlp = GPTJMLP(inner_dim, config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 433, in __init__ self.fc_in = nn.Linear(embed_dim, intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1709, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,816.734208,11724.06272,0.0,11328.815104,11314.254848,s,1,7.628697265625,7.628697265625,0.0,7.628697265625,7.628697265625,7.628697265625,7.628697265625,[7.628697265625],,kWh,1.2234974120830581e-05,1.3241086368013376e-06,4.526114731999431e-06,1.808519748963135e-05,,MB,1110.388736,12177.047552,0.0,11771.314176,11713.906688,s,10,4.138964324951171,0.4138964324951172,0.012396790772144301,0.41192939758300784,0.41933658142089847,0.4330530258178711,0.44402618133544924,"[0.3938546142578125, 0.4104230041503906, 0.41178872680664064, 0.412070068359375, 0.41628848266601565, 0.44676947021484376, 0.4114878845214844, 0.41365005493164064, 0.4091081237792969, 0.4135238952636719]",tokens/s,618.5122168285905,kWh,1.1924313853666414e-05,1.315038087562178e-06,7.905161879679933e-06,2.1144513820908526e-05,tokens/kWh,12107159.434749316,MB,1115.181056,12281.905152,0.0,11876.171776,11829.476864,s,10,33.361275634765626,3.3361275634765626,0.0045100151832228605,3.3370075683593754,3.341362548828125,3.341722900390625,3.342011181640625,"[3.32912841796875, 3.328656982421875, 3.333444091796875, 3.3338837890625, 3.336612060546875, 3.341282470703125, 3.342083251953125, 3.34015478515625, 3.337403076171875, 3.338626708984375]",tokens/s,18.884169984899497,kWh,9.75254553630005e-05,1.075733135902759e-05,6.48060074003208e-05,0.0001730887941223489,tokens/kWh,363975.0355847303,,s,630,33.357723354339605,0.052948767229110474,0.0002874159301043155,0.05293332862854004,0.05319623527526856,0.0533000020980835,0.05435796005249024,"[0.05449523162841797, 0.05295654296875, 0.052574207305908206, 0.052775550842285156, 0.05268889617919922, 0.05261721420288086, 0.052590591430664066, 0.052760574340820314, 0.05286092758178711, 0.05254553604125976, 0.05263872146606445, 0.05270220947265625, 0.052514816284179686, 0.053272575378417966, 0.05257830429077148, 0.0526110725402832, 0.05279129409790039, 0.05256796646118164, 0.05268489456176758, 0.05286300659179687, 0.052727935791015625, 0.05266006469726563, 0.052674560546875, 0.05264179229736328, 0.05286707305908203, 0.05263894271850586, 0.05263030242919922, 0.05264384078979492, 0.05286431884765625, 0.05262188720703125, 0.05265216064453125, 0.05264147186279297, 0.05304761505126953, 0.05284659194946289, 0.05277692794799805, 0.05289782333374023, 0.053129505157470704, 0.053010143280029294, 0.05290595245361328, 0.052932640075683594, 0.05292031860351563, 0.052961280822753906, 0.052879009246826175, 0.0529466552734375, 0.05275302505493164, 0.05283955383300781, 0.05302687835693359, 0.05290655899047852, 0.052933120727539064, 0.05282758331298828, 0.05286739349365235, 0.052843807220458984, 0.05286563110351562, 0.05288569641113281, 0.05292787170410156, 0.053242431640625, 0.0530145263671875, 0.052994014739990235, 0.05305347061157226, 0.052934783935546875, 0.052776161193847655, 0.052791969299316406, 0.05273964691162109, 0.05425999832153321, 0.053138656616210936, 0.05272809600830078, 0.05259724807739258, 0.052596736907958984, 0.05264384078979492, 0.052506622314453126, 0.052512767791748044, 0.05249039840698242, 0.05251055908203125, 0.052279296875, 0.052364768981933596, 0.05240067291259766, 0.05249331283569336, 0.052519935607910156, 0.052959232330322265, 0.05269708633422852, 0.052795520782470705, 0.052938816070556644, 0.05279520034790039, 0.05285059356689453, 0.052672607421875, 0.05256425476074219, 0.05253817749023437, 0.05256489562988281, 0.05248758316040039, 0.052642208099365234, 0.052571552276611325, 0.052666782379150394, 0.05269952011108398, 0.05268431854248047, 0.05276924896240234, 0.05285887908935547, 0.05284796905517578, 0.05291689682006836, 0.05286444854736328, 0.05307980728149414, 0.053176513671875, 0.05314179229736328, 0.05315545654296875, 0.05297430419921875, 0.053016574859619144, 0.052796798706054686, 0.05311065673828125, 0.05283712005615234, 0.052897342681884764, 0.05290233612060547, 0.05296332931518555, 0.052908031463623044, 0.05278310394287109, 0.052877311706542966, 0.05290598297119141, 0.05301273727416992, 0.053051136016845704, 0.05294102478027344, 0.05294163131713867, 0.05317116928100586, 0.05315129470825195, 0.053047969818115236, 0.05302463912963867, 0.053126785278320314, 0.05285823822021484, 0.05303968048095703, 0.0543590087890625, 0.05301164627075195, 0.052644577026367184, 0.05308015823364258, 0.0527011833190918, 0.052623329162597654, 0.052770206451416016, 0.052587135314941406, 0.053018112182617184, 0.05252761459350586, 0.052744094848632815, 0.052873119354248044, 0.05249862289428711, 0.052628833770751955, 0.05253936004638672, 0.052525760650634766, 0.052580352783203124, 0.05267388916015625, 0.052896415710449216, 0.05284067153930664, 0.05284636688232422, 0.05272780990600586, 0.05268889617919922, 0.052726879119873046, 0.05274252700805664, 0.0527611198425293, 0.052729854583740236, 0.05286905670166016, 0.05272377777099609, 0.05291136169433594, 0.052681056976318356, 0.05271318435668945, 0.05289849472045898, 0.05283430480957031, 0.05288889694213867, 0.05300009536743164, 0.05336483383178711, 0.05304595184326172, 0.05299100875854492, 0.053062175750732424, 0.05320684814453125, 0.05295577621459961, 0.053022048950195313, 0.05306435012817383, 0.053063617706298825, 0.05315795135498047, 0.052985855102539066, 0.052850719451904296, 0.053002208709716794, 0.05281923294067383, 0.05288832092285156, 0.05302054214477539, 0.053004383087158206, 0.05290313720703125, 0.05328108978271484, 0.05324233627319336, 0.05313702392578125, 0.05308659362792969, 0.05312470245361328, 0.053176734924316404, 0.052942848205566405, 0.05286092758178711, 0.05295513534545898, 0.05454188919067383, 0.053394176483154296, 0.05294873428344726, 0.052836353302001954, 0.05268204879760742, 0.05265478515625, 0.052647232055664066, 0.05248684692382812, 0.052467041015625, 0.052533920288085935, 0.05246156692504883, 0.05264998245239258, 0.052601119995117185, 0.052663230895996095, 0.05264652633666992, 0.052805728912353515, 0.052674625396728514, 0.052724769592285156, 0.05294588851928711, 0.052881118774414065, 0.0528175048828125, 0.05274425506591797, 0.05264857482910156, 0.052678913116455076, 0.052829856872558596, 0.0527913932800293, 0.05304524612426758, 0.05282185745239258, 0.05275459289550781, 0.052676929473876956, 0.05279097747802734, 0.05279769515991211, 0.053217025756835935, 0.05315379333496094, 0.052918270111083986, 0.052928703308105465, 0.05301331329345703, 0.05308528137207031, 0.05304883193969727, 0.05315615844726562, 0.05302076721191406, 0.05304524612426758, 0.053034366607666014, 0.052807872772216796, 0.053158336639404294, 0.052969406127929684, 0.05276883316040039, 0.05291417694091797, 0.05308415985107422, 0.052934337615966796, 0.05323955154418945, 0.052935230255126954, 0.05302614212036133, 0.05301942443847656, 0.05293388748168945, 0.05304899215698242, 0.05309539031982422, 0.05323980712890625, 0.053096256256103515, 0.053026912689208984, 0.05302076721191406, 0.052951038360595705, 0.05299507141113281, 0.054322879791259764, 0.053108993530273436, 0.0527988166809082, 0.05276755142211914, 0.052744190216064454, 0.052787200927734375, 0.052746238708496096, 0.05267011260986328, 0.05267027282714844, 0.052797985076904294, 0.05277286529541016, 0.05267660903930664, 0.05262556838989258, 0.05291196823120117, 0.053101566314697264, 0.0528397102355957, 0.05277193450927734, 0.05304998397827149, 0.052967422485351565, 0.05288748931884766, 0.052827903747558594, 0.05266873550415039, 0.05274214553833008, 0.05271254348754883, 0.052771007537841794, 0.052775646209716795, 0.052760513305664065, 0.05277907180786133, 0.052756191253662106, 0.052717376708984375, 0.052932735443115234, 0.05274857711791992, 0.05292448043823242, 0.05289984130859375, 0.053100193023681644, 0.05303443145751953, 0.05325417709350586, 0.05317695999145508, 0.05317145538330078, 0.05303807830810547, 0.053071998596191404, 0.05301615905761719, 0.05297612762451172, 0.05296310424804687, 0.052950336456298826, 0.052935264587402345, 0.05285472106933594, 0.05301283264160156, 0.05297107315063477, 0.053096736907958984, 0.05312851333618164, 0.053432544708251956, 0.053127616882324216, 0.053065185546875, 0.05307756805419922, 0.05319164657592773, 0.053147647857666014, 0.053272575378417966, 0.05306067276000977, 0.05295756912231445, 0.05308883285522461, 0.05302272033691406, 0.053008384704589843, 0.05435539245605469, 0.05310726547241211, 0.052893695831298826, 0.05260003280639648, 0.05272182464599609, 0.052859519958496096, 0.05261280059814453, 0.052625728607177735, 0.05271347045898438, 0.052836353302001954, 0.052636768341064455, 0.0527184944152832, 0.05282428741455078, 0.052754207611083986, 0.05265167999267578, 0.05271078491210938, 0.05269631958007812, 0.052975582122802733, 0.05314534378051758, 0.05326816177368164, 0.053037376403808595, 0.05290003204345703, 0.052989761352539064, 0.052978721618652344, 0.05293769454956055, 0.05279743957519531, 0.05280883026123047, 0.052851585388183596, 0.05282815933227539, 0.05282979202270508, 0.05305795288085938, 0.05369036865234375, 0.05301862335205078, 0.05299168014526367, 0.053184833526611325, 0.05325174331665039, 0.053217632293701175, 0.05322751998901367, 0.05312694549560547, 0.053225406646728514, 0.053163936614990234, 0.053221279144287106, 0.05307644653320313, 0.05311862564086914, 0.05307625579833984, 0.05315798568725586, 0.053006591796875, 0.053118686676025394, 0.05322099304199219, 0.05302924728393555, 0.05313945770263672, 0.053149696350097655, 0.05334220886230469, 0.0531328010559082, 0.05302924728393555, 0.05315724945068359, 0.05311884689331055, 0.05330614471435547, 0.053188545227050785, 0.05329321670532226, 0.05298128128051758, 0.053026302337646485, 0.05317731094360351, 0.054775169372558594, 0.05361151885986328, 0.053065727233886716, 0.053114879608154295, 0.05275804901123047, 0.05285116958618164, 0.05283430480957031, 0.05305344009399414, 0.05305344009399414, 0.05288345718383789, 0.0526541748046875, 0.052845951080322265, 0.052634143829345705, 0.052735774993896485, 0.05278332901000977, 0.05283225631713867, 0.05274009704589844, 0.05286502456665039, 0.053272575378417966, 0.053026817321777345, 0.05311078262329102, 0.05280495834350586, 0.052814495086669924, 0.05266470336914063, 0.05283187103271485, 0.052870529174804684, 0.052907745361328126, 0.05280649566650391, 0.05270044708251953, 0.05274294281005859, 0.05286656188964844, 0.0529224967956543, 0.05287155151367187, 0.05289984130859375, 0.05300223922729492, 0.053187679290771485, 0.05316009521484375, 0.0532487678527832, 0.05308415985107422, 0.05328051376342773, 0.05356364822387695, 0.05313945770263672, 0.053122814178466794, 0.053139713287353514, 0.053020286560058597, 0.05316960144042969, 0.05304825592041015, 0.053133312225341796, 0.05303500747680664, 0.053050559997558595, 0.053001022338867186, 0.053184513092041016, 0.05311888122558594, 0.05335868835449219, 0.0530145263671875, 0.05319007873535156, 0.05333871841430664, 0.05332783889770508, 0.05326233673095703, 0.05321113586425781, 0.05290972900390625, 0.05302921676635742, 0.05315910339355469, 0.054681598663330076, 0.05352470397949219, 0.052924190521240234, 0.053067424774169925, 0.05276704025268555, 0.05282819366455078, 0.05284659194946289, 0.05280767822265625, 0.05286659240722656, 0.05286284637451172, 0.05291251373291016, 0.05287913513183594, 0.05276102447509766, 0.05289318466186523, 0.05283670425415039, 0.05271567916870117, 0.05286297607421875, 0.05287740707397461, 0.0531959342956543, 0.05315068817138672, 0.05305545425415039, 0.052853790283203125, 0.052879199981689454, 0.05281273651123047, 0.05286902236938477, 0.05294873428344726, 0.05327289581298828, 0.05294083023071289, 0.05274995040893555, 0.052785537719726563, 0.05280972671508789, 0.05282928085327149, 0.05285766220092773, 0.05300028610229492, 0.05297151947021484, 0.05310259246826172, 0.05317631912231445, 0.05320073699951172, 0.05334956741333008, 0.05310927963256836, 0.052971969604492186, 0.05305487823486328, 0.052988800048828125, 0.05301353454589844, 0.05306671905517578, 0.05293353652954102, 0.05291455841064453, 0.052894142150878905, 0.05310787200927734, 0.05296774291992187, 0.0530560302734375, 0.053022270202636716, 0.05307436752319336, 0.05304844665527344, 0.05304601669311523, 0.05312217712402344, 0.053063934326171874, 0.0532938232421875, 0.0531346549987793, 0.05330505752563477, 0.05294947052001953, 0.05294742584228516, 0.052987934112548825, 0.05445257568359375, 0.05312307357788086, 0.05288473510742187, 0.05292047882080078, 0.052684993743896485, 0.05282656097412109, 0.05285254287719727, 0.052949153900146484, 0.0526376953125, 0.0526827507019043, 0.052685920715332034, 0.05293967819213867, 0.05280924987792969, 0.053345855712890626, 0.05276671981811523, 0.0529552001953125, 0.05270937728881836, 0.05282902526855469, 0.05302864074707031, 0.05301065444946289, 0.05296236801147461, 0.052706241607666016, 0.052698368072509764, 0.05262124633789062, 0.052783935546875, 0.052802623748779295, 0.05273491287231445, 0.05269094467163086, 0.05266636657714844, 0.05268479919433594, 0.05273369598388672, 0.052754688262939456, 0.05284044647216797, 0.052879070281982424, 0.05294291305541992, 0.053075489044189454, 0.053193119049072264, 0.0531827507019043, 0.05305567932128906, 0.05306553649902344, 0.05302272033691406, 0.05306924819946289, 0.05293952178955078, 0.05286614227294922, 0.05294768142700195, 0.05297356796264648, 0.0529409294128418, 0.05305740737915039, 0.05298591995239258, 0.0529788818359375, 0.052947711944580075, 0.05308121490478516, 0.053109630584716794, 0.05308755111694336, 0.05322208023071289, 0.05361891174316406, 0.05335631942749024, 0.05327193450927734, 0.05332380676269531, 0.05313187026977539, 0.053038654327392576, 0.05292281723022461, 0.05297151947021484, 0.05467548751831055, 0.05331600189208984, 0.05300611114501953, 0.05288959884643555, 0.05271084976196289, 0.052722240447998045, 0.052741310119628904, 0.05268563079833984, 0.05269913482666016, 0.05282182312011719, 0.052670654296875, 0.05275353622436523, 0.05262015914916992, 0.052779006958007815, 0.05275033569335937, 0.052746238708496096, 0.05291212844848633, 0.05336012649536133, 0.053036544799804686, 0.05310073471069336, 0.05303788757324219, 0.05288959884643555, 0.05280767822265625, 0.052668479919433596, 0.05279913711547852, 0.05272751998901367, 0.052806209564208985, 0.05264998245239258, 0.052757503509521485, 0.05282918548583984, 0.05278105545043945, 0.05269094467163086, 0.05297711944580078, 0.052953216552734376, 0.052942657470703126, 0.053198944091796874, 0.053139041900634766, 0.05313833618164063, 0.05313558578491211, 0.0531022720336914, 0.053356639862060545, 0.05296294403076172, 0.0528858871459961, 0.053008384704589843, 0.053131263732910154, 0.053043201446533204, 0.05292851257324219, 0.05303910446166992, 0.05326985549926758, 0.05310531234741211, 0.053116928100585936, 0.053071937561035155, 0.05308204650878906, 0.053231616973876954, 0.05312220764160156, 0.05317446517944336, 0.053119647979736326, 0.05332729721069336, 0.053101119995117185, 0.0531827507019043, 0.0529917106628418, 0.05295513534545898, 0.053043201446533204]",tokens/s,18.886180969482787,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1153, in __init__ self.model = Qwen2MoeModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 918, in __init__ [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 918, in [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 696, in __init__ self.mlp = Qwen2MoeSparseMoeBlock(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 634, in __init__ [Qwen2MoeMLP(config, intermediate_size=config.moe_intermediate_size) for _ in range(self.num_experts)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 634, in [Qwen2MoeMLP(config, intermediate_size=config.moe_intermediate_size) for _ in range(self.num_experts)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 240, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 12.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 609, in __init__ self.dense_4h_to_h = nn.Linear(config.intermediate_size, config.hidden_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 400.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 163, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 268.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,816.96768,718.209024,0.0,322.961408,314.743808,s,1,7.3543603515625,7.3543603515625,0.0,7.3543603515625,7.3543603515625,7.3543603515625,7.3543603515625,[7.3543603515625],,kWh,4.528132787478019e-06,4.921199367924644e-07,1.0097230300071258e-06,6.02997575427761e-06,,MB,1109.393408,810.483712,0.0,404.750336,391.119872,s,28,0.23511241626739496,0.00839687200954982,5.0066419832193083e-05,0.008390496253967285,0.008446630477905273,0.008482257795333861,0.008542315378189086,"[0.008561599731445312, 0.008347359657287598, 0.008490176200866699, 0.008378623962402344, 0.00836246395111084, 0.008417216300964356, 0.008345791816711426, 0.008420000076293946, 0.008392512321472168, 0.008333632469177246, 0.00831372833251953, 0.008388480186462403, 0.008340831756591796, 0.00840828800201416, 0.008381471633911133, 0.008366623878479003, 0.008437088012695312, 0.008401503562927246, 0.008437664031982421, 0.008411968231201172, 0.008467552185058593, 0.008403200149536133, 0.008402400016784668, 0.008382783889770509, 0.008385055541992188, 0.008348480224609375, 0.008399552345275878, 0.008386367797851563]",tokens/s,30487.543421984927,kWh,2.677828497402986e-07,2.9515348800167903e-08,1.7613773129606e-07,4.734359298365265e-07,tokens/kWh,540727865.9403706,MB,1119.39584,823.066624,0.0,417.333248,391.122432,s,28,10.094631469726561,0.3605225524902344,0.02247068146145327,0.3560248718261719,0.3585250762939453,0.35991512908935547,0.4454740164184571,"[0.4770150146484375, 0.35675845336914064, 0.35692742919921877, 0.35595477294921873, 0.3581532897949219, 0.35647695922851563, 0.35771258544921875, 0.359392578125, 0.3544256591796875, 0.35476324462890624, 0.3531097717285156, 0.35399282836914064, 0.354876708984375, 0.3577764892578125, 0.3552135009765625, 0.35681475830078124, 0.3555978088378906, 0.3551192626953125, 0.3559044494628906, 0.35580642700195314, 0.3554816589355469, 0.356094970703125, 0.35668875122070315, 0.3554750671386719, 0.3562126159667969, 0.3550398864746094, 0.36019650268554687, 0.3576500244140625]",tokens/s,174.74634961069876,kWh,1.0572432539457617e-05,1.1659751416953781e-06,4.684432000777751e-06,1.6422839681930743e-05,tokens/kWh,3836120.9888272765,,s,1764,10.082759708881394,0.005715850175102823,0.0028396695228811914,0.0056310720443725586,0.005697811079025269,0.0057735600233078,0.006137980718612669,"[0.005451935768127442, 0.005694911956787109, 0.005716127872467041, 0.005833183765411377, 0.005695487976074219, 0.005738399982452393, 0.0056648640632629395, 0.005642240047454834, 0.005666783809661865, 0.005847072124481201, 0.005715583801269531, 0.005724544048309326, 0.005670911788940429, 0.005752831935882568, 0.00588972806930542, 0.0057432641983032225, 0.005648032188415527, 0.12485836791992187, 0.005994336128234863, 0.005746848106384277, 0.005677055835723877, 0.005640096187591552, 0.005617760181427002, 0.005640416145324707, 0.005646111965179443, 0.00564134407043457, 0.005684095859527588, 0.005607423782348633, 0.0056258559226989744, 0.005586080074310303, 0.005639008045196533, 0.005646336078643799, 0.005631999969482422, 0.0056975998878479005, 0.005652416229248047, 0.005857279777526855, 0.005732160091400146, 0.0056724481582641605, 0.005646080017089844, 0.0056063361167907715, 0.005668863773345947, 0.005616991996765137, 0.005664735794067383, 0.005669472217559815, 0.005745759963989258, 0.005702239990234375, 0.005610208034515381, 0.005725120067596436, 0.005620192050933838, 0.00561900806427002, 0.005652607917785645, 0.005610655784606933, 0.00562553596496582, 0.005668320178985596, 0.005626399993896484, 0.005594336032867432, 0.0056096000671386715, 0.005651103973388672, 0.005603040218353271, 0.005607711791992188, 0.005594751834869385, 0.005601151943206787, 0.005672959804534912, 0.0053487358093261715, 0.00566048002243042, 0.005829823970794678, 0.00569920015335083, 0.005656479835510254, 0.00567084789276123, 0.00575107192993164, 0.005640192031860352, 0.005719808101654053, 0.005639776229858399, 0.005657023906707763, 0.005617887973785401, 0.005703680038452149, 0.005678336143493653, 0.005645055770874024, 0.005648672103881836, 0.005684959888458252, 0.0056295361518859865, 0.005683360099792481, 0.005615647792816162, 0.005832255840301514, 0.0056162881851196285, 0.0056258559226989744, 0.0056293120384216305, 0.005704319953918457, 0.0056258559226989744, 0.005584479808807373, 0.005620128154754639, 0.005591040134429932, 0.005576704025268555, 0.005662399768829346, 0.005558591842651367, 0.0056442880630493165, 0.005581151962280273, 0.006147456169128418, 0.0056118078231811525, 0.005596896171569824, 0.005625440120697022, 0.005673664093017578, 0.005819519996643066, 0.005682047843933106, 0.005592671871185303, 0.005717663764953613, 0.005618239879608154, 0.005618207931518555, 0.005633696079254151, 0.005638112068176269, 0.005595168113708496, 0.00559884786605835, 0.005642240047454834, 0.005654911994934082, 0.005627200126647949, 0.005609951972961426, 0.005610879898071289, 0.005659488201141358, 0.005621664047241211, 0.005635551929473877, 0.005616256237030029, 0.00561900806427002, 0.005775743961334228, 0.00571014404296875, 0.0056852478981018065, 0.005670911788940429, 0.005462143898010254, 0.005672351837158203, 0.0056938881874084475, 0.005642943859100342, 0.005635807991027832, 0.005670911788940429, 0.005609471797943116, 0.005649983882904053, 0.005628352165222168, 0.005670239925384521, 0.005642015933990479, 0.005604512214660644, 0.005652192115783691, 0.005644224166870117, 0.005637663841247559, 0.005656991958618164, 0.0056070399284362795, 0.005659135818481445, 0.005647359848022461, 0.005661695957183838, 0.005672639846801758, 0.005637728214263916, 0.005648575782775879, 0.00561411190032959, 0.0057712640762329105, 0.0056442880630493165, 0.005677055835723877, 0.005649856090545654, 0.005614143848419189, 0.005646207809448242, 0.005642303943634033, 0.005602719783782959, 0.005648672103881836, 0.005681280136108398, 0.0056302080154418946, 0.005625247955322266, 0.0056431999206542965, 0.005610559940338135, 0.005598048210144043, 0.005646016120910645, 0.005592864036560058, 0.005897503852844238, 0.005659647941589355, 0.00566921615600586, 0.005752480030059815, 0.005650207996368408, 0.006117599964141846, 0.00565772819519043, 0.005741439819335938, 0.005654079914093018, 0.005665215969085693, 0.0056483840942382815, 0.005677055835723877, 0.005657599925994873, 0.005628960132598877, 0.005646304130554199, 0.005646336078643799, 0.005611072063446045, 0.005717472076416015, 0.005589983940124512, 0.005621503829956054, 0.005628159999847412, 0.005701632022857666, 0.005383647918701172, 0.005673503875732422, 0.005656576156616211, 0.005604351997375488, 0.005631328105926514, 0.005637440204620361, 0.005618239879608154, 0.006198400020599365, 0.005634751796722412, 0.005666656017303467, 0.0056475200653076174, 0.005649375915527344, 0.005646336078643799, 0.0056852478981018065, 0.0056483840942382815, 0.00566476821899414, 0.0056234879493713376, 0.00561359977722168, 0.005646431922912597, 0.0057112002372741695, 0.005601696014404297, 0.005632448196411133, 0.00561356782913208, 0.005638144016265869, 0.005622943878173828, 0.005605663776397705, 0.005655104160308838, 0.005598495960235596, 0.005700319766998291, 0.005609471797943116, 0.005598464012145996, 0.005632768154144287, 0.005642111778259277, 0.0056096000671386715, 0.0056135039329528805, 0.005603392124176026, 0.005705728054046631, 0.005608863830566406, 0.0056735677719116214, 0.005622911930084229, 0.0056265277862548825, 0.005627327919006347, 0.005628064155578614, 0.005742720127105713, 0.005609983921051025, 0.005632319927215576, 0.00563375997543335, 0.005650400161743164, 0.00563318395614624, 0.005595168113708496, 0.005622015953063965, 0.005632639884948731, 0.005698592185974121, 0.00565340805053711, 0.00562992000579834, 0.005658656120300293, 0.005629280090332031, 0.005730976104736328, 0.005600992202758789, 0.005593376159667969, 0.005635104179382324, 0.005600224018096924, 0.00563750410079956, 0.0053547840118408204, 0.005625919818878174, 0.005614336013793945, 0.005592127799987793, 0.005851583957672119, 0.0055996479988098145, 0.005638144016265869, 0.005631999969482422, 0.005609663963317871, 0.005643871784210205, 0.005616991996765137, 0.005630847930908203, 0.005634047985076904, 0.0056566400527954105, 0.005642399787902832, 0.005639071941375732, 0.005694047927856445, 0.005653120040893554, 0.005654176235198974, 0.005654528141021729, 0.0064386558532714844, 0.0060900158882141115, 0.005679232120513916, 0.0057721281051635745, 0.005627456188201904, 0.005675456047058105, 0.005650207996368408, 0.0056566400527954105, 0.005642399787902832, 0.0056236801147460935, 0.005717887878417969, 0.005633696079254151, 0.005647071838378906, 0.005633344173431396, 0.005812128067016601, 0.005694111824035644, 0.005626976013183594, 0.005659552097320557, 0.005713888168334961, 0.005675039768218994, 0.005768223762512207, 0.0057704000473022465, 0.00564192008972168, 0.005763199806213379, 0.005664063930511475, 0.005603744029998779, 0.005640480041503906, 0.00559660816192627, 0.005674848079681397, 0.005652224063873291, 0.005618336200714111, 0.005638463973999024, 0.0056258559226989744, 0.005609471797943116, 0.005650432109832764, 0.005611519813537597, 0.0056863040924072265, 0.005667808055877685, 0.005612576007843017, 0.00563647985458374, 0.00562992000579834, 0.0056408319473266605, 0.0060208959579467775, 0.005398528099060058, 0.005647903919219971, 0.005673439979553223, 0.005654528141021729, 0.0056436161994934085, 0.005665440082550049, 0.005638144016265869, 0.005645408153533936, 0.005747615814208984, 0.005658624172210694, 0.005678976058959961, 0.005658207893371582, 0.005701759815216065, 0.005628159999847412, 0.0056505918502807615, 0.00561356782913208, 0.005611839771270752, 0.005738175868988037, 0.00559830379486084, 0.00565340805053711, 0.005636159896850586, 0.005669919967651367, 0.005665760040283203, 0.005622911930084229, 0.0056650562286376955, 0.005895711898803711, 0.00567193603515625, 0.005675007820129394, 0.005646560192108154, 0.005658400058746338, 0.005636096000671387, 0.005645823955535889, 0.005622208118438721, 0.005627552032470703, 0.005656991958618164, 0.005623807907104492, 0.005623807907104492, 0.0056258559226989744, 0.005620736122131348, 0.0056824002265930176, 0.005569568157196045, 0.005622879981994629, 0.005609119892120361, 0.005601280212402344, 0.005591040134429932, 0.005674975872039795, 0.00563375997543335, 0.00601529598236084, 0.005791744232177734, 0.005816319942474365, 0.005626976013183594, 0.005622655868530274, 0.0056945281028747555, 0.005658944129943847, 0.005583712100982666, 0.00562275218963623, 0.005596000194549561, 0.005590943813323974, 0.0056481599807739254, 0.005566976070404053, 0.005661664009094239, 0.005583072185516357, 0.005581727981567383, 0.005351871967315674, 0.0055927357673645015, 0.005861663818359375, 0.005599232196807862, 0.005593088150024414, 0.0055808000564575196, 0.005574656009674072, 0.005601280212402344, 0.0055668802261352535, 0.005585984230041504, 0.005609536170959472, 0.005597536087036133, 0.005701759815216065, 0.005819456100463867, 0.006220736026763916, 0.006209536075592041, 0.006112383842468262, 0.005630847930908203, 0.005658976078033447, 0.005942431926727295, 0.006500703811645508, 0.005635263919830322, 0.00566806411743164, 0.00565993595123291, 0.005824704170227051, 0.0056293439865112305, 0.005583744049072265, 0.005662432193756103, 0.005600736141204834, 0.005636991977691651, 0.00562716817855835, 0.005610144138336182, 0.00562716817855835, 0.005579487800598144, 0.005614943981170654, 0.005626656055450439, 0.005584767818450928, 0.00566476821899414, 0.005595136165618897, 0.005623807907104492, 0.005607423782348633, 0.005611519813537597, 0.0056274237632751465, 0.005595615863800049, 0.005617504119873047, 0.005621920108795166, 0.00561356782913208, 0.005627903938293457, 0.005601471900939942, 0.005625664234161377, 0.005611519813537597, 0.005642240047454834, 0.005631999969482422, 0.005606527805328369, 0.005675456047058105, 0.005648575782775879, 0.005651936054229736, 0.0056368961334228515, 0.00561356782913208, 0.005621632099151611, 0.005621664047241211, 0.005624032020568848, 0.005595136165618897, 0.0054579200744628905, 0.0056179518699646, 0.005590752124786377, 0.005627903938293457, 0.005619775772094726, 0.005631936073303223, 0.005582848072052002, 0.005594783782958984, 0.0055937919616699215, 0.0055968317985534664, 0.005603328227996827, 0.005595136165618897, 0.005574016094207763, 0.005615615844726562, 0.005590784072875977, 0.005890944004058838, 0.006743231773376465, 0.0072341117858886715, 0.007020607948303222, 0.005933248043060303, 0.005666816234588623, 0.005607264041900635, 0.005623968124389649, 0.005601280212402344, 0.005635488033294678, 0.005896800041198731, 0.005623839855194092, 0.00566048002243042, 0.005616896152496338, 0.005601439952850342, 0.00566326379776001, 0.005600575923919678, 0.0056143999099731445, 0.005635935783386231, 0.005613791942596436, 0.005615359783172608, 0.005619743824005127, 0.0056154241561889645, 0.005615327835083008, 0.005630688190460205, 0.005600895881652832, 0.005638688087463379, 0.005617504119873047, 0.005627903938293457, 0.005611519813537597, 0.005607168197631836, 0.005603583812713623, 0.00566048002243042, 0.005601471900939942, 0.005636096000671387, 0.005638144016265869, 0.005668863773345947, 0.005622111797332764, 0.005592639923095703, 0.005636191844940185, 0.005596288204193115, 0.005617695808410645, 0.0057923197746276855, 0.005624095916748047, 0.005634047985076904, 0.005629024028778076, 0.005600160121917725, 0.005645311832427978, 0.0053821439743042, 0.005654528141021729, 0.005611199855804444, 0.005638463973999024, 0.0056863360404968265, 0.005675968170166016, 0.00562716817855835, 0.005627967834472656, 0.005765024185180664, 0.0056241598129272465, 0.005613984107971192, 0.0056217598915100095, 0.005694464206695556, 0.00564089584350586, 0.005593728065490722, 0.005627711772918701, 0.00559500789642334, 0.00557260799407959, 0.005636096000671387, 0.005580031871795654, 0.005630080223083496, 0.005605120182037354, 0.005630847930908203, 0.005646336078643799, 0.005609471797943116, 0.005633215904235839, 0.005614208221435547, 0.0056302080154418946, 0.00560697603225708, 0.0056910080909729, 0.005626207828521729, 0.005607840061187744, 0.0056356477737426755, 0.005591487884521485, 0.005600768089294434, 0.0056161279678344726, 0.005627520084381103, 0.005617536067962646, 0.005609983921051025, 0.005580671787261963, 0.005673056125640869, 0.005580192089080811, 0.005642784118652344, 0.005609151840209961, 0.005581215858459472, 0.005631999969482422, 0.005601280212402344, 0.0056200637817382815, 0.005588384151458741, 0.005654784202575683, 0.005650432109832764, 0.005602943897247315, 0.005619808197021485, 0.005597472190856934, 0.005605184078216553, 0.005611711978912353, 0.005601280212402344, 0.005654528141021729, 0.005615039825439453, 0.005571135997772217, 0.005619711875915527, 0.005582047939300537, 0.005603231906890869, 0.005328896045684814, 0.005587007999420166, 0.0057134079933166505, 0.005808576107025147, 0.005640543937683105, 0.005611167907714844, 0.005603328227996827, 0.005582240104675293, 0.005572224140167236, 0.005599967956542969, 0.005561600208282471, 0.005635072231292724, 0.005791168212890625, 0.005562943935394287, 0.005598239898681641, 0.005573599815368652, 0.005601280212402344, 0.005603295803070068, 0.005590496063232422, 0.00571449613571167, 0.005748479843139648, 0.005908448219299317, 0.005828479766845703, 0.005654943943023682, 0.005672959804534912, 0.005621344089508057, 0.005613984107971192, 0.005588607788085937, 0.005615200042724609, 0.005628704071044922, 0.005603328227996827, 0.005594783782958984, 0.005584415912628174, 0.005616543769836425, 0.0056109437942504885, 0.005659103870391846, 0.005634047985076904, 0.005586944103240967, 0.005629951953887939, 0.005659711837768555, 0.005616576194763184, 0.005606431961059571, 0.005612512111663818, 0.005652480125427246, 0.005611519813537597, 0.005621632099151611, 0.005603456020355225, 0.005615615844726562, 0.0056217598915100095, 0.005594560146331787, 0.0056059517860412595, 0.005599135875701904, 0.005616928100585937, 0.005638688087463379, 0.0055668802261352535, 0.005640160083770752, 0.005582752227783203, 0.0056217598915100095, 0.005620960235595703, 0.005571360111236572, 0.0055808000564575196, 0.005596320152282715, 0.005585855960845947, 0.005375936031341553, 0.005572800159454346, 0.005634751796722412, 0.005596255779266358, 0.005602208137512207, 0.00556441593170166, 0.005563936233520508, 0.005577184200286865, 0.0055623679161071774, 0.005613247871398926, 0.005595680236816406, 0.005551904201507568, 0.005631872177124023, 0.005574783802032471, 0.005588992118835449, 0.0055582718849182125, 0.005554175853729248, 0.005576704025268555, 0.005625152111053467, 0.005554463863372803, 0.005603744029998779, 0.005564703941345215, 0.005600992202758789, 0.005583136081695556, 0.005590464115142822, 0.0055790400505065915, 0.005560287952423096, 0.005601471900939942, 0.005729983806610108, 0.005621856212615967, 0.005621823787689209, 0.005582848072052002, 0.005611519813537597, 0.005597184181213379, 0.005582431793212891, 0.005625823974609375, 0.0056936640739440915, 0.005648608207702637, 0.005609471797943116, 0.005612544059753418, 0.005598207950592041, 0.005588992118835449, 0.005619711875915527, 0.005594399929046631, 0.0055938239097595216, 0.005576704025268555, 0.0055474557876586915, 0.005591616153717041, 0.0055848960876464845, 0.005658048152923584, 0.005597536087036133, 0.005578976154327392, 0.005595136165618897, 0.005615520000457764, 0.005625951766967774, 0.0055848960876464845, 0.005586016178131104, 0.005907360076904297, 0.00556441593170166, 0.005599391937255859, 0.005594560146331787, 0.005580543994903564, 0.005618656158447265, 0.005347455978393555, 0.005714111804962159, 0.005601280212402344, 0.005615615844726562, 0.0055922560691833495, 0.005575488090515137, 0.005606656074523926, 0.005620480060577393, 0.005619904041290283, 0.005605184078216553, 0.0055920639038085935, 0.005618688106536865, 0.005592991828918457, 0.005619391918182373, 0.005583263874053955, 0.005570559978485107, 0.005586368083953857, 0.005589087963104248, 0.005625984191894531, 0.0056629438400268555, 0.005606527805328369, 0.005643551826477051, 0.005572319984436035, 0.005596511840820313, 0.00563267183303833, 0.005607423782348633, 0.005597087860107422, 0.005609568119049072, 0.005629439830780029, 0.0055895037651062015, 0.005590688228607177, 0.00564463996887207, 0.0055808000564575196, 0.005623839855194092, 0.005581952095031738, 0.005581920146942139, 0.00562713623046875, 0.005626368045806885, 0.005607552051544189, 0.005607295989990235, 0.0055760002136230466, 0.005628608226776123, 0.005578303813934326, 0.005639840126037598, 0.005589600086212158, 0.005580671787261963, 0.005603519916534424, 0.005578048229217529, 0.005658432006835938, 0.005609471797943116, 0.005600607872009277, 0.0056459841728210445, 0.005607423782348633, 0.005623807907104492, 0.005599232196807862, 0.005689343929290771, 0.005613247871398926, 0.0055790719985961915, 0.005605375766754151, 0.0057935361862182615, 0.005843200206756592, 0.005621407985687256, 0.005589344024658203, 0.005359615802764893, 0.005588992118835449, 0.005627903938293457, 0.005600351810455323, 0.005607935905456543, 0.00558735990524292, 0.0055764479637145994, 0.00561568021774292, 0.005962080001831055, 0.005650271892547607, 0.00561356782913208, 0.005608799934387207, 0.005634719848632812, 0.005581056118011475, 0.005631455898284912, 0.0055957441329956056, 0.005596864223480225, 0.005619487762451172, 0.005582143783569336, 0.005630879878997803, 0.005605375766754151, 0.005615231990814209, 0.005630239963531494, 0.005602431774139404, 0.005630943775177002, 0.005629951953887939, 0.005631040096282959, 0.005640960216522216, 0.005596704006195068, 0.00559990406036377, 0.005619584083557129, 0.005633408069610595, 0.0055857281684875484, 0.005574016094207763, 0.005589568138122559, 0.005603328227996827, 0.005576704025268555, 0.005641983985900879, 0.005595232009887695, 0.005625376224517822, 0.005601920127868652, 0.005615744113922119, 0.005615488052368164, 0.005597184181213379, 0.005728256225585937, 0.005671040058135986, 0.005633408069610595, 0.005808127880096436, 0.0055976958274841304, 0.005701632022857666, 0.005605216026306152, 0.005673120021820069, 0.005617152214050293, 0.005607935905456543, 0.0056258559226989744, 0.005607295989990235, 0.005611648082733154, 0.005658624172210694, 0.0056852478981018065, 0.005679103851318359, 0.005660672187805176, 0.005639552116394043, 0.005730432033538818, 0.0053366079330444335, 0.005619616031646729, 0.005648287773132324, 0.005702527999877929, 0.00559827184677124, 0.005630559921264648, 0.005577087879180908, 0.0056044158935546875, 0.005632768154144287, 0.005777279853820801, 0.005920063972473144, 0.005594079971313477, 0.005649759769439698, 0.005605663776397705, 0.005640575885772705, 0.0056135358810424805, 0.005608575820922852, 0.005626016139984131, 0.006284031867980957, 0.006352543830871582, 0.006281023979187012, 0.005992512226104737, 0.005953440189361572, 0.005671487808227539, 0.005652480125427246, 0.005664480209350586, 0.005693727970123291, 0.005685408115386963, 0.005635583877563476, 0.005593311786651611, 0.005654655933380127, 0.005595136165618897, 0.005643551826477051, 0.005616352081298828, 0.005612800121307373, 0.005616384029388428, 0.0055848960876464845, 0.00564192008972168, 0.005636415958404541, 0.0055848960876464845, 0.005705728054046631, 0.005818143844604492, 0.005635488033294678, 0.005607391834259033, 0.005608287811279297, 0.005614880084991455, 0.0055979199409484865, 0.005593088150024414, 0.005605375766754151, 0.005615615844726562, 0.00562332820892334, 0.005609856128692627, 0.005625984191894531, 0.005609439849853516, 0.005595136165618897, 0.005617504119873047, 0.005593247890472412, 0.005637440204620361, 0.0056031041145324706, 0.005601439952850342, 0.005611519813537597, 0.005579520225524903, 0.005607423782348633, 0.0055354881286621095, 0.0056302080154418946, 0.005605375766754151, 0.005625279903411865, 0.005620287895202637, 0.005609471797943116, 0.005631999969482422, 0.005592607975006104, 0.0056427202224731444, 0.0056258559226989744, 0.005642240047454834, 0.005650239944458008, 0.005594719886779785, 0.005634655952453613, 0.005621471881866455, 0.005595424175262451, 0.005646207809448242, 0.0056152639389038085, 0.005620192050933838, 0.005603328227996827, 0.005611519813537597, 0.005638144016265869, 0.005593088150024414, 0.005633664131164551, 0.0056221442222595215, 0.005589248180389405, 0.005627647876739502, 0.005605728149414063, 0.005602975845336914, 0.005596704006195068, 0.005595104217529297, 0.0056427521705627445, 0.005609471797943116, 0.005619711875915527, 0.005640192031860352, 0.0056154241561889645, 0.005628032207489014, 0.00560748815536499, 0.005602880001068115, 0.005626304149627686, 0.006440959930419922, 0.005617568016052246, 0.005811647891998291, 0.0056241598129272465, 0.005601600170135498, 0.005647647857666016, 0.005601215839385987, 0.00560649585723877, 0.005590720176696777, 0.005574656009674072, 0.005638144016265869, 0.005619711875915527, 0.005631999969482422, 0.005607423782348633, 0.005605375766754151, 0.005633855819702148, 0.005606688022613525, 0.005602208137512207, 0.005615488052368164, 0.005611648082733154, 0.00563750410079956, 0.005589632034301758, 0.005623807907104492, 0.005357696056365967, 0.005615392208099365, 0.005610144138336182, 0.005662720203399658, 0.005636127948760986, 0.00562172794342041, 0.0056258878707885745, 0.005606880187988281, 0.005597311973571778, 0.005630335807800293, 0.005605375766754151, 0.005617631912231445, 0.005586976051330566, 0.005574656009674072, 0.005621503829956054, 0.005589248180389405, 0.005646624088287353, 0.00561078405380249, 0.005677504062652588, 0.005631872177124023, 0.005640160083770752, 0.005658783912658691, 0.005633952140808106, 0.005636191844940185, 0.005627903938293457, 0.005605375766754151, 0.005607423782348633, 0.005647679805755615, 0.005659167766571045, 0.005640192031860352, 0.0056303682327270505, 0.005684991836547851, 0.005611008167266846, 0.0056254081726074216, 0.005742847919464111, 0.00564521598815918, 0.00566044807434082, 0.005623167991638184, 0.00564195203781128, 0.005622687816619873, 0.005656576156616211, 0.005615615844726562, 0.005617919921875, 0.005653439998626709, 0.005621920108795166, 0.005859839916229248, 0.006127776145935059, 0.005861343860626221, 0.005633056163787842, 0.005645408153533936, 0.005681056022644043, 0.005660672187805176, 0.005675072193145752, 0.005652160167694092, 0.005671040058135986, 0.005619679927825928, 0.0057077760696411135, 0.005691391944885254, 0.00563420820236206, 0.005642240047454834, 0.005809760093688965, 0.0060661759376525876, 0.005627744197845459, 0.005402912139892578, 0.005650432109832764, 0.0056310720443725586, 0.005659552097320557, 0.005699584007263184, 0.005703392028808593, 0.005613855838775635, 0.005617631912231445, 0.005639616012573242, 0.005603456020355225, 0.005628384113311767, 0.005604351997375488, 0.005596159934997558, 0.005651552200317383, 0.005622464179992676, 0.005635903835296631, 0.00564028787612915, 0.0057736320495605465, 0.005648575782775879, 0.005601088047027588, 0.005692575931549072, 0.005643104076385498, 0.005672959804534912, 0.005646336078643799, 0.0056640000343322755, 0.0059028158187866215, 0.0056239042282104496, 0.005629119873046875, 0.005641215801239013, 0.005639935970306396, 0.0056343040466308595, 0.005616640090942383, 0.0056492481231689455, 0.0056440639495849605, 0.005651904106140137, 0.00561407995223999, 0.005625311851501465, 0.005635039806365967, 0.005624896049499512, 0.005618624210357666, 0.005627264022827148, 0.005617472171783447, 0.005647168159484863, 0.00563750410079956, 0.005636735916137696, 0.005650432109832764, 0.005627456188201904, 0.005652927875518799, 0.005603328227996827, 0.005670015811920166, 0.005622655868530274, 0.005640192031860352, 0.005624864101409912, 0.005604320049285889, 0.0056258559226989744, 0.005612544059753418, 0.005628032207489014, 0.005606272220611572, 0.005631999969482422, 0.005609471797943116, 0.005629568099975586, 0.00564467191696167, 0.0056442880630493165, 0.005392384052276611, 0.005628223896026611, 0.00562278413772583, 0.005644224166870117, 0.005609344005584717, 0.00564518404006958, 0.005613152027130127, 0.005597599983215332, 0.0056258559226989744, 0.0056137280464172365, 0.005595200061798096, 0.005631775856018067, 0.005588992118835449, 0.005666944026947021, 0.00559500789642334, 0.005629727840423584, 0.005615359783172608, 0.005591519832611084, 0.005651679992675782, 0.0056061758995056155, 0.005640192031860352, 0.005605375766754151, 0.005584991931915283, 0.005654431819915771, 0.005826560020446778, 0.005627520084381103, 0.0056180481910705566, 0.005630015850067139, 0.005613024234771728, 0.00559113597869873, 0.0056236801147460935, 0.005609536170959472, 0.005638591766357422, 0.005629568099975586, 0.005754784107208252, 0.0056835517883300785, 0.005652607917785645, 0.005662720203399658, 0.005637695789337158, 0.005605823993682861, 0.005654528141021729, 0.005618847846984863, 0.005651296138763428, 0.0056483840942382815, 0.005646016120910645, 0.005632415771484375, 0.005599135875701904, 0.005643743991851807, 0.005634592056274414, 0.005645919799804688, 0.005633600234985351, 0.005624095916748047, 0.005687520027160644, 0.005640543937683105, 0.005658432006835938, 0.00564243221282959, 0.005607423782348633, 0.005652480125427246, 0.005600927829742431, 0.005658976078033447, 0.005617663860321045, 0.005599232196807862, 0.005646336078643799, 0.005373248100280762, 0.005667808055877685, 0.005599391937255859, 0.00562937593460083, 0.005628320217132568, 0.0056211199760437015, 0.005627711772918701, 0.005606207847595215, 0.005609471797943116, 0.005586559772491455, 0.005560704231262207, 0.005619711875915527, 0.005566463947296142, 0.005599232196807862, 0.005583936214447021, 0.0055799040794372555, 0.005639071941375732, 0.005593311786651611, 0.005688000202178955, 0.005631999969482422, 0.005623807907104492, 0.005658239841461182, 0.005621600151062012, 0.005698336124420166, 0.005666336059570312, 0.005676928043365478, 0.005670591831207276, 0.005666528224945068, 0.005673920154571533, 0.005637663841247559, 0.005816800117492676, 0.00568236780166626, 0.005868447780609131, 0.00567849588394165, 0.00563865613937378, 0.005652480125427246, 0.005619872093200684, 0.00564796781539917, 0.005635903835296631, 0.0056406397819519045, 0.0057051520347595214, 0.005878335952758789, 0.0057019200325012205, 0.005631103992462159, 0.005673439979553223, 0.005615744113922119, 0.005610847949981689, 0.005648799896240234, 0.005646592140197754, 0.005660672187805176, 0.005642240047454834, 0.005629439830780029, 0.00565670394897461, 0.005616000175476074, 0.005628159999847412, 0.005633791923522949, 0.005629759788513183, 0.005635488033294678, 0.005613408088684082, 0.005641151905059814, 0.005627903938293457, 0.005650432109832764, 0.005628096103668213, 0.005391615867614746, 0.005603936195373535, 0.0056068158149719235, 0.005608191967010498, 0.005625631809234619, 0.005613247871398926, 0.0056241598129272465, 0.005600736141204834, 0.00563478422164917, 0.005750239849090576, 0.005603871822357178, 0.005610879898071289, 0.005611616134643555, 0.005654111862182617, 0.005636223793029785, 0.00563647985458374, 0.005613887786865234, 0.0056096000671386715, 0.005654016017913818, 0.005614240169525147, 0.005646175861358642, 0.0056258559226989744, 0.005621344089508057, 0.005663392066955567, 0.005615231990814209, 0.005680992126464844, 0.005644576072692871, 0.005668032169342041, 0.005655295848846435, 0.005627967834472656, 0.00566476821899414, 0.005615615844726562, 0.005635200023651123, 0.005624767780303955, 0.005627264022827148, 0.005717984199523926, 0.0056341438293457035, 0.005664544105529785, 0.0056464638710021975, 0.005617695808410645, 0.005681727886199951, 0.005624959945678711, 0.005687424182891846, 0.005649151802062988, 0.005652671813964844, 0.005670752048492432, 0.005657919883728027, 0.005671008110046387, 0.005624383926391602, 0.005765120029449463, 0.005711391925811767, 0.005634528160095215, 0.005660287857055664, 0.005626239776611328, 0.005668863773345947, 0.005634047985076904, 0.005666336059570312, 0.005622591972351074, 0.005610879898071289, 0.005744448184967041, 0.005611487865447998, 0.0056570878028869625, 0.005646336078643799, 0.005398591995239258, 0.005645023822784424, 0.005597280025482178, 0.005656544208526611, 0.005597216129302978, 0.005633887767791748, 0.005626016139984131, 0.005608640193939209, 0.0056360640525817875, 0.00561033582687378, 0.005647552013397217, 0.00565715217590332, 0.0058178558349609374, 0.005609856128692627, 0.0056277761459350584, 0.005628416061401367, 0.005615615844726562, 0.005629951953887939, 0.005611519813537597, 0.005615615844726562, 0.005634047985076904, 0.005609471797943116, 0.00562713623046875, 0.005624576091766358, 0.005608511924743652, 0.005651391983032226, 0.00563097620010376, 0.005670976161956787, 0.005669727802276612, 0.005640255928039551, 0.005724192142486572, 0.005666207790374756, 0.005723936080932617, 0.005642240047454834, 0.005661375999450683, 0.005699711799621582, 0.005635488033294678, 0.005679711818695068, 0.005642240047454834, 0.005646240234375, 0.0056054720878601074, 0.005605120182037354, 0.005646592140197754, 0.0056146240234375, 0.005630943775177002, 0.005633503913879395, 0.005596896171569824, 0.005672863960266113, 0.0056267518997192385, 0.005643455982208252, 0.005632863998413086, 0.005673247814178467, 0.005649663925170898, 0.005616096019744873, 0.0056622719764709475, 0.005607359886169434, 0.005636608123779297, 0.005610976219177246, 0.005618207931518555, 0.005633535861968994, 0.0056202239990234375, 0.005636096000671387, 0.005631999969482422, 0.005351679801940918, 0.0056003198623657225, 0.005632991790771484, 0.005727519989013672, 0.0056388797760009764, 0.005646336078643799, 0.005638144016265869, 0.0056258559226989744, 0.005672095775604248, 0.00567145586013794, 0.005695648193359375, 0.005652544021606446, 0.0056341438293457035, 0.0056433920860290525, 0.005633056163787842, 0.005653535842895508, 0.005619904041290283, 0.005628255844116211, 0.005628128051757813, 0.0056098241806030276, 0.005678815841674805, 0.005650432109832764, 0.005643487930297852, 0.005616415977478027, 0.005648064136505127, 0.005635935783386231, 0.005601759910583496, 0.0056418561935424804, 0.005936927795410156, 0.005642848014831543, 0.0056250238418579105, 0.005624127864837646, 0.005634560108184815, 0.005629951953887939, 0.005646336078643799, 0.005619711875915527, 0.00561359977722168, 0.005664735794067383, 0.005609471797943116, 0.0056497278213500975, 0.005613791942596436, 0.0056232957839965824, 0.005667520046234131, 0.005625984191894531, 0.005632160186767578, 0.0056638078689575195, 0.005634943962097168, 0.005625823974609375, 0.005637631893157959, 0.0056735677719116214, 0.005656576156616211, 0.005687295913696289, 0.005697535991668701, 0.00564415979385376, 0.005662112236022949, 0.005637951850891113, 0.005651360034942627, 0.005748608112335205, 0.005675136089324951, 0.0056516480445861815, 0.005632448196411133, 0.005626239776611328, 0.005699584007263184, 0.005359712123870849, 0.005623167991638184, 0.0056284480094909665, 0.0056112961769104, 0.005625088214874268, 0.005630655765533447, 0.005624095916748047, 0.005631999969482422, 0.005639872074127197, 0.0056112961769104, 0.005655072212219238, 0.005627071857452393, 0.005654911994934082, 0.005632448196411133, 0.005637951850891113, 0.005666240215301514, 0.005645055770874024, 0.0056442880630493165, 0.005633696079254151, 0.00559116792678833, 0.005650527954101562, 0.0056275839805603025, 0.005645088195800781, 0.005648032188415527, 0.005647456169128418, 0.005626783847808838, 0.005641791820526123, 0.005642687797546387, 0.005654528141021729, 0.005666719913482666, 0.005681248188018799, 0.005616703987121582, 0.005692351818084717, 0.005592864036560058, 0.0059836478233337405, 0.006132415771484375, 0.005727935791015625, 0.005654047966003418, 0.005653120040893554, 0.005643775939941406, 0.005660575866699219, 0.005641088008880615, 0.005658624172210694, 0.005666816234588623, 0.005648447990417481, 0.0056070079803466795, 0.0056733121871948245, 0.005619711875915527, 0.00564134407043457, 0.005649280071258545, 0.005631648063659668, 0.005693439960479736, 0.0056200637817382815, 0.005695712089538574, 0.005643743991851807, 0.00566918420791626, 0.00564195203781128, 0.005624032020568848, 0.005900288105010986, 0.005596640110015869, 0.005650176048278809, 0.005622432231903076, 0.0055912318229675295, 0.005355616092681884, 0.005615615844726562, 0.005631999969482422, 0.005607135772705078, 0.005611680030822754, 0.005619584083557129, 0.005613632202148437, 0.005640384197235108, 0.005605375766754151, 0.005611519813537597, 0.005601280212402344, 0.005688576221466064, 0.005614336013793945, 0.005607423782348633, 0.0056442880630493165, 0.005631999969482422, 0.0056239042282104496, 0.005652383804321289, 0.0056295042037963865, 0.0056713600158691405, 0.005662975788116455, 0.005635776042938232, 0.00560748815536499, 0.005619711875915527, 0.005683199882507324, 0.005601280212402344, 0.005617663860321045, 0.005618720054626465, 0.0055797438621521, 0.005756896018981933, 0.005601344108581543, 0.005631968021392823, 0.005629951953887939, 0.00561897611618042, 0.005646399974822998, 0.005585440158843994, 0.005641632080078125, 0.0056388797760009764, 0.005816256046295166, 0.0056341118812561035, 0.005597184181213379, 0.005629856109619141, 0.005622975826263428, 0.005854112148284912, 0.005631135940551758, 0.00562604808807373, 0.005663392066955567, 0.005687295913696289, 0.0056217598915100095, 0.0056217598915100095, 0.005758848190307617, 0.005726336002349853, 0.00560038423538208, 0.005608320236206055, 0.005636288166046143, 0.00562332820892334, 0.005646624088287353, 0.005607423782348633, 0.005659904003143311, 0.005585663795471192, 0.005621024131774902, 0.005611487865447998, 0.005690112113952637, 0.005453855991363526, 0.005606304168701172, 0.0056375679969787595, 0.005620160102844238, 0.005621183872222901, 0.005635776042938232, 0.005622367858886718, 0.005662528038024903, 0.005937280178070069, 0.00563043212890625, 0.0056130561828613285, 0.005636000156402588, 0.005609151840209961, 0.005593472003936768, 0.005627456188201904, 0.005616608142852783, 0.005656383991241455, 0.005609632015228272, 0.0055931200981140135, 0.005646336078643799, 0.005617663860321045, 0.005658624172210694, 0.0056557440757751464, 0.005640768051147461, 0.005670591831207276, 0.00563046407699585, 0.005632031917572022, 0.005650527954101562, 0.005650368213653565, 0.005703680038452149, 0.005650303840637207, 0.0056730880737304685, 0.005679103851318359, 0.005654528141021729, 0.005650368213653565, 0.0056310720443725586, 0.005663712024688721, 0.005848159790039062, 0.005644544124603272, 0.005648096084594726, 0.005657536029815674, 0.00579744005203247, 0.005595327854156494, 0.005713568210601806, 0.005626463890075683, 0.005631999969482422, 0.005636096000671387, 0.005617472171783447, 0.005642079830169678, 0.00560368013381958, 0.005652736186981202, 0.005630879878997803, 0.005639008045196533, 0.005669087886810303, 0.00566044807434082, 0.005652480125427246, 0.005661856174468994, 0.005663584232330323, 0.005618879795074463, 0.005614175796508789, 0.005652031898498535, 0.00562662410736084, 0.005636000156402588, 0.005375999927520752, 0.0056315197944641114, 0.0056713919639587405, 0.005623807907104492, 0.0056770238876342775, 0.005619743824005127, 0.0056341438293457035, 0.005662623882293701, 0.005659808158874511, 0.005697887897491455, 0.005615871906280517, 0.005660927772521972, 0.005638144016265869, 0.0056217598915100095, 0.00565177583694458, 0.005618368148803711, 0.005644320011138916, 0.0056295042037963865, 0.005616032123565674, 0.005615359783172608, 0.005607583999633789, 0.0056341438293457035, 0.005613344192504883, 0.0056304001808166505, 0.005625472068786621, 0.005598432064056397, 0.005622719764709473, 0.005629216194152832, 0.005669600009918213, 0.005627520084381103, 0.005623968124389649, 0.005648672103881836, 0.005606592178344727, 0.005630720138549805, 0.005629951953887939, 0.005588736057281494, 0.005650047779083252, 0.0055747518539428715, 0.00564031982421875, 0.005608191967010498, 0.005797120094299316, 0.005656991958618164, 0.005595136165618897, 0.005638144016265869, 0.005672192096710205, 0.005630144119262695, 0.005636191844940185, 0.005612095832824707, 0.005643455982208252, 0.005626656055450439, 0.005631872177124023, 0.005635615825653076, 0.005594816207885742, 0.005630815982818603, 0.005617728233337402, 0.005615551948547363, 0.005640192031860352, 0.005582848072052002, 0.005607423782348633, 0.005590623855590821, 0.005623839855194092, 0.0055972480773925784, 0.005695807933807373, 0.005393184185028076, 0.005619711875915527, 0.005691391944885254, 0.005646336078643799, 0.005634047985076904, 0.005646336078643799, 0.0055948801040649416, 0.005628032207489014, 0.005593215942382813, 0.005654528141021729, 0.005608672142028808, 0.005633056163787842, 0.005641759872436524, 0.005624032020568848, 0.005621503829956054, 0.00578326416015625, 0.006793248176574707, 0.005968480110168457, 0.006121535778045654, 0.006065279960632324, 0.006225823879241943, 0.005679935932159424, 0.005709824085235596, 0.005649792194366455, 0.005668672084808349, 0.005650335788726806, 0.005661824226379395, 0.005689023971557617, 0.00562992000579834, 0.005666111946105957, 0.005671743869781494, 0.005660223960876465, 0.00564899206161499, 0.005675903797149658, 0.005711935997009278, 0.00565340805053711, 0.005711775779724121, 0.005658783912658691, 0.00566864013671875, 0.00566921615600586, 0.005686079978942871, 0.0056863360404968265, 0.00565180778503418, 0.005661280155181885, 0.005638144016265869, 0.0056275839805603025, 0.005677375793457031, 0.005662720203399658, 0.005703616142272949, 0.005717376232147217, 0.005651135921478271, 0.005695487976074219, 0.005649759769439698, 0.005667232036590576, 0.00566707181930542, 0.005662720203399658, 0.0056217598915100095, 0.005629951953887939, 0.005713920116424561, 0.006149824142456055, 0.005664351940155029, 0.005663455963134766, 0.005689343929290771, 0.005354015827178955, 0.005670911788940429, 0.005636096000671387, 0.005616703987121582, 0.005655007839202881, 0.005667295932769776, 0.0056741762161254886, 0.005665599822998047, 0.0056399679183959965, 0.005656511783599854, 0.00562614393234253, 0.005640192031860352, 0.005623519897460938, 0.005597472190856934, 0.00564793586730957, 0.005618112087249756, 0.005654528141021729, 0.00565772819519043, 0.005661568164825439, 0.005623807907104492, 0.005607423782348633, 0.005684607982635498, 0.005642879962921143, 0.005670911788940429, 0.0059169921875, 0.005620960235595703, 0.0061567678451538085, 0.0056260800361633305, 0.005664544105529785, 0.005631999969482422, 0.005646336078643799, 0.005646336078643799, 0.005640192031860352, 0.005671040058135986, 0.005652160167694092, 0.005652671813964844, 0.005668416023254395, 0.005740992069244385, 0.005679103851318359, 0.005678431987762451, 0.0056900157928466795, 0.005641600131988525, 0.005773151874542236, 0.005727007865905762, 0.005675007820129394, 0.005646336078643799, 0.005645567893981933, 0.0056921601295471195, 0.0056442880630493165, 0.00566431999206543, 0.005652575969696045, 0.00563750410079956, 0.005667808055877685, 0.005656576156616211, 0.005700640201568604, 0.005724319934844971, 0.005716032028198243, 0.005663487911224365, 0.00564134407043457, 0.005685887813568115, 0.005666975975036621, 0.0056976318359375, 0.005817728042602539]",tokens/s,174.95210150115787,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,742.989824,11799.560192,0.0,11404.312576,11388.314624,s,1,7.22953173828125,7.22953173828125,0.0,7.22953173828125,7.22953173828125,7.22953173828125,7.22953173828125,[7.22953173828125],,kWh,6.060062895841157e-06,6.604853918541069e-07,3.1891692180013864e-06,9.90971750569665e-06,,MB,1045.102592,11812.143104,0.0,11406.409728,11107.92192,s,10,4.078596130371094,0.4078596130371094,0.006914636060580647,0.4093941650390625,0.41454249267578125,0.4161641296386719,0.4174614392089844,"[0.3908428344726563, 0.40325701904296873, 0.4048968811035156, 0.40763894653320315, 0.4104405212402344, 0.41418212890625, 0.40905242919921875, 0.41076370239257814, 0.40973590087890627, 0.4177857666015625]",tokens/s,627.6669516104005,kWh,1.1848457179999816e-05,1.3057261868365596e-06,7.91846189032011e-06,2.1072645257156487e-05,tokens/kWh,12148451.078445397,MB,1049.894912,11814.240256,0.0,11408.50688,11305.031168,s,10,33.25393603515625,3.325393603515625,0.004680598481783512,3.3244874267578126,3.331663720703125,3.3319961669921874,3.332262124023438,"[3.32085595703125, 3.32172607421875, 3.32499609375, 3.320064453125, 3.3196875, 3.323978759765625, 3.32793017578125, 3.33232861328125, 3.33158984375, 3.330778564453125]",tokens/s,18.94512575395467,kWh,9.727351534833359e-05,1.0730509639033732e-05,6.439720707327956e-05,0.0001724012320606469,tokens/kWh,365426.6228088092,,s,630,33.250572078704806,0.05277868583921402,0.00030674911309846537,0.05275551986694336,0.053081284332275394,0.05319347534179687,0.054124218521118164,"[0.0541383056640625, 0.05286969757080078, 0.05241062545776367, 0.05242035293579102, 0.052539424896240236, 0.052261089324951174, 0.05237136077880859, 0.052418399810791015, 0.05233817672729492, 0.052375518798828125, 0.05235324859619141, 0.052402145385742185, 0.05252556610107422, 0.05238297653198242, 0.05253529739379883, 0.05237820816040039, 0.05243628692626953, 0.052414497375488284, 0.05265177536010742, 0.05280767822265625, 0.05262160110473633, 0.05304336166381836, 0.052717151641845705, 0.052778976440429684, 0.05249491119384766, 0.05253279876708984, 0.052677471160888674, 0.052567424774169924, 0.052643966674804685, 0.052544033050537106, 0.05244662475585937, 0.05290854263305664, 0.052652320861816405, 0.05259030532836914, 0.05252505493164063, 0.052580352783203124, 0.05269852828979492, 0.05272431945800781, 0.053174270629882815, 0.05282179260253906, 0.053018753051757815, 0.052811870574951174, 0.05289372634887695, 0.0526520004272461, 0.05276435089111328, 0.05268259048461914, 0.052684257507324216, 0.052634624481201174, 0.05271756744384765, 0.052739585876464844, 0.05284710311889648, 0.05268889617919922, 0.05295718383789062, 0.05281932830810547, 0.05289843368530273, 0.05272371292114258, 0.05284592056274414, 0.05336131286621094, 0.05306982421875, 0.053190689086914066, 0.052962303161621094, 0.052933120727539064, 0.05286550521850586, 0.054295841217041015, 0.05278310394287109, 0.05225545501708984, 0.05222348785400391, 0.05213471984863281, 0.05276435089111328, 0.05241856002807617, 0.05244518280029297, 0.0523612174987793, 0.05259823989868164, 0.05237583923339844, 0.05227657699584961, 0.05245225524902344, 0.05256185531616211, 0.05246105575561524, 0.05245990371704102, 0.0522856330871582, 0.05280972671508789, 0.05276387023925781, 0.052833057403564455, 0.05261711883544922, 0.05257030487060547, 0.05251478576660156, 0.05252822494506836, 0.052620094299316404, 0.05253500747680664, 0.05258812713623047, 0.05267324829101563, 0.052838401794433595, 0.052598785400390625, 0.05249017715454102, 0.05257353591918945, 0.05297020721435547, 0.05268889617919922, 0.05284044647216797, 0.05280115127563476, 0.05288179016113281, 0.053029022216796874, 0.05291356658935547, 0.052753952026367186, 0.05279212951660156, 0.05269014358520508, 0.052716415405273435, 0.05270233535766602, 0.05283651351928711, 0.05289884948730469, 0.052725440979003904, 0.05300569534301758, 0.052718208312988284, 0.05275033569335937, 0.0526295051574707, 0.05269289779663086, 0.05280547332763672, 0.05272003173828125, 0.05290387344360352, 0.05283190536499023, 0.053141761779785156, 0.05346918487548828, 0.053028865814208986, 0.052942848205566405, 0.053008384704589843, 0.05283375930786133, 0.05295772933959961, 0.0541822738647461, 0.05274176025390625, 0.05234732818603516, 0.052550945281982425, 0.05255356979370117, 0.05251718521118164, 0.05244704055786133, 0.05243376159667969, 0.05239174270629883, 0.05232963180541992, 0.052342655181884766, 0.052356063842773436, 0.052547584533691405, 0.05264169692993164, 0.05256614303588867, 0.052591743469238283, 0.05249110412597656, 0.052542720794677734, 0.05268278503417969, 0.05272003173828125, 0.05251308822631836, 0.052518753051757815, 0.05262556838989258, 0.05255782318115235, 0.05266960144042969, 0.05263446426391601, 0.05271779251098633, 0.052727584838867185, 0.05248006439208985, 0.05255161666870117, 0.0526025276184082, 0.05267695999145508, 0.05294230270385742, 0.052908065795898435, 0.05303123092651367, 0.05357177734375, 0.05297488021850586, 0.052861663818359376, 0.05295539093017578, 0.05288499069213867, 0.053065982818603516, 0.052876960754394534, 0.05283795166015625, 0.05278799819946289, 0.0530882568359375, 0.05285174560546875, 0.05284281539916992, 0.05268764877319336, 0.05266985702514648, 0.05278153610229492, 0.052770782470703125, 0.05302217483520508, 0.05281849670410156, 0.05310025787353516, 0.05289769744873047, 0.05295756912231445, 0.05297151947021484, 0.05296102523803711, 0.052888961791992185, 0.0530645751953125, 0.053065727233886716, 0.052951038360595705, 0.05332992172241211, 0.05417824172973633, 0.05289571380615234, 0.05244675064086914, 0.05241219329833984, 0.052550430297851565, 0.05248604965209961, 0.05226015853881836, 0.05234479904174805, 0.052349056243896484, 0.05243145751953125, 0.05271142578125, 0.05235302352905274, 0.05244480133056641, 0.052440608978271484, 0.05248700714111328, 0.0523691520690918, 0.052289249420166016, 0.05249897766113281, 0.052850368499755856, 0.05290611267089844, 0.052828254699707033, 0.052533344268798826, 0.052478240966796874, 0.05251430511474609, 0.05239318466186523, 0.052429824829101565, 0.05249622344970703, 0.052488449096679685, 0.052442623138427735, 0.052731391906738284, 0.05265296173095703, 0.05241439819335938, 0.052496158599853515, 0.05253763198852539, 0.052545055389404294, 0.05320268630981445, 0.05318729782104492, 0.052910079956054686, 0.052921409606933596, 0.05282297515869141, 0.05267251205444336, 0.05267171096801758, 0.05290063858032226, 0.05263161468505859, 0.052770751953125, 0.05267865753173828, 0.05271551895141602, 0.05280691146850586, 0.05285145568847656, 0.052733535766601565, 0.052744449615478514, 0.05276073455810547, 0.05272576141357422, 0.05313238525390625, 0.052916385650634765, 0.052813888549804684, 0.05304143905639649, 0.052922782897949217, 0.0530882568359375, 0.052909854888916016, 0.05281814575195312, 0.05285273742675781, 0.052891647338867184, 0.054089729309082034, 0.05288547134399414, 0.05237251281738281, 0.05233337783813476, 0.05233478546142578, 0.052393600463867186, 0.05241241455078125, 0.0523823356628418, 0.05242227172851562, 0.05236083221435547, 0.05278908920288086, 0.05237036895751953, 0.052472671508789065, 0.05242902374267578, 0.05275920104980469, 0.052555679321289066, 0.05271356964111328, 0.0526192626953125, 0.05252048110961914, 0.05265251159667969, 0.05267670440673828, 0.05266217422485352, 0.05276163101196289, 0.05257321548461914, 0.052557758331298825, 0.052499584197998043, 0.05245792007446289, 0.05243948745727539, 0.05244927978515625, 0.05246771240234375, 0.0525513916015625, 0.052494625091552734, 0.05247180938720703, 0.05246361541748047, 0.05320028686523438, 0.0529920654296875, 0.052733535766601565, 0.05265708923339844, 0.05290393447875977, 0.052724864959716795, 0.05337948989868164, 0.052705760955810546, 0.05294899368286133, 0.05284864044189453, 0.052872543334960935, 0.052568737030029296, 0.05271347045898438, 0.05271273422241211, 0.05285551834106445, 0.052708576202392575, 0.05266716766357422, 0.05255782318115235, 0.05283356857299805, 0.052711776733398434, 0.053072158813476565, 0.052754528045654295, 0.05274185562133789, 0.05279260635375976, 0.05277312088012695, 0.05285289764404297, 0.05288201522827148, 0.05281587219238281, 0.052967422485351565, 0.05408150482177734, 0.052770591735839846, 0.05258671951293945, 0.05242879867553711, 0.052424705505371094, 0.052391712188720706, 0.05248255920410156, 0.05252243041992188, 0.05233078384399414, 0.05242675018310547, 0.05241424179077148, 0.05258204650878906, 0.0529697265625, 0.05251513671875, 0.05253529739379883, 0.05252710342407227, 0.05259468841552734, 0.052711166381835935, 0.05284889602661133, 0.052830432891845705, 0.05274591827392578, 0.052676288604736325, 0.05271798324584961, 0.05254553604125976, 0.05266783905029297, 0.05261983871459961, 0.05264787292480469, 0.052617279052734375, 0.052623489379882815, 0.05266960144042969, 0.05278793716430664, 0.05263359832763672, 0.05275651168823242, 0.0525700798034668, 0.05270528030395508, 0.052682369232177735, 0.052834686279296876, 0.05288140869140625, 0.05290188980102539, 0.05278307342529297, 0.052817119598388675, 0.05302777481079102, 0.052786815643310545, 0.05273420715332031, 0.05282406234741211, 0.05277084732055664, 0.05295491027832031, 0.052744384765625, 0.053053760528564455, 0.05283808135986328, 0.05270528030395508, 0.05282611083984375, 0.05281792068481445, 0.05300617599487305, 0.05300204849243164, 0.05279369735717773, 0.052999488830566405, 0.05307696151733399, 0.052870880126953124, 0.05287097549438476, 0.053231136322021484, 0.05285520172119141, 0.053017887115478515, 0.05406307220458984, 0.05285583877563477, 0.05234790420532227, 0.052397247314453124, 0.052420894622802736, 0.05233513641357422, 0.05240537643432617, 0.05258329772949219, 0.052569534301757814, 0.05259273529052735, 0.05240812683105469, 0.05234960174560547, 0.05247334289550781, 0.0528983039855957, 0.052566017150878906, 0.05257212829589844, 0.05280361557006836, 0.05270937728881836, 0.05275839996337891, 0.05278937530517578, 0.052746238708496096, 0.05259823989868164, 0.05263824081420899, 0.05264003372192383, 0.052737567901611326, 0.05265340805053711, 0.05283107376098633, 0.052762622833251956, 0.05260489654541016, 0.05267027282714844, 0.052863201141357424, 0.05324579238891602, 0.05301776123046875, 0.052863998413085936, 0.05271756744384765, 0.05290963363647461, 0.05293072128295898, 0.05283663940429688, 0.05303068923950195, 0.052989246368408204, 0.052978591918945314, 0.052899486541748045, 0.05295548629760742, 0.05285472106933594, 0.05299820709228516, 0.053017791748046876, 0.05292319869995117, 0.05290800094604492, 0.052875297546386715, 0.052784961700439455, 0.05288191986083984, 0.052802398681640626, 0.05292486572265625, 0.05304348754882812, 0.05314982223510742, 0.05293011093139648, 0.05305785751342774, 0.053055553436279296, 0.05322348785400391, 0.05311654281616211, 0.05297628784179687, 0.05293641662597656, 0.05308124923706055, 0.05461270523071289, 0.053144832611083985, 0.05265071868896484, 0.05251686477661133, 0.05243417739868164, 0.05247286224365234, 0.052538944244384767, 0.052396190643310546, 0.05261529541015625, 0.05266729736328125, 0.05251379013061523, 0.05374358367919922, 0.052701343536376954, 0.05273788833618164, 0.05269417572021484, 0.05274915313720703, 0.05264777755737305, 0.05265423965454102, 0.05267827224731445, 0.05319510269165039, 0.05292233657836914, 0.05308422470092773, 0.05280767822265625, 0.05284659194946289, 0.05279888153076172, 0.05276732635498047, 0.05276374435424805, 0.052809982299804686, 0.05270595169067383, 0.05268668746948242, 0.052719425201416016, 0.05276079940795898, 0.052754047393798825, 0.05283891296386719, 0.052868640899658204, 0.0528421745300293, 0.05279414367675781, 0.05278617477416992, 0.05300128173828125, 0.05289971160888672, 0.05314156723022461, 0.05289267349243164, 0.05293913650512695, 0.052738494873046875, 0.052961280822753906, 0.0527341423034668, 0.05281788635253906, 0.052799518585205076, 0.052811775207519535, 0.05273011016845703, 0.052813568115234376, 0.05289567947387695, 0.052916385650634765, 0.05315081787109375, 0.05319148635864258, 0.0529837760925293, 0.05310262298583984, 0.05332735824584961, 0.05320755386352539, 0.05337699127197266, 0.053346046447753905, 0.05318070220947266, 0.053106689453125, 0.05466521453857422, 0.05334339141845703, 0.052703102111816405, 0.05247894287109375, 0.05252828979492188, 0.05254377746582031, 0.052410816192626955, 0.052627582550048825, 0.05255168151855469, 0.05253324890136719, 0.052502113342285155, 0.05256233596801758, 0.052587646484375, 0.05255487823486328, 0.052612545013427735, 0.05280185699462891, 0.052482177734375, 0.05268876647949219, 0.052728031158447264, 0.05289539337158203, 0.05286310577392578, 0.05277494430541992, 0.05271708679199219, 0.05267327880859375, 0.052676063537597656, 0.05277030563354492, 0.05288828659057617, 0.052709022521972654, 0.05279792022705078, 0.05270105743408203, 0.05278464126586914, 0.05269903945922851, 0.05286563110351562, 0.052827327728271485, 0.05291228866577148, 0.05285270309448242, 0.05293331146240234, 0.052883712768554685, 0.05298688125610351, 0.0529409294128418, 0.05295798492431641, 0.053319454193115234, 0.05304121780395508, 0.0530063362121582, 0.05297971343994141, 0.05307494354248047, 0.05297840118408203, 0.052834465026855466, 0.05283654403686523, 0.05287519836425781, 0.052983806610107424, 0.052822017669677736, 0.052956768035888675, 0.05294736099243164, 0.053182464599609375, 0.05314787292480469, 0.053106464385986325, 0.05320083236694336, 0.053137248992919925, 0.05315974426269531, 0.05315353775024414, 0.05341865539550781, 0.05304681777954102, 0.05451468658447266, 0.05318656158447266, 0.052654079437255856, 0.05249871826171875, 0.052536991119384764, 0.05242585754394531, 0.05259564971923828, 0.053082015991210936, 0.05249187088012695, 0.052679168701171876, 0.05255708694458008, 0.05245836639404297, 0.05275222396850586, 0.05255478286743164, 0.052591743469238283, 0.05266211318969727, 0.05263359832763672, 0.05268428802490235, 0.05275289535522461, 0.053082111358642575, 0.05294668960571289, 0.05282822418212891, 0.05271897506713867, 0.05253116989135742, 0.05270943832397461, 0.05268339157104492, 0.052842655181884766, 0.05267804718017578, 0.052660831451416014, 0.052719615936279295, 0.05269081497192383, 0.05266998291015625, 0.0528554573059082, 0.05276627349853515, 0.052913887023925785, 0.052947616577148436, 0.05287116622924805, 0.052789249420166016, 0.053037120819091794, 0.05297760009765625, 0.05308160018920898, 0.05293721771240235, 0.052967422485351565, 0.05287936019897461, 0.05300428771972656, 0.052848190307617185, 0.05299244689941406, 0.052754432678222656, 0.052819969177246094, 0.05288905715942383, 0.05279388809204102, 0.05301248168945313, 0.05294079971313476, 0.053032958984375, 0.05303910446166992, 0.05296332931518555, 0.053125118255615236, 0.05315996932983398, 0.05339907073974609, 0.05335254287719727, 0.05317232131958008, 0.05292262268066406, 0.05307148742675781]",tokens/s,18.94704243009041,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciLM-7B/c3c9f4226801dc0433f32aebffe0aac68ee2f051/modeling_decilm.py"", line 311, in __init__ self.model = DeciLMModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciLM-7B/c3c9f4226801dc0433f32aebffe0aac68ee2f051/modeling_decilm.py"", line 182, in __init__ self.layers = nn.ModuleList([DeciLMDecoderLayer(config, layer_idx) for layer_idx File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciLM-7B/c3c9f4226801dc0433f32aebffe0aac68ee2f051/modeling_decilm.py"", line 182, in self.layers = nn.ModuleList([DeciLMDecoderLayer(config, layer_idx) for layer_idx File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciLM-7B/c3c9f4226801dc0433f32aebffe0aac68ee2f051/modeling_decilm.py"", line 149, in __init__ self.mlp = LlamaMLP(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciLM-7B/c3c9f4226801dc0433f32aebffe0aac68ee2f051/transformers_v4_35_2__modeling_llama.py"", line 236, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 224.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,742.764544,6193.872896,0.0,5798.62528,5774.685184,s,1,7.30814697265625,7.30814697265625,0.0,7.30814697265625,7.30814697265625,7.30814697265625,7.30814697265625,[7.30814697265625],,kWh,4.32576990416654e-06,4.700026609990046e-07,2.377224124001931e-06,7.172996689167475e-06,,MB,1047.277568,6206.455808,0.0,5800.722432,5525.736448,s,10,2.3927240905761713,0.23927240905761718,0.009610978361010573,0.2413498229980469,0.24558556365966797,0.24645138931274413,0.24714404983520508,"[0.2117969207763672, 0.2372667236328125, 0.2394752655029297, 0.24016172790527343, 0.2419894714355469, 0.24731721496582032, 0.24520982360839844, 0.24539315795898436, 0.24071017456054689, 0.2434036102294922]",tokens/s,1069.9102374915062,kWh,6.58644672194437e-06,7.263188448296269e-07,4.35650965804443e-06,1.1669275224818428e-05,tokens/kWh,21937952.020836264,MB,1051.738112,6208.55296,0.0,5802.819584,5623.660032,s,10,17.640743408203125,1.7640743408203126,0.002191758134030772,1.76315966796875,1.7671169555664061,1.767554815673828,1.7679051037597657,"[1.76262060546875, 1.7623641357421875, 1.7670196533203124, 1.761020751953125, 1.7628199462890626, 1.7648861083984375, 1.76799267578125, 1.7661329345703125, 1.76238720703125, 1.7634993896484374]",tokens/s,35.71278065906472,kWh,5.2048872468055586e-05,5.739458893656275e-06,3.445896583875587e-05,9.224729720046773e-05,tokens/kWh,682946.8386818011,,s,630,17.638176202774062,0.02799710508376833,0.00031267521339151144,0.02794905662536621,0.02817639675140381,0.028387158775329588,0.029773395805358892,"[0.029443679809570314, 0.028571552276611328, 0.027992576599121095, 0.027808864593505858, 0.02789686393737793, 0.027770912170410156, 0.027739200592041015, 0.027689472198486328, 0.027746591567993164, 0.027676671981811524, 0.02773401641845703, 0.02784867286682129, 0.027787296295166016, 0.02779961585998535, 0.02803296089172363, 0.0278154239654541, 0.02775059127807617, 0.02770102310180664, 0.02777142333984375, 0.027777023315429687, 0.027808927536010743, 0.028105567932128907, 0.02825356864929199, 0.028070240020751952, 0.027837823867797852, 0.02789878463745117, 0.02776678466796875, 0.027826175689697266, 0.027846656799316406, 0.02788675117492676, 0.02789401626586914, 0.027859264373779297, 0.027896095275878906, 0.027935007095336913, 0.02852854347229004, 0.02844633674621582, 0.028176511764526367, 0.028033151626586914, 0.027996095657348632, 0.02821126365661621, 0.028190656661987303, 0.028203008651733398, 0.027992063522338868, 0.028090368270874022, 0.027992095947265625, 0.027918560028076172, 0.02821865653991699, 0.0281246395111084, 0.027947488784790038, 0.02786886405944824, 0.027888479232788085, 0.027936767578125, 0.02787942314147949, 0.027903711318969727, 0.027988256454467772, 0.02800614356994629, 0.027918687820434572, 0.0279215030670166, 0.027949312210083007, 0.028006399154663086, 0.02790166473388672, 0.027915103912353516, 0.027994400024414064, 0.02965711975097656, 0.028714656829833984, 0.028100479125976564, 0.027877824783325195, 0.02780182456970215, 0.02776655960083008, 0.02771046447753906, 0.02781110382080078, 0.027743711471557617, 0.027674720764160155, 0.027699359893798826, 0.02783251190185547, 0.027789119720458985, 0.027770559310913087, 0.027732383728027343, 0.02778009605407715, 0.027673664093017577, 0.027684000015258788, 0.027780832290649413, 0.02774963188171387, 0.027659488677978517, 0.027762304306030272, 0.027835264205932617, 0.028010240554809572, 0.027754751205444336, 0.02776412773132324, 0.027880224227905273, 0.027821887969970704, 0.02782931137084961, 0.027871295928955077, 0.027858911514282228, 0.027855775833129884, 0.027867136001586915, 0.02799523162841797, 0.028023712158203123, 0.028037120819091797, 0.028163871765136718, 0.02815407943725586, 0.02814361572265625, 0.028098880767822267, 0.028174144744873047, 0.02891526412963867, 0.028252096176147462, 0.028002687454223633, 0.027975679397583008, 0.027850751876831056, 0.02790768051147461, 0.0280231990814209, 0.027973440170288084, 0.02785708808898926, 0.0279552001953125, 0.028215295791625978, 0.028062816619873046, 0.02811996841430664, 0.028019968032836913, 0.028074464797973632, 0.027971872329711912, 0.028000448226928713, 0.02789561653137207, 0.027987968444824218, 0.028006399154663086, 0.028024927139282226, 0.02810665512084961, 0.03003664016723633, 0.029128608703613282, 0.028288192749023437, 0.028132223129272462, 0.028026687622070313, 0.02781724739074707, 0.0277410888671875, 0.02798124885559082, 0.027775552749633788, 0.027807743072509765, 0.027850656509399413, 0.027868640899658202, 0.027816287994384764, 0.027851200103759764, 0.027843679428100586, 0.02793343925476074, 0.02778748893737793, 0.027842336654663086, 0.027757984161376953, 0.02784111976623535, 0.027804927825927736, 0.02794099235534668, 0.027890304565429687, 0.02783395195007324, 0.02781430435180664, 0.027852832794189455, 0.027858911514282228, 0.027879520416259764, 0.02803049659729004, 0.029004159927368163, 0.028035072326660155, 0.027971263885498046, 0.027991840362548828, 0.028027423858642576, 0.02816409683227539, 0.028089792251586913, 0.02815648078918457, 0.028012544631958007, 0.028051456451416015, 0.028039167404174805, 0.028008159637451173, 0.027990304946899414, 0.02792857551574707, 0.027978784561157228, 0.02790483283996582, 0.027936927795410155, 0.028025983810424804, 0.027967552185058593, 0.027942880630493164, 0.027911008834838866, 0.027987968444824218, 0.028458400726318358, 0.028442975997924804, 0.028080127716064454, 0.028004608154296874, 0.0279837760925293, 0.028059776306152345, 0.028100448608398436, 0.02808844757080078, 0.027977567672729492, 0.028049631118774412, 0.028123071670532226, 0.028211200714111328, 0.029952224731445314, 0.028782495498657225, 0.02820355224609375, 0.02788483238220215, 0.027914016723632813, 0.0277142391204834, 0.027641855239868163, 0.027703296661376952, 0.027711488723754882, 0.02772377586364746, 0.027820287704467775, 0.027768287658691406, 0.027730207443237304, 0.027835968017578126, 0.027783008575439454, 0.027781280517578125, 0.027790943145751954, 0.027753343582153322, 0.027891328811645508, 0.02777052879333496, 0.028027584075927734, 0.027891712188720705, 0.02778508758544922, 0.027773056030273437, 0.02775654411315918, 0.02789583969116211, 0.02777225685119629, 0.027763328552246093, 0.027799264907836914, 0.027834144592285157, 0.02783078384399414, 0.02777529525756836, 0.02786604881286621, 0.027984607696533204, 0.028221376419067384, 0.028030815124511718, 0.02806809616088867, 0.02802284812927246, 0.027966623306274415, 0.02812291145324707, 0.028035167694091798, 0.02793769645690918, 0.027918399810791014, 0.027848224639892578, 0.027873952865600585, 0.027918079376220702, 0.02802284812927246, 0.02807539176940918, 0.027909791946411133, 0.02795564842224121, 0.027988447189331054, 0.028108192443847657, 0.027880096435546876, 0.027940959930419923, 0.027905696868896483, 0.027959487915039063, 0.027989023208618163, 0.028122079849243163, 0.02800828742980957, 0.027977888107299804, 0.027999359130859373, 0.028023679733276366, 0.02803264045715332, 0.029866783142089844, 0.02874563217163086, 0.02847145652770996, 0.028131519317626953, 0.02785251235961914, 0.02781337547302246, 0.02802943992614746, 0.027842336654663086, 0.02774822425842285, 0.027828575134277344, 0.02788688087463379, 0.027909055709838867, 0.02772764778137207, 0.02775859260559082, 0.02788262367248535, 0.02782912063598633, 0.027840511322021484, 0.028063743591308594, 0.027953216552734375, 0.028078144073486327, 0.027840639114379884, 0.02779680061340332, 0.027750688552856444, 0.027821344375610353, 0.02782912063598633, 0.027817983627319336, 0.027786815643310547, 0.02776054382324219, 0.027836671829223635, 0.027836704254150392, 0.02774220848083496, 0.02778688049316406, 0.028008832931518554, 0.028057600021362306, 0.02813337516784668, 0.028106752395629882, 0.028106719970703124, 0.027991167068481447, 0.028171167373657227, 0.02812495994567871, 0.02795132827758789, 0.02789580726623535, 0.027906047821044923, 0.027891712188720705, 0.02822524833679199, 0.02794268798828125, 0.02802070426940918, 0.02795779228210449, 0.027884960174560547, 0.027902143478393555, 0.0278799991607666, 0.027905887603759765, 0.02796134376525879, 0.02793471908569336, 0.02796134376525879, 0.027850879669189452, 0.027968896865844726, 0.028123647689819335, 0.02794495964050293, 0.027979904174804688, 0.02797350311279297, 0.027917728424072266, 0.028029472351074218, 0.029911008834838868, 0.02967747116088867, 0.028435968399047853, 0.028311391830444337, 0.027951871871948242, 0.02782841682434082, 0.02768227195739746, 0.02777948760986328, 0.027704832077026367, 0.0277774715423584, 0.02774233627319336, 0.027694976806640625, 0.027792383193969726, 0.02776166343688965, 0.02774220848083496, 0.02773606491088867, 0.027812864303588865, 0.027823104858398437, 0.027686975479125978, 0.0278035831451416, 0.02829120063781738, 0.02816806411743164, 0.02779136085510254, 0.02792188835144043, 0.027773471832275392, 0.027768831253051757, 0.027844608306884764, 0.02790809631347656, 0.0278853759765625, 0.027888896942138672, 0.027929536819458006, 0.027830272674560546, 0.027899232864379883, 0.028008319854736327, 0.0279968318939209, 0.02810086441040039, 0.02799203109741211, 0.02801888084411621, 0.028101919174194336, 0.028114463806152342, 0.027998559951782225, 0.028013120651245116, 0.02815999984741211, 0.028128992080688475, 0.028018848419189453, 0.028219520568847658, 0.02811087989807129, 0.028026847839355468, 0.028016767501831054, 0.02800774383544922, 0.02793734359741211, 0.027983871459960938, 0.027926528930664062, 0.028039167404174805, 0.028026847839355468, 0.027977983474731447, 0.027981216430664063, 0.028245471954345704, 0.0279715518951416, 0.027988927841186523, 0.027987167358398436, 0.027995136260986327, 0.02799007987976074, 0.029646879196166993, 0.02871891212463379, 0.02826678466796875, 0.028129152297973633, 0.027922271728515625, 0.02784787178039551, 0.027771871566772462, 0.027811840057373048, 0.027785152435302735, 0.027886655807495116, 0.02798899269104004, 0.027795583724975585, 0.027916160583496094, 0.028022783279418945, 0.02794643211364746, 0.027846656799316406, 0.0279205436706543, 0.027876064300537108, 0.027932544708251954, 0.027846527099609376, 0.02783843231201172, 0.027876895904541017, 0.02795155143737793, 0.027932191848754884, 0.027951583862304688, 0.027985183715820313, 0.027863807678222656, 0.02784272003173828, 0.027879104614257813, 0.027855039596557617, 0.027789375305175782, 0.027862207412719726, 0.02793747138977051, 0.027992063522338868, 0.0281529598236084, 0.028181375503540038, 0.028096511840820314, 0.028042816162109376, 0.028105152130126952, 0.028077791213989258, 0.02948739242553711, 0.028118528366088868, 0.028002815246582033, 0.028010751724243162, 0.027967039108276366, 0.028028703689575194, 0.028020479202270507, 0.028015264511108397, 0.028071327209472655, 0.028127359390258788, 0.028112895965576173, 0.028071392059326173, 0.028103967666625977, 0.02812928009033203, 0.028184288024902342, 0.028209152221679686, 0.028160032272338868, 0.028196832656860352, 0.02798703956604004, 0.02809116744995117, 0.0281396484375, 0.028188928604125977, 0.028210880279541016, 0.02993731117248535, 0.028737024307250978, 0.02841846466064453, 0.0281112003326416, 0.027872383117675783, 0.027909088134765624, 0.027746368408203125, 0.02787708854675293, 0.02776038360595703, 0.027898208618164062, 0.027870912551879883, 0.027785728454589844, 0.02788140869140625, 0.027875328063964845, 0.027867136001586915, 0.027737632751464843, 0.027818464279174806, 0.02779862403869629, 0.027819936752319335, 0.02787833595275879, 0.02790928077697754, 0.028007328033447267, 0.02794905662536621, 0.027891712188720705, 0.027881471633911133, 0.027897823333740236, 0.02779743957519531, 0.027879680633544922, 0.02908361625671387, 0.02802060890197754, 0.02798201560974121, 0.02798182487487793, 0.027983680725097656, 0.028057600021362306, 0.02823097610473633, 0.02813164710998535, 0.028037504196166994, 0.02817638397216797, 0.02811635208129883, 0.02801318359375, 0.028001855850219727, 0.028045759201049805, 0.02797772789001465, 0.027950815200805664, 0.02808665657043457, 0.028022687911987306, 0.028006336212158204, 0.027955263137817384, 0.028005504608154298, 0.02795814323425293, 0.028006399154663086, 0.02797590446472168, 0.02808399963378906, 0.0279564151763916, 0.027960128784179687, 0.02794905662536621, 0.028080127716064454, 0.028026880264282225, 0.027960447311401366, 0.02800320053100586, 0.027987968444824218, 0.027983007431030275, 0.028234592437744142, 0.029812576293945313, 0.02880953598022461, 0.02834889602661133, 0.0279117431640625, 0.028043712615966797, 0.027821760177612304, 0.027699520111083984, 0.02779136085510254, 0.02775654411315918, 0.027942783355712892, 0.027795743942260743, 0.027720575332641603, 0.02779849624633789, 0.027813791275024414, 0.027770975112915038, 0.02777907180786133, 0.02775654411315918, 0.027703296661376952, 0.027796863555908203, 0.027800384521484374, 0.027821887969970704, 0.027797504425048827, 0.02782339286804199, 0.027810495376586915, 0.027821439743041992, 0.027744672775268556, 0.02777891159057617, 0.02778972816467285, 0.027897151947021484, 0.02782908821105957, 0.02794713592529297, 0.027846015930175782, 0.027864479064941407, 0.027986656188964842, 0.02833430480957031, 0.02815795135498047, 0.028065792083740236, 0.028176223754882813, 0.028078208923339842, 0.02802403259277344, 0.028213920593261718, 0.028141727447509767, 0.02792959976196289, 0.027988832473754884, 0.028078559875488282, 0.028057376861572267, 0.028037023544311524, 0.02795699119567871, 0.027955455780029295, 0.027998207092285156, 0.02790934371948242, 0.027950944900512694, 0.027899999618530274, 0.02792108726501465, 0.027936735153198243, 0.027914560317993165, 0.02795039939880371, 0.027946624755859375, 0.027973663330078124, 0.028000383377075194, 0.027994911193847657, 0.02800873565673828, 0.028097248077392577, 0.029820928573608397, 0.028690240859985353, 0.028274879455566407, 0.02795929527282715, 0.02796771240234375, 0.027823904037475585, 0.027788736343383788, 0.027786111831665038, 0.027862720489501953, 0.027941024780273438, 0.02780143928527832, 0.027711488723754882, 0.02775449562072754, 0.02775449562072754, 0.027686464309692384, 0.02778067207336426, 0.027896703720092772, 0.027856992721557616, 0.02784675216674805, 0.02789356803894043, 0.02775040054321289, 0.027803712844848633, 0.0278035831451416, 0.027850751876831056, 0.027805152893066405, 0.027872800827026367, 0.02778828811645508, 0.02794291114807129, 0.027966720581054687, 0.027931392669677733, 0.027945119857788085, 0.027912031173706053, 0.027933952331542968, 0.027919103622436523, 0.02814156723022461, 0.02806915283203125, 0.028011232376098632, 0.02809196853637695, 0.028127359390258788, 0.02801020812988281, 0.0279736328125, 0.028240480422973634, 0.028536159515380858, 0.02796406364440918, 0.028086271286010742, 0.02806537628173828, 0.028131872177124023, 0.028052671432495117, 0.028035776138305664, 0.027901952743530273, 0.027937919616699218, 0.02795814323425293, 0.02795484733581543, 0.027935071945190428, 0.027962656021118165, 0.028005119323730468, 0.027985887527465822, 0.02798201560974121, 0.027914079666137695, 0.02796544075012207, 0.02799190330505371, 0.02798195266723633, 0.028090368270874022]",tokens/s,35.717978591285224,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 163, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,818.118656,2127.495168,0.0,1732.247552,1728.316416,s,1,7.90771826171875,7.90771826171875,0.0,7.90771826171875,7.90771826171875,7.90771826171875,7.90771826171875,[7.90771826171875],,kWh,9.499211254145242e-06,1.040675102756e-06,3.082502466000836e-06,1.3622388822902077e-05,,MB,1110.9376,2328.82176,0.0,1923.088384,1891.2,s,10,0.48508626937866217,0.048508626937866214,0.0029105012856569454,0.047908863067626956,0.04953007087707519,0.05320087604522704,0.05613752017974854,"[0.056871681213378905, 0.048147838592529295, 0.04766988754272461, 0.0456190071105957, 0.04682697677612305, 0.04732592010498047, 0.048253406524658204, 0.047424320220947266, 0.04871433639526367, 0.04823289489746094]",tokens/s,5277.411795800891,kWh,1.8499921940927998e-06,2.040192039538802e-07,1.2289935992278652e-06,3.2830049972745454e-06,tokens/kWh,77977340.94603075,MB,1120.669696,2328.82176,0.0,1923.088384,1895.80032,s,10,12.342516357421875,1.2342516357421873,0.010719832370455143,1.2334990234375,1.2469368408203125,1.2498820556640624,1.2522382275390624,"[1.24167626953125, 1.2302158203125, 1.239170654296875, 1.2462823486328125, 1.2367822265625, 1.2528272705078125, 1.228632080078125, 1.227983154296875, 1.2144898681640626, 1.2244566650390625]",tokens/s,51.04307596247703,kWh,3.597472274965803e-05,3.967591333625105e-06,1.8983994793373237e-05,5.892630887665637e-05,tokens/kWh,1069131.9582204039,,s,630,12.339980672836315,0.019587270909263974,0.00045158069869127306,0.019530303955078127,0.01990556468963623,0.020072346115112303,0.02086324857711792,"[0.019812416076660157, 0.019705663681030272, 0.019674335479736328, 0.01954867172241211, 0.019330944061279297, 0.020469568252563478, 0.02086579132080078, 0.019900543212890625, 0.01983852767944336, 0.01953638458251953, 0.019448863983154298, 0.0195001277923584, 0.019581663131713868, 0.019593408584594726, 0.01961859130859375, 0.019644416809082032, 0.01983888053894043, 0.019578336715698242, 0.01976179122924805, 0.019737920761108398, 0.019557056427001954, 0.020711423873901368, 0.021198816299438476, 0.019617824554443358, 0.019466239929199217, 0.019545856475830077, 0.019523839950561523, 0.01981439971923828, 0.019611648559570313, 0.019566848754882814, 0.019605119705200194, 0.019908992767333985, 0.019617183685302735, 0.01945840072631836, 0.019310592651367187, 0.019538047790527344, 0.019540128707885743, 0.019650495529174805, 0.01965648078918457, 0.01973446464538574, 0.019793983459472655, 0.01971548843383789, 0.019530176162719726, 0.019464351654052733, 0.01949894332885742, 0.01960870361328125, 0.01951584053039551, 0.01951590347290039, 0.019480064392089845, 0.019564447402954103, 0.019626592636108397, 0.019834880828857423, 0.01981177520751953, 0.019692256927490236, 0.01969136047363281, 0.019765247344970704, 0.019911712646484375, 0.019792863845825195, 0.01966640090942383, 0.019493408203125, 0.019562496185302734, 0.01957606315612793, 0.01969843292236328, 0.019678848266601563, 0.019524063110351562, 0.019511295318603517, 0.019324928283691405, 0.01941663932800293, 0.019515104293823242, 0.01956732749938965, 0.019517440795898438, 0.019328479766845704, 0.019384735107421874, 0.019412960052490234, 0.019599103927612306, 0.01948099136352539, 0.019326175689697266, 0.01927779197692871, 0.01957356834411621, 0.01943280029296875, 0.019587743759155275, 0.02006220817565918, 0.019517440795898438, 0.01957587242126465, 0.019624895095825195, 0.019646368026733398, 0.01965411186218262, 0.019597087860107422, 0.0193670711517334, 0.019575519561767576, 0.019530431747436523, 0.019435808181762694, 0.01941209602355957, 0.01928598403930664, 0.019555103302001952, 0.01952479934692383, 0.019637216567993165, 0.019605472564697267, 0.019529792785644533, 0.019593151092529296, 0.01942118453979492, 0.01951091194152832, 0.019318304061889648, 0.0194747200012207, 0.019573343276977538, 0.01951299285888672, 0.019451488494873048, 0.019372127532958985, 0.019557088851928712, 0.019744672775268556, 0.019781631469726564, 0.019568384170532225, 0.01992483139038086, 0.019501472473144533, 0.01936720085144043, 0.019409631729125975, 0.019597503662109376, 0.01942310333251953, 0.019425472259521483, 0.019443456649780273, 0.01956003189086914, 0.019443391799926758, 0.019442399978637694, 0.02000486373901367, 0.019402463912963866, 0.0195382080078125, 0.019469087600708007, 0.019619840621948242, 0.01944576072692871, 0.01977289581298828, 0.019482431411743165, 0.020191776275634767, 0.02085702323913574, 0.019685375213623048, 0.01961369514465332, 0.01950627136230469, 0.019452192306518554, 0.019260063171386718, 0.019414655685424803, 0.01946454429626465, 0.019826688766479493, 0.01928390312194824, 0.01918777656555176, 0.019335168838500977, 0.019343360900878907, 0.01927529525756836, 0.019466720581054687, 0.019537919998168944, 0.019728384017944335, 0.019806367874145508, 0.019565792083740235, 0.01952422332763672, 0.019351232528686525, 0.019390783309936523, 0.01956211280822754, 0.019507007598876955, 0.01941766357421875, 0.019363840103149413, 0.01927577590942383, 0.0194969596862793, 0.019306495666503908, 0.01927987289428711, 0.019414688110351564, 0.0196713924407959, 0.019738624572753907, 0.019647680282592773, 0.019612031936645506, 0.01945849609375, 0.01945599937438965, 0.02042790412902832, 0.019753856658935545, 0.01964195251464844, 0.019612064361572267, 0.019688608169555664, 0.01966374397277832, 0.020831615447998046, 0.02157606315612793, 0.020117727279663086, 0.019996480941772463, 0.019928319931030274, 0.020118463516235353, 0.019701984405517577, 0.01965648078918457, 0.019777536392211914, 0.01984547233581543, 0.019679967880249023, 0.019657663345336914, 0.01961759948730469, 0.019537471771240236, 0.019827552795410156, 0.01962940788269043, 0.019669696807861327, 0.019953056335449217, 0.019628543853759766, 0.019597375869750976, 0.019766271591186522, 0.019853311538696287, 0.01990518379211426, 0.019775903701782227, 0.019691328048706054, 0.019640064239501952, 0.01958540725708008, 0.01982796859741211, 0.019710847854614258, 0.019709823608398437, 0.019568479537963868, 0.019514751434326173, 0.019859903335571288, 0.019670751571655272, 0.019626720428466797, 0.019706975936889647, 0.019616479873657226, 0.019805503845214845, 0.01995475196838379, 0.019903488159179687, 0.019913440704345704, 0.019910655975341796, 0.020494335174560546, 0.019853311538696287, 0.01973587226867676, 0.019932064056396484, 0.019552095413208008, 0.01975699234008789, 0.019773439407348634, 0.019877824783325195, 0.0201646728515625, 0.02000486373901367, 0.020002815246582033, 0.020330495834350586, 0.019965951919555663, 0.019933183670043944, 0.019937280654907227, 0.01986355209350586, 0.019958944320678712, 0.01985174369812012, 0.019871551513671874, 0.019755456924438478, 0.019779136657714844, 0.019685407638549805, 0.019509183883666993, 0.01955718421936035, 0.019519264221191407, 0.019793920516967774, 0.019775199890136718, 0.019566335678100587, 0.01959494400024414, 0.01967804718017578, 0.01963212776184082, 0.01963212776184082, 0.019525632858276368, 0.019501056671142578, 0.01982259178161621, 0.019886079788208007, 0.019711999893188475, 0.01964851188659668, 0.019625120162963867, 0.019503007888793944, 0.01937504005432129, 0.01930201530456543, 0.019267135620117188, 0.01944403266906738, 0.019579231262207033, 0.019525184631347656, 0.01957539176940918, 0.01967513656616211, 0.019684831619262697, 0.019632671356201174, 0.019599552154541015, 0.019656192779541014, 0.01986137580871582, 0.019732032775878906, 0.01990233612060547, 0.019679231643676756, 0.019647487640380858, 0.019512928009033204, 0.01971241569519043, 0.01960550308227539, 0.020002815246582033, 0.019564544677734375, 0.019486719131469727, 0.019441471099853516, 0.019546112060546874, 0.01964771270751953, 0.019746816635131836, 0.019745567321777343, 0.01970195198059082, 0.01979167938232422, 0.019599552154541015, 0.019695615768432616, 0.019762752532958984, 0.019816896438598634, 0.02004787254333496, 0.01954310417175293, 0.01939708709716797, 0.019503583908081056, 0.019451776504516603, 0.019779136657714844, 0.0198699836730957, 0.019851200103759764, 0.019763551712036132, 0.019659839630126952, 0.019915103912353516, 0.019663455963134766, 0.019548160552978516, 0.019475616455078126, 0.019438175201416014, 0.019484928131103516, 0.01945756721496582, 0.01946054458618164, 0.01955574417114258, 0.01959766387939453, 0.01952195167541504, 0.019593088150024415, 0.019492416381835936, 0.019562944412231446, 0.020164575576782227, 0.01981439971923828, 0.01967513656616211, 0.01957475280761719, 0.01961974334716797, 0.020203647613525392, 0.027420448303222655, 0.021888511657714844, 0.019779680252075195, 0.020435232162475586, 0.019833183288574217, 0.019912704467773438, 0.019854623794555663, 0.020156768798828124, 0.020091264724731446, 0.019713247299194336, 0.01957484817504883, 0.01959017562866211, 0.019619232177734376, 0.019826208114624023, 0.01963033676147461, 0.019452415466308593, 0.019442911148071288, 0.019478368759155273, 0.019630624771118165, 0.019357791900634767, 0.019720512390136717, 0.020288639068603516, 0.019825504302978515, 0.019886112213134764, 0.019459999084472657, 0.01944585609436035, 0.019501056671142578, 0.01960515213012695, 0.02037705612182617, 0.01965679931640625, 0.01939740753173828, 0.01939263916015625, 0.019355199813842774, 0.01946451187133789, 0.019494911193847657, 0.01927724838256836, 0.019249727249145508, 0.01951670455932617, 0.019647199630737303, 0.019689472198486328, 0.01943552017211914, 0.01941913604736328, 0.020133184432983398, 0.021340959548950194, 0.019939231872558593, 0.01986284828186035, 0.019925376892089845, 0.019730655670166016, 0.01960540771484375, 0.019660991668701173, 0.019572736740112305, 0.019382272720336914, 0.019288063049316406, 0.019517440795898438, 0.020083711624145507, 0.020059135437011717, 0.019608671188354493, 0.01992176055908203, 0.019710016250610352, 0.019525568008422853, 0.019580703735351562, 0.019800191879272462, 0.019455904006958007, 0.019377952575683595, 0.0193089599609375, 0.019570688247680663, 0.019535680770874024, 0.019398847579956056, 0.01927577590942383, 0.019697664260864257, 0.01932195281982422, 0.019790752410888672, 0.0196997127532959, 0.019552255630493166, 0.019525568008422853, 0.01940275192260742, 0.019412479400634765, 0.019446176528930666, 0.01942252731323242, 0.019389280319213865, 0.019316736221313476, 0.01922867202758789, 0.01941094398498535, 0.019343360900878907, 0.01939241600036621, 0.019341407775878908, 0.01947644805908203, 0.019638303756713868, 0.019689151763916016, 0.019580352783203126, 0.01968124771118164, 0.019585952758789063, 0.019597312927246095, 0.01947161674499512, 0.019518016815185547, 0.019476512908935546, 0.019436800003051757, 0.01948054313659668, 0.01955936050415039, 0.0193832950592041, 0.019497983932495116, 0.01960140800476074, 0.01985740852355957, 0.019805952072143553, 0.019699296951293944, 0.019505023956298828, 0.01946659278869629, 0.019496927261352538, 0.0194069766998291, 0.01936345672607422, 0.019314783096313477, 0.019286655426025392, 0.01952902412414551, 0.01948847961425781, 0.01943440055847168, 0.019360927581787108, 0.019380352020263673, 0.019385120391845704, 0.019429567337036133, 0.019350431442260743, 0.01984921646118164, 0.019939327239990236, 0.019949567794799804, 0.019838016510009767, 0.01976211166381836, 0.019789663314819336, 0.019663007736206054, 0.019542015075683594, 0.019352832794189454, 0.01931248092651367, 0.019272607803344728, 0.01934115219116211, 0.02026019287109375, 0.019282751083374024, 0.01927577590942383, 0.01935478401184082, 0.01928201675415039, 0.019244096755981446, 0.01928876876831055, 0.019339935302734375, 0.019280223846435546, 0.019454015731811523, 0.01953171157836914, 0.0195379524230957, 0.019353567123413087, 0.019331071853637697, 0.01942732810974121, 0.019529727935791014, 0.019331071853637697, 0.02199519920349121, 0.019581247329711914, 0.019335168838500977, 0.019664896011352538, 0.019359743118286133, 0.01929360008239746, 0.019278047561645507, 0.01938470458984375, 0.019363840103149413, 0.019455808639526367, 0.019504640579223635, 0.01944646453857422, 0.019466239929199217, 0.019357696533203125, 0.019171327590942384, 0.019269567489624023, 0.019229791641235353, 0.019346399307250975, 0.01921558380126953, 0.01924892807006836, 0.019259584426879882, 0.019102527618408204, 0.019264896392822264, 0.019452512741088866, 0.01922380828857422, 0.019234943389892577, 0.019174272537231446, 0.020500255584716798, 0.019840576171875, 0.01998054313659668, 0.01927596855163574, 0.019269632339477538, 0.01919811248779297, 0.01927939224243164, 0.019228479385375977, 0.019286144256591798, 0.019247167587280272, 0.01930031967163086, 0.019257375717163086, 0.019080543518066408, 0.019159711837768555, 0.019212223052978514, 0.01912166404724121, 0.019226528167724608, 0.019077791213989257, 0.019147775650024415, 0.019212383270263672, 0.019465120315551757, 0.01920204734802246, 0.019164255142211914, 0.01913484764099121, 0.01951798439025879, 0.019194976806640625, 0.019213375091552735, 0.019060575485229492, 0.019191808700561523, 0.01923891258239746, 0.01924870491027832, 0.01927212715148926, 0.019197887420654296, 0.01929427146911621, 0.019451904296875, 0.019400703430175782, 0.019234560012817384, 0.01920742416381836, 0.019194879531860352, 0.019224576950073242, 0.019258752822875976, 0.019260032653808594, 0.019247007369995118, 0.01925948715209961, 0.01920796775817871, 0.019334688186645507, 0.01926144027709961, 0.019251903533935546, 0.019167327880859376, 0.019211551666259766, 0.019399391174316407, 0.019759008407592774, 0.01940787124633789, 0.019243808746337892, 0.01916486358642578, 0.019185344696044923, 0.019573600769042968, 0.019156991958618166, 0.01922662353515625, 0.01920204734802246, 0.019277023315429687, 0.019385120391845704, 0.02008064079284668, 0.01925065612792969, 0.019330656051635742, 0.019352224349975584, 0.019329536437988282, 0.019299552917480468, 0.01925155258178711, 0.019240192413330078, 0.019551744461059572, 0.019479360580444336, 0.01919990348815918, 0.019531871795654295, 0.019371936798095703, 0.019929183959960937, 0.019296255111694336, 0.019318784713745117, 0.019320831298828126, 0.01934262466430664, 0.01942710494995117, 0.019311552047729493, 0.01943731117248535, 0.019506591796875, 0.019460960388183592, 0.019404800415039062, 0.019191808700561523, 0.0191441593170166, 0.019188255310058595, 0.0196011848449707, 0.01970812797546387, 0.019826688766479493, 0.019688608169555664, 0.019453887939453126, 0.019518367767333983, 0.019314687728881837, 0.019416767120361327, 0.01938163185119629, 0.01936684799194336, 0.020107616424560548, 0.01946998405456543, 0.01966080093383789, 0.0192491512298584, 0.019220224380493166, 0.019259647369384767, 0.019277824401855468, 0.019663999557495117, 0.01927891159057617, 0.019241823196411132, 0.019403968811035156, 0.0192325439453125, 0.019158559799194334, 0.019164703369140626, 0.019177471160888672, 0.01919276809692383, 0.019180864334106446, 0.019253952026367187, 0.019406688690185546, 0.019194015502929686, 0.019238304138183594, 0.019276384353637696, 0.01958502388000488, 0.01934329605102539, 0.019250560760498046, 0.019255136489868162, 0.019677728652954102, 0.019296575546264648, 0.019269632339477538, 0.020627231597900392, 0.020057695388793945, 0.019626623153686525, 0.01962188720703125, 0.01959446334838867]",tokens/s,51.05356456406804,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,817.729536,6174.998528,0.0,5779.750912,5773.960192,s,1,7.57930224609375,7.57930224609375,0.0,7.57930224609375,7.57930224609375,7.57930224609375,7.57930224609375,[7.57930224609375],,kWh,1.031787790832368e-05,1.1308835693605839e-06,4.427225764006004e-06,1.587598724169027e-05,,MB,1109.594112,6491.66848,0.0,6085.935104,6038.345728,s,10,2.3674476165771483,0.23674476165771483,0.012145732333113422,0.24003164672851562,0.24437044525146484,0.2444778022766113,0.2445636878967285,"[0.20128944396972656, 0.23908291625976563, 0.2431251220703125, 0.24098037719726562, 0.24434658813476562, 0.2358023681640625, 0.23840220642089843, 0.23757420349121094, 0.2422592315673828, 0.2445851593017578]",tokens/s,1081.3333237342094,kWh,6.383036748641098e-06,7.038957026354253e-07,4.221482845782736e-06,1.130841529705926e-05,tokens/kWh,22638008.357065953,MB,1114.431488,6512.64,0.0,6106.906624,6086.544896,s,10,18.189046997070317,1.8189046997070313,0.002654217950322588,1.818224365234375,1.8220148559570313,1.8222690246582032,1.8224723596191408,"[1.8154215087890626, 1.818479248046875, 1.817969482421875, 1.815954345703125, 1.822523193359375, 1.8219407958984375, 1.8219583740234375, 1.8213411865234375, 1.81760107421875, 1.8158577880859375]",tokens/s,34.63622916041029,kWh,5.362022793344189e-05,5.914190468179385e-06,3.549668781761704e-05,9.503110621923831e-05,tokens/kWh,662940.8254457017,,s,630,18.185909311294548,0.028866522716340565,0.0002932959952372305,0.028806336402893068,0.029048799324035642,0.029152521705627443,0.030631099720001224,"[0.030439424514770507, 0.029577215194702147, 0.029061119079589845, 0.028778495788574218, 0.028741247177124025, 0.028761888504028322, 0.028666080474853514, 0.02862076759338379, 0.028575328826904296, 0.028547903060913087, 0.0285980167388916, 0.028608768463134766, 0.02877440071105957, 0.02878054428100586, 0.028684255599975585, 0.028745376586914062, 0.02866009521484375, 0.028655616760253907, 0.028535839080810546, 0.028687328338623048, 0.028677791595458985, 0.028649791717529297, 0.02860611152648926, 0.028621055603027343, 0.028801151275634766, 0.02874336051940918, 0.028725568771362304, 0.028696128845214844, 0.028682687759399413, 0.02873958396911621, 0.028685407638549806, 0.028771167755126954, 0.02884819221496582, 0.02881657600402832, 0.028826175689697267, 0.02907321548461914, 0.029012224197387696, 0.029159616470336915, 0.02888915252685547, 0.028945760726928713, 0.02879756736755371, 0.028757440567016603, 0.028709407806396484, 0.02875094413757324, 0.028721887588500975, 0.028811456680297852, 0.028807167053222657, 0.028862464904785157, 0.02883737564086914, 0.028744192123413087, 0.02875596809387207, 0.028755359649658203, 0.028785247802734375, 0.028887359619140626, 0.028808544158935547, 0.028736991882324217, 0.02885264015197754, 0.028909536361694337, 0.028930559158325195, 0.028878400802612305, 0.028897727966308594, 0.02893414306640625, 0.028735231399536133, 0.030709760665893555, 0.029738239288330078, 0.029081663131713866, 0.02891436767578125, 0.02869862365722656, 0.02870681571960449, 0.02865344047546387, 0.028776575088500976, 0.028708864212036132, 0.02870681571960449, 0.028622848510742187, 0.028516351699829103, 0.028704767227172853, 0.028659711837768553, 0.028740863800048828, 0.02864566421508789, 0.028596704483032226, 0.02865337562561035, 0.028733631134033204, 0.02871500778198242, 0.02873107147216797, 0.028655935287475585, 0.028940288543701172, 0.028618080139160156, 0.028738016128540038, 0.028700544357299806, 0.028721471786499024, 0.028708192825317384, 0.028711584091186522, 0.028905536651611326, 0.02885215950012207, 0.028872480392456056, 0.02888630485534668, 0.028923072814941407, 0.028931615829467773, 0.028964160919189453, 0.0289719352722168, 0.029073408126831055, 0.028901056289672853, 0.028836160659790038, 0.029100032806396486, 0.02894643211364746, 0.028848127365112306, 0.028817407608032225, 0.028824800491333007, 0.02875276756286621, 0.028757312774658202, 0.028815967559814453, 0.02894643211364746, 0.028888479232788086, 0.029149599075317383, 0.0289334716796875, 0.02890982437133789, 0.028840192794799803, 0.028915327072143556, 0.028838367462158204, 0.02889753532409668, 0.028831584930419922, 0.028825664520263673, 0.028724319458007814, 0.028959743499755858, 0.02883500862121582, 0.028834432601928712, 0.03068511962890625, 0.029756959915161134, 0.029282848358154298, 0.028835840225219726, 0.02872025680541992, 0.028674079895019532, 0.02870467185974121, 0.02870377540588379, 0.028702463150024414, 0.028508031845092773, 0.028570112228393556, 0.028591903686523437, 0.02866489601135254, 0.028709823608398438, 0.028765663146972657, 0.028669567108154298, 0.02871776008605957, 0.02872038459777832, 0.028729343414306642, 0.028711904525756837, 0.02872319984436035, 0.028667104721069335, 0.02876032066345215, 0.02870662307739258, 0.028752031326293944, 0.02892857551574707, 0.0287457275390625, 0.028843551635742187, 0.02880988883972168, 0.02877129554748535, 0.028709344863891602, 0.0287379207611084, 0.028872735977172853, 0.028924928665161134, 0.02897977638244629, 0.029137504577636718, 0.029220672607421876, 0.02905824089050293, 0.028986175537109374, 0.028895360946655273, 0.028732959747314452, 0.028706592559814455, 0.028667455673217775, 0.028799808502197266, 0.02874387168884277, 0.028805343627929688, 0.02884940719604492, 0.028914207458496093, 0.029234399795532228, 0.028875551223754882, 0.02874387168884277, 0.028743488311767578, 0.028805152893066406, 0.028767391204833983, 0.02881990432739258, 0.028968448638916015, 0.02888172721862793, 0.028921920776367186, 0.028880447387695313, 0.028848575592041015, 0.028753183364868165, 0.028771360397338866, 0.028781728744506838, 0.030577407836914063, 0.029626367568969726, 0.02905023956298828, 0.028690240859985353, 0.028605247497558595, 0.028618335723876953, 0.02858345603942871, 0.02854092788696289, 0.028541120529174804, 0.028641408920288085, 0.02858451271057129, 0.028628992080688476, 0.028626623153686522, 0.028542911529541016, 0.028655391693115234, 0.028834400177001954, 0.029147136688232423, 0.028675743103027344, 0.028739936828613283, 0.02869171142578125, 0.02874448013305664, 0.028728736877441406, 0.02874220848083496, 0.02871839904785156, 0.028764928817749023, 0.028713983535766603, 0.028719968795776367, 0.028622943878173827, 0.02855740737915039, 0.028655519485473634, 0.02880512046813965, 0.029430784225463868, 0.028821535110473633, 0.028967008590698243, 0.029035392761230468, 0.029057024002075195, 0.02898054313659668, 0.029007808685302734, 0.028895999908447264, 0.028915712356567383, 0.028823551177978517, 0.028794591903686523, 0.02874163246154785, 0.028737823486328126, 0.028807039260864257, 0.028784767150878906, 0.028716928482055665, 0.028749952316284178, 0.028794879913330077, 0.028692480087280273, 0.028769472122192382, 0.028774528503417968, 0.028954399108886718, 0.028743743896484375, 0.029012960433959963, 0.028806880950927736, 0.028739744186401368, 0.028835840225219726, 0.028784576416015624, 0.028813119888305663, 0.028815616607666017, 0.02879283142089844, 0.028725248336791992, 0.030593952178955077, 0.029666879653930663, 0.029003488540649415, 0.028742271423339842, 0.028512479782104493, 0.02851171112060547, 0.028549184799194337, 0.02873788833618164, 0.028784255981445312, 0.02875164794921875, 0.02887548828125, 0.0287903995513916, 0.028780799865722656, 0.02874982452392578, 0.02874982452392578, 0.028705888748168946, 0.0287425594329834, 0.028738847732543947, 0.028797664642333985, 0.028907520294189453, 0.028778495788574218, 0.02873139190673828, 0.02877961540222168, 0.02874435234069824, 0.028806720733642578, 0.02895270347595215, 0.028800575256347657, 0.02876927947998047, 0.028827360153198242, 0.028879295349121092, 0.028856351852416993, 0.028955968856811523, 0.028903776168823243, 0.029568960189819336, 0.02915760040283203, 0.02908723258972168, 0.029053440093994142, 0.029060096740722657, 0.029025407791137697, 0.02904051208496094, 0.02901580810546875, 0.02898150444030762, 0.028868160247802734, 0.0290263671875, 0.02889971160888672, 0.028953855514526367, 0.028871423721313478, 0.028932096481323243, 0.02893519973754883, 0.028953088760375976, 0.028901439666748047, 0.028901792526245116, 0.028929407119750977, 0.028918399810791015, 0.028999584197998047, 0.029114463806152343, 0.028911808013916015, 0.028935583114624023, 0.028879072189331053, 0.028952224731445313, 0.02897769546508789, 0.02884422492980957, 0.02901100730895996, 0.03085094451904297, 0.029772287368774415, 0.02911884880065918, 0.028860416412353516, 0.028786687850952147, 0.028706464767456055, 0.028651456832885742, 0.028719520568847655, 0.02874163246154785, 0.028817407608032225, 0.0287554874420166, 0.02874361610412598, 0.028776384353637694, 0.02867897605895996, 0.02874959945678711, 0.02868230438232422, 0.028721216201782227, 0.028815231323242187, 0.02895462417602539, 0.02900480079650879, 0.028824703216552734, 0.028760223388671874, 0.028751392364501954, 0.028686527252197266, 0.028844127655029295, 0.028983455657958984, 0.0288656005859375, 0.02892870330810547, 0.02881926345825195, 0.028952768325805664, 0.028825408935546876, 0.028833663940429688, 0.02903481674194336, 0.029097471237182617, 0.029097663879394532, 0.02904863929748535, 0.02896691131591797, 0.028903839111328124, 0.02891961669921875, 0.02899843215942383, 0.028893184661865235, 0.028821504592895508, 0.02890713691711426, 0.02888742446899414, 0.0287457275390625, 0.028825599670410155, 0.02891302490234375, 0.028805952072143554, 0.02890150451660156, 0.028868288040161134, 0.02880102348327637, 0.0287903995513916, 0.029024639129638673, 0.02887641525268555, 0.02891542434692383, 0.028961599349975584, 0.028888927459716798, 0.029097984313964844, 0.028868608474731446, 0.028903423309326173, 0.029041791915893556, 0.028922752380371095, 0.028901632308959962, 0.030714591979980468, 0.029648000717163087, 0.029075872421264647, 0.028886655807495117, 0.028729440689086914, 0.028695552825927735, 0.028726911544799803, 0.02872902488708496, 0.02876153564453125, 0.028768543243408204, 0.028693376541137697, 0.028737152099609375, 0.028776351928710937, 0.028639167785644532, 0.028799327850341797, 0.028674079895019532, 0.028730911254882814, 0.02870524787902832, 0.028753440856933595, 0.028715551376342772, 0.028815391540527344, 0.02885843276977539, 0.028814912796020508, 0.02879747200012207, 0.029027999877929686, 0.028864608764648438, 0.02880886459350586, 0.028797279357910155, 0.028792032241821287, 0.02879280090332031, 0.02886079978942871, 0.028859935760498046, 0.028881824493408204, 0.028917760848999025, 0.02926313591003418, 0.029061855316162108, 0.029056768417358398, 0.028960639953613282, 0.029030784606933594, 0.02899715232849121, 0.028813791275024415, 0.028919807434082033, 0.028809215545654295, 0.028796096801757813, 0.02899795150756836, 0.028789247512817383, 0.028983327865600587, 0.028959808349609376, 0.02896784019470215, 0.028817407608032225, 0.028957984924316407, 0.02887343978881836, 0.028899328231811523, 0.028900928497314453, 0.029280832290649414, 0.028904800415039063, 0.028936256408691408, 0.02892608070373535, 0.028896799087524416, 0.02902931213378906, 0.028900447845458983, 0.028959520339965822, 0.029071104049682616, 0.030795743942260742, 0.02978019142150879, 0.029081375122070312, 0.028708864212036132, 0.02876438331604004, 0.028681024551391602, 0.028731679916381835, 0.028720928192138673, 0.028724128723144532, 0.028729055404663088, 0.028780799865722656, 0.028673152923583984, 0.028824480056762695, 0.02868751907348633, 0.02873619270324707, 0.02869660758972168, 0.028712671279907228, 0.02871548843383789, 0.028738943099975586, 0.028777023315429688, 0.028770303726196288, 0.028896480560302733, 0.028803871154785155, 0.02879897689819336, 0.028763551712036133, 0.0291549129486084, 0.028758495330810548, 0.02883404731750488, 0.02885865592956543, 0.02874982452392578, 0.028858367919921874, 0.028931615829467773, 0.029001792907714843, 0.029075872421264647, 0.029042688369750977, 0.02900105667114258, 0.02899193572998047, 0.029013919830322265, 0.029339967727661134, 0.02894220733642578, 0.02875200080871582, 0.028896896362304688, 0.0288439998626709, 0.028815263748168944, 0.02886092758178711, 0.028923904418945313, 0.028762111663818358, 0.028915552139282225, 0.02895894432067871, 0.028728256225585936, 0.028913951873779296, 0.028887775421142577, 0.028960639953613282, 0.028829471588134765, 0.028954656600952148, 0.028872415542602538, 0.028938720703125, 0.02891788864135742, 0.028932319641113282, 0.02882966423034668, 0.029035743713378907, 0.028971616744995116, 0.02885807991027832, 0.0308175048828125, 0.029647712707519532, 0.029142976760864258, 0.028788000106811523, 0.028797504425048828, 0.029034656524658205, 0.02894438362121582, 0.028645376205444335, 0.02871500778198242, 0.028604415893554686, 0.02869001579284668, 0.028695968627929686, 0.028744991302490235, 0.028663520812988282, 0.028657663345336915, 0.02855891227722168, 0.029085535049438477, 0.029118751525878905, 0.02866217613220215, 0.02873436737060547, 0.028682432174682616, 0.02910700798034668, 0.029087039947509767, 0.028707168579101563, 0.028788223266601562, 0.028746591567993165, 0.02869990348815918, 0.028713056564331055, 0.028719423294067382, 0.0287542724609375, 0.028735488891601563, 0.028747039794921873, 0.028809471130371092, 0.02895414352416992, 0.028978111267089844, 0.0289234561920166, 0.02882431983947754, 0.02889491271972656, 0.028823392868041992, 0.028789024353027343, 0.028915679931640625, 0.028835744857788087, 0.028684160232543946, 0.02876198387145996, 0.028762367248535155, 0.028730880737304686, 0.028708736419677736, 0.028756607055664064, 0.028758079528808593, 0.028676031112670898, 0.02879283142089844, 0.02877440071105957, 0.02877449607849121, 0.02871900749206543, 0.02883516883850098, 0.02883407974243164, 0.028976543426513672, 0.02878767967224121, 0.028833791732788085, 0.028694271087646484, 0.028782880783081055, 0.028914688110351562, 0.02878563117980957, 0.030646272659301758, 0.029595008850097658, 0.02903654479980469, 0.028760704040527343, 0.028604415893554686, 0.028506111145019532, 0.028635168075561525, 0.028770559310913085, 0.028714719772338866, 0.02867967987060547, 0.028690944671630858, 0.028563232421875, 0.028610847473144532, 0.028565023422241213, 0.028729759216308593, 0.02867571258544922, 0.028573631286621094, 0.02864975929260254, 0.028586143493652343, 0.02857164764404297, 0.02872425651550293, 0.028675039291381835, 0.028907520294189453, 0.029618175506591796, 0.028811103820800783, 0.02866102409362793, 0.028671871185302733, 0.02863747215270996, 0.028848575592041015, 0.028739871978759764, 0.028704767227172853, 0.02880499267578125, 0.02879302406311035, 0.028821439743041993, 0.028876800537109375, 0.0289751033782959, 0.028887231826782225, 0.02889299201965332, 0.02892812728881836, 0.02879270362854004, 0.028729343414306642, 0.02876380729675293, 0.02867030334472656, 0.028686656951904296, 0.028714208602905272, 0.02873788833618164, 0.028745023727416993, 0.028723552703857423, 0.028799455642700197, 0.028688383102416993, 0.02877408027648926, 0.028733024597167967, 0.028727584838867188, 0.028706335067749025, 0.028859296798706056, 0.028938175201416016, 0.028977216720581053, 0.029085695266723634, 0.028866336822509765, 0.028893407821655274, 0.02896294403076172, 0.02900099182128906, 0.028803647994995116]",tokens/s,34.642205083951,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 608, in __init__ self.dense_h_to_4h = nn.Linear(config.hidden_size, config.intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 400.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 955, in __init__ self.model = MistralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 707, in __init__ [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 707, in [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 506, in __init__ self.mlp = MistralMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 147, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 224.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 555, in __init__ self.self_attn = QWEN2_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 216, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 163, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 268.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 164, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.50 GiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 609, in __init__ self.dense_4h_to_h = nn.Linear(config.intermediate_size, config.hidden_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1068, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 845, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 845, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 632, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 300, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 64.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 555, in __init__ self.self_attn = QWEN2_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 216, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 163, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 172.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 162, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 536.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,816.074752,1129.250816,0.0,734.0032,709.336064,s,1,7.43693310546875,7.43693310546875,0.0,7.43693310546875,7.43693310546875,7.43693310546875,7.43693310546875,[7.43693310546875],,kWh,5.201868145816964e-06,5.662632469932491e-07,1.980001584002411e-06,7.748132976812624e-06,,MB,1107.88608,1276.051456,0.0,870.31808,809.960448,s,15,0.25941091346740724,0.01729406089782715,0.0005297255663439621,0.01726335906982422,0.01760234909057617,0.01818601589202881,0.018896005973815917,"[0.019073503494262695, 0.01717180824279785, 0.01726335906982422, 0.017280704498291017, 0.0169836483001709, 0.017047456741333008, 0.01691372871398926, 0.01683955192565918, 0.017281055450439453, 0.01729737663269043, 0.0178056640625, 0.017280031204223632, 0.016884416580200196, 0.016996864318847657, 0.017291744232177733]",tokens/s,14802.769662512534,kWh,6.447960063649882e-07,7.107573282732315e-08,4.25537500105971e-07,1.1414092392982822e-06,tokens/kWh,224284149.0904561,MB,1117.888512,1311.70304,0.0,905.969664,809.963008,s,15,9.905081726074219,0.6603387817382812,0.011113331088490127,0.6602666015625,0.672424609375,0.6754632690429687,0.6802393334960937,"[0.6602666015625, 0.6636019897460937, 0.6598812255859375, 0.663210693359375, 0.6421433715820313, 0.6421513671875, 0.6425101318359375, 0.6600648803710938, 0.6729046630859375, 0.681433349609375, 0.6664047241210938, 0.6534988403320312, 0.6589554443359374, 0.6717045288085938, 0.6663499145507813]",tokens/s,95.40557323342162,kWh,1.8989165730301262e-05,2.0941903259328287e-06,8.994136702293818e-06,3.0077492758527917e-05,tokens/kWh,2094589.482766562,,s,945,9.89892097187043,0.010475048647481926,0.00034942214293159305,0.010455455780029297,0.010799897193908692,0.010900223731994629,0.011734079666137695,"[0.010524831771850585, 0.011044447898864745, 0.010804767608642579, 0.01053769588470459, 0.010745183944702148, 0.010670751571655273, 0.010538944244384766, 0.010553279876708984, 0.010522751808166505, 0.010623040199279786, 0.010565567970275878, 0.01033407974243164, 0.010340703964233398, 0.01020633602142334, 0.010228128433227538, 0.010248448371887207, 0.010231807708740234, 0.010229791641235352, 0.010223584175109863, 0.010192831993103028, 0.010149951934814452, 0.010299136161804199, 0.010492064476013184, 0.010332256317138673, 0.010460960388183593, 0.010344672203063964, 0.010338303565979003, 0.010264575958251953, 0.01027187156677246, 0.010599007606506347, 0.010914079666137695, 0.010866687774658204, 0.010795007705688477, 0.01064857578277588, 0.010570624351501464, 0.01064179229736328, 0.010612480163574219, 0.01053286361694336, 0.010628992080688477, 0.010574175834655761, 0.010753120422363281, 0.010563584327697753, 0.010381728172302247, 0.010374879837036133, 0.010424896240234375, 0.010231871604919434, 0.01022969627380371, 0.010226976394653321, 0.010394335746765137, 0.010491904258728027, 0.010414079666137695, 0.010389727592468262, 0.010287967681884766, 0.010316736221313476, 0.010220959663391113, 0.010297951698303222, 0.01044863986968994, 0.010514687538146973, 0.010532032012939454, 0.010435680389404296, 0.01058521556854248, 0.010692352294921876, 0.010808159828186035, 0.010562175750732422, 0.010714816093444825, 0.010645824432373047, 0.01053600025177002, 0.010519488334655762, 0.010887040138244628, 0.01085632038116455, 0.010617055892944337, 0.010618240356445312, 0.010597023963928223, 0.010751999855041505, 0.010502143859863282, 0.010500096321105956, 0.010448896408081054, 0.010335359573364258, 0.010203712463378907, 0.01036847972869873, 0.010305919647216798, 0.010455455780029297, 0.010528575897216797, 0.010365183830261231, 0.01032192039489746, 0.010554880142211913, 0.010589856147766114, 0.010404640197753907, 0.010297408103942871, 0.010327296257019043, 0.0102259521484375, 0.010539487838745117, 0.010960351943969727, 0.01083683204650879, 0.010840928077697754, 0.010965855598449707, 0.01082367992401123, 0.010553631782531738, 0.010542816162109375, 0.010512384414672851, 0.010647551536560058, 0.0106496000289917, 0.010687999725341797, 0.010473919868469237, 0.010450431823730469, 0.010472000122070313, 0.01069046401977539, 0.0107357120513916, 0.010489439964294434, 0.010465632438659668, 0.010470879554748536, 0.010446592330932617, 0.010420160293579101, 0.010240032196044921, 0.010281855583190919, 0.010358688354492187, 0.010215200424194337, 0.010230079650878906, 0.010329983711242675, 0.010254464149475097, 0.010201087951660156, 0.010233856201171876, 0.01031372833251953, 0.01070473575592041, 0.011079327583312988, 0.010897919654846192, 0.010456864356994628, 0.010522720336914062, 0.01052070426940918, 0.010590208053588868, 0.010569184303283692, 0.010629664421081543, 0.010823264122009277, 0.010660160064697265, 0.010504287719726562, 0.010473695755004883, 0.010469152450561523, 0.010438976287841796, 0.010337984085083008, 0.010372447967529296, 0.010510751724243164, 0.01049129581451416, 0.010369888305664062, 0.010210304260253907, 0.010255071640014648, 0.010192543983459472, 0.010182304382324219, 0.010142687797546387, 0.01010649585723877, 0.010172320365905761, 0.01024227237701416, 0.010199040412902831, 0.010194527626037597, 0.010254752159118653, 0.010178655624389648, 0.01028707218170166, 0.010684096336364746, 0.010882559776306153, 0.010722304344177246, 0.010713055610656738, 0.010653727531433106, 0.010561408042907715, 0.010559840202331543, 0.010671903610229492, 0.010686464309692383, 0.010729472160339355, 0.010584063529968261, 0.010483584403991698, 0.010444928169250488, 0.010520575523376464, 0.010780256271362304, 0.010393183708190918, 0.01028211212158203, 0.010316767692565919, 0.010276960372924806, 0.010239871978759765, 0.010244864463806153, 0.010446463584899902, 0.010401984214782715, 0.010434752464294433, 0.011067744255065917, 0.010900799751281737, 0.010617312431335449, 0.010598272323608398, 0.010414079666137695, 0.010274208068847657, 0.010244704246520997, 0.010274720191955567, 0.01090783977508545, 0.010463359832763672, 0.010580415725708009, 0.010575072288513183, 0.010600607872009277, 0.01046787166595459, 0.010377311706542968, 0.0103155517578125, 0.010293472290039062, 0.0102194242477417, 0.010303584098815918, 0.010467328071594239, 0.010647520065307618, 0.010472831726074218, 0.010435232162475586, 0.010393600463867187, 0.0104017915725708, 0.010264479637145996, 0.010223615646362304, 0.010192864418029786, 0.010254048347473144, 0.010555808067321776, 0.010784511566162109, 0.010635392189025879, 0.010571999549865723, 0.010763392448425294, 0.01072822380065918, 0.010698080062866211, 0.010678943634033202, 0.01067523193359375, 0.010715488433837891, 0.010613375663757325, 0.0107642879486084, 0.010505408287048339, 0.01054751968383789, 0.01053542423248291, 0.01043455982208252, 0.010528767585754394, 0.010347840309143066, 0.010326720237731934, 0.010420224189758302, 0.010548576354980469, 0.010561375617980958, 0.010494688034057616, 0.01040345573425293, 0.010531231880187989, 0.010448479652404785, 0.010158559799194337, 0.010239999771118164, 0.01073971176147461, 0.010825568199157715, 0.010649760246276856, 0.010462271690368652, 0.010442815780639648, 0.01073036766052246, 0.010695679664611817, 0.011336095809936523, 0.010687071800231934, 0.010659839630126953, 0.010616064071655273, 0.010724096298217774, 0.010569567680358887, 0.010330240249633789, 0.010316991806030273, 0.010115008354187011, 0.01014742374420166, 0.010104384422302246, 0.01036137580871582, 0.010434464454650879, 0.010238431930541992, 0.010141695976257324, 0.010199007987976074, 0.010194623947143554, 0.010278528213500976, 0.0105315523147583, 0.010168319702148437, 0.01021350383758545, 0.010499839782714843, 0.010750080108642578, 0.010491904258728027, 0.010308735847473145, 0.01015078353881836, 0.010141823768615723, 0.010326047897338868, 0.010106752395629883, 0.010203104019165039, 0.010205183982849121, 0.010166272163391114, 0.010172384262084961, 0.010190688133239745, 0.010260416030883788, 0.010211584091186523, 0.010395648002624512, 0.010280960083007813, 0.010286272048950195, 0.010115424156188964, 0.010086496353149415, 0.010113311767578124, 0.010067999839782715, 0.010129728317260743, 0.010073311805725097, 0.010119711875915528, 0.010057056427001953, 0.010066592216491699, 0.010179903984069824, 0.01022156810760498, 0.010140352249145508, 0.010149375915527344, 0.010062335968017578, 0.010110943794250489, 0.010053664207458497, 0.010131456375122071, 0.010143744468688964, 0.010071136474609376, 0.01020406436920166, 0.010208992004394532, 0.010132800102233887, 0.010114015579223633, 0.010153056144714356, 0.010087455749511719, 0.010112192153930664, 0.010077887535095215, 0.010138463973999024, 0.010086560249328613, 0.010093791961669922, 0.01004419231414795, 0.010070015907287597, 0.010059071540832519, 0.010023776054382325, 0.0101212158203125, 0.010085791587829589, 0.010199647903442383, 0.01008358383178711, 0.010132224082946777, 0.010192895889282226, 0.010099871635437012, 0.01010364818572998, 0.010116671562194824, 0.010193344116210937, 0.010172160148620605, 0.010232288360595703, 0.01023363208770752, 0.01013759994506836, 0.010061823844909668, 0.01012940788269043, 0.010045632362365723, 0.010098496437072753, 0.01005568027496338, 0.010137727737426758, 0.010087583541870117, 0.01010694408416748, 0.010085023880004883, 0.010033056259155274, 0.01009059238433838, 0.010097824096679687, 0.010170623779296875, 0.010101344108581543, 0.011016192436218262, 0.010645503997802735, 0.011602016448974609, 0.01024300765991211, 0.010152607917785645, 0.010113632202148438, 0.010464991569519043, 0.010153984069824219, 0.010180607795715332, 0.010149888038635254, 0.010158080101013184, 0.010202752113342286, 0.01026460838317871, 0.010336607933044433, 0.010225664138793946, 0.010185888290405273, 0.010113471984863282, 0.010113151550292968, 0.01013584041595459, 0.010145792007446289, 0.010082304000854492, 0.010145792007446289, 0.010085536003112792, 0.010154784202575684, 0.010350655555725098, 0.0101396484375, 0.010178720474243164, 0.010102527618408203, 0.010111071586608887, 0.010062975883483887, 0.010122112274169922, 0.01007414436340332, 0.010182623863220214, 0.010059359550476075, 0.01010483169555664, 0.010145792007446289, 0.01011302375793457, 0.010119168281555176, 0.010118783950805664, 0.010121600151062012, 0.010090496063232422, 0.010106880187988282, 0.010028127670288087, 0.010137760162353515, 0.01006492805480957, 0.010145376205444336, 0.010045696258544921, 0.010121088027954101, 0.010062047958374024, 0.010114080429077148, 0.010086688041687011, 0.010237631797790527, 0.010088640213012695, 0.01011193561553955, 0.010112671852111817, 0.010090496063232422, 0.010108832359313966, 0.010100640296936036, 0.010115263938903808, 0.010174464225769043, 0.010196800231933595, 0.014173919677734376, 0.010813568115234375, 0.010235456466674805, 0.01010153579711914, 0.010254336357116698, 0.010117119789123535, 0.01011302375793457, 0.010077919960021973, 0.010101023674011231, 0.010130463600158691, 0.010134495735168458, 0.010114720344543457, 0.010109215736389161, 0.01010044765472412, 0.010133855819702149, 0.010116191864013671, 0.01012009620666504, 0.010112480163574218, 0.010107423782348633, 0.010076160430908204, 0.010110112190246582, 0.010097503662109375, 0.010089792251586915, 0.0100765438079834, 0.010096960067749024, 0.010143967628479003, 0.010171296119689942, 0.010248255729675294, 0.010168448448181152, 0.010164928436279298, 0.010136608123779298, 0.010064607620239257, 0.010115103721618652, 0.01011734390258789, 0.010076383590698242, 0.01005452823638916, 0.010086400032043457, 0.010102687835693359, 0.01017369556427002, 0.010133824348449706, 0.010314271926879883, 0.010141695976257324, 0.01011302375793457, 0.010123135566711426, 0.01010912036895752, 0.010148927688598633, 0.01013644790649414, 0.010188672065734863, 0.010110783576965331, 0.010139967918395996, 0.010077695846557617, 0.010117664337158203, 0.010153951644897461, 0.010141695976257324, 0.010138879776000977, 0.010130080223083496, 0.010384575843811035, 0.010146656036376954, 0.01031174373626709, 0.010168160438537598, 0.010154144287109375, 0.010094112396240234, 0.010371552467346192, 0.010589664459228515, 0.010629664421081543, 0.010711039543151855, 0.01098134422302246, 0.010649632453918458, 0.010754048347473144, 0.01072332763671875, 0.01067580795288086, 0.010750368118286132, 0.010671327590942383, 0.010705696105957032, 0.010974559783935547, 0.010809856414794922, 0.010791071891784668, 0.010690560340881347, 0.01093120002746582, 0.01084006404876709, 0.010871135711669921, 0.010869279861450195, 0.01081603240966797, 0.01072208023071289, 0.010687264442443847, 0.010685855865478516, 0.010637120246887207, 0.010606816291809082, 0.010611295700073242, 0.010577119827270508, 0.01043507194519043, 0.010393280029296875, 0.010554080009460448, 0.010831392288208008, 0.010811648368835448, 0.010841664314270019, 0.010687135696411132, 0.010641280174255372, 0.010473440170288086, 0.01173094367980957, 0.010421728134155274, 0.010420767784118653, 0.010506239891052246, 0.010620896339416503, 0.010629152297973633, 0.010471424102783204, 0.010521856307983399, 0.010470144271850586, 0.010674176216125488, 0.01075814437866211, 0.010626079559326172, 0.010657024383544923, 0.010755807876586913, 0.010580032348632813, 0.010526656150817871, 0.01061580753326416, 0.010705727577209473, 0.01042198371887207, 0.010441184043884277, 0.01049180793762207, 0.010648768424987793, 0.010657952308654786, 0.01076095962524414, 0.010848256111145019, 0.010907487869262695, 0.01077286434173584, 0.010581791877746582, 0.01063526439666748, 0.01073523235321045, 0.010561920166015626, 0.01054319953918457, 0.010606399536132812, 0.01075823974609375, 0.01073151969909668, 0.010825728416442871, 0.010672127723693848, 0.010721280097961425, 0.010612640380859375, 0.011736543655395507, 0.010707584381103516, 0.010795007705688477, 0.01075609588623047, 0.010532896041870117, 0.010577792167663575, 0.01058137607574463, 0.010705632209777832, 0.01064252758026123, 0.010522815704345703, 0.010433247566223144, 0.010495776176452637, 0.010909536361694335, 0.010805631637573242, 0.010820799827575684, 0.010730175971984864, 0.010681728363037109, 0.010714879989624023, 0.010604960441589355, 0.010606847763061523, 0.01059670352935791, 0.010636832237243652, 0.010678367614746094, 0.010394240379333496, 0.010662079811096192, 0.010668031692504883, 0.010630847930908204, 0.010744031906127929, 0.010736736297607422, 0.010791935920715333, 0.010707200050354004, 0.010648736000061035, 0.010588768005371094, 0.010800959587097168, 0.010748255729675292, 0.010786656379699708, 0.010704319953918457, 0.011899359703063965, 0.01069257640838623, 0.010632896423339843, 0.010624768257141114, 0.010676480293273926, 0.010631232261657715, 0.010627455711364746, 0.01076633644104004, 0.010704895973205567, 0.010751711845397948, 0.010864928245544433, 0.01070899200439453, 0.010693920135498048, 0.01059008026123047, 0.010574687957763671, 0.010579968452453613, 0.011530240058898926, 0.011794783592224122, 0.011101856231689453, 0.011558879852294921, 0.010742815971374511, 0.010646528244018554, 0.010691807746887208, 0.010523424148559571, 0.010675328254699707, 0.0106561279296875, 0.010590656280517578, 0.010953151702880859, 0.010806912422180176, 0.01074176025390625, 0.010696703910827637, 0.010602335929870605, 0.010631327629089355, 0.010792960166931152, 0.010727423667907715, 0.010620800018310547, 0.01065334415435791, 0.010861023902893067, 0.010798303604125977, 0.01062377643585205, 0.010659775733947753, 0.010749792098999024, 0.010733792304992676, 0.010757632255554199, 0.01066006374359131, 0.010600959777832031, 0.011044639587402344, 0.012755200386047364, 0.01128867244720459, 0.010575231552124023, 0.01075868797302246, 0.010573823928833008, 0.01056761646270752, 0.010555392265319824, 0.01083193588256836, 0.010665023803710937, 0.010771391868591308, 0.010734720230102539, 0.010641280174255372, 0.010673151969909669, 0.010705120086669922, 0.010657343864440918, 0.01073583984375, 0.011132927894592285, 0.010823904037475586, 0.010889344215393067, 0.01096992015838623, 0.01076921558380127, 0.010764320373535157, 0.010853568077087402, 0.010759296417236329, 0.010770079612731933, 0.010720352172851562, 0.010718144416809083, 0.010672127723693848, 0.010829824447631836, 0.010696703910827637, 0.010622976303100586, 0.01077222442626953, 0.0107010555267334, 0.010629119873046875, 0.01061513614654541, 0.010712415695190429, 0.010643136024475098, 0.010446623802185058, 0.010197855949401856, 0.010129376411437989, 0.010164256095886231, 0.010182592391967773, 0.010154047966003417, 0.01011302375793457, 0.01022150421142578, 0.010141119956970215, 0.010162816047668458, 0.010215392112731933, 0.010086400032043457, 0.01017039966583252, 0.010590208053588868, 0.010972543716430664, 0.01087551975250244, 0.010643072128295898, 0.01056982421875, 0.01044099235534668, 0.01039731216430664, 0.010405920028686523, 0.01031935977935791, 0.010574687957763671, 0.010601696014404296, 0.010570528030395508, 0.0104017915725708, 0.010288415908813476, 0.010421088218688965, 0.010324000358581543, 0.01019878387451172, 0.0101048641204834, 0.010082304000854492, 0.010172160148620605, 0.010154399871826172, 0.010116959571838379, 0.010460543632507324, 0.010533056259155273, 0.01019545555114746, 0.010264255523681641, 0.010133760452270508, 0.01021951961517334, 0.010129695892333984, 0.010540767669677735, 0.01084620761871338, 0.010762240409851074, 0.010758272171020508, 0.01063481616973877, 0.010654239654541016, 0.010477343559265136, 0.01042841625213623, 0.010406144142150879, 0.010641375541687011, 0.010796832084655763, 0.010391551971435547, 0.010272543907165527, 0.010252511978149415, 0.010239232063293456, 0.01032374382019043, 0.01047651195526123, 0.010120512008666992, 0.010106559753417968, 0.010202112197875977, 0.010207232475280761, 0.010145440101623535, 0.01022339153289795, 0.010172991752624512, 0.010170368194580079, 0.010204959869384765, 0.010128671646118165, 0.010195903778076172, 0.010168319702148437, 0.010131391525268554, 0.010432576179504395, 0.010975232124328613, 0.011100128173828125, 0.010866720199584961, 0.010584063529968261, 0.010510335922241211, 0.010457375526428222, 0.010372832298278808, 0.010381312370300292, 0.010571167945861817, 0.010480223655700683, 0.01040505599975586, 0.010424192428588868, 0.010675135612487794, 0.01036291217803955, 0.01009660816192627, 0.010163264274597168, 0.010077119827270508, 0.010135552406311036, 0.010233792304992675, 0.010139360427856445, 0.010136896133422852, 0.010234848022460937, 0.010245696067810058, 0.010181056022644044, 0.010221152305603028, 0.010123680114746094, 0.010196864128112793, 0.01038144016265869, 0.010864416122436523, 0.010905823707580566, 0.010645503997802735, 0.010544639587402344, 0.010598655700683593, 0.010510592460632325, 0.010475520133972169, 0.010491519927978516, 0.010576255798339844, 0.010743807792663575, 0.010665984153747558, 0.010717503547668457, 0.010760064125061036, 0.010566656112670898, 0.010443584442138672, 0.010356736183166505, 0.010301440238952637, 0.010225664138793946, 0.010192447662353516, 0.010150336265563964, 0.010269696235656739, 0.010109951972961426, 0.010159296035766601, 0.010224448204040528, 0.010354432106018066, 0.010313088417053223, 0.010244864463806153, 0.0102457275390625, 0.010340736389160157, 0.010802720069885254, 0.01071769618988037, 0.011051136016845703, 0.010731103897094727, 0.010645376205444337, 0.010532928466796874, 0.010481984138488769, 0.010473183631896973, 0.010508735656738282, 0.010599455833435058, 0.010515423774719238, 0.010751999855041505, 0.01067625617980957, 0.010505887985229492, 0.010448448181152344, 0.010431232452392579, 0.010364128112792968, 0.010410335540771485, 0.010445247650146485, 0.010391136169433594, 0.010615200042724609, 0.010727392196655274, 0.01039686393737793, 0.01032192039489746, 0.01022163200378418, 0.010274815559387206, 0.010340224266052246, 0.010618176460266113, 0.010920255661010743, 0.010893376350402832, 0.010772352218627929, 0.010641599655151367, 0.010688032150268555, 0.010664799690246581, 0.010559488296508789, 0.010528191566467286, 0.010451519966125488, 0.010606880187988282, 0.010845919609069825, 0.011370368003845215, 0.01092416000366211, 0.013450783729553223, 0.011151840209960937, 0.010439807891845704, 0.010486656188964844, 0.010284704208374023, 0.01028326416015625, 0.010338624000549317, 0.010235487937927246, 0.010211359977722169, 0.010193056106567382, 0.010424063682556153, 0.010368895530700684, 0.012419455528259278, 0.012228608131408691, 0.01057151985168457, 0.01073305606842041, 0.011157440185546876, 0.010847040176391601, 0.01073971176147461, 0.010694656372070312, 0.01061888027191162, 0.010748991966247559, 0.01073971176147461, 0.010601344108581543, 0.010803263664245605, 0.010754048347473144, 0.010724639892578125, 0.010717920303344727, 0.01066921615600586, 0.010721983909606934, 0.010490015983581543, 0.010389504432678222, 0.010297344207763673, 0.01030288028717041, 0.01024403190612793, 0.010357151985168457, 0.010244352340698242, 0.010182656288146973, 0.010291199684143066, 0.010338303565979003, 0.01028006362915039, 0.010279808044433594, 0.010235424041748048, 0.010203616142272949, 0.010436415672302246, 0.010874943733215332, 0.010575743675231933, 0.010669407844543457, 0.010572671890258789, 0.01062502384185791, 0.010559488296508789, 0.010422271728515625, 0.01053273582458496, 0.010489184379577637, 0.010504639625549317, 0.010652000427246094, 0.010757535934448241, 0.010534496307373046, 0.010414560317993165, 0.010295136451721192, 0.010309408187866211, 0.010205280303955079, 0.010306015968322754, 0.010328415870666505, 0.010364928245544434, 0.01040998363494873, 0.01042636775970459, 0.010424320220947265, 0.010362879753112793, 0.010315296173095703, 0.010355199813842773, 0.010469344139099121, 0.01095411205291748, 0.010895359992980956, 0.010807935714721679, 0.010749888420104981, 0.010729408264160157, 0.010608768463134766, 0.011374591827392578, 0.011780096054077148, 0.011974143981933593, 0.011467007637023926, 0.010658047676086425, 0.010682271957397462, 0.010715231895446778, 0.010476608276367188, 0.010235936164855956, 0.010195872306823731, 0.010436448097229004, 0.010171584129333496, 0.01013974380493164, 0.010244992256164551, 0.010225664138793946, 0.010465279579162597, 0.010340352058410645, 0.01023369598388672, 0.01022480010986328, 0.010238975524902343, 0.010270912170410157, 0.010480480194091797, 0.010795904159545898, 0.010854496002197265, 0.010772480010986327, 0.010704895973205567, 0.010529952049255371, 0.010611552238464356, 0.010553343772888184, 0.01067244815826416, 0.010739392280578613]",tokens/s,95.46495044110252,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.616,6174.998528,0.0,5779.750912,5773.960192,s,1,7.478220703125,7.478220703125,0.0,7.478220703125,7.478220703125,7.478220703125,7.478220703125,[7.478220703125],,kWh,1.0453589358333676e-05,1.1408213825045796e-06,3.244724817996758e-06,1.4839135558835014e-05,,MB,1107.259392,6491.66848,0.0,6085.935104,6038.345728,s,10,2.3825533142089843,0.23825533142089844,0.014006455705070683,0.24009297180175782,0.2507077835083008,0.25461110916137697,0.2577337696838379,"[0.20144749450683594, 0.23908642578125, 0.25851443481445313, 0.23458921813964845, 0.2365148468017578, 0.23665673828125, 0.24212879943847657, 0.24109951782226563, 0.24267546081542968, 0.2498403778076172]",tokens/s,1074.4775299393157,kWh,6.354509738043421e-06,7.007880555138613e-07,4.220492506826212e-06,1.1275790300383495e-05,tokens/kWh,22703508.417613383,MB,1112.174592,6512.64,0.0,6106.906624,6086.544896,s,10,18.115085937499998,1.8115085937500002,0.005597813361466063,1.812888427734375,1.8164951049804687,1.817097100830078,1.8175786975097656,"[1.81152734375, 1.8122493896484375, 1.81234716796875, 1.79936083984375, 1.8026717529296874, 1.816361328125, 1.8176990966796875, 1.8134296875, 1.8143582763671875, 1.8150810546875]",tokens/s,34.7776434610138,kWh,5.342013054278995e-05,5.892050214093037e-06,3.532256690537347e-05,9.463474766225645e-05,tokens/kWh,665717.4194075285,,s,630,18.111963876724243,0.028749149010673394,0.00032102976876612315,0.028713279724121094,0.028936253356933592,0.02910829677581787,0.030612848529815676,"[0.030595104217529298, 0.029626304626464844, 0.02889936065673828, 0.028631040573120117, 0.028559232711791994, 0.02851024055480957, 0.028401599884033204, 0.028483039855957033, 0.028536991119384767, 0.028654111862182616, 0.028512063980102538, 0.028549312591552734, 0.028524351119995118, 0.0285861759185791, 0.028477439880371092, 0.028545024871826172, 0.02865679931640625, 0.02864121627807617, 0.028562335968017577, 0.02863420867919922, 0.028593055725097655, 0.028645376205444335, 0.02865558433532715, 0.0285098876953125, 0.02873583984375, 0.02873958396911621, 0.028579839706420897, 0.028545024871826172, 0.028762304306030273, 0.02869638442993164, 0.02867193603515625, 0.02862598419189453, 0.028791135787963867, 0.02885446357727051, 0.029012672424316405, 0.029253408432006835, 0.02896076774597168, 0.02882361602783203, 0.02870582389831543, 0.028725599288940428, 0.02872991943359375, 0.02873311996459961, 0.028721471786499024, 0.02878384017944336, 0.029506336212158202, 0.028700672149658202, 0.02862051200866699, 0.028732831954956056, 0.028803232192993165, 0.02866044807434082, 0.028728992462158202, 0.02878044891357422, 0.0287891845703125, 0.028665855407714845, 0.02880512046813965, 0.028827648162841796, 0.028753919601440428, 0.028787776947021483, 0.02870163154602051, 0.02872319984436035, 0.02877804756164551, 0.02876051139831543, 0.028659711837768553, 0.03077734375, 0.029468576431274415, 0.02901545524597168, 0.028737695693969726, 0.028594720840454103, 0.02845462417602539, 0.028565792083740233, 0.028653568267822265, 0.028620800018310546, 0.02851020812988281, 0.028553279876708984, 0.028450752258300783, 0.02874163246154785, 0.028549375534057616, 0.028566272735595703, 0.02852454376220703, 0.028439552307128906, 0.028645376205444335, 0.02858393669128418, 0.028635135650634767, 0.029360128402709962, 0.028440704345703127, 0.02865510368347168, 0.028489728927612305, 0.028670335769653322, 0.028756128311157227, 0.028819135665893555, 0.02862710380554199, 0.028683712005615234, 0.028764736175537108, 0.02868364715576172, 0.028713600158691406, 0.02884105682373047, 0.028831647872924804, 0.02899660873413086, 0.028845695495605467, 0.028809600830078125, 0.028833791732788085, 0.028876863479614257, 0.0287455997467041, 0.02876131248474121, 0.028876991271972657, 0.028716896057128908, 0.02872812843322754, 0.02876825523376465, 0.02879078483581543, 0.028712959289550782, 0.028704191207885744, 0.028803712844848634, 0.028663423538208006, 0.029017728805541994, 0.028786687850952147, 0.028726112365722655, 0.028719167709350586, 0.02877622413635254, 0.02875587272644043, 0.02867625617980957, 0.028847936630249024, 0.028702367782592775, 0.028706304550170897, 0.028779327392578127, 0.028672224044799806, 0.028718624114990234, 0.030757152557373046, 0.029655008316040038, 0.0289168643951416, 0.0286965446472168, 0.028613536834716798, 0.02853232002258301, 0.028477439880371092, 0.02859663963317871, 0.028487871170043946, 0.028628608703613282, 0.028555391311645507, 0.02861369514465332, 0.02871603202819824, 0.02860851287841797, 0.028497919082641602, 0.02874982452392578, 0.028564992904663085, 0.02863088035583496, 0.02856230354309082, 0.028636255264282227, 0.0286276798248291, 0.028499935150146486, 0.028649471282958985, 0.028495872497558594, 0.02877628707885742, 0.02866796875, 0.02862499237060547, 0.02854300880432129, 0.028601823806762697, 0.02864348793029785, 0.028832096099853516, 0.0289619197845459, 0.02867852783203125, 0.028746240615844725, 0.028991615295410156, 0.02888431930541992, 0.028897216796875, 0.028938848495483397, 0.028901567459106447, 0.02872096061706543, 0.028695552825927735, 0.02874060821533203, 0.02873958396911621, 0.02872643280029297, 0.028750688552856445, 0.02880496025085449, 0.028778175354003906, 0.028952255249023437, 0.028695327758789062, 0.028696575164794923, 0.028775903701782228, 0.02884048080444336, 0.028911615371704103, 0.028761695861816407, 0.028723615646362305, 0.028788288116455077, 0.028758144378662108, 0.028752031326293944, 0.028839647293090822, 0.02888035202026367, 0.0287938232421875, 0.02877644729614258, 0.028685375213623045, 0.03062009620666504, 0.029390335083007812, 0.028874847412109376, 0.0286110725402832, 0.028839807510375976, 0.028520448684692383, 0.028624832153320314, 0.02842425537109375, 0.02831974411010742, 0.028358047485351562, 0.028416608810424803, 0.028362720489501954, 0.028734560012817382, 0.028719680786132813, 0.028440959930419923, 0.028411903381347657, 0.028321727752685547, 0.028325727462768555, 0.028401887893676758, 0.028424192428588867, 0.028334079742431642, 0.02836070442199707, 0.028493791580200194, 0.028436288833618165, 0.028451040267944337, 0.02838755226135254, 0.02844179153442383, 0.028400224685668947, 0.02844879913330078, 0.02843235206604004, 0.028380767822265625, 0.028420511245727538, 0.02853273582458496, 0.02852604866027832, 0.02861520004272461, 0.0286167049407959, 0.028645376205444335, 0.028661184310913086, 0.028543039321899413, 0.028561920166015626, 0.02850115203857422, 0.02863395118713379, 0.02860633659362793, 0.028502143859863282, 0.028516063690185545, 0.02846134376525879, 0.02853455924987793, 0.02849184036254883, 0.028581504821777345, 0.028512575149536132, 0.028503456115722657, 0.028537696838378906, 0.028534751892089844, 0.028508159637451173, 0.02856550407409668, 0.02852659225463867, 0.028559392929077148, 0.02858540725708008, 0.028515871047973634, 0.028503040313720703, 0.0285614070892334, 0.028506111145019532, 0.02854092788696289, 0.03032268714904785, 0.029396991729736328, 0.028836992263793944, 0.02857865524291992, 0.028452896118164064, 0.028420095443725587, 0.02842624092102051, 0.028428287506103517, 0.028382463455200194, 0.028475263595581054, 0.028424543380737306, 0.028367391586303713, 0.028331104278564452, 0.0283472957611084, 0.028293184280395508, 0.0283155517578125, 0.028356639862060547, 0.02840166473388672, 0.028411712646484375, 0.028553407669067384, 0.028516351699829103, 0.028442367553710938, 0.02852275276184082, 0.028422143936157225, 0.028516351699829103, 0.028512256622314453, 0.028411359786987306, 0.02845052719116211, 0.028619039535522462, 0.028447263717651366, 0.028416000366210937, 0.02846723175048828, 0.028601696014404297, 0.028617216110229493, 0.028594303131103515, 0.028669279098510744, 0.028667680740356444, 0.028697471618652343, 0.028639232635498047, 0.028688640594482423, 0.02860972785949707, 0.028648000717163086, 0.028620351791381837, 0.02852396774291992, 0.02857472038269043, 0.028747615814208986, 0.028518272399902345, 0.028549407958984373, 0.028530048370361327, 0.028576160430908205, 0.02873776054382324, 0.02886182403564453, 0.028750463485717772, 0.02871500778198242, 0.02880512046813965, 0.028848031997680663, 0.028772447586059572, 0.028794879913330077, 0.028682239532470705, 0.02875801658630371, 0.028778495788574218, 0.028780256271362305, 0.028712575912475585, 0.030640703201293945, 0.029507104873657226, 0.029047264099121093, 0.028697599411010744, 0.028621824264526367, 0.028603904724121092, 0.028588672637939454, 0.02862054443359375, 0.02874380874633789, 0.02859519958496094, 0.028601343154907227, 0.028528640747070313, 0.028720703125, 0.028700992584228514, 0.028668031692504883, 0.02855731201171875, 0.028589920043945314, 0.0286474552154541, 0.02854310417175293, 0.028639488220214844, 0.028702720642089844, 0.028691680908203124, 0.028705535888671876, 0.0285565128326416, 0.02870044708251953, 0.02874473571777344, 0.028661407470703126, 0.028905567169189454, 0.028627967834472655, 0.028678720474243163, 0.028684352874755858, 0.02891200065612793, 0.028886560440063477, 0.028807647705078127, 0.028837631225585938, 0.030287519454956054, 0.029019744873046874, 0.028933120727539063, 0.029001728057861328, 0.028932096481323243, 0.028825599670410155, 0.028853952407836916, 0.02880031967163086, 0.028937376022338868, 0.028833887100219727, 0.028753919601440428, 0.02876323127746582, 0.028742591857910157, 0.02886627197265625, 0.028816928863525392, 0.028818111419677734, 0.028763935089111327, 0.028709056854248047, 0.028763711929321287, 0.028954879760742187, 0.028862464904785157, 0.028717056274414062, 0.02894233512878418, 0.02876006317138672, 0.02875596809387207, 0.02880732727050781, 0.02886783981323242, 0.028951135635375977, 0.030656063079833984, 0.029681791305541994, 0.02915564727783203, 0.028720191955566406, 0.02862710380554199, 0.028588991165161132, 0.028495712280273436, 0.02861392021179199, 0.028531423568725588, 0.028665855407714845, 0.028633087158203126, 0.028606464385986328, 0.029478912353515626, 0.02856550407409668, 0.028663808822631837, 0.028633216857910156, 0.028682111740112304, 0.028923904418945313, 0.028708864212036132, 0.028672000885009766, 0.02879859161376953, 0.028688928604125977, 0.028673887252807617, 0.02857094383239746, 0.028655519485473634, 0.029012767791748047, 0.028733440399169922, 0.028733503341674804, 0.028714527130126954, 0.028709280014038087, 0.0287825927734375, 0.028733440399169922, 0.02881331253051758, 0.02879692840576172, 0.02893414306640625, 0.028964864730834962, 0.029110111236572266, 0.029060991287231445, 0.02896473693847656, 0.029018527984619142, 0.028778495788574218, 0.02893337631225586, 0.02881203269958496, 0.028753919601440428, 0.02876825523376465, 0.028857471466064454, 0.02878758430480957, 0.028738912582397462, 0.02880169677734375, 0.028739072799682616, 0.028825824737548827, 0.028934431076049805, 0.028850175857543944, 0.02872684860229492, 0.028784576416015624, 0.028750335693359375, 0.02878054428100586, 0.02921881675720215, 0.029187103271484376, 0.029010879516601563, 0.02887068748474121, 0.028901344299316407, 0.028798847198486327, 0.031053024291992186, 0.029621023178100586, 0.02892361640930176, 0.028692768096923827, 0.028693824768066405, 0.028622623443603515, 0.028660640716552735, 0.02848988723754883, 0.02865340805053711, 0.028633087158203126, 0.02853887939453125, 0.028680192947387696, 0.02871603202819824, 0.028683263778686522, 0.028594175338745118, 0.02853856086730957, 0.02858425521850586, 0.028565216064453124, 0.028623424530029296, 0.028616512298583984, 0.028611936569213868, 0.028652095794677736, 0.028704767227172853, 0.028737535476684572, 0.02875116729736328, 0.028801727294921874, 0.028692384719848633, 0.02873103904724121, 0.028690784454345704, 0.028818815231323243, 0.028666591644287108, 0.02874367904663086, 0.02874367904663086, 0.028719104766845704, 0.02885171127319336, 0.028936128616333007, 0.028833984375, 0.028780927658081056, 0.029017087936401367, 0.028976127624511717, 0.028754240036010743, 0.028755136489868164, 0.028692447662353515, 0.02868262481689453, 0.02879280090332031, 0.028709056854248047, 0.02872319984436035, 0.02879078483581543, 0.028716543197631835, 0.02869487953186035, 0.028782655715942383, 0.028711008071899413, 0.02874163246154785, 0.02870681571960449, 0.02874928092956543, 0.0287805118560791, 0.028797183990478516, 0.02879017639160156, 0.02885932731628418, 0.028712928771972655, 0.0288719367980957, 0.02888547134399414, 0.028766496658325195, 0.030526592254638673, 0.029479808807373047, 0.029003231048583985, 0.028942880630493165, 0.029142047882080076, 0.028761056900024413, 0.028556800842285155, 0.028531200408935548, 0.028622528076171876, 0.028804927825927733, 0.028688320159912108, 0.028561983108520508, 0.028618751525878908, 0.028593887329101564, 0.028575807571411132, 0.028621248245239258, 0.028642208099365234, 0.02862387275695801, 0.02857088088989258, 0.02860915184020996, 0.02855116844177246, 0.028674047470092775, 0.028682016372680665, 0.028688608169555666, 0.028802976608276368, 0.02873049545288086, 0.028707807540893554, 0.02872438430786133, 0.028696767807006834, 0.028701343536376954, 0.028680192947387696, 0.028675680160522462, 0.02886697578430176, 0.028886272430419923, 0.029305791854858397, 0.02901740837097168, 0.02906675148010254, 0.02893926429748535, 0.028710079193115235, 0.02879756736755371, 0.02877440071105957, 0.02882374382019043, 0.02879897689819336, 0.028757440567016603, 0.028681888580322265, 0.028758943557739256, 0.02877644729614258, 0.02872319984436035, 0.028716064453125, 0.028703104019165038, 0.028754528045654298, 0.028711999893188477, 0.028713920593261718, 0.028762111663818358, 0.02878463935852051, 0.028856447219848633, 0.02884752082824707, 0.028783071517944337, 0.02873244857788086, 0.0288306884765625, 0.02888243293762207, 0.02881177520751953, 0.028704927444458007, 0.030879743576049806, 0.029616128921508788, 0.028861919403076173, 0.028768415451049804, 0.02875430488586426, 0.028737312316894532, 0.028647647857666016, 0.02851353645324707, 0.02863795280456543, 0.028633087158203126, 0.028542335510253907, 0.028621440887451173, 0.028727296829223634, 0.028667903900146483, 0.02856345558166504, 0.028555200576782225, 0.028687551498413087, 0.028683135986328125, 0.02856982421875, 0.02890118408203125, 0.028624319076538087, 0.028623615264892578, 0.028785760879516602, 0.028652128219604493, 0.028747167587280274, 0.028667808532714844, 0.028625024795532226, 0.02852681541442871, 0.028748224258422852, 0.028729343414306642, 0.02873075294494629, 0.028727903366088867, 0.028852256774902343, 0.028917024612426758, 0.029137632369995118, 0.0291060791015625, 0.028868703842163085, 0.028955839157104493, 0.028883935928344727, 0.0288623046875, 0.028753919601440428, 0.028792224884033202, 0.02872368049621582, 0.028729215621948242, 0.02879283142089844, 0.028700159072875975, 0.028875520706176758, 0.028786304473876954, 0.0288221435546875, 0.028912384033203124, 0.028869632720947266, 0.028778495788574218, 0.0287825927734375, 0.028800287246704102, 0.028885631561279296, 0.028855808258056642, 0.028774463653564453, 0.02878873634338379, 0.028858688354492186, 0.028833919525146485, 0.02874991989135742, 0.02874163246154785, 0.028747264862060546]",tokens/s,34.783638278431845,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,839.241728,8760.786944,0.0,8365.539328,8230.228992,s,1,7.50484423828125,7.50484423828125,0.0,7.50484423828125,7.50484423828125,7.50484423828125,7.50484423828125,[7.50484423828125],,kWh,1.1066832612497515e-05,1.2130818759605153e-06,5.226393070004165e-06,1.7506307558462196e-05,,MB,1169.158144,8951.627776,0.0,8545.8944,8499.295232,s,10,2.6764699096679685,0.26764699096679684,0.010224015400804483,0.27015660095214844,0.2749579895019531,0.2763633331298828,0.2774876080322266,"[0.2387668762207031, 0.2687913208007813, 0.2687487487792969, 0.264208251953125, 0.26977490234375, 0.2708685607910156, 0.27053829956054687, 0.27235858154296877, 0.27464569091796875, 0.2777686767578125]",tokens/s,956.4837589814648,kWh,7.34776420145863e-06,8.100666222931695e-07,4.890156689900046e-06,1.3047987513651845e-05,tokens/kWh,19619883.8887723,MB,1174.2208,8953.724928,0.0,8547.991552,8499.297792,s,10,18.966721679687502,1.8966721679687502,0.004236313017153842,1.8971524658203125,1.8996663452148437,1.9014992492675782,1.9029655725097656,"[1.89731689453125, 1.8964061279296875, 1.8975816650390624, 1.895955078125, 1.8856767578125, 1.896988037109375, 1.89871826171875, 1.8954876708984374, 1.9033321533203125, 1.899259033203125]",tokens/s,33.216072373471974,kWh,5.602011279520714e-05,6.179156081395586e-06,3.704018240989933e-05,9.923945128650207e-05,tokens/kWh,634828.1775371814,,s,630,18.963937675476082,0.030101488373771548,0.0003737273369058032,0.030036399841308594,0.03032941131591797,0.030539182472229004,0.03224072696685791,"[0.03222934341430664, 0.03057254409790039, 0.030236448287963867, 0.03005414390563965, 0.029918912887573243, 0.02982304000854492, 0.02989302444458008, 0.029762943267822264, 0.029848512649536134, 0.02996944046020508, 0.02985241508483887, 0.029823200225830078, 0.0300316162109375, 0.030032127380371094, 0.030001279830932617, 0.030010976791381837, 0.030052223205566407, 0.029941375732421876, 0.029847583770751952, 0.029767711639404296, 0.029906656265258787, 0.030077951431274414, 0.030069856643676757, 0.029860767364501953, 0.029891775131225585, 0.029958976745605468, 0.029800447463989257, 0.030143583297729492, 0.029955135345458985, 0.02990652847290039, 0.030183231353759766, 0.030195775985717772, 0.03033251190185547, 0.030488927841186522, 0.030249120712280274, 0.030761247634887696, 0.030257152557373046, 0.029973600387573244, 0.030194591522216797, 0.030045215606689452, 0.030088159561157228, 0.030010400772094728, 0.030073823928833007, 0.03017728042602539, 0.030138111114501952, 0.029980224609375, 0.030036575317382814, 0.030038112640380858, 0.03025017547607422, 0.03026924705505371, 0.030079679489135744, 0.030007680892944335, 0.030168256759643554, 0.030157535552978516, 0.030029855728149413, 0.03024105644226074, 0.030117599487304688, 0.030121280670166017, 0.03013088035583496, 0.030013439178466796, 0.030076223373413084, 0.030819007873535156, 0.030054399490356445, 0.032245376586914065, 0.030806367874145507, 0.030271488189697264, 0.029884416580200194, 0.02975062370300293, 0.029945663452148438, 0.030065311431884765, 0.02992918395996094, 0.029901472091674805, 0.02979596710205078, 0.02995327949523926, 0.029847904205322264, 0.029911167144775392, 0.029845279693603517, 0.0299051513671875, 0.030036224365234374, 0.02981011199951172, 0.029905664443969728, 0.03004966354370117, 0.030089088439941406, 0.03018582344055176, 0.0300032958984375, 0.029774080276489256, 0.02984351921081543, 0.030021120071411132, 0.030105344772338866, 0.02984217643737793, 0.029924863815307616, 0.029918943405151367, 0.029879072189331054, 0.030027135848999024, 0.030081151962280273, 0.03037164878845215, 0.0305097599029541, 0.03033888053894043, 0.03023993682861328, 0.03011862373352051, 0.0300731201171875, 0.030074880599975585, 0.03031449508666992, 0.03052694320678711, 0.030027711868286133, 0.030146656036376954, 0.029943552017211914, 0.029921279907226563, 0.03026940727233887, 0.030245664596557618, 0.029914495468139648, 0.030026464462280272, 0.029986719131469726, 0.030087167739868165, 0.030101152420043947, 0.030090816497802736, 0.02998137664794922, 0.03020400047302246, 0.02997987174987793, 0.03002556800842285, 0.029948320388793945, 0.03027203178405762, 0.030449663162231445, 0.030055776596069336, 0.030122655868530274, 0.030150592803955076, 0.032194526672363284, 0.030664703369140626, 0.030345216751098632, 0.02997622489929199, 0.029835615158081055, 0.029705631256103517, 0.029993024826049805, 0.029749248504638674, 0.03002217674255371, 0.030031328201293946, 0.029822784423828123, 0.02985443115234375, 0.02998886489868164, 0.029845504760742186, 0.030131231307983397, 0.03000419235229492, 0.029748607635498046, 0.029851295471191405, 0.029796672821044923, 0.029825696945190428, 0.029880319595336914, 0.030119935989379884, 0.029913087844848633, 0.029871488571166994, 0.029944448471069335, 0.029805696487426758, 0.0298353271484375, 0.030026559829711915, 0.03024684715270996, 0.03012339210510254, 0.030099264144897463, 0.03016691207885742, 0.030694400787353516, 0.030279680252075194, 0.030284896850585937, 0.030086048126220705, 0.030341119766235353, 0.030081024169921877, 0.03016294479370117, 0.03003392028808594, 0.029996192932128907, 0.029944671630859374, 0.030280832290649415, 0.03020684814453125, 0.03008230400085449, 0.03001590347290039, 0.030984384536743164, 0.029928607940673826, 0.03000831985473633, 0.03010767936706543, 0.03020716857910156, 0.03013916778564453, 0.03002572822570801, 0.03001753616333008, 0.03041231918334961, 0.03026540756225586, 0.030105920791625978, 0.03006854438781738, 0.03018351936340332, 0.030224576950073242, 0.030084800720214844, 0.030255104064941408, 0.030326528549194334, 0.03230550384521484, 0.03094432067871094, 0.030393280029296876, 0.02995609664916992, 0.030096576690673827, 0.029818975448608398, 0.02974550437927246, 0.029871648788452148, 0.03005084800720215, 0.030023231506347656, 0.029780736923217775, 0.029804191589355468, 0.030009695053100586, 0.030072832107543947, 0.029980127334594726, 0.02989302444458008, 0.030006687164306642, 0.02986240005493164, 0.029888128280639647, 0.029819488525390625, 0.02991923141479492, 0.030133344650268554, 0.02994473648071289, 0.02994175910949707, 0.029841407775878907, 0.02983526420593262, 0.02985958480834961, 0.029858047485351563, 0.030095359802246095, 0.029910144805908204, 0.03023551940917969, 0.030087167739868165, 0.030093311309814453, 0.030523391723632814, 0.030308351516723633, 0.030062591552734375, 0.030066688537597655, 0.03007427215576172, 0.029997663497924806, 0.03028531265258789, 0.029975040435791016, 0.03001510429382324, 0.030140159606933593, 0.03006502342224121, 0.030185728073120116, 0.02996019172668457, 0.030320512771606446, 0.030172351837158204, 0.03004300880432129, 0.030001216888427735, 0.029997055053710937, 0.030315967559814454, 0.030028352737426756, 0.02998681640625, 0.030385440826416015, 0.02999123191833496, 0.030050207138061523, 0.030114303588867186, 0.030000896453857423, 0.030015743255615235, 0.03041279983520508, 0.030035871505737305, 0.03011782455444336, 0.032194561004638675, 0.03055961608886719, 0.03025974464416504, 0.0302061767578125, 0.029746400833129884, 0.029642784118652343, 0.029637344360351564, 0.029662879943847656, 0.029663616180419922, 0.029749248504638674, 0.029702335357666015, 0.029714239120483397, 0.02971980857849121, 0.029775808334350586, 0.029761344909667968, 0.029825216293334962, 0.029788991928100587, 0.02980575942993164, 0.029799232482910155, 0.029742687225341798, 0.029790271759033204, 0.029730527877807618, 0.029702783584594727, 0.02976335906982422, 0.029720800399780273, 0.02975334358215332, 0.02978201675415039, 0.029722623825073242, 0.0297325439453125, 0.02982943916320801, 0.030025760650634767, 0.02995622444152832, 0.03012915229797363, 0.030204063415527345, 0.030160640716552733, 0.03006355285644531, 0.030044160842895507, 0.02999091148376465, 0.029911039352416992, 0.029886463165283202, 0.029906944274902345, 0.029959936141967774, 0.02993382453918457, 0.02999728012084961, 0.0298855037689209, 0.02983750343322754, 0.02981942367553711, 0.02979430389404297, 0.029836479187011718, 0.029874975204467774, 0.029922943115234375, 0.029850015640258788, 0.029879840850830078, 0.029921728134155272, 0.029949983596801757, 0.029882368087768556, 0.0299233283996582, 0.029998336791992188, 0.030115711212158204, 0.030094207763671874, 0.0300437126159668, 0.03003343963623047, 0.03013500785827637, 0.03227036666870117, 0.03070774459838867, 0.030063167572021484, 0.0298221435546875, 0.029780799865722657, 0.029741056442260744, 0.02984342384338379, 0.029814815521240233, 0.02975129508972168, 0.02976563262939453, 0.029744768142700197, 0.029804927825927734, 0.029714431762695313, 0.02973695945739746, 0.029788223266601563, 0.029734367370605468, 0.02970204734802246, 0.029680192947387694, 0.02979635238647461, 0.029798080444335937, 0.03004419136047363, 0.030015775680541992, 0.03017523193359375, 0.030109695434570313, 0.030031871795654298, 0.03002524757385254, 0.030126367568969727, 0.03027987289428711, 0.02996441650390625, 0.03011737632751465, 0.030230911254882812, 0.030235967636108398, 0.030366399765014648, 0.030535680770874023, 0.03032035255432129, 0.03023209571838379, 0.030370559692382813, 0.030275583267211914, 0.030246912002563478, 0.030215904235839842, 0.030181663513183594, 0.030312320709228516, 0.030072959899902343, 0.030189376831054687, 0.030119871139526366, 0.030040319442749024, 0.030205856323242186, 0.030229856491088867, 0.03002579116821289, 0.030175935745239257, 0.030336736679077148, 0.03011008071899414, 0.030156351089477538, 0.030322368621826173, 0.030352031707763672, 0.030119935989379884, 0.029997312545776367, 0.02999679946899414, 0.030227935791015625, 0.03001807975769043, 0.030109695434570313, 0.030212032318115235, 0.030224447250366212, 0.03246089553833008, 0.030785535812377928, 0.030296064376831053, 0.030003200531005858, 0.029838399887084963, 0.02984441566467285, 0.03003385543823242, 0.029978687286376954, 0.02977177619934082, 0.030101503372192383, 0.030007295608520508, 0.029728511810302734, 0.02976924705505371, 0.029909727096557617, 0.029968351364135743, 0.0299434871673584, 0.029846944808959962, 0.029975488662719728, 0.029908992767333983, 0.02994175910949707, 0.029962112426757812, 0.03015388870239258, 0.02987513542175293, 0.030248735427856447, 0.02988057518005371, 0.0299683837890625, 0.02990675163269043, 0.030146751403808594, 0.02997657585144043, 0.03017919921875, 0.030275039672851563, 0.03054204750061035, 0.030611648559570312, 0.0303372802734375, 0.03058016014099121, 0.030196287155151366, 0.030342720031738282, 0.030116287231445313, 0.03018121528625488, 0.030054559707641603, 0.03018956756591797, 0.030346687316894532, 0.030284351348876953, 0.029919071197509764, 0.02993078422546387, 0.0300184326171875, 0.030264511108398437, 0.030042848587036132, 0.030166624069213867, 0.030046720504760743, 0.02996220779418945, 0.029890592575073243, 0.030304256439208983, 0.03020150375366211, 0.02998512077331543, 0.03005232048034668, 0.03001683235168457, 0.030007999420166017, 0.030062623977661133, 0.030327871322631837, 0.03016729545593262, 0.030329376220703124, 0.03024470329284668, 0.033325214385986325, 0.031747360229492184, 0.030630399703979492, 0.030443231582641603, 0.03010201644897461, 0.029898752212524415, 0.029747200012207032, 0.029849599838256836, 0.029787263870239257, 0.029743999481201173, 0.029798015594482422, 0.029914527893066405, 0.029762527465820313, 0.030080608367919922, 0.03008348846435547, 0.029916511535644532, 0.03004425621032715, 0.0298417911529541, 0.03005459213256836, 0.029999103546142578, 0.029865503311157227, 0.030129919052124022, 0.02990358352661133, 0.029900543212890623, 0.029943712234497072, 0.029884096145629882, 0.029882816314697264, 0.030289119720458984, 0.03001651191711426, 0.029937664031982423, 0.030094783782958986, 0.03002217674255371, 0.030293439865112303, 0.030196607589721678, 0.030329727172851564, 0.030202720642089845, 0.030250751495361328, 0.030138208389282228, 0.030110111236572267, 0.030038015365600586, 0.029962175369262694, 0.029919296264648437, 0.02990438461303711, 0.02988310432434082, 0.029844671249389648, 0.02991574478149414, 0.029870080947875976, 0.029865983963012696, 0.029877471923828124, 0.02999171257019043, 0.02994700813293457, 0.029911584854125976, 0.029886816024780275, 0.029911039352416992, 0.02993152046203613, 0.0299597110748291, 0.030132768630981445, 0.030000991821289062, 0.029981983184814452, 0.030069568634033202, 0.030105600357055663, 0.030066688537597655, 0.030100576400756834, 0.03231948852539063, 0.030898143768310547, 0.030517248153686522, 0.0302073917388916, 0.029964895248413087, 0.029888416290283205, 0.030077024459838866, 0.02993715286254883, 0.030192031860351562, 0.02982512092590332, 0.029755392074584962, 0.030082176208496094, 0.03023551940917969, 0.030211103439331054, 0.030012351989746094, 0.029923360824584962, 0.030080223083496095, 0.03039516830444336, 0.03018956756591797, 0.030070783615112305, 0.030017023086547853, 0.02989926338195801, 0.030019584655761718, 0.02995609664916992, 0.030053855895996094, 0.030200319290161134, 0.030001119613647462, 0.03005638313293457, 0.030107776641845704, 0.030286048889160155, 0.030365472793579103, 0.03043231964111328, 0.030298336029052735, 0.030472671508789063, 0.030595327377319338, 0.030195711135864257, 0.030294015884399415, 0.031135744094848632, 0.030269439697265626, 0.030029312133789062, 0.030003679275512694, 0.029959615707397462, 0.030313056945800783, 0.02992905616760254, 0.030305856704711913, 0.030077280044555663, 0.030072959899902343, 0.03001363182067871, 0.029970624923706054, 0.030193119049072265, 0.030144960403442382, 0.029935264587402345, 0.02997907257080078, 0.03078348731994629, 0.03033087921142578, 0.030126079559326172, 0.03022233581542969, 0.03003392028808594, 0.030023008346557616, 0.030261568069458008, 0.030318143844604493, 0.030155168533325196, 0.030433664321899413, 0.03231110382080078, 0.03096182441711426, 0.03017932891845703, 0.0299532470703125, 0.030118688583374024, 0.03018137550354004, 0.02980454444885254, 0.029800447463989257, 0.02979151916503906, 0.030052959442138674, 0.029849536895751955, 0.030277183532714844, 0.029979263305664063, 0.029915136337280275, 0.030113792419433592, 0.0298570556640625, 0.030149343490600587, 0.030085119247436523, 0.029816831588745117, 0.030126079559326172, 0.030058496475219725, 0.029834943771362303, 0.029780288696289063, 0.029863744735717773, 0.029990463256835936, 0.029991552352905272, 0.029853599548339844, 0.029851743698120117, 0.030260223388671875, 0.03029875183105469, 0.030181760787963866, 0.030283775329589844, 0.03032268714904785, 0.030320640563964843, 0.030232576370239257, 0.030253055572509766, 0.030107648849487304, 0.030191295623779296, 0.03000966453552246, 0.03007187271118164, 0.03032294464111328, 0.03018207931518555, 0.029961919784545897, 0.030002496719360353, 0.030135295867919923, 0.030277631759643556, 0.030091264724731445, 0.030021312713623047, 0.029951583862304686, 0.03130646324157715, 0.030169343948364256, 0.029867776870727537, 0.030291007995605468, 0.030268352508544923, 0.030053728103637694, 0.03010371208190918, 0.030046783447265624, 0.03003308868408203, 0.030317312240600587, 0.030013439178466796, 0.030104639053344727, 0.030155712127685547, 0.030236064910888674]",tokens/s,33.22094866482862,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 582, in __init__ self.transformer = CodeGenModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 400, in __init__ self.h = nn.ModuleList([CodeGenBlock(config) for _ in range(config.n_layer)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 400, in self.h = nn.ModuleList([CodeGenBlock(config) for _ in range(config.n_layer)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 258, in __init__ self.mlp = CodeGenMLP(inner_dim, config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 236, in __init__ self.fc_in = nn.Linear(embed_dim, intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,738.77504,1148.125184,0.0,752.877568,713.93792,s,1,7.34036962890625,7.34036962890625,0.0,7.34036962890625,7.34036962890625,7.34036962890625,7.34036962890625,[7.34036962890625],,kWh,6.353637062502078e-06,6.932980891981484e-07,1.972779356002735e-06,9.019714507702962e-06,,MB,1052.758016,1190.068224,0.0,784.334848,638.877696,s,18,0.33536745643615723,0.01863152535756429,0.0004424272807504113,0.01849524784088135,0.018742918586730956,0.01913710832595825,0.020132650852203365,"[0.02038153648376465, 0.01852057647705078, 0.018917503356933593, 0.01836672019958496, 0.018438175201416017, 0.018667007446289064, 0.018447296142578125, 0.018466527938842774, 0.0186680965423584, 0.0185446720123291, 0.018389856338500977, 0.018467391967773437, 0.01844771194458008, 0.01845568084716797, 0.018569536209106445, 0.018547935485839842, 0.018469919204711915, 0.0186013126373291]",tokens/s,13740.152514998752,kWh,6.889110478183946e-07,7.597375514928903e-08,4.5554740322169947e-07,1.220432206189383e-06,tokens/kWh,209761753.8292616,MB,1063.030784,1215.234048,0.0,809.500672,638.880256,s,18,9.91459100341797,0.5508106113009983,0.0024000763314876027,0.5507142639160156,0.5530488464355469,0.554253955078125,0.556790341796875,"[0.5496165161132812, 0.5463087768554687, 0.5512454223632812, 0.5495008544921876, 0.5516490478515625, 0.5506685180664063, 0.5485482177734375, 0.5524202270507812, 0.5505003051757813, 0.5536944580078125, 0.5495416259765625, 0.5477716674804688, 0.550760009765625, 0.5574244384765625, 0.5520377807617187, 0.55111181640625, 0.5490191650390625, 0.5527721557617188]",tokens/s,114.37688146783496,kWh,1.5814841655654132e-05,1.744113168838617e-06,7.741326438222773e-06,2.5300281262715512e-05,tokens/kWh,2490090.8944771993,,s,1134,9.909957987785342,0.008738940024502064,0.00017457159636791476,0.008700208187103271,0.008826022624969482,0.008934936332702637,0.009401976852416994,"[0.008698080062866211, 0.008691583633422852, 0.00869820785522461, 0.008882080078125, 0.008676639556884766, 0.008678208351135254, 0.008724384307861328, 0.008626079559326172, 0.008737183570861817, 0.008652288436889649, 0.008652928352355958, 0.008659008026123047, 0.008684543609619141, 0.01020201587677002, 0.008724896430969239, 0.008742591857910157, 0.00905446434020996, 0.008732416152954102, 0.008682815551757813, 0.008698559761047364, 0.0086364164352417, 0.00865449619293213, 0.008669343948364258, 0.008659135818481446, 0.008666848182678223, 0.008692000389099121, 0.008702272415161133, 0.008726207733154297, 0.008689472198486328, 0.008659135818481446, 0.008650752067565918, 0.008712191581726075, 0.008689663887023925, 0.008630271911621093, 0.00865824031829834, 0.008696767807006836, 0.008634367942810058, 0.008652544021606446, 0.008642560005187988, 0.008699040412902832, 0.00867801570892334, 0.008753376007080078, 0.008646656036376953, 0.008667455673217774, 0.00867625617980957, 0.008809599876403808, 0.008865440368652344, 0.008675328254699707, 0.008622079849243165, 0.008738816261291504, 0.008706048011779785, 0.008652159690856934, 0.008657535552978516, 0.008615936279296875, 0.008647775650024414, 0.008620960235595703, 0.008732192039489746, 0.008659423828125, 0.00866870403289795, 0.008726207733154297, 0.008808256149291992, 0.008696767807006836, 0.008715680122375488, 0.008609663963317872, 0.008662079811096192, 0.008642784118652344, 0.008667872428894044, 0.008633824348449707, 0.00865123176574707, 0.008599616050720215, 0.00871833610534668, 0.008601344108581543, 0.008675583839416504, 0.008651040077209472, 0.00862169647216797, 0.008681568145751953, 0.008665087699890137, 0.008603872299194336, 0.0087957124710083, 0.008638655662536621, 0.008653887748718262, 0.008728575706481934, 0.008774335861206056, 0.008697343826293944, 0.008635168075561523, 0.00862003231048584, 0.008642848014831544, 0.00863372802734375, 0.00868342399597168, 0.008702400207519532, 0.008648256301879882, 0.008607295989990235, 0.008640992164611816, 0.008669088363647461, 0.00866543960571289, 0.00863212776184082, 0.008634495735168456, 0.008662464141845704, 0.008659520149230958, 0.008668479919433593, 0.008686495780944823, 0.008627264022827149, 0.008707008361816405, 0.00872163200378418, 0.008699711799621581, 0.00865817642211914, 0.00868937587738037, 0.008644864082336426, 0.008660736083984375, 0.008640512466430664, 0.008648703575134278, 0.008685055732727051, 0.008741087913513183, 0.008677536010742188, 0.008675456047058105, 0.008609919548034669, 0.00873574447631836, 0.00874294376373291, 0.0086843843460083, 0.00869711971282959, 0.008676063537597657, 0.008671232223510742, 0.008640768051147461, 0.0086812162399292, 0.00870969581604004, 0.008673727989196777, 0.008753472328186036, 0.008741184234619141, 0.008696288108825683, 0.00866703987121582, 0.008685759544372559, 0.008642368316650391, 0.008667136192321777, 0.008686816215515137, 0.008765343666076661, 0.008622976303100586, 0.008666560173034668, 0.00866323184967041, 0.008681856155395507, 0.008630335807800293, 0.008644543647766113, 0.008699071884155274, 0.008700736045837402, 0.00862003231048584, 0.008673279762268067, 0.009494112014770508, 0.009800095558166504, 0.00884489631652832, 0.008710528373718262, 0.00870809555053711, 0.008605728149414063, 0.008642560005187988, 0.008629983901977539, 0.00878211212158203, 0.00865609645843506, 0.008667584419250488, 0.00862831974029541, 0.008659296035766602, 0.008830880165100098, 0.008689663887023925, 0.008662783622741699, 0.008651007652282714, 0.00865510368347168, 0.008740480422973634, 0.008760512351989746, 0.008694720268249512, 0.008656319618225097, 0.008684096336364746, 0.00864633560180664, 0.008671551704406739, 0.008724703788757324, 0.010385184288024903, 0.008725855827331543, 0.00872659206390381, 0.008747808456420899, 0.008634176254272461, 0.008720383644104004, 0.008636287689208984, 0.008752287864685059, 0.00866806411743164, 0.008665151596069336, 0.008656031608581543, 0.00886569595336914, 0.008694399833679199, 0.008632991790771484, 0.00869331169128418, 0.00864470386505127, 0.008679360389709472, 0.00864249610900879, 0.00875276756286621, 0.00867414379119873, 0.008666496276855468, 0.008718976020812988, 0.008902239799499511, 0.008694175720214845, 0.008732735633850098, 0.008734656333923339, 0.008696096420288086, 0.008684864044189453, 0.008642368316650391, 0.008657504081726074, 0.00867948818206787, 0.008652735710144042, 0.008668864250183105, 0.008835071563720704, 0.008702143669128418, 0.008659071922302246, 0.008625696182250977, 0.008686047554016113, 0.008634367942810058, 0.008699904441833496, 0.008650752067565918, 0.008647711753845214, 0.008690655708312988, 0.008704000473022461, 0.008652704238891602, 0.008661087989807128, 0.008671520233154296, 0.008670944213867188, 0.008675328254699707, 0.008675328254699707, 0.008648320198059082, 0.00865932846069336, 0.008644736289978028, 0.008820608139038085, 0.008935040473937988, 0.008777088165283203, 0.008713088035583495, 0.00883456039428711, 0.008708736419677735, 0.00869375991821289, 0.008703871726989746, 0.008654175758361816, 0.008674079895019532, 0.00874067211151123, 0.00866323184967041, 0.009287232398986817, 0.008870335578918457, 0.008737792015075683, 0.009143296241760255, 0.008714048385620118, 0.008720576286315918, 0.00871628761291504, 0.008715744018554688, 0.008659487724304199, 0.008679424285888672, 0.008673088073730469, 0.008613408088684082, 0.008657055854797363, 0.00873475170135498, 0.008648832321166993, 0.008685695648193359, 0.008648672103881837, 0.00860598373413086, 0.008652192115783691, 0.008644063949584961, 0.008736672401428223, 0.008721376419067383, 0.008652576446533203, 0.008675552368164063, 0.008694080352783204, 0.008680928230285644, 0.008893664360046388, 0.008812864303588868, 0.008886367797851562, 0.008795040130615234, 0.008746368408203125, 0.008751423835754394, 0.008688768386840821, 0.0087193603515625, 0.008697728157043456, 0.008607744216918945, 0.008626175880432128, 0.008650943756103516, 0.008595264434814454, 0.008658080101013184, 0.008618687629699707, 0.008630016326904296, 0.008603167533874512, 0.008622976303100586, 0.008671232223510742, 0.008697855949401855, 0.00880025577545166, 0.008697855949401855, 0.008666272163391114, 0.008687520027160644, 0.008754112243652344, 0.00870809555053711, 0.008707167625427246, 0.00911248016357422, 0.00913590431213379, 0.009410783767700196, 0.008752703666687012, 0.009255007743835449, 0.009028160095214843, 0.008700991630554199, 0.008677184104919433, 0.008731552124023437, 0.008716480255126953, 0.008653696060180665, 0.008883135795593262, 0.008728384017944336, 0.008720576286315918, 0.008689727783203125, 0.008714591979980469, 0.0088635835647583, 0.00866431999206543, 0.008649215698242188, 0.008687007904052734, 0.008632767677307128, 0.008691616058349609, 0.008930624008178711, 0.00931935977935791, 0.008648287773132325, 0.00869212818145752, 0.008721376419067383, 0.008748543739318848, 0.008776191711425782, 0.008681568145751953, 0.008664383888244628, 0.008671839714050293, 0.008796159744262694, 0.008671296119689942, 0.009133855819702148, 0.009384096145629883, 0.008789183616638184, 0.008675264358520509, 0.008706784248352051, 0.008691871643066406, 0.008704000473022461, 0.008773504257202148, 0.00873635196685791, 0.00868188762664795, 0.008755007743835449, 0.008630271911621093, 0.008755616188049316, 0.00883193588256836, 0.008815232276916505, 0.008640864372253417, 0.008683520317077637, 0.008707136154174804, 0.008681471824645997, 0.008689696311950684, 0.0086495361328125, 0.008681535720825195, 0.008681119918823242, 0.009169280052185058, 0.00870195198059082, 0.008738847732543945, 0.008724448204040527, 0.008710432052612304, 0.008768671989440919, 0.008714495658874511, 0.008708415985107422, 0.008691007614135743, 0.008769280433654785, 0.008684479713439941, 0.008664095878601074, 0.008647647857666016, 0.008705375671386718, 0.008643232345581054, 0.008740863800048827, 0.008797344207763671, 0.008686431884765625, 0.008689663887023925, 0.008665087699890137, 0.008697504043579102, 0.008742783546447754, 0.00869219207763672, 0.008742400169372559, 0.008665599822998048, 0.008764800071716308, 0.008716927528381348, 0.008697919845581055, 0.008728575706481934, 0.00869164752960205, 0.00870911979675293, 0.008741184234619141, 0.008748703956604004, 0.008663776397705079, 0.008716416358947755, 0.008709280014038086, 0.008657088279724121, 0.008667872428894044, 0.008675519943237304, 0.008654591560363769, 0.008670944213867188, 0.00870246410369873, 0.008672287940979004, 0.008679295539855957, 0.008751999855041505, 0.00876255989074707, 0.008733504295349121, 0.008691712379455567, 0.008697792053222657, 0.008619839668273926, 0.008723999977111816, 0.008642463684082032, 0.008661824226379395, 0.008646976470947266, 0.008711872100830078, 0.008720383644104004, 0.00870025634765625, 0.008941216468811035, 0.008806400299072266, 0.008678624153137207, 0.008639264106750488, 0.008660991668701172, 0.008652799606323243, 0.00863759994506836, 0.008694175720214845, 0.00872697639465332, 0.00867363166809082, 0.008660639762878418, 0.008664799690246581, 0.008762751579284668, 0.0089017915725708, 0.00872815990447998, 0.008644767761230469, 0.00863167953491211, 0.008661631584167481, 0.008653887748718262, 0.008648832321166993, 0.008678208351135254, 0.008697855949401855, 0.00871628761291504, 0.008667136192321777, 0.008742303848266601, 0.00871235179901123, 0.008658368110656739, 0.008653823852539062, 0.008671232223510742, 0.008684543609619141, 0.008710304260253906, 0.008747679710388184, 0.008715776443481446, 0.00883296012878418, 0.008755904197692871, 0.008796223640441895, 0.008785280227661132, 0.008718976020812988, 0.008659456253051758, 0.008712160110473632, 0.008642463684082032, 0.00867369556427002, 0.008699999809265137, 0.008798239707946777, 0.009000831604003907, 0.008699904441833496, 0.00870304012298584, 0.008713151931762695, 0.008767487525939942, 0.008724224090576172, 0.00869753646850586, 0.00861676788330078, 0.008732416152954102, 0.008691167831420899, 0.009269791603088379, 0.0088985595703125, 0.008934880256652832, 0.00874300765991211, 0.009200160026550292, 0.008714143753051757, 0.008722304344177246, 0.008681247711181641, 0.008741151809692383, 0.00867743968963623, 0.0090316801071167, 0.00883619213104248, 0.008673600196838379, 0.008826656341552734, 0.008817472457885743, 0.008742912292480469, 0.008709759712219238, 0.008759519577026368, 0.008824543952941894, 0.008744735717773438, 0.008778656005859375, 0.00875875186920166, 0.008847647666931153, 0.008689984321594239, 0.008806079864501952, 0.008881759643554688, 0.008724864006042481, 0.008747039794921875, 0.008780063629150391, 0.008802016258239747, 0.008742912292480469, 0.00871395206451416, 0.008726719856262208, 0.008757087707519531, 0.00878559970855713, 0.00868614387512207, 0.008861311912536621, 0.008755616188049316, 0.008732352256774902, 0.008701631546020508, 0.008633952140808105, 0.008737664222717285, 0.008646783828735351, 0.008611519813537598, 0.008708415985107422, 0.00869375991821289, 0.008730624198913574, 0.008639871597290039, 0.008695839881896972, 0.008638400077819824, 0.008661664009094239, 0.008689951896667481, 0.008695520401000977, 0.00868556785583496, 0.00877558422088623, 0.008662943840026855, 0.008679519653320313, 0.00868883228302002, 0.008747039794921875, 0.008684415817260743, 0.008667136192321777, 0.008648703575134278, 0.008683103561401367, 0.008687168121337891, 0.008694399833679199, 0.00881481647491455, 0.008728128433227539, 0.00866547203063965, 0.008769599914550782, 0.009103360176086426, 0.009074687957763672, 0.00872009563446045, 0.008709728240966797, 0.008729280471801758, 0.008700991630554199, 0.00870851230621338, 0.00870639991760254, 0.008650943756103516, 0.00869375991821289, 0.00872447967529297, 0.008669183731079102, 0.008629759788513184, 0.00870851230621338, 0.008652864456176758, 0.00864412784576416, 0.008675840377807617, 0.008761183738708496, 0.008701248168945313, 0.00884928035736084, 0.00873788833618164, 0.008722304344177246, 0.008804351806640624, 0.008790016174316406, 0.008814623832702637, 0.008734687805175782, 0.00870195198059082, 0.008742912292480469, 0.008761343955993652, 0.008734720230102539, 0.008642560005187988, 0.008723999977111816, 0.009145088195800781, 0.00908463954925537, 0.008781248092651368, 0.008755776405334472, 0.00872447967529297, 0.008740768432617188, 0.008677472114562988, 0.008670880317687988, 0.00865056037902832, 0.009156607627868652, 0.008736767768859864, 0.008677696228027344, 0.008808128356933594, 0.008667263984680176, 0.008900480270385742, 0.009539584159851074, 0.009084927558898925, 0.00992255973815918, 0.008873984336853028, 0.008998335838317871, 0.008981056213378906, 0.008668767929077148, 0.009142687797546387, 0.008814047813415528, 0.00870412826538086, 0.008677791595458985, 0.008761343955993652, 0.008679424285888672, 0.008642080307006837, 0.008675104141235351, 0.008632479667663574, 0.008738752365112304, 0.008659711837768555, 0.009184703826904297, 0.008749823570251464, 0.008816287994384766, 0.008741024017333985, 0.008648544311523438, 0.008730624198913574, 0.008666943550109864, 0.008648896217346191, 0.008736031532287598, 0.008680159568786622, 0.008642560005187988, 0.008699904441833496, 0.008650239944458007, 0.008671039581298829, 0.008685407638549804, 0.00869871997833252, 0.008683327674865723, 0.008720767974853516, 0.008716032028198243, 0.008682911872863769, 0.00871836757659912, 0.008786463737487793, 0.008996959686279296, 0.008769536018371582, 0.008845312118530273, 0.008841216087341308, 0.008728575706481934, 0.008709343910217285, 0.008629023551940918, 0.00871401596069336, 0.008657119750976562, 0.008722016334533692, 0.008675935745239258, 0.008691807746887208, 0.008656736373901366, 0.008678367614746094, 0.00865167999267578, 0.008695520401000977, 0.008687040328979493, 0.008715968132019043, 0.008660863876342774, 0.008664671897888183, 0.008655200004577637, 0.008671744346618653, 0.008660991668701172, 0.008673279762268067, 0.008646656036376953, 0.008648703575134278, 0.008699071884155274, 0.008710975646972657, 0.008700096130371093, 0.008674367904663086, 0.00865775966644287, 0.00866643238067627, 0.008688223838806153, 0.008707903861999511, 0.008644800186157226, 0.008652959823608398, 0.008641599655151367, 0.008626976013183595, 0.008634367942810058, 0.008648703575134278, 0.008663040161132812, 0.008737919807434082, 0.008663488388061523, 0.008630080223083496, 0.008664735794067383, 0.008757216453552246, 0.008723039627075196, 0.008679967880249023, 0.008689472198486328, 0.00866425609588623, 0.00862502384185791, 0.008693280220031739, 0.008685376167297363, 0.008698464393615723, 0.008667488098144532, 0.00996224021911621, 0.008827872276306152, 0.008762463569641114, 0.008686495780944823, 0.008676608085632325, 0.008757599830627441, 0.008694111824035644, 0.008728832244873046, 0.008697759628295899, 0.008720288276672363, 0.008777728080749512, 0.008711423873901367, 0.008687423706054688, 0.008738911628723145, 0.008921055793762208, 0.00872332763671875, 0.00876307201385498, 0.008667455673217774, 0.008653887748718262, 0.008671903610229492, 0.008671839714050293, 0.008699584007263183, 0.008658944129943847, 0.009164799690246582, 0.008681119918823242, 0.008656319618225097, 0.008641087532043457, 0.008650655746459962, 0.008650848388671875, 0.008652799606323243, 0.00867734432220459, 0.008826911926269531, 0.008736319541931152, 0.008686016082763672, 0.008773152351379394, 0.008690143585205078, 0.008804351806640624, 0.008812543869018554, 0.008732447624206543, 0.008687840461730958, 0.00872652816772461, 0.008689663887023925, 0.008668959617614746, 0.00863696002960205, 0.008649824142456054, 0.00864089584350586, 0.008638688087463379, 0.008638463973999023, 0.008639967918395997, 0.008644255638122558, 0.008642815589904785, 0.008663071632385253, 0.008704863548278808, 0.008678912162780762, 0.008671487808227539, 0.008779680252075196, 0.008693856239318848, 0.008683103561401367, 0.008630463600158692, 0.008669407844543457, 0.00865446376800537, 0.00862758445739746, 0.008608768463134766, 0.0086364164352417, 0.008703712463378907, 0.008679712295532226, 0.008742527961730957, 0.008713791847229004, 0.008697664260864258, 0.00862217617034912, 0.008717311859130859, 0.008677280426025391, 0.008634367942810058, 0.008680831909179688, 0.008702400207519532, 0.008765151977539062, 0.008722911834716797, 0.00863424015045166, 0.008650879859924316, 0.008690688133239746, 0.00872755241394043, 0.008785920143127441, 0.008750144004821778, 0.008723135948181152, 0.008705344200134277, 0.008743583679199219, 0.008720255851745606, 0.008722880363464355, 0.008681471824645997, 0.008765439987182617, 0.008658944129943847, 0.008689023971557618, 0.008698592185974121, 0.00868956756591797, 0.008722271919250488, 0.008648863792419434, 0.00875868797302246, 0.008708703994750976, 0.008660191535949708, 0.008678175926208497, 0.008796159744262694, 0.008812543869018554, 0.008685407638549804, 0.008699392318725586, 0.008691967964172364, 0.008704416275024414, 0.008714240074157715, 0.008680512428283691, 0.008671584129333497, 0.008667743682861329, 0.00870524787902832, 0.008790271759033203, 0.008933024406433105, 0.008696127891540527, 0.008714816093444825, 0.00874227237701416, 0.008771552085876464, 0.008739487648010254, 0.008787967681884766, 0.008747008323669434, 0.008721792221069335, 0.008648544311523438, 0.00915724754333496, 0.008734880447387695, 0.00878489589691162, 0.00879923152923584, 0.008749055862426757, 0.008689056396484375, 0.008704192161560058, 0.00867574405670166, 0.008703328132629395, 0.008698528289794921, 0.008706048011779785, 0.008763392448425293, 0.008671232223510742, 0.008767487525939942, 0.00873686408996582, 0.008830880165100098, 0.008761343955993652, 0.008727968215942383, 0.008690048217773437, 0.008669407844543457, 0.008677056312561035, 0.008741184234619141, 0.008757247924804687, 0.008802304267883301, 0.008796031951904298, 0.008814720153808594, 0.00894156837463379, 0.008722432136535644, 0.008757247924804687, 0.008699904441833496, 0.008785920143127441, 0.008763392448425293, 0.008773344039916992, 0.008734496116638183, 0.008761024475097657, 0.008696640014648437, 0.00870195198059082, 0.008675680160522461, 0.00872208023071289, 0.0087259521484375, 0.008700480461120605, 0.008695136070251465, 0.008639007568359376, 0.009274944305419922, 0.008983103752136231, 0.008775679588317872, 0.00874931240081787, 0.008703871726989746, 0.008734272003173829, 0.008701760292053223, 0.008728960037231445, 0.009248895645141602, 0.008755200386047364, 0.008740863800048827, 0.008785056114196778, 0.008864607810974121, 0.008822208404541015, 0.008833600044250487, 0.008851072311401368, 0.008782591819763184, 0.00876095962524414, 0.008680992126464844, 0.008655327796936035, 0.00870582389831543, 0.008786144256591797, 0.008744895935058594, 0.00865657615661621, 0.008702336311340332, 0.008768832206726074, 0.009179840087890625, 0.008752991676330566, 0.0087041597366333, 0.008759296417236329, 0.008664480209350586, 0.00868000030517578, 0.008674464225769042, 0.008713088035583495, 0.008699392318725586, 0.008768320083618164, 0.008701631546020508, 0.008654879570007325, 0.00868553638458252, 0.008654848098754882, 0.00862003231048584, 0.008652799606323243, 0.008660127639770508, 0.008702239990234374, 0.011135295867919922, 0.010840448379516601, 0.009742207527160644, 0.008853504180908203, 0.008776000022888183, 0.008764512062072754, 0.00890991973876953, 0.008865504264831544, 0.008788064002990722, 0.008849504470825196, 0.00884175968170166, 0.00892956829071045, 0.009129376411437988, 0.008984928131103515, 0.008810400009155273, 0.008923199653625489, 0.008782048225402832, 0.008818400382995606, 0.008751040458679199, 0.008735136032104492, 0.008787487983703613, 0.008739295959472657, 0.008899616241455079, 0.009001952171325683, 0.009338208198547363, 0.008702624320983886, 0.00867363166809082, 0.00871718406677246, 0.00870684814453125, 0.008773632049560547, 0.008690912246704102, 0.008710944175720214, 0.008669535636901855, 0.00865449619293213, 0.008728704452514648, 0.008757311820983887, 0.008621888160705566, 0.008665184020996093, 0.008668831825256348, 0.008653056144714356, 0.008662464141845704, 0.008671808242797851, 0.008687616348266602, 0.008673279762268067, 0.008763360023498536, 0.008656448364257812, 0.008682208061218262, 0.00873036766052246, 0.008697855949401855, 0.00868131160736084, 0.008771648406982422, 0.00872047996520996, 0.008707615852355957, 0.008739295959472657, 0.008755552291870116, 0.008741888046264648, 0.008678048133850098, 0.008719967842102052, 0.00869212818145752, 0.008695808410644532, 0.008695808410644532, 0.008704000473022461, 0.00866425609588623, 0.008694016456604003, 0.0086626558303833, 0.008659903526306151, 0.00869983959197998, 0.008804415702819824, 0.008710016250610351, 0.00876195240020752, 0.008700032234191895, 0.008693535804748536, 0.008710463523864747, 0.008685759544372559, 0.00877344036102295, 0.008837120056152344, 0.008814592361450196, 0.008820735931396484, 0.008764639854431152, 0.008843296051025391, 0.008741632461547851, 0.008661151885986328, 0.008692959785461425, 0.008855744361877442, 0.008849856376647949, 0.008804320335388183, 0.008713824272155762, 0.008706496238708497, 0.008683520317077637, 0.008714240074157715, 0.008849344253540038, 0.008749119758605957, 0.008742719650268556, 0.008776960372924804, 0.0087192964553833, 0.008790111541748047, 0.00880787181854248, 0.008720767974853516, 0.008728320121765137, 0.008800607681274414, 0.008744959831237792, 0.008706048011779785, 0.00877558422088623, 0.008708191871643066, 0.008677120208740235, 0.00866329574584961, 0.008699904441833496, 0.00869375991821289, 0.008704000473022461, 0.008733823776245117, 0.008712832450866698, 0.008757216453552246, 0.008779775619506837, 0.008734304428100586, 0.008667712211608886, 0.008683520317077637, 0.008768863677978515, 0.008782719612121582, 0.008766528129577636, 0.008754015922546387, 0.00870025634765625, 0.00872822380065918, 0.008785920143127441, 0.008747008323669434, 0.00903104019165039, 0.008726688385009765, 0.008694016456604003, 0.008666367530822753, 0.008696800231933593, 0.008701087951660157, 0.008657055854797363, 0.008656607627868653, 0.008722432136535644, 0.008699135780334473, 0.00865766429901123, 0.008726207733154297, 0.00867465591430664, 0.008717280387878418, 0.008759263992309571, 0.008703519821166992, 0.008710368156433106, 0.008736800193786622, 0.008720895767211915, 0.008700927734375, 0.008780384063720703, 0.008732288360595704, 0.00877622413635254, 0.008730527877807617, 0.008713695526123047, 0.008661631584167481, 0.008658495903015137, 0.008632287979125977, 0.008645248413085938, 0.008641535758972169, 0.008712127685546875, 0.008642911911010742, 0.00868188762664795, 0.008749152183532715, 0.008713919639587403, 0.00864627170562744, 0.008704832077026367, 0.008669343948364258, 0.00866425609588623, 0.00869167995452881, 0.008653440475463868, 0.008696063995361328, 0.008643360137939453, 0.008644991874694825, 0.008732512474060058, 0.008759455680847168, 0.008757344245910645, 0.008767999649047852, 0.00870195198059082, 0.008763392448425293, 0.008687199592590332, 0.008701760292053223, 0.008680031776428223, 0.008695584297180176, 0.008773088455200195, 0.008917023658752441, 0.008832927703857421, 0.008719136238098144, 0.00872441577911377, 0.008675200462341309, 0.008699711799621581, 0.008628640174865723, 0.00870195198059082, 0.008649951934814453, 0.008632831573486328, 0.008692000389099121, 0.008679424285888672, 0.008769536018371582, 0.00893337631225586, 0.008797504425048829, 0.00875984001159668, 0.00874512004852295, 0.008780832290649415, 0.00867420768737793, 0.008654303550720215, 0.008691871643066406, 0.00866528034210205, 0.008700160026550293, 0.008738783836364747, 0.008714271545410155, 0.008692735671997071, 0.00870297622680664, 0.00867734432220459, 0.008642208099365235, 0.008868224143981933, 0.009127903938293457, 0.008665120124816895, 0.00869702434539795, 0.008713343620300292, 0.008679360389709472, 0.008670975685119629, 0.008644831657409669, 0.008664863586425781, 0.008652799606323243, 0.008673536300659179, 0.008965888023376466, 0.009224191665649414, 0.009723360061645508, 0.008761759757995605, 0.008790143966674805, 0.008863455772399902, 0.008805695533752442, 0.00877462387084961, 0.008822688102722168, 0.008876128196716309, 0.008775391578674317, 0.00880668830871582, 0.008660991668701172, 0.00868934440612793, 0.008677696228027344, 0.009142271995544434, 0.008873984336853028, 0.008713215827941894, 0.008743840217590332, 0.008795647621154786, 0.008777664184570313, 0.008776351928710938, 0.008728575706481934, 0.00871833610534668, 0.008730208396911621, 0.008808863639831544, 0.008779775619506837, 0.00899071979522705, 0.008815936088562012, 0.008655072212219238, 0.00874953556060791, 0.008668383598327637, 0.008608736038208008, 0.008633184432983398, 0.008602527618408204, 0.00867081642150879, 0.008732704162597657, 0.008681728363037109]",tokens/s,114.43035393265318,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1068, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 845, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 845, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 634, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 230, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,816.316416,14274.199552,0.0,13878.951936,13865.632768,s,1,7.819302734375,7.819302734375,0.0,7.819302734375,7.819302734375,7.819302734375,7.819302734375,[7.819302734375],,kWh,1.4235044483336877e-05,1.5627023503701438e-06,7.607783864008177e-06,2.3405530697715198e-05,,MB,1107.124224,14695.727104,0.0,14289.993728,14237.628416,s,10,13.329096923828125,1.3329096923828128,0.003811522174652249,1.3340584716796875,1.3368260620117187,1.336930487060547,1.3370140270996094,"[1.3257540283203124, 1.330975341796875, 1.328086669921875, 1.3329853515625, 1.3299989013671876, 1.335131591796875, 1.3368028564453125, 1.3357734375, 1.337034912109375, 1.3365538330078126]",tokens/s,192.06102368597422,kWh,3.8972799550415024e-05,4.298243802833858e-06,2.587240958680098e-05,6.914345294004986e-05,tokens/kWh,3702447.435217941,MB,1128.259584,14863.499264,0.0,14457.765888,14416.301056,s,10,39.33003002929687,3.933003002929688,0.003964859828145915,3.931811279296875,3.937063427734375,3.9396120361328126,3.9416509228515624,"[3.9292900390625, 3.932463134765625, 3.9364970703125, 3.936447265625, 3.930498779296875, 3.931708984375, 3.93191357421875, 3.928558837890625, 3.93049169921875, 3.94216064453125]",tokens/s,16.018294405844948,kWh,0.0001149224127012515,1.2676791999453186e-05,7.6365227758797e-05,0.00020396443245950175,tokens/kWh,308877.3823961145,,s,630,39.32551675796513,0.06242145517137316,0.00037507737989959206,0.06243038368225098,0.06289647064208984,0.0630232105255127,0.06322790199279785,"[0.06253164672851562, 0.06211529541015625, 0.06166787338256836, 0.06163679885864258, 0.06149305725097656, 0.06185779190063476, 0.061792255401611325, 0.062056190490722654, 0.062023937225341795, 0.062064319610595706, 0.06189433670043945, 0.06226803207397461, 0.062131263732910155, 0.06236236953735352, 0.0621278076171875, 0.06222438430786133, 0.062076545715332034, 0.06209628677368164, 0.06172243118286133, 0.062066783905029295, 0.06194182586669922, 0.06223664093017578, 0.06215663909912109, 0.06221382522583008, 0.06214912033081055, 0.06258281707763672, 0.06241686248779297, 0.062388225555419924, 0.06253567886352539, 0.062476192474365234, 0.06224700927734375, 0.06213836669921875, 0.06251113510131837, 0.06244121551513672, 0.06227347183227539, 0.06229840087890625, 0.06210332870483398, 0.06232291030883789, 0.06219728088378906, 0.06231216049194336, 0.0623480339050293, 0.06235548782348633, 0.06235337448120117, 0.06272576141357422, 0.06250739288330077, 0.06287753677368164, 0.06268739318847656, 0.0626237449645996, 0.06252339172363282, 0.06269747161865234, 0.06260121536254883, 0.06268918228149414, 0.06289212799072266, 0.06300262451171874, 0.0626954231262207, 0.06273023986816406, 0.06294118499755859, 0.06286342239379883, 0.06265350341796876, 0.06269744110107422, 0.0626956787109375, 0.06331052780151367, 0.06317254257202148, 0.0627589454650879, 0.06217107009887695, 0.06175132751464844, 0.061875743865966795, 0.06171855926513672, 0.06202188873291015, 0.06194764709472656, 0.06188899230957031, 0.062097312927246094, 0.06196620941162109, 0.061835582733154294, 0.061994976043701175, 0.06201068878173828, 0.0622086067199707, 0.06210150527954102, 0.0624005126953125, 0.06249430465698242, 0.06249123382568359, 0.062534912109375, 0.06223516845703125, 0.06198668670654297, 0.06207299041748047, 0.062096607208251955, 0.061991710662841794, 0.062134273529052736, 0.06238534545898437, 0.062284095764160156, 0.0627061767578125, 0.06261721420288086, 0.062495105743408205, 0.06240201568603516, 0.06247481536865234, 0.062342624664306644, 0.06245347213745117, 0.062323486328125, 0.06242303848266602, 0.06231654357910156, 0.06257449722290039, 0.06260131072998047, 0.06250291061401367, 0.062443424224853515, 0.06255814361572265, 0.062476062774658205, 0.06268352127075195, 0.06265568161010743, 0.06262815856933594, 0.06277785491943359, 0.06261324691772462, 0.06281027221679687, 0.06295356750488282, 0.06262924957275391, 0.06260758590698243, 0.062424766540527345, 0.06257123184204101, 0.06254163360595703, 0.062810302734375, 0.06270361709594727, 0.06291177749633789, 0.06300336074829102, 0.06306406402587891, 0.0628427848815918, 0.06287369537353515, 0.06279987335205078, 0.06265711975097656, 0.062024097442626956, 0.061591552734375, 0.06162636947631836, 0.06166883087158203, 0.06176432037353516, 0.06216819381713867, 0.06200697708129883, 0.06205542373657227, 0.062089214324951174, 0.0619716796875, 0.06223676681518555, 0.06245040130615234, 0.062324447631835936, 0.062220542907714844, 0.06241254425048828, 0.06230230331420898, 0.06220611190795899, 0.06226534271240235, 0.062268672943115236, 0.06210790252685547, 0.06226947021484375, 0.06221049499511719, 0.062283008575439454, 0.06230265426635742, 0.062427486419677734, 0.062330848693847654, 0.062443038940429685, 0.06251366424560546, 0.06286288070678711, 0.0627770881652832, 0.0626613426208496, 0.06235340881347656, 0.0629452781677246, 0.06268678283691406, 0.06251142501831054, 0.062255233764648435, 0.06233283233642578, 0.06226953506469726, 0.06270566558837891, 0.062371841430664064, 0.06268710327148437, 0.06260953521728516, 0.06267897415161133, 0.06282451248168945, 0.06287760162353516, 0.0626583366394043, 0.06265884780883789, 0.06318086242675781, 0.06301004791259765, 0.06292144012451172, 0.06283849716186524, 0.06267932891845703, 0.06270975875854493, 0.06251520156860352, 0.06253567886352539, 0.06259097671508788, 0.06289926528930664, 0.06281926345825195, 0.06308169555664063, 0.06313225555419921, 0.06302329635620117, 0.06314384078979492, 0.06244761657714844, 0.06200729751586914, 0.06171984100341797, 0.06178441619873047, 0.06207936096191406, 0.061927425384521485, 0.061908992767333984, 0.061808639526367185, 0.06221593475341797, 0.06214271926879883, 0.062203903198242184, 0.06217523193359375, 0.06214656066894531, 0.06234726333618164, 0.0621030387878418, 0.062280193328857425, 0.06217475128173828, 0.06256684875488282, 0.06223791885375977, 0.062134815216064454, 0.062144577026367186, 0.06213808059692383, 0.062292030334472656, 0.062211872100830075, 0.06230492782592773, 0.06229734420776367, 0.06241321563720703, 0.06245366287231445, 0.06277983856201172, 0.06272617721557618, 0.06254735946655274, 0.062363777160644535, 0.06255212783813477, 0.06247644805908203, 0.06233871841430664, 0.06235168075561524, 0.06225513458251953, 0.06242531204223633, 0.0626237449645996, 0.06254991912841797, 0.06274265670776367, 0.06308822250366211, 0.06275462341308594, 0.06289465713500976, 0.06258483123779297, 0.0627317771911621, 0.0626726417541504, 0.06296243286132812, 0.06285110473632813, 0.06291651153564454, 0.062476192474365234, 0.06287580871582031, 0.062574462890625, 0.06265024185180663, 0.06247862243652344, 0.06241001510620117, 0.06279238510131836, 0.06308031845092774, 0.06308236694335938, 0.06344729614257813, 0.06327267074584961, 0.06302953720092773, 0.06293830490112305, 0.06248716735839844, 0.061853919982910156, 0.061574176788330076, 0.06148342514038086, 0.061434432983398436, 0.06179008102416992, 0.06187417602539062, 0.06204428863525391, 0.06187007904052735, 0.06229811096191406, 0.0620871696472168, 0.06187417602539062, 0.06187606430053711, 0.06210323333740234, 0.062032352447509764, 0.06216198348999023, 0.062073089599609374, 0.06199363327026367, 0.06193532943725586, 0.06197484970092773, 0.06188032150268555, 0.062203903198242184, 0.062161121368408206, 0.0623733139038086, 0.06217536163330078, 0.06244169616699219, 0.062259040832519534, 0.0622573127746582, 0.06250495910644531, 0.06226067352294922, 0.0626099853515625, 0.06249062347412109, 0.0624189453125, 0.0625539207458496, 0.0624554557800293, 0.062497089385986325, 0.06244512176513672, 0.06227769470214844, 0.06232684707641602, 0.06250508880615234, 0.06261183929443359, 0.06266473770141602, 0.06286131286621094, 0.06283417510986328, 0.062687744140625, 0.06282649612426758, 0.06247423934936523, 0.06270156860351563, 0.062382080078125, 0.0626319351196289, 0.0628408317565918, 0.0626698226928711, 0.06259814453125, 0.0630231056213379, 0.06273027038574219, 0.06274003219604492, 0.06276723098754883, 0.0628834571838379, 0.06295804977416992, 0.06299423980712891, 0.06307068634033203, 0.06311312103271484, 0.0630804786682129, 0.06248534393310547, 0.06198006439208984, 0.06156681442260742, 0.06146255874633789, 0.061655040740966796, 0.06171526336669922, 0.061884414672851565, 0.06184755325317383, 0.06218547058105469, 0.06233292770385742, 0.0620052490234375, 0.06206259155273437, 0.06204620742797851, 0.061966335296630856, 0.062081024169921874, 0.06198601531982422, 0.06205721664428711, 0.06218889617919922, 0.061838016510009766, 0.06216640090942383, 0.062163551330566405, 0.06217119979858399, 0.062338943481445315, 0.06232281494140625, 0.0623185920715332, 0.06250492858886719, 0.062322689056396485, 0.06236972808837891, 0.06220751953125, 0.06234163284301758, 0.06273225784301757, 0.06248659133911133, 0.062457088470458985, 0.06250576019287109, 0.0623076171875, 0.06235420989990234, 0.06225017547607422, 0.06233065414428711, 0.062258113861083986, 0.06239849472045898, 0.062416446685791015, 0.06316057586669922, 0.06256454467773437, 0.06282032012939454, 0.06268518447875976, 0.06288179016113281, 0.06274867248535156, 0.06254147338867187, 0.06270985412597656, 0.06280755233764648, 0.06273875045776367, 0.0627388801574707, 0.06266060638427734, 0.06291024017333985, 0.06271958541870117, 0.06270012664794922, 0.06280787277221679, 0.0629065933227539, 0.06285107040405273, 0.06300467300415039, 0.06296371078491211, 0.0632463035583496, 0.06306800079345704, 0.06254748916625977, 0.061964767456054684, 0.06156902313232422, 0.0615997428894043, 0.06167552185058594, 0.06205440139770508, 0.06185574340820312, 0.06188851165771484, 0.06178201675415039, 0.06207692718505859, 0.06224281692504883, 0.06206054306030274, 0.0617696647644043, 0.06210671997070313, 0.06205952072143555, 0.06206870269775391, 0.062099071502685545, 0.06209302520751953, 0.06188304138183594, 0.06235340881347656, 0.062027488708496094, 0.062269729614257814, 0.062115009307861326, 0.06212239837646484, 0.06215107345581055, 0.06235744094848633, 0.062359615325927736, 0.06250495910644531, 0.06226313781738281, 0.06227164840698242, 0.06256435012817382, 0.06257209777832032, 0.06260940933227539, 0.06245833587646484, 0.06230422210693359, 0.06246809768676758, 0.06239187240600586, 0.06272998428344727, 0.062274368286132815, 0.06239779281616211, 0.06267734527587891, 0.06276496124267578, 0.06262607955932617, 0.06299142456054688, 0.06270425415039063, 0.06280838394165039, 0.06264422225952149, 0.06259916687011718, 0.06253158569335937, 0.06265024185180663, 0.06278566360473632, 0.06279782485961914, 0.06291251373291015, 0.06284288024902343, 0.06266831970214844, 0.06286998367309571, 0.06282444763183594, 0.06261967849731445, 0.0628996810913086, 0.06307481765747071, 0.06302931213378907, 0.06315615844726563, 0.0630374412536621, 0.06262937545776368, 0.06205283355712891, 0.06150761413574219, 0.061582847595214846, 0.061667839050292966, 0.061739009857177736, 0.06194796752929688, 0.0619150390625, 0.06198278427124024, 0.06221206283569336, 0.06199926376342774, 0.061978431701660154, 0.061830753326416014, 0.06192377471923828, 0.062089248657226564, 0.06226736068725586, 0.062279232025146486, 0.06219820785522461, 0.0620951042175293, 0.06233321762084961, 0.06200419235229492, 0.06215315246582031, 0.06205209732055664, 0.06231273651123047, 0.06213056182861328, 0.06240480041503906, 0.06223641586303711, 0.06218569564819336, 0.06225449752807617, 0.0626712303161621, 0.06251062393188477, 0.062438079833984375, 0.06248857498168945, 0.06258659362792969, 0.06239056015014648, 0.06226739120483398, 0.06217932891845703, 0.06223257446289063, 0.0621893424987793, 0.06226268768310547, 0.06253855895996094, 0.06280934524536133, 0.0626429443359375, 0.06243328094482422, 0.062814208984375, 0.06268246459960937, 0.06254659271240234, 0.0626237449645996, 0.0626297607421875, 0.06292051315307617, 0.06262406539916993, 0.06265404891967774, 0.06265488052368164, 0.06255523300170898, 0.06289289474487304, 0.06258678436279297, 0.06257676696777344, 0.06269136047363282, 0.06274867248535156, 0.06275276947021484, 0.06283468627929688, 0.06286540985107422, 0.06285830307006836, 0.0626032943725586, 0.062011360168457035, 0.06158940887451172, 0.061611198425292966, 0.06154678344726563, 0.061792896270751956, 0.06193971252441406, 0.06185776138305664, 0.06219782257080078, 0.06217315292358398, 0.062133663177490236, 0.062075328826904294, 0.06202703857421875, 0.062118335723876955, 0.062271041870117186, 0.062171520233154295, 0.062435264587402346, 0.0622023696899414, 0.06203193664550781, 0.062134273529052736, 0.062117889404296876, 0.06203801727294922, 0.06238956832885742, 0.062180030822753904, 0.06218137741088867, 0.062271488189697265, 0.06240995025634766, 0.062187999725341794, 0.062173534393310546, 0.06236569595336914, 0.06268684768676758, 0.06252169418334962, 0.06251625442504882, 0.06269846343994141, 0.06237596893310547, 0.06245782470703125, 0.062182910919189455, 0.062271617889404295, 0.06233945465087891, 0.06215388870239258, 0.06239443206787109, 0.062438175201416014, 0.06271180725097657, 0.0626954231262207, 0.06249676895141602, 0.06252134323120118, 0.06259711837768554, 0.06259097671508788, 0.06256841659545899, 0.06262377548217773, 0.06255007934570313, 0.06274809646606445, 0.06265087890625, 0.06268518447875976, 0.06278553771972656, 0.06267903900146485, 0.06273843383789063, 0.06275686264038086, 0.0629678077697754, 0.06298390579223632, 0.06294742584228516, 0.06331209564208984, 0.06311727905273437, 0.06260208129882812, 0.062222431182861325, 0.06179638290405273, 0.062175201416015624, 0.0620145263671875, 0.06187868881225586, 0.06187593460083008, 0.061975360870361325, 0.062107521057128905, 0.06224860763549805, 0.06231475067138672, 0.06227289581298828, 0.06224163055419922, 0.062320640563964844, 0.06227478408813476, 0.06262156677246093, 0.06252159881591797, 0.062466815948486326, 0.06228518295288086, 0.06224076843261719, 0.06211638259887695, 0.06216022491455078, 0.06221072006225586, 0.062348926544189456, 0.06224307250976562, 0.06250041580200195, 0.06257516860961915, 0.06287360000610352, 0.0626828498840332, 0.06289616012573242, 0.06279180908203125, 0.06262080001831055, 0.06258790588378907, 0.062438465118408205, 0.06229814529418945, 0.06256483078002929, 0.06267129516601562, 0.06263558578491212, 0.06262623977661133, 0.06263759994506836, 0.06262015914916992, 0.06260118484497071, 0.06275683212280274, 0.0628818244934082, 0.06258262252807617, 0.06284243011474609, 0.06312201690673828, 0.0629678077697754, 0.06282793426513672, 0.06294384002685546, 0.06269327926635743, 0.0626707534790039, 0.06270790481567383, 0.0627691535949707, 0.062963134765625, 0.06307020950317382, 0.06292537689208984, 0.0630231056213379, 0.06338150405883788, 0.06318284988403321, 0.0628592643737793, 0.0631009292602539, 0.06325775909423828]",tokens/s,16.020132777337203,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,836.345856,1903.099904,0.0,1507.852288,1469.840384,s,1,7.574193359375,7.574193359375,0.0,7.574193359375,7.574193359375,7.574193359375,7.574193359375,[7.574193359375],,kWh,9.962895483340617e-06,1.0918203991698297e-06,4.525003619984536e-06,1.5579719502494982e-05,,MB,1136.771072,1945.042944,0.0,1539.309568,1426.272256,s,10,0.8855196151733398,0.08855196151733398,0.0016090846363628177,0.08803173065185546,0.08919951705932616,0.09123092689514159,0.09285605476379395,"[0.09326233673095703, 0.08814620971679688, 0.08739705657958985, 0.08817842864990234, 0.08787843322753906, 0.08791725158691406, 0.0877531509399414, 0.08840086364746094, 0.08874809265136718, 0.0878377914428711]",tokens/s,2890.9579823354698,kWh,2.9496639118685594e-06,3.252961394549608e-07,1.9088932218383453e-06,5.183853273161865e-06,tokens/kWh,49384113.80688136,MB,1157.967872,1953.431552,0.0,1547.698176,1426.274816,s,10,14.043275268554689,1.404327526855469,0.012428507222118449,1.4076810913085938,1.4179456909179688,1.4183011535644532,1.4185855236816407,"[1.393094970703125, 1.39147119140625, 1.38444921875, 1.4186566162109375, 1.41786669921875, 1.41556591796875, 1.408018310546875, 1.4160279541015626, 1.4073438720703124, 1.390780517578125]",tokens/s,44.86132956538127,kWh,4.052118441813273e-05,4.469144923210777e-06,2.0030318773961722e-05,6.502064811530524e-05,tokens/kWh,968922.9779481451,,s,630,14.040806724548343,0.02228699480087038,0.00039382153057359665,0.022294223785400392,0.022652210426330564,0.022793475914001465,0.023854936161041264,"[0.02276905632019043, 0.022347488403320313, 0.022135679244995116, 0.02208131217956543, 0.021897472381591798, 0.022273632049560548, 0.022090112686157227, 0.02188697624206543, 0.02225766372680664, 0.022239231109619142, 0.022173696517944336, 0.021958656311035156, 0.02175103950500488, 0.021725631713867186, 0.021708959579467772, 0.021919904708862306, 0.022388736724853517, 0.022035743713378905, 0.021906400680541994, 0.0221910400390625, 0.021979135513305666, 0.022092607498168944, 0.022089279174804688, 0.02203487968444824, 0.022585344314575196, 0.021990560531616212, 0.02237696075439453, 0.022356063842773437, 0.022300928115844727, 0.022046720504760742, 0.02191708755493164, 0.022030208587646486, 0.02173103904724121, 0.021926912307739257, 0.02194755172729492, 0.021863264083862306, 0.021915456771850587, 0.02210361671447754, 0.02218422317504883, 0.02212284851074219, 0.022164928436279298, 0.02228486442565918, 0.022417407989501953, 0.022269952774047853, 0.02232729530334473, 0.02270412826538086, 0.02224947166442871, 0.022744287490844728, 0.02202908706665039, 0.02183932876586914, 0.022123039245605467, 0.022337440490722657, 0.02191574478149414, 0.022032384872436524, 0.02206924819946289, 0.021914655685424805, 0.021926816940307618, 0.022188095092773436, 0.021849119186401367, 0.021767135620117187, 0.02208358383178711, 0.022188032150268554, 0.022099967956542968, 0.02285206413269043, 0.022132736206054687, 0.02199510383605957, 0.022054399490356445, 0.02241219139099121, 0.023459487915039063, 0.02306083106994629, 0.022795808792114257, 0.022185951232910155, 0.022089759826660157, 0.0220164794921875, 0.021870559692382812, 0.022064735412597656, 0.02198512077331543, 0.021995391845703125, 0.022073663711547852, 0.022385055541992188, 0.02206924819946289, 0.021943935394287108, 0.021918079376220704, 0.021815296173095702, 0.02389606475830078, 0.022386688232421875, 0.021947872161865233, 0.022007904052734374, 0.021940671920776367, 0.02218943977355957, 0.02193471908569336, 0.022420608520507812, 0.022192640304565428, 0.021930368423461914, 0.021805055618286134, 0.02172313690185547, 0.02189107131958008, 0.022336736679077148, 0.02225027275085449, 0.02271353530883789, 0.02220524787902832, 0.022034431457519533, 0.0221276798248291, 0.021735647201538085, 0.021674463272094727, 0.021745759963989256, 0.02195804786682129, 0.021891359329223634, 0.02196726417541504, 0.021741632461547852, 0.021600255966186522, 0.022179840087890625, 0.02168828773498535, 0.021672128677368164, 0.022060895919799806, 0.022136831283569337, 0.021991424560546875, 0.02185215950012207, 0.02182963180541992, 0.02215116882324219, 0.02192915153503418, 0.021862432479858397, 0.021592031478881835, 0.02157651138305664, 0.02167807960510254, 0.02163711929321289, 0.022790624618530275, 0.021755903244018555, 0.021681184768676757, 0.02170159912109375, 0.021684415817260744, 0.02221558380126953, 0.021738399505615236, 0.022054912567138672, 0.021784000396728516, 0.0218670711517334, 0.021940223693847655, 0.021851680755615235, 0.02163145637512207, 0.02167398452758789, 0.02165551948547363, 0.021503679275512694, 0.02154924774169922, 0.021536928176879883, 0.021546464920043945, 0.021635488510131837, 0.02168025588989258, 0.021616544723510742, 0.021865760803222656, 0.021982303619384767, 0.02193142318725586, 0.02173129653930664, 0.021635168075561522, 0.021795360565185547, 0.021614240646362304, 0.021717023849487305, 0.021874208450317383, 0.02236400032043457, 0.022362783432006837, 0.022111583709716796, 0.021967519760131837, 0.022132736206054687, 0.02184601593017578, 0.021690336227416993, 0.021731359481811523, 0.02166281509399414, 0.021746591567993166, 0.021621824264526367, 0.021638015747070312, 0.02162486457824707, 0.021712703704833983, 0.021663808822631837, 0.02174991989135742, 0.021768192291259765, 0.021819263458251952, 0.02205299186706543, 0.02209382438659668, 0.022302047729492187, 0.022604448318481445, 0.02250102424621582, 0.023230815887451173, 0.02395132827758789, 0.022502559661865235, 0.02249817657470703, 0.022576223373413085, 0.022393280029296875, 0.022394336700439454, 0.022252544403076172, 0.022376447677612304, 0.023058080673217775, 0.022436511993408202, 0.022483104705810546, 0.022625600814819336, 0.022198368072509765, 0.022518367767333985, 0.022394880294799805, 0.022455808639526367, 0.022469120025634767, 0.022617984771728515, 0.022524032592773437, 0.022269952774047853, 0.02224127960205078, 0.02234163284301758, 0.022216512680053712, 0.022230239868164064, 0.022352863311767578, 0.022339584350585938, 0.02227609634399414, 0.02229862403869629, 0.02225152015686035, 0.02217532730102539, 0.022220544815063477, 0.022163103103637696, 0.02253926467895508, 0.022458368301391602, 0.023175071716308594, 0.023226335525512697, 0.022761152267456054, 0.022294111251831054, 0.02233942413330078, 0.02265190315246582, 0.02249728012084961, 0.022640159606933594, 0.022714847564697264, 0.025616384506225585, 0.02323420715332031, 0.022290239334106444, 0.02211484718322754, 0.022394880294799805, 0.022388736724853517, 0.022412960052490234, 0.022149471282958983, 0.022260896682739256, 0.022500160217285157, 0.02245430374145508, 0.022490432739257812, 0.023119647979736327, 0.022823423385620118, 0.022329727172851564, 0.02251094436645508, 0.022325952529907225, 0.022376447677612304, 0.02256889533996582, 0.02236422348022461, 0.022237184524536133, 0.02232294464111328, 0.022538496017456056, 0.02231648063659668, 0.0224835205078125, 0.02234761619567871, 0.022263967514038085, 0.022730752944946288, 0.022818559646606444, 0.022527711868286133, 0.022376991271972655, 0.022614015579223632, 0.022361471176147462, 0.022319616317749022, 0.02242777633666992, 0.024313343048095702, 0.024013311386108398, 0.02243315124511719, 0.0225798397064209, 0.022568960189819336, 0.022339040756225587, 0.022268447875976562, 0.022709760665893555, 0.022370336532592773, 0.022385120391845703, 0.022476383209228516, 0.022256032943725586, 0.022621728897094726, 0.022294336318969727, 0.022172319412231446, 0.022203744888305663, 0.022608543395996095, 0.022318304061889647, 0.022270751953125, 0.022501375198364256, 0.022623584747314452, 0.02252457618713379, 0.022332704544067383, 0.02242972755432129, 0.02259424018859863, 0.02265497589111328, 0.022255903244018556, 0.02257891273498535, 0.02246451187133789, 0.022420896530151366, 0.022642463684082032, 0.022350175857543945, 0.022372800827026366, 0.022321056365966797, 0.02235331153869629, 0.022323776245117187, 0.022300479888916015, 0.022298976898193358, 0.022301984786987306, 0.022430431365966796, 0.02242870330810547, 0.023782367706298827, 0.022384639739990234, 0.022468608856201173, 0.022405120849609376, 0.022693727493286135, 0.02232048034667969, 0.022401599884033202, 0.022456575393676757, 0.02232953643798828, 0.022341407775878907, 0.0222761287689209, 0.022481983184814452, 0.0222379207611084, 0.022356191635131837, 0.022468128204345704, 0.02289449691772461, 0.022697216033935548, 0.022674272537231446, 0.02261724853515625, 0.022661983489990236, 0.02253824043273926, 0.022392704010009767, 0.022351200103759766, 0.022364959716796876, 0.02268569564819336, 0.022662559509277345, 0.022364160537719727, 0.022413375854492188, 0.022536447525024414, 0.022421728134155272, 0.022339136123657226, 0.022247936248779295, 0.022237184524536133, 0.022495231628417968, 0.022433792114257813, 0.022585344314575196, 0.023002111434936523, 0.022559743881225586, 0.022480863571166992, 0.022325279235839844, 0.022558719635009765, 0.022511615753173828, 0.02247270393371582, 0.02236947250366211, 0.02255135917663574, 0.022509567260742186, 0.022329696655273436, 0.0224880313873291, 0.022534847259521484, 0.022196224212646484, 0.022179840087890625, 0.0224399356842041, 0.02272051239013672, 0.022430816650390626, 0.022321279525756837, 0.022450624465942384, 0.022301023483276367, 0.022478143692016603, 0.0224385929107666, 0.02243174362182617, 0.02243174362182617, 0.022579200744628908, 0.022388256072998047, 0.022551008224487305, 0.02246784019470215, 0.022472991943359374, 0.022351551055908202, 0.022077600479125978, 0.02275596809387207, 0.022779903411865234, 0.023166015625, 0.022340543746948244, 0.022200511932373046, 0.022136640548706055, 0.022357440948486327, 0.02224799919128418, 0.022161312103271484, 0.022155359268188478, 0.023228960037231447, 0.02249772834777832, 0.02221776008605957, 0.022274208068847657, 0.022690624237060548, 0.022362112045288086, 0.02231430435180664, 0.022206207275390626, 0.02275833511352539, 0.0224682559967041, 0.0221146240234375, 0.022351743698120118, 0.02217795181274414, 0.02223209571838379, 0.022152191162109376, 0.022208480834960936, 0.022726655960083008, 0.02239481544494629, 0.022226848602294923, 0.022134944915771483, 0.022402624130249023, 0.022571456909179687, 0.022457759857177736, 0.022146656036376954, 0.0221246395111084, 0.022731327056884767, 0.022401376724243163, 0.02246985626220703, 0.02228630447387695, 0.022487871170043944, 0.02231500816345215, 0.02240246391296387, 0.022377056121826173, 0.02216534423828125, 0.022248640060424804, 0.02211859130859375, 0.021860511779785156, 0.02221939277648926, 0.022689184188842772, 0.02243235206604004, 0.022142303466796874, 0.021970720291137696, 0.022243392944335937, 0.022143808364868164, 0.02215705680847168, 0.022161376953125, 0.022280479431152345, 0.02239897537231445, 0.022326656341552734, 0.02226598358154297, 0.022306880950927734, 0.02246905517578125, 0.02248908805847168, 0.02245631980895996, 0.02255820846557617, 0.022430208206176756, 0.022372352600097657, 0.022390783309936522, 0.022220800399780274, 0.022231039047241212, 0.022228031158447265, 0.022412223815917967, 0.02248089599609375, 0.0228985595703125, 0.022280736923217772, 0.022221183776855467, 0.022495231628417968, 0.022648832321166993, 0.02262835121154785, 0.022403072357177735, 0.022374399185180666, 0.022321151733398437, 0.02251753616333008, 0.0225118408203125, 0.022304479598999023, 0.022325088500976562, 0.02233184051513672, 0.02218707275390625, 0.022277055740356447, 0.022450176239013672, 0.022352991104125978, 0.02260188865661621, 0.02215769577026367, 0.02207974433898926, 0.02229055976867676, 0.022681600570678712, 0.022431232452392577, 0.02226777648925781, 0.022497215270996095, 0.022332096099853517, 0.024602624893188478, 0.022839296340942384, 0.02269932746887207, 0.022942399978637694, 0.022514944076538087, 0.02251238441467285, 0.02243174362182617, 0.02228223991394043, 0.022262815475463868, 0.022369247436523437, 0.02216134452819824, 0.022052928924560546, 0.022062431335449217, 0.02266339111328125, 0.02261235237121582, 0.022491199493408203, 0.0221265926361084, 0.022196224212646484, 0.022079488754272462, 0.02225971221923828, 0.02238198471069336, 0.022532928466796876, 0.022451583862304687, 0.022587263107299804, 0.023884576797485353, 0.02358246421813965, 0.022570016860961915, 0.022388927459716795, 0.02206800079345703, 0.022353536605834962, 0.02200739288330078, 0.022249568939208986, 0.02269046401977539, 0.022493215560913087, 0.022248992919921873, 0.022251264572143555, 0.022806144714355468, 0.022405664443969728, 0.022208063125610352, 0.022696224212646485, 0.022165216445922852, 0.022367807388305665, 0.022266592025756836, 0.022443296432495118, 0.022090463638305663, 0.022334943771362303, 0.022641023635864257, 0.022451616287231444, 0.02231068801879883, 0.022376895904541016, 0.02237289619445801, 0.022471904754638672, 0.022299423217773437, 0.022335487365722655, 0.022421503067016603, 0.022495391845703126, 0.022363391876220703, 0.022194784164428712, 0.022278144836425783, 0.022220800399780274, 0.02221820831298828, 0.022380224227905275, 0.02215407943725586, 0.02246806335449219, 0.022080032348632813, 0.022640640258789063, 0.022515071868896484, 0.02261225509643555, 0.022210912704467775, 0.022328575134277343, 0.02219843292236328, 0.022366815567016602, 0.022392831802368163, 0.022273887634277345, 0.02215888023376465, 0.02235228729248047, 0.022188255310058594, 0.02225971221923828, 0.02200134468078613, 0.022090047836303712, 0.022314399719238282, 0.022200927734375, 0.022147071838378905, 0.022050111770629884, 0.02229097557067871, 0.022304256439208983, 0.022566688537597655, 0.02243673515319824, 0.02241472053527832, 0.022432384490966798, 0.022421375274658203, 0.02226380729675293, 0.02237225532531738, 0.022184160232543944, 0.022525056838989258, 0.02232963180541992, 0.022243040084838867, 0.0224899845123291, 0.022179840087890625, 0.02281046485900879, 0.022216512680053712, 0.02213055992126465, 0.02256697654724121, 0.022438304901123047, 0.022362112045288086, 0.022208511352539064, 0.02233263969421387, 0.022172447204589843, 0.02232048034667969, 0.02208835220336914, 0.022054399490356445, 0.022006240844726563, 0.022118431091308594, 0.02178483200073242, 0.021734495162963868, 0.022206655502319338, 0.022274431228637696, 0.02213692855834961, 0.022119775772094726, 0.02189788818359375, 0.02198080062866211, 0.022495359420776368, 0.02198758316040039, 0.022261728286743165, 0.022109472274780273, 0.02275542449951172, 0.021857152938842772, 0.02172496032714844, 0.021753408432006835, 0.022812255859375, 0.02186262321472168, 0.02197977638244629, 0.023086336135864256, 0.021996288299560546, 0.02190745544433594, 0.021820703506469728, 0.02175424003601074, 0.022784351348876953, 0.02182931137084961, 0.021760095596313478, 0.021586143493652343, 0.021700607299804688, 0.0216944637298584, 0.021934080123901366, 0.02190336036682129, 0.021998783111572266, 0.02204345512390137, 0.021993471145629884, 0.02205081558227539, 0.022046367645263673, 0.021946016311645507, 0.021914304733276366, 0.02186614418029785, 0.02183612823486328, 0.02255023956298828, 0.02195484733581543, 0.021850208282470703, 0.021946144104003907, 0.021823616027832032, 0.021792768478393554, 0.021712896347045898, 0.021914976119995117]",tokens/s,44.869216730868835,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1709, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,816.427008,6223.233024,0.0,5827.985408,5712.718848,s,1,7.35060791015625,7.35060791015625,0.0,7.35060791015625,7.35060791015625,7.35060791015625,7.35060791015625,[7.35060791015625],,kWh,1.1335553629161647e-05,1.2427354602444235e-06,4.540559187998783e-06,1.7118848277404853e-05,,MB,1105.03936,6430.851072,0.0,6025.117696,5988.31104,s,10,5.266182556152343,0.5266182556152343,0.0033533696184772214,0.5274917602539062,0.5288304748535156,0.5293293914794922,0.5297285247802734,"[0.5171857299804687, 0.5271314697265626, 0.5255582275390625, 0.5278660278320313, 0.52785205078125, 0.5267911376953125, 0.5298283081054688, 0.5287196044921875, 0.52656787109375, 0.52868212890625]",tokens/s,486.12063343858426,kWh,1.5356614839999643e-05,1.6935863205025556e-06,1.0231910963300061e-05,2.7282112123802258e-05,tokens/kWh,9383437.72059543,MB,1126.424576,6514.737152,0.0,6109.003776,6092.423168,s,10,18.62453515625,1.8624535156250002,0.0026194074933142417,1.8625668945312501,1.8650244995117187,1.8663476379394532,1.8674061486816407,"[1.8676707763671876, 1.86459619140625, 1.859385498046875, 1.8589068603515626, 1.860238037109375, 1.8620662841796876, 1.86473046875, 1.8632889404296875, 1.8605845947265625, 1.8630675048828125]",tokens/s,33.82634759550416,kWh,5.470010627666928e-05,6.032732448828807e-06,3.625929289629987e-05,9.699213162179796e-05,tokens/kWh,649537.224789082,,s,630,18.621239629745485,0.029557523221818226,0.0002972871238932349,0.029545472145080565,0.029829120635986327,0.029975936222076418,0.03106435123443604,"[0.03123980712890625, 0.030093664169311522, 0.029759103775024415, 0.02931350326538086, 0.029312448501586916, 0.02931052780151367, 0.029238176345825196, 0.0293021125793457, 0.02966793632507324, 0.029489120483398436, 0.029528160095214844, 0.029437952041625977, 0.02928438377380371, 0.029335519790649415, 0.029214719772338867, 0.029234560012817382, 0.029180543899536133, 0.02935603141784668, 0.029238847732543944, 0.029530559539794922, 0.029437952041625977, 0.029650943756103516, 0.030299264907836913, 0.029453184127807615, 0.02954649543762207, 0.029711648941040038, 0.029430496215820313, 0.02941043281555176, 0.029365119934082032, 0.029321216583251954, 0.029754560470581056, 0.02950227165222168, 0.029672639846801758, 0.029654848098754884, 0.02979532814025879, 0.029829120635986327, 0.02959116744995117, 0.029962623596191406, 0.029722528457641603, 0.02966102409362793, 0.0296646728515625, 0.02979702377319336, 0.029825183868408205, 0.02982048034667969, 0.02979478454589844, 0.029757312774658203, 0.029504928588867187, 0.029434207916259766, 0.029677919387817383, 0.029751327514648436, 0.029405183792114258, 0.029521791458129883, 0.029572544097900392, 0.029698751449584962, 0.029912288665771485, 0.029696352005004884, 0.029651039123535155, 0.030071136474609374, 0.030113792419433592, 0.029861888885498046, 0.03012403106689453, 0.029782112121582032, 0.030015487670898438, 0.031291391372680666, 0.0302838077545166, 0.029650911331176758, 0.02939289665222168, 0.029255680084228516, 0.029275264739990235, 0.029143936157226564, 0.029147136688232423, 0.029216768264770508, 0.029492639541625978, 0.02929315185546875, 0.029251071929931642, 0.02932793617248535, 0.029304447174072264, 0.029268287658691407, 0.02928963279724121, 0.029251840591430663, 0.029573728561401367, 0.02946832084655762, 0.029384384155273436, 0.029442720413208008, 0.029511295318603515, 0.02957548713684082, 0.029345855712890626, 0.029420576095581054, 0.02939798355102539, 0.029380640029907226, 0.02952739143371582, 0.029395360946655274, 0.02963587188720703, 0.029402048110961913, 0.02954982376098633, 0.029772544860839845, 0.030074880599975585, 0.02993312072753906, 0.029774080276489256, 0.029765312194824218, 0.029778144836425782, 0.029794591903686524, 0.02962985610961914, 0.029843488693237306, 0.029688095092773436, 0.029591840744018556, 0.029642751693725586, 0.029638656616210936, 0.0297205753326416, 0.029634559631347656, 0.02953798484802246, 0.029428031921386717, 0.02968780708312988, 0.029706239700317383, 0.02970419120788574, 0.029681024551391603, 0.02964134407043457, 0.029714431762695313, 0.02998886489868164, 0.02958745574951172, 0.029742975234985352, 0.029710464477539063, 0.029814687728881836, 0.02962441635131836, 0.029609983444213867, 0.029659135818481445, 0.031364992141723634, 0.030175071716308594, 0.029565088272094725, 0.029237119674682618, 0.029116416931152345, 0.02909401512145996, 0.029038591384887694, 0.029128704071044922, 0.02931711959838867, 0.029304800033569337, 0.02952400016784668, 0.029266176223754884, 0.02921651268005371, 0.029190143585205077, 0.029216768264770508, 0.029212671279907225, 0.0291778564453125, 0.029427711486816405, 0.029310976028442383, 0.029279392242431642, 0.029344608306884765, 0.029425664901733397, 0.02947465515136719, 0.029384864807128906, 0.029349279403686524, 0.02930134391784668, 0.02930073547363281, 0.029294591903686523, 0.029398752212524415, 0.029543872833251952, 0.02957542419433594, 0.029645248413085936, 0.029735071182250977, 0.029857791900634766, 0.029857696533203124, 0.02988607978820801, 0.029718175888061523, 0.029610815048217772, 0.02951795196533203, 0.029519359588623048, 0.029373823165893556, 0.029500640869140626, 0.02950918388366699, 0.0295218563079834, 0.02955606460571289, 0.029524927139282228, 0.029665279388427734, 0.0294946231842041, 0.02952668762207031, 0.029593599319458007, 0.029605024337768553, 0.029560928344726563, 0.02965376091003418, 0.02953011131286621, 0.029550592422485353, 0.029478368759155275, 0.02959414482116699, 0.02960588836669922, 0.029568960189819336, 0.02961414337158203, 0.029752960205078127, 0.029723007202148436, 0.02967331123352051, 0.03100652885437012, 0.029876415252685546, 0.029519744873046875, 0.02930086326599121, 0.029236831665039063, 0.029284448623657228, 0.029288768768310547, 0.029230464935302736, 0.029237184524536133, 0.02929475212097168, 0.029342239379882812, 0.029247488021850586, 0.029351936340332032, 0.02918956756591797, 0.0291777286529541, 0.029239999771118165, 0.029250783920288084, 0.029338399887084962, 0.029269983291625976, 0.029337631225585938, 0.02918364715576172, 0.029255647659301758, 0.029260160446166993, 0.029254976272583007, 0.0292872314453125, 0.02935385513305664, 0.029417152404785155, 0.02935753631591797, 0.029416288375854492, 0.0294168643951416, 0.02959823989868164, 0.030027999877929687, 0.029759328842163087, 0.029698047637939453, 0.029797792434692383, 0.029878879547119142, 0.029638656616210936, 0.029663232803344725, 0.02955264091491699, 0.02958950424194336, 0.029490976333618163, 0.029546911239624024, 0.02951968002319336, 0.029634176254272462, 0.029423999786376952, 0.029462528228759766, 0.029378559112548826, 0.029365951538085938, 0.029335872650146484, 0.029408607482910156, 0.029678239822387695, 0.029817951202392577, 0.029723072052001955, 0.029770111083984373, 0.029704288482666017, 0.029566719055175782, 0.029573312759399416, 0.02957948875427246, 0.029576448440551757, 0.02954204750061035, 0.02964371109008789, 0.02959974479675293, 0.029824607849121092, 0.030799072265625, 0.02980944061279297, 0.029378400802612305, 0.02924995231628418, 0.02919808006286621, 0.029224960327148438, 0.029270015716552734, 0.0293703670501709, 0.0293253116607666, 0.029347328186035155, 0.02927462387084961, 0.029212671279907225, 0.02940835189819336, 0.029295520782470705, 0.029220767974853516, 0.029173856735229493, 0.029187999725341796, 0.02925168037414551, 0.029181951522827147, 0.02933977508544922, 0.029207904815673827, 0.029323808670043944, 0.029344768524169923, 0.029274560928344726, 0.02928611183166504, 0.029478944778442383, 0.029307104110717772, 0.02960223960876465, 0.029628576278686522, 0.029541824340820314, 0.029673120498657227, 0.02952272033691406, 0.02971558380126953, 0.029692928314208986, 0.02971238327026367, 0.029676992416381835, 0.029681440353393554, 0.029755807876586913, 0.029501823425292967, 0.029607936859130858, 0.02959062385559082, 0.029547168731689454, 0.029645055770874024, 0.029792255401611328, 0.029620223999023438, 0.029585535049438477, 0.029525888442993163, 0.029518848419189454, 0.029502464294433595, 0.029687007904052733, 0.02963462448120117, 0.029596000671386718, 0.029440383911132812, 0.029632448196411133, 0.029568544387817385, 0.029694528579711915, 0.029650911331176758, 0.02976723289489746, 0.029716447830200197, 0.029723104476928712, 0.029822975158691405, 0.02978611183166504, 0.029779136657714842, 0.03121183967590332, 0.03016694450378418, 0.029497440338134766, 0.02939388847351074, 0.02922300720214844, 0.02927299118041992, 0.02936832046508789, 0.029469919204711915, 0.029307680130004884, 0.029218368530273438, 0.02916806411743164, 0.0291409912109375, 0.029237247467041014, 0.029373472213745117, 0.02939523124694824, 0.029214719772338867, 0.02924799919128418, 0.029243167877197267, 0.029301151275634766, 0.029332735061645507, 0.029295295715332032, 0.029194303512573242, 0.029262943267822264, 0.029403968811035155, 0.029406848907470702, 0.029581632614135742, 0.029376640319824218, 0.029392927169799805, 0.029533599853515623, 0.029549152374267577, 0.029582815170288088, 0.02957776069641113, 0.029577215194702147, 0.029904064178466798, 0.02993235206604004, 0.029859071731567384, 0.02971468734741211, 0.02971820831298828, 0.02969385528564453, 0.029622623443603516, 0.029655616760253908, 0.029677536010742186, 0.029589536666870118, 0.02955673599243164, 0.029671424865722655, 0.02967046356201172, 0.029623231887817382, 0.029676959991455077, 0.029589151382446287, 0.029673759460449218, 0.02955740737915039, 0.029548479080200196, 0.029523359298706055, 0.02970419120788574, 0.02959552001953125, 0.02959974479675293, 0.029591487884521483, 0.029707103729248046, 0.029650943756103516, 0.029663232803344725, 0.029640703201293944, 0.029665279388427734, 0.02994790458679199, 0.031087968826293947, 0.03007535934448242, 0.029534591674804687, 0.02938012886047363, 0.02923356819152832, 0.029173824310302736, 0.029220224380493164, 0.02917849540710449, 0.029082944869995117, 0.029181663513183593, 0.029395744323730467, 0.029335264205932618, 0.029310976028442383, 0.029612096786499023, 0.029304479598999022, 0.029360895156860353, 0.029370431900024415, 0.029519807815551757, 0.029470720291137696, 0.02940880012512207, 0.029353696823120116, 0.02947052764892578, 0.02948601531982422, 0.029476863861083984, 0.02946227264404297, 0.02934982490539551, 0.02941276741027832, 0.029491167068481445, 0.029496063232421876, 0.029532384872436524, 0.02962784004211426, 0.02964739227294922, 0.02977984046936035, 0.029792383193969728, 0.029949951171875, 0.030045824050903322, 0.029994464874267577, 0.029977088928222657, 0.029831584930419923, 0.029569055557250975, 0.029677536010742186, 0.029697439193725587, 0.029565311431884764, 0.02966927909851074, 0.029509248733520507, 0.02956972885131836, 0.029517824172973633, 0.029587039947509764, 0.029468095779418946, 0.029522911071777343, 0.029605728149414062, 0.02965315246582031, 0.02958896064758301, 0.029678112030029298, 0.029775808334350586, 0.029788127899169924, 0.02968502426147461, 0.029637439727783203, 0.029717632293701172, 0.029637504577636718, 0.02994790458679199, 0.02997452735900879, 0.029928831100463866, 0.031112192153930664, 0.02998159980773926, 0.029485151290893553, 0.02937651252746582, 0.029343744277954102, 0.029326944351196288, 0.029262208938598634, 0.029322496414184572, 0.029220863342285155, 0.029161663055419923, 0.02926838493347168, 0.029343936920166017, 0.029327199935913085, 0.0292475528717041, 0.029377759933471678, 0.029299455642700194, 0.029251712799072266, 0.02929840087890625, 0.029220863342285155, 0.029360416412353516, 0.030070783615112305, 0.0293703670501709, 0.02963852882385254, 0.029519775390625, 0.0296080322265625, 0.02950752067565918, 0.029534400939941405, 0.029593088150024413, 0.029485504150390626, 0.029362239837646485, 0.029406368255615236, 0.02982793617248535, 0.029873695373535156, 0.02983500862121582, 0.02966912078857422, 0.029721567153930664, 0.029515104293823244, 0.02961193656921387, 0.02956159973144531, 0.02953011131286621, 0.029519872665405275, 0.029568864822387696, 0.029436063766479493, 0.029437856674194338, 0.02967955207824707, 0.029578592300415037, 0.02971321678161621, 0.029633983612060547, 0.029636703491210937, 0.029701663970947267, 0.029619136810302735, 0.029679616928100585, 0.02962403106689453, 0.029741344451904298, 0.029716480255126954, 0.029670495986938477, 0.029696800231933593, 0.029702207565307618, 0.029652223587036133, 0.029772480010986327, 0.029691520690917968, 0.029858047485351563, 0.029806848526000976, 0.031124223709106447, 0.030045759201049804, 0.02960633659362793, 0.0293621768951416, 0.029328895568847657, 0.029204992294311522, 0.029104320526123047, 0.029138751983642578, 0.029261823654174804, 0.029239295959472656, 0.02934377670288086, 0.02923491287231445, 0.029149440765380858, 0.029267967224121092, 0.029310400009155274, 0.029254207611083983, 0.02921062469482422, 0.02933964729309082, 0.029272064208984375, 0.029214208602905273, 0.02922694396972656, 0.029364479064941405, 0.02941574478149414, 0.02934988784790039, 0.0294071044921875, 0.029491327285766603, 0.02954444885253906, 0.02952351951599121, 0.029676095962524414, 0.029495168685913085, 0.02955459213256836, 0.029669471740722656, 0.029718528747558592, 0.029829120635986327, 0.029702144622802733, 0.029898719787597658, 0.02986358451843262, 0.029700128555297852, 0.02950793647766113, 0.029493247985839844, 0.029435808181762696, 0.029511775970458985, 0.029441696166992187, 0.029618528366088866, 0.029526016235351563, 0.029620031356811523, 0.029521663665771483, 0.029565376281738283, 0.029572544097900392, 0.029684127807617186, 0.02950364875793457, 0.02961750411987305, 0.029524639129638673, 0.02953763198852539, 0.029554847717285157, 0.029608448028564452, 0.029613567352294923, 0.02961609649658203, 0.029682207107543945, 0.029634559631347656, 0.029569023132324217, 0.02993561553955078, 0.02993270492553711, 0.030819936752319334, 0.02989481544494629, 0.029397247314453125, 0.029251552581787108, 0.02933660888671875, 0.02938982391357422, 0.029271263122558594, 0.029170272827148437, 0.02920822334289551, 0.02923980712890625, 0.02923936080932617, 0.029282272338867186, 0.029395040512084962, 0.029267871856689453, 0.029279935836791993, 0.02921504020690918, 0.02913443183898926, 0.029301151275634766, 0.02936835289001465, 0.029393983840942384, 0.029416351318359374, 0.029451583862304686, 0.029457088470458984, 0.029353599548339843, 0.029342079162597658, 0.029433088302612306, 0.029470624923706053, 0.029588287353515624, 0.029618207931518554, 0.029620223999023438, 0.029693151473999025, 0.02974799919128418, 0.029954048156738283, 0.030062591552734375, 0.030099456787109374, 0.029988447189331056, 0.0298721923828125, 0.029661184310913087, 0.02955094337463379, 0.029414432525634766, 0.02942460823059082, 0.029503328323364258, 0.029670751571655274, 0.029608768463134767, 0.029612031936645508, 0.029589056015014648, 0.029685983657836913, 0.029621631622314452, 0.029530975341796876, 0.029726240158081056, 0.029606367111206056, 0.02965456008911133, 0.02953878402709961, 0.02976095962524414, 0.029674047470092772, 0.02977382469177246, 0.029826271057128907, 0.02976028823852539, 0.02957107162475586, 0.029609983444213867, 0.029665279388427734, 0.029892608642578124, 0.02977382469177246]",tokens/s,33.832334072627525,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1153, in __init__ self.model = Qwen2MoeModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 918, in __init__ [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 918, in [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 691, in __init__ self.self_attn = QWEN2MOE_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 294, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 634, in __init__ self.attention = GPT_NEOX_ATTENTION_CLASSES[config._attn_implementation](config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 402, in __init__ super().__init__(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 102, in __init__ self.query_key_value = nn.Linear(config.hidden_size, 3 * config.hidden_size, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 150.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 163, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,814.370816,569.311232,0.0,174.063616,172.57984,s,1,7.39778955078125,7.39778955078125,0.0,7.39778955078125,7.39778955078125,7.39778955078125,7.39778955078125,[7.39778955078125],,kWh,4.7222717499986785e-06,5.062745464826148e-07,2.0172238360122163e-06,7.245770132493509e-06,,MB,1102.393344,640.6144,0.0,234.881024,215.589888,s,28,0.2848372488021851,0.010172758885792324,0.00013117854918736572,0.010129647731781006,0.010273417854309084,0.010319713973999023,0.010636543121337891,"[0.010750176429748536, 0.010159199714660644, 0.010094880104064941, 0.010211104393005372, 0.010301888465881348, 0.010120927810668945, 0.010138175964355468, 0.010130271911621094, 0.010129023551940917, 0.010106335639953614, 0.010083200454711914, 0.010213151931762695, 0.01010598373413086, 0.010100319862365722, 0.010065631866455078, 0.010045984268188476, 0.01007094383239746, 0.010248096466064453, 0.010171072006225586, 0.010164031982421875, 0.010261216163635255, 0.010215295791625976, 0.010104384422302246, 0.010108320236206055, 0.010329312324523925, 0.010110719680786133, 0.010171232223510742, 0.010126367568969726]",tokens/s,25165.247979831674,kWh,3.3376456913332124e-07,3.6808116085848184e-08,2.205903352022912e-07,5.911630204214606e-07,tokens/kWh,433044678.29785556,MB,1124.192256,644.808704,0.0,239.075328,215.592448,s,28,9.718639923095703,0.3470942829677037,0.026224458851421606,0.3441082611083984,0.34980752868652343,0.35327261810302735,0.44566918975830083,"[0.35386810302734373, 0.4796230163574219, 0.34065325927734375, 0.3382584533691406, 0.34457293701171876, 0.3441163024902344, 0.34413015747070314, 0.34559478759765627, 0.340366455078125, 0.3450955505371094, 0.34164190673828126, 0.3381908264160156, 0.3309373474121094, 0.3296451416015625, 0.32842724609375, 0.33047039794921873, 0.346295166015625, 0.3521667175292969, 0.34715267944335937, 0.3487964477539062, 0.3454385986328125, 0.3441002197265625, 0.3431165466308594, 0.3409849548339844, 0.3409042663574219, 0.34522457885742186, 0.3434397583007813, 0.3454281005859375]",tokens/s,181.50687894177156,kWh,9.682858715171649e-06,1.0678501069536257e-06,4.092357559597695e-06,1.4843066381722967e-05,tokens/kWh,4244405.999394785,,s,1764,9.704967070102688,0.005501682012529869,0.0031853742987004437,0.005389279842376709,0.0056641695499420166,0.005749015855789184,0.00590723310947418,"[0.005378176212310791, 0.005633984088897705, 0.0066518402099609375, 0.0055623679161071774, 0.005505152225494385, 0.005490752220153808, 0.0054225921630859375, 0.005332320213317871, 0.00535427188873291, 0.005353663921356201, 0.005321023941040039, 0.005414271831512451, 0.005429056167602539, 0.0056276159286499025, 0.005731103897094726, 0.005851136207580566, 0.005887936115264893, 0.005803359985351563, 0.00578223991394043, 0.005713823795318603, 0.005698751926422119, 0.0057448320388793946, 0.005591968059539795, 0.006127327919006348, 0.007182432174682617, 0.007171648025512695, 0.007236159801483154, 0.006718624114990235, 0.005348320007324219, 0.0054720001220703125, 0.005443456172943115, 0.005308703899383545, 0.0053290238380432125, 0.005260447978973389, 0.005302879810333252, 0.00533296012878418, 0.005325024127960205, 0.005318496227264404, 0.005530687808990478, 0.005355519771575928, 0.005427328109741211, 0.005481215953826904, 0.005541855812072754, 0.0054354238510131835, 0.0054534077644348145, 0.005450143814086914, 0.005568511962890625, 0.0054570879936218265, 0.0053821120262146, 0.0054026880264282226, 0.005423903942108154, 0.005339136123657226, 0.0054778561592102055, 0.005446176052093506, 0.005436768054962158, 0.005599135875701904, 0.005528319835662842, 0.005575967788696289, 0.0053277120590209965, 0.005435135841369629, 0.005412992000579834, 0.005360799789428711, 0.005388224124908447, 0.00535811185836792, 0.00546230411529541, 0.005336927890777588, 0.005271071910858154, 0.005275712013244629, 0.005300543785095215, 0.005441535949707031, 0.005554080009460449, 0.0056622719764709475, 0.005759039878845215, 0.005617152214050293, 0.005502175807952881, 0.0057853121757507325, 0.0055066561698913575, 0.13898793029785156, 0.005687424182891846, 0.005420032024383545, 0.0054050559997558595, 0.005675519943237305, 0.005770559787750244, 0.0055764479637145994, 0.005662784099578857, 0.00575167989730835, 0.0056128320693969724, 0.005519999980926514, 0.005432767868041992, 0.005298175811767578, 0.005251743793487549, 0.005273791790008545, 0.005321824073791504, 0.005331679821014405, 0.005369855880737305, 0.005406720161437988, 0.005362688064575195, 0.005276735782623291, 0.005328703880310059, 0.005359744071960449, 0.005459775924682617, 0.005511360168457031, 0.005332863807678222, 0.005263423919677735, 0.005430560111999511, 0.0052623038291931155, 0.005242688179016113, 0.0053309440612792965, 0.005343232154846191, 0.005413055896759033, 0.005608575820922852, 0.005612095832824707, 0.005635551929473877, 0.0054811201095581056, 0.0055623679161071774, 0.005621024131774902, 0.005639039993286133, 0.005795072078704834, 0.005729119777679443, 0.005610527992248535, 0.005790624141693115, 0.005574463844299316, 0.005572832107543945, 0.005576191902160644, 0.005474336147308349, 0.00535920000076294, 0.005068895816802979, 0.005327104091644287, 0.005312128067016602, 0.005338848114013672, 0.005344064235687256, 0.005338624000549317, 0.005392288208007813, 0.005374239921569824, 0.005339263916015625, 0.005425087928771973, 0.005422336101531982, 0.005490975856781006, 0.005361567974090576, 0.005292543888092041, 0.005378208160400391, 0.005658207893371582, 0.005756735801696777, 0.005593855857849121, 0.005472095966339111, 0.00532480001449585, 0.0053433279991149905, 0.005289408206939697, 0.0054074878692626956, 0.005340896129608154, 0.005329055786132813, 0.005309855937957764, 0.005351071834564209, 0.005429183959960938, 0.005285920143127442, 0.005305151939392089, 0.005309919834136963, 0.005290527820587158, 0.005320576190948487, 0.005277535915374756, 0.005269792079925537, 0.0053920321464538574, 0.005368159770965576, 0.0054148478507995606, 0.005303423881530761, 0.0052973442077636716, 0.005244671821594238, 0.005277696132659912, 0.005253344058990478, 0.005240608215332032, 0.005270592212677002, 0.0052952318191528324, 0.005242688179016113, 0.005309887886047363, 0.0052271361351013184, 0.005231935977935791, 0.005292672157287598, 0.005343232154846191, 0.005462016105651855, 0.005594336032867432, 0.005765791893005371, 0.005807360172271728, 0.005733248233795166, 0.005727776050567627, 0.005752480030059815, 0.005575104236602783, 0.005610911846160888, 0.005672959804534912, 0.005542367935180664, 0.00521830415725708, 0.005402624130249023, 0.005409088134765625, 0.005375679969787598, 0.005371903896331787, 0.005281792163848877, 0.005269152164459228, 0.005302624225616455, 0.005264927864074707, 0.005231167793273926, 0.005488160133361816, 0.005222784042358398, 0.0053383359909057616, 0.005546207904815674, 0.005372032165527344, 0.005406400203704834, 0.005461984157562256, 0.005421855926513672, 0.005380064010620117, 0.0053080000877380375, 0.005230944156646728, 0.00530953598022461, 0.005556896209716797, 0.005435743808746338, 0.005816383838653565, 0.005511104106903076, 0.005392608165740967, 0.005389791965484619, 0.005269408226013184, 0.005357984066009521, 0.005326144218444824, 0.005290688037872315, 0.005296351909637451, 0.005381919860839844, 0.005318655967712403, 0.005351424217224121, 0.0055519680976867675, 0.00541212797164917, 0.005440095901489258, 0.0053619518280029295, 0.00532480001449585, 0.0053309440612792965, 0.005302271842956543, 0.005310463905334473, 0.005289472103118896, 0.005282303810119629, 0.005263199806213379, 0.005265567779541016, 0.005281599998474121, 0.005303616046905517, 0.005230815887451172, 0.0052681279182434084, 0.005255392074584961, 0.005246751785278321, 0.005390336036682129, 0.005337088108062744, 0.005269408226013184, 0.005267744064331055, 0.005246784210205078, 0.0052449598312377926, 0.005396480083465576, 0.005713888168334961, 0.005781504154205322, 0.005538527965545654, 0.005690752029418945, 0.005656288146972656, 0.005675360202789307, 0.005472832202911377, 0.005832704067230224, 0.0057588801383972165, 0.005496479988098144, 0.006107200145721436, 0.006009215831756592, 0.005462016105651855, 0.005294079780578613, 0.005320703983306885, 0.005304671764373779, 0.005467552185058594, 0.00540496015548706, 0.005365952014923096, 0.005340544223785401, 0.005360032081604004, 0.005434847831726074, 0.00538643217086792, 0.0054848318099975585, 0.005491903781890869, 0.0054908480644226074, 0.005515232086181641, 0.005409471988677979, 0.005442624092102051, 0.005352223873138428, 0.0053086400032043455, 0.005387584209442139, 0.005307040214538574, 0.0054570560455322265, 0.005337344169616699, 0.005367584228515625, 0.005270080089569092, 0.005333216190338135, 0.005369855880737305, 0.005388351917266845, 0.00551091194152832, 0.005633503913879395, 0.005518112182617187, 0.005492735862731934, 0.0053678078651428224, 0.005439616203308105, 0.005547904014587403, 0.005434912204742431, 0.005564896106719971, 0.005505023956298828, 0.005347583770751953, 0.005451519966125488, 0.005514463901519775, 0.005446303844451905, 0.005367936134338379, 0.005392608165740967, 0.005594912052154541, 0.00555344009399414, 0.005490784168243408, 0.0053909759521484375, 0.005305791854858398, 0.005332896232604981, 0.005329567909240723, 0.005256703853607178, 0.00524124813079834, 0.005526368141174316, 0.005921951770782471, 0.005810688018798828, 0.005886112213134766, 0.005806975841522217, 0.005666048049926758, 0.005688159942626953, 0.005590911865234375, 0.005550687789916992, 0.005392799854278565, 0.005404672145843506, 0.005369855880737305, 0.005322751998901368, 0.005306111812591553, 0.005367712020874023, 0.005265888214111328, 0.005279327869415283, 0.005340767860412598, 0.005349599838256836, 0.005403103828430175, 0.005331264019012451, 0.005340864181518554, 0.0053944320678710935, 0.005309887886047363, 0.0055277438163757325, 0.005498784065246582, 0.005364192008972168, 0.005250688076019287, 0.005273536205291748, 0.005276095867156982, 0.005295328140258789, 0.005274400234222412, 0.005273215770721436, 0.005263199806213379, 0.005396671772003174, 0.005368159770965576, 0.005261312007904053, 0.005273600101470947, 0.005285920143127442, 0.005311872005462646, 0.005298783779144287, 0.005586944103240967, 0.005760704040527344, 0.005683008193969726, 0.005568416118621826, 0.005773920059204102, 0.005560319900512695, 0.005451488018035889, 0.005357312202453613, 0.005304863929748535, 0.005320703983306885, 0.005483615875244141, 0.005403264045715332, 0.005568672180175782, 0.005611936092376709, 0.005553855895996094, 0.005567840099334717, 0.005594079971313477, 0.005526591777801513, 0.005443327903747559, 0.005384640216827392, 0.005511199951171875, 0.0055016641616821285, 0.005202239990234375, 0.005316160202026367, 0.005249824047088623, 0.005286911964416504, 0.005236671924591064, 0.005290719985961914, 0.0055268478393554685, 0.0057413439750671385, 0.005802976131439209, 0.005730624198913574, 0.005795519828796386, 0.005804512023925781, 0.005767648220062256, 0.005739776134490967, 0.005603295803070068, 0.005574624061584473, 0.0055642881393432615, 0.005499423980712891, 0.005550496101379395, 0.005480447769165039, 0.005539423942565918, 0.005405407905578613, 0.00534281587600708, 0.00537011194229126, 0.005443424224853516, 0.005476319789886474, 0.005423136234283447, 0.005400000095367431, 0.005296703815460205, 0.005311520099639893, 0.005340288162231446, 0.00534665584564209, 0.005382656097412109, 0.00536678409576416, 0.005385216236114502, 0.00548796796798706, 0.0054913277626037595, 0.005451807975769043, 0.005383264064788818, 0.005282688140869141, 0.005387872219085693, 0.005474751949310303, 0.005594399929046631, 0.00563705587387085, 0.005541855812072754, 0.0055001602172851565, 0.00539244794845581, 0.0053396477699279785, 0.005317728042602539, 0.005352352142333984, 0.005346496105194092, 0.005307199954986572, 0.005366943836212158, 0.005296512126922608, 0.005286399841308594, 0.005304287910461425, 0.005299583911895752, 0.005614463806152344, 0.005578656196594239, 0.005472383975982666, 0.0054633598327636715, 0.0054150080680847165, 0.005630496025085449, 0.005165440082550049, 0.005356575965881348, 0.00526639986038208, 0.0052527360916137696, 0.005282112121582031, 0.005341119766235351, 0.00539247989654541, 0.005685535907745362, 0.005811903953552246, 0.005803264141082763, 0.005734367847442627, 0.00583897590637207, 0.005826816082000732, 0.005702047824859619, 0.005752831935882568, 0.00571289587020874, 0.005614143848419189, 0.0055812478065490724, 0.005599232196807862, 0.005539455890655518, 0.005427584171295166, 0.00545798397064209, 0.005330880165100097, 0.005279232025146485, 0.005310976028442383, 0.005449567794799805, 0.0053517122268676755, 0.005365344047546386, 0.005306848049163818, 0.0053812160491943355, 0.005521471977233887, 0.0054217281341552735, 0.005291840076446533, 0.005421120166778565, 0.0054148159027099605, 0.005351647853851318, 0.005601344108581543, 0.005294015884399414, 0.005290272235870361, 0.005371615886688232, 0.005323935985565185, 0.005325407981872559, 0.005435200214385987, 0.005605823993682861, 0.005917920112609863, 0.005901088237762451, 0.0056761598587036135, 0.005596223831176758, 0.005486400127410889, 0.005455872058868408, 0.005433343887329102, 0.005492032051086426, 0.0055569281578063965, 0.005410816192626953, 0.005443776130676269, 0.005422912120819092, 0.005347583770751953, 0.005426943778991699, 0.005484543800354004, 0.00540499210357666, 0.005387968063354493, 0.005355519771575928, 0.0053350400924682614, 0.005181151866912842, 0.005290463924407959, 0.005240447998046875, 0.005209216117858887, 0.005266496181488037, 0.005335008144378662, 0.005355423927307129, 0.005326720237731933, 0.005258016109466553, 0.005236832141876221, 0.005240928173065185, 0.005281792163848877, 0.005209856033325196, 0.005236800193786621, 0.005252895832061767, 0.005208479881286621, 0.005242400169372559, 0.005273824214935303, 0.005395872116088867, 0.005614431858062744, 0.005890304088592529, 0.00583244800567627, 0.005791679859161377, 0.005686528205871582, 0.005611936092376709, 0.005642623901367188, 0.005592639923095703, 0.005513696193695068, 0.005351776123046875, 0.005400224208831787, 0.005304448127746582, 0.005351295948028564, 0.0052899842262268066, 0.005302527904510498, 0.005271615982055664, 0.005275167942047119, 0.005331103801727295, 0.005273600101470947, 0.005269504070281982, 0.005316800117492676, 0.005283648014068603, 0.005376319885253906, 0.0055047359466552735, 0.005543776035308838, 0.005496640205383301, 0.0054513921737670895, 0.005371808052062988, 0.005398719787597656, 0.005612480163574219, 0.005496479988098144, 0.005425151824951172, 0.005402624130249023, 0.005320703983306885, 0.005591040134429932, 0.005627711772918701, 0.0056274237632751465, 0.005378047943115235, 0.005319327831268311, 0.005281824111938477, 0.005272543907165527, 0.005268479824066162, 0.00543068790435791, 0.005451712131500244, 0.004999551773071289, 0.005257376194000244, 0.005280992031097412, 0.005440288066864014, 0.005332992076873779, 0.005283840179443359, 0.005238143920898437, 0.005415584087371826, 0.005429215908050537, 0.005433343887329102, 0.005259263992309571, 0.005292031764984131, 0.005340896129608154, 0.005253344058990478, 0.005330687999725342, 0.0053656001091003415, 0.005489120006561279, 0.005781311988830567, 0.0057079682350158695, 0.005606688022613525, 0.005571296215057373, 0.00565228796005249, 0.005763264179229737, 0.005723199844360352, 0.005710783958435058, 0.005598400115966797, 0.005547071933746338, 0.0056911039352416995, 0.005503007888793945, 0.005470208168029785, 0.005885216236114502, 0.005559008121490478, 0.00556166410446167, 0.005391039848327637, 0.005361663818359375, 0.0053309440612792965, 0.0052856321334838864, 0.005337344169616699, 0.005285855770111084, 0.005253151893615722, 0.005318655967712403, 0.005468255996704101, 0.005403679847717285, 0.005370304107666015, 0.00535920000076294, 0.0055409598350524905, 0.005597951889038086, 0.005573631763458252, 0.005423359870910645, 0.005391935825347901, 0.0053396477699279785, 0.0057560958862304685, 0.005466239929199219, 0.005472640037536621, 0.0055808000564575196, 0.005496096134185791, 0.005374112129211426, 0.005650752067565918, 0.006821248054504394, 0.005309247970581054, 0.0053023362159729005, 0.005296127796173096, 0.0053043198585510255, 0.005077375888824463, 0.005404319763183594, 0.005408768177032471, 0.005445600032806397, 0.005482880115509034, 0.005502655982971192, 0.005460256099700927, 0.00537824010848999, 0.0054898238182067875, 0.005333695888519287, 0.005311967849731446, 0.0054132800102233885, 0.005439743995666504, 0.005568416118621826, 0.005824480056762695, 0.005603456020355225, 0.005533408164978027, 0.005464352130889892, 0.005431392192840576, 0.005539391994476318, 0.005552351951599121, 0.00555622386932373, 0.0055582718849182125, 0.005433343887329102, 0.005575776100158691, 0.005616543769836425, 0.005609439849853516, 0.005599071979522705, 0.005853343963623047, 0.0055636482238769535, 0.0054477438926696774, 0.005331295967102051, 0.005398655891418457, 0.005279551982879638, 0.005256703853607178, 0.005305344104766845, 0.005315936088562011, 0.005304831981658936, 0.0053348479270935055, 0.005341087818145752, 0.00529856014251709, 0.005310463905334473, 0.005315616130828858, 0.005297376155853272, 0.005270400047302246, 0.005494944095611572, 0.0052453441619873045, 0.005327167987823486, 0.005253215789794922, 0.005355423927307129, 0.005453375816345215, 0.005355775833129883, 0.0052978239059448245, 0.0054198079109191895, 0.005316351890563965, 0.005314559936523438, 0.005344607830047607, 0.005275775909423828, 0.005343776226043701, 0.0054332160949707034, 0.005309855937957764, 0.005280543804168701, 0.005466047763824463, 0.00512937593460083, 0.005375616073608399, 0.005290976047515869, 0.005322080135345459, 0.005329567909240723, 0.005303775787353516, 0.005351359844207764, 0.005382688045501709, 0.005617248058319092, 0.005663424015045166, 0.005533184051513672, 0.0055418238639831545, 0.005617472171783447, 0.0055559039115905765, 0.005392352104187012, 0.005286303997039795, 0.005228576183319092, 0.00521670389175415, 0.005298431873321533, 0.0052139520645141605, 0.005230591773986816, 0.0052139520645141605, 0.005252863883972168, 0.005229023933410645, 0.005230751991271973, 0.005202112197875977, 0.005278687953948974, 0.005236544132232666, 0.005208992004394532, 0.005277696132659912, 0.005279263973236084, 0.005273888111114502, 0.0052778878211975095, 0.005283840179443359, 0.005268479824066162, 0.005398943901062012, 0.005375999927520752, 0.005277760028839111, 0.005212704181671143, 0.005308127880096435, 0.0052206401824951175, 0.005232480049133301, 0.005238944053649902, 0.005228544235229492, 0.0052408638000488285, 0.005246943950653076, 0.005304224014282226, 0.005230368137359619, 0.005599552154541015, 0.005486591815948487, 0.005773503780364991, 0.005395648002624512, 0.005311103820800781, 0.00522649621963501, 0.005406720161437988, 0.007766016006469726, 0.0053096961975097655, 0.005245728015899658, 0.00527180814743042, 0.005295839786529541, 0.005220384120941162, 0.005299903869628906, 0.005226655960083008, 0.004976640224456787, 0.0051970877647399905, 0.0052128958702087405, 0.005285888195037842, 0.005249023914337158, 0.005248000144958496, 0.0052715520858764645, 0.005233312129974365, 0.005207647800445556, 0.005250879764556885, 0.005237696170806885, 0.005224448204040527, 0.005181439876556396, 0.005240543842315674, 0.005212448120117188, 0.005224063873291016, 0.005310783863067627, 0.005218368053436279, 0.005232287883758545, 0.005248447895050049, 0.005247903823852539, 0.005236832141876221, 0.005330848217010498, 0.005312704086303711, 0.005246784210205078, 0.005274655818939209, 0.005253536224365234, 0.0052147841453552245, 0.005240384101867676, 0.00526310396194458, 0.005227200031280518, 0.005252255916595459, 0.005317599773406983, 0.005262400150299072, 0.005246880054473877, 0.005263936042785644, 0.005259488105773926, 0.005234816074371338, 0.005251071929931641, 0.005224480152130127, 0.0052326078414916995, 0.005250432014465332, 0.0052397122383117676, 0.005256703853607178, 0.0052144317626953126, 0.005251071929931641, 0.005197824001312256, 0.005221824169158936, 0.005327263832092285, 0.005251232147216797, 0.005228544235229492, 0.005254752159118652, 0.005242623805999756, 0.0052202558517456055, 0.005233151912689209, 0.0053127679824829105, 0.005238783836364746, 0.0053916797637939455, 0.005241536140441895, 0.005244607925415039, 0.005259583950042725, 0.0052796158790588375, 0.00527510404586792, 0.005088511943817139, 0.00527350378036499, 0.005271999835968017, 0.005260896205902099, 0.005222400188446045, 0.005268159866333008, 0.00525324821472168, 0.005214528083801269, 0.005248703956604004, 0.005226784229278565, 0.005215968132019043, 0.005230080127716064, 0.005251584053039551, 0.005238783836364746, 0.005220352172851562, 0.0052899842262268066, 0.005228384017944336, 0.005189792156219482, 0.005332992076873779, 0.005212160110473632, 0.005212160110473632, 0.00520911979675293, 0.005231584072113037, 0.005212160110473632, 0.005197824001312256, 0.005222400188446045, 0.005191967964172363, 0.005207776069641113, 0.005218495845794677, 0.005210080146789551, 0.005203999996185303, 0.0052260799407958984, 0.005193471908569336, 0.005199391841888428, 0.005278816223144531, 0.005220191955566406, 0.005212287902832031, 0.005203904151916504, 0.0053268160820007324, 0.0052013759613037105, 0.005208384037017822, 0.005271743774414062, 0.005228064060211182, 0.005199520111083984, 0.005235680103302002, 0.005223840236663818, 0.005200191974639893, 0.005218431949615478, 0.005210112094879151, 0.005195648193359375, 0.005214335918426513, 0.0052302079200744625, 0.005251455783843994, 0.005222623825073242, 0.0052631359100341795, 0.005242879867553711, 0.005185823917388916, 0.005249760150909424, 0.005186560153961181, 0.005183487892150879, 0.005264992237091064, 0.005220767974853516, 0.005183072090148926, 0.004925631999969483, 0.005192895889282226, 0.005238751888275147, 0.00518828821182251, 0.005187744140625, 0.0051866240501403805, 0.00522054386138916, 0.005200640201568603, 0.0052193598747253414, 0.005250016212463379, 0.0051948800086975095, 0.005206912040710449, 0.005230591773986816, 0.005180704116821289, 0.0051799359321594235, 0.005159327983856201, 0.00520579195022583, 0.005179391860961914, 0.005162720203399658, 0.005210400104522705, 0.005210112094879151, 0.005208000183105469, 0.005234752178192139, 0.005232480049133301, 0.005197984218597412, 0.005183680057525635, 0.005275455951690674, 0.005227871894836426, 0.005252863883972168, 0.005263743877410888, 0.005194399833679199, 0.005214079856872559, 0.0052408318519592285, 0.005236415863037109, 0.005179711818695068, 0.0052204480171203615, 0.005197343826293946, 0.0051849279403686525, 0.005196288108825684, 0.00520854377746582, 0.005205152034759522, 0.00517795181274414, 0.005218560218811035, 0.005205215930938721, 0.005191584110260009, 0.005239456176757812, 0.005232863903045655, 0.005223680019378662, 0.005216383934020996, 0.005259903907775879, 0.005177152156829834, 0.005197728157043457, 0.005248640060424805, 0.0052269759178161625, 0.005204031944274903, 0.0051979517936706545, 0.0051996479034423825, 0.0051833920478820805, 0.005202239990234375, 0.005214208126068115, 0.005197824001312256, 0.005217599868774414, 0.005253119945526123, 0.005162752151489257, 0.0052247681617736816, 0.005201695919036865, 0.005257472038269043, 0.0052427840232849125, 0.005235968112945556, 0.005248127937316894, 0.005237823963165284, 0.00520249605178833, 0.005218624114990234, 0.005228544235229492, 0.005193535804748535, 0.005195871829986572, 0.005207839965820312, 0.005232704162597656, 0.005203904151916504, 0.0052573118209838865, 0.005221536159515381, 0.0052269439697265625, 0.005212224006652832, 0.0052962880134582516, 0.005265471935272217, 0.0052432317733764645, 0.0052854719161987305, 0.005242208003997803, 0.0052473278045654295, 0.0052555837631225585, 0.005256383895874023, 0.005216447830200195, 0.0052271361351013184, 0.005259168148040771, 0.00520966386795044, 0.005237343788146972, 0.0052501440048217775, 0.005225279808044434, 0.005222432136535645, 0.00531660795211792, 0.005214208126068115, 0.005232639789581299, 0.005254655838012695, 0.005243199825286865, 0.005255167961120606, 0.005254816055297851, 0.005220928192138672, 0.0052399678230285645, 0.005241663932800293, 0.005252768039703369, 0.005216447830200195, 0.0052000322341918944, 0.0052408318519592285, 0.005221856117248535, 0.005253664016723633, 0.005216256141662597, 0.005240575790405273, 0.005226111888885498, 0.005245567798614502, 0.0052420802116394044, 0.005227295875549316, 0.005305535793304443, 0.005294911861419678, 0.005216256141662597, 0.005251232147216797, 0.005348896026611328, 0.005604864120483399, 0.005728767871856689, 0.005339136123657226, 0.006174719810485839, 0.005289599895477295, 0.005269599914550781, 0.005248799800872803, 0.005241343975067139, 0.005298175811767578, 0.005226624011993408, 0.005250944137573242, 0.005310719966888428, 0.005293248176574707, 0.005388031959533692, 0.0053209919929504395, 0.005282112121582031, 0.005320032119750976, 0.005798783779144287, 0.005392672061920166, 0.0054759359359741215, 0.005408063888549804, 0.0054421119689941405, 0.005431295871734619, 0.005436736106872559, 0.005401567935943603, 0.00540169620513916, 0.0054198079109191895, 0.005435488224029541, 0.00550707197189331, 0.00556387186050415, 0.005749087810516357, 0.00565993595123291, 0.0055708479881286625, 0.005675903797149658, 0.005707520008087158, 0.005700704097747803, 0.0055981121063232425, 0.005539999961853027, 0.005560383796691894, 0.005593952178955078, 0.005702591896057129, 0.005627007961273194, 0.005606272220611572, 0.005533696174621582, 0.0055380802154541015, 0.005522367954254151, 0.005548831939697266, 0.005569536209106446, 0.005645311832427978, 0.00566374397277832, 0.005610847949981689, 0.005437376022338868, 0.0053736639022827146, 0.005426591873168946, 0.005395040035247802, 0.005326848030090332, 0.00537395191192627, 0.005383840084075928, 0.005468480110168457, 0.005496863842010498, 0.005484543800354004, 0.0054906878471374515, 0.005484543800354004, 0.005518367767333984, 0.005724991798400879, 0.00569155216217041, 0.005462207794189453, 0.005486400127410889, 0.005445375919342041, 0.005543488025665283, 0.00569209623336792, 0.005556511878967285, 0.005652192115783691, 0.005482048034667969, 0.005448128223419189, 0.005453504085540771, 0.005513023853302002, 0.0056232957839965824, 0.005665760040283203, 0.0056044158935546875, 0.005661952018737793, 0.00562556791305542, 0.005561984062194824, 0.005500448226928711, 0.005627744197845459, 0.005917695999145508, 0.005674560070037842, 0.005596640110015869, 0.005516255855560303, 0.0053814082145690915, 0.005462751865386963, 0.005355584144592285, 0.0053450241088867185, 0.005416160106658936, 0.005639135837554932, 0.0057402877807617185, 0.0055567359924316405, 0.005443615913391113, 0.005477791786193848, 0.00555244779586792, 0.0056277761459350584, 0.0056423678398132325, 0.005641439914703369, 0.0056228160858154295, 0.0055577921867370605, 0.005554399967193604, 0.0057497601509094234, 0.005626880168914795, 0.005640543937683105, 0.005664351940155029, 0.005789023876190185, 0.005722911834716797, 0.0056191678047180175, 0.005606080055236816, 0.005684288024902344, 0.005619999885559082, 0.005630271911621094, 0.005586143970489502, 0.005583360195159912, 0.0055668802261352535, 0.005541888236999512, 0.005482336044311523, 0.005463488101959228, 0.005474944114685059, 0.005499008178710937, 0.005830944061279297, 0.0053056960105896, 0.005496479988098144, 0.005422080039978027, 0.005404672145843506, 0.005392064094543457, 0.005337408065795898, 0.005377439975738525, 0.005362271785736084, 0.005446720123291016, 0.005681375980377198, 0.005630688190460205, 0.00568342399597168, 0.005532608032226562, 0.005475168228149414, 0.005434976100921631, 0.005424863815307617, 0.005419936180114746, 0.0054878082275390625, 0.005521599769592285, 0.0054952001571655276, 0.005430335998535156, 0.005434304237365723, 0.005449728012084961, 0.005422880172729492, 0.005365119934082031, 0.005412000179290772, 0.005649824142456055, 0.005703328132629394, 0.0056267518997192385, 0.00551529598236084, 0.005459936141967774, 0.005442463874816895, 0.005399456024169922, 0.005410463809967041, 0.005502687931060791, 0.005513792037963867, 0.005478047847747803, 0.00540499210357666, 0.00555625581741333, 0.005732351779937744, 0.005436704158782959, 0.005614048004150391, 0.0055032958984375, 0.005623744010925293, 0.005784992218017578, 0.005710432052612305, 0.005737792015075684, 0.00557535982131958, 0.005709824085235596, 0.005525440216064453, 0.005477920055389405, 0.005519904136657715, 0.005490176200866699, 0.005538144111633301, 0.005533823966979981, 0.005490880012512207, 0.0055103998184204105, 0.005476831912994385, 0.005520927906036377, 0.005498784065246582, 0.005364511966705322, 0.0054107198715209965, 0.005371903896331787, 0.005285151958465576, 0.005688096046447754, 0.005588992118835449, 0.005592639923095703, 0.005468607902526856, 0.005525440216064453, 0.005451839923858642, 0.005455872058868408, 0.00571398401260376, 0.005529088020324707, 0.005572480201721191, 0.005821280002593994, 0.0060403838157653805, 0.00577785587310791, 0.005677536010742188, 0.005631392002105713, 0.005624127864837646, 0.005697824001312256, 0.005711775779724121, 0.005779551982879639, 0.00578547191619873, 0.005885600090026855, 0.005789152145385742, 0.005808703899383545, 0.005624256134033203, 0.005570559978485107, 0.005687295913696289, 0.005611199855804444, 0.005517632007598877, 0.0054243202209472655, 0.0053703680038452144, 0.005337247848510742, 0.005327072143554688, 0.0053861761093139645, 0.00555622386932373, 0.005566463947296142, 0.005538847923278809, 0.005415296077728272, 0.005361695766448974, 0.005362239837646485, 0.005447360038757324, 0.005349696159362793, 0.005309760093688965, 0.00531657600402832, 0.005264095783233643, 0.0052408318519592285, 0.005450816154479981, 0.005534656047821045, 0.0054941439628601076, 0.0053684477806091304, 0.005308671951293946, 0.005352640151977539, 0.005372479915618896, 0.005410367965698242, 0.005562816143035889, 0.005493792057037353, 0.005399519920349121, 0.005451807975769043, 0.005361184120178223, 0.005347775936126709, 0.005390143871307373, 0.0055809922218322755, 0.005825952053070068, 0.005263904094696045, 0.005511168003082275, 0.0054988799095153805, 0.005541888236999512, 0.005471360206604004, 0.0054767999649047855, 0.005482175827026367, 0.0055016961097717285, 0.005541888236999512, 0.005490719795227051, 0.005435008049011231, 0.005509471893310547, 0.005458943843841553, 0.005501440048217773, 0.0054563841819763184, 0.00538431978225708, 0.005452864170074463, 0.0053870720863342285, 0.0053283519744873045, 0.005314112186431885, 0.005314911842346191, 0.0052865281105041505, 0.00542080020904541, 0.005564671993255615, 0.005592576026916504, 0.00558735990524292, 0.005447711944580078, 0.005423423767089844, 0.0054204797744750975, 0.005386559963226318, 0.005389535903930664, 0.005389023780822754, 0.005519423961639404, 0.005634272098541259, 0.005537568092346191, 0.005502336025238037, 0.005610112190246582, 0.005505311965942383, 0.005487520217895508, 0.005463871955871582, 0.005438464164733887, 0.005418591976165772, 0.005404416084289551, 0.005333631992340088, 0.0053719358444213865, 0.005339136123657226, 0.00533894395828247, 0.005411007881164551, 0.005482272148132324, 0.005494368076324463, 0.005536384105682373, 0.005531775951385498, 0.005558144092559814, 0.005553567886352539, 0.0054462399482727054, 0.005535776138305664, 0.005467552185058594, 0.0054332160949707034, 0.005562719821929932, 0.005671103954315185, 0.0056934719085693355, 0.005641759872436524, 0.005732031822204589, 0.005318304061889648, 0.005591296195983887, 0.005571839809417725, 0.005614431858062744, 0.0055400958061218265, 0.005603072166442871, 0.005578752040863037, 0.005601024150848389, 0.00554531192779541, 0.005465216159820556, 0.005381919860839844, 0.005421055793762207, 0.005400415897369385, 0.005387455940246582, 0.0055075201988220215, 0.00529257583618164, 0.005347583770751953, 0.005404384136199951, 0.005566239833831787, 0.005705344200134277, 0.005734111785888672, 0.005567391872406006, 0.0053678078651428224, 0.0053137922286987304, 0.005336095809936523, 0.005276800155639649, 0.005247776031494141, 0.005303679943084717, 0.005304768085479737, 0.005286240100860596, 0.00538588809967041, 0.0053658242225646975, 0.005375264167785644, 0.005402560234069824, 0.005441472053527832, 0.005563519954681397, 0.005666463851928711, 0.00543120002746582, 0.005387968063354493, 0.0053844799995422365, 0.005400703907012939, 0.005537792205810547, 0.005482495784759522, 0.005416959762573242, 0.005445087909698486, 0.005423744201660156, 0.005482272148132324, 0.00556982421875, 0.005486688137054443, 0.005471007823944092, 0.005639135837554932, 0.005635072231292724, 0.0056113600730896, 0.005513567924499512, 0.005424960136413574, 0.005619967937469482, 0.0053942399024963375, 0.005428768157958984, 0.005386367797851562, 0.005331232070922852, 0.005334943771362305, 0.005324895858764648, 0.005301951885223389, 0.005069920063018799, 0.005314879894256592, 0.005540703773498535, 0.005608607769012451, 0.005528384208679199, 0.005474080085754394, 0.00542310380935669, 0.005393695831298828, 0.005314367771148682, 0.005271679878234863, 0.0052947840690612796, 0.0052952318191528324, 0.005307360172271729, 0.005419072151184082, 0.0055008001327514645, 0.005521471977233887, 0.005597184181213379, 0.0055328960418701174, 0.005595935821533203, 0.005476480007171631, 0.005515359878540039, 0.00556828784942627, 0.005486591815948487, 0.005426432132720947, 0.0054709758758544925, 0.005359615802764893, 0.005302239894866943, 0.005374239921569824, 0.005324543952941895, 0.0053637118339538575, 0.005394144058227539, 0.005372032165527344, 0.005407040119171143, 0.005287295818328858, 0.005272031784057617, 0.005322751998901368, 0.005285888195037842, 0.005283616065979004, 0.005280255794525146, 0.005348576068878174, 0.005296639919281006, 0.0053034558296203614, 0.00539737606048584, 0.00536572790145874, 0.0054291200637817385, 0.005400191783905029, 0.005394944190979004, 0.005560319900512695, 0.005602496147155762, 0.00560211181640625, 0.005695680141448975, 0.005558080196380615, 0.005596799850463867, 0.0056590080261230465, 0.00559497594833374, 0.005595295906066894, 0.005424352169036866, 0.005425951957702637, 0.005554175853729248, 0.0056501121520996095, 0.005589119911193848, 0.005474495887756348, 0.00555017614364624, 0.00501475191116333, 0.005307007789611817, 0.005322944164276123, 0.005328864097595215, 0.0053043198585510255, 0.005266687870025635, 0.005282432079315186, 0.00527782392501831, 0.0053203201293945315, 0.005292416095733643, 0.0053012480735778805, 0.005286911964416504, 0.005327936172485352, 0.005403327941894531, 0.005370272159576416, 0.005329792022705078, 0.00532374382019043, 0.00532480001449585, 0.0054028801918029785, 0.005366752147674561, 0.0053194561004638674, 0.005334752082824707, 0.005406847953796387, 0.0054150080680847165, 0.005520864009857178, 0.005522016048431396, 0.0053647680282592775, 0.005336351871490478, 0.0053060479164123535, 0.0053678078651428224, 0.005488639831542969, 0.005432511806488037, 0.005348512172698975, 0.005375648021697998, 0.005293375968933105, 0.005268159866333008, 0.005359936237335205, 0.00544326400756836, 0.005414912223815918, 0.005441120147705078, 0.005458335876464843, 0.005660128116607666, 0.0056977920532226565, 0.005727903842926025, 0.005786240100860596, 0.0057077760696411135, 0.005668352127075196, 0.00548038387298584, 0.005464191913604736, 0.005527999877929688, 0.005474239826202393, 0.005443583965301513, 0.005468416213989258, 0.005414720058441162, 0.005347487926483155, 0.005322207927703857, 0.0052986559867858884, 0.0053144640922546385, 0.0053678078651428224, 0.005310175895690918, 0.00537663984298706, 0.005631775856018067, 0.005568384170532226, 0.0055214080810546875, 0.005724319934844971, 0.005629792213439942, 0.005629216194152832, 0.0057494077682495115, 0.0055929598808288575, 0.005554368019104004, 0.005402624130249023, 0.005398528099060058, 0.005337088108062744, 0.0054448962211608885, 0.00552623987197876, 0.005389696121215821, 0.005328735828399658, 0.005276480197906494, 0.005248991966247558, 0.005264544010162354, 0.0053277120590209965, 0.005354976177215576, 0.005306335926055908, 0.005352000236511231, 0.005308351993560791, 0.005287487983703613, 0.005326560020446777, 0.005333087921142578, 0.005500703811645508, 0.005460256099700927, 0.0053536958694458, 0.005320223808288574, 0.005356416225433349, 0.005326496124267578, 0.0053005762100219725, 0.0053077759742736815, 0.005330624103546143, 0.005253056049346924, 0.005263904094696045, 0.005267199993133545, 0.005374752044677735, 0.005581952095031738, 0.005599552154541015, 0.005429247856140137, 0.0055157761573791505, 0.005314752101898194, 0.0052856321334838864, 0.005307839870452881, 0.0054358081817626955, 0.005390560150146485, 0.005498303890228272, 0.005425024032592774, 0.005444223880767822, 0.005435103893280029, 0.005507423877716065, 0.005478432178497314, 0.005367455959320068, 0.005327167987823486, 0.005320191860198975, 0.005449632167816162, 0.005322336196899414, 0.005485856056213379, 0.0053901119232177734, 0.005326720237731933, 0.005349696159362793, 0.005432479858398438, 0.0050178241729736325, 0.005277696132659912, 0.005289663791656494, 0.0053517441749572755, 0.00536575984954834, 0.005322815895080567, 0.005273344039916993, 0.005247168064117432, 0.0052899842262268066, 0.005337088108062744, 0.005449440002441406, 0.005685535907745362, 0.0059985918998718265, 0.005867519855499268, 0.005826560020446778, 0.00578172779083252, 0.0057710399627685545, 0.005748608112335205, 0.005746816158294678, 0.0056835198402404785, 0.0056761598587036135, 0.005699584007263184, 0.005796031951904297, 0.005676928043365478, 0.005626336097717285, 0.005672863960266113, 0.005572383880615235, 0.005527999877929688, 0.00545577621459961, 0.0055214080810546875, 0.005361663818359375, 0.005310400009155274, 0.005254816055297851, 0.005291808128356934, 0.005278048038482666, 0.0052165441513061525, 0.005492512226104736, 0.00551142406463623, 0.005343167781829834, 0.00535964822769165, 0.005297408103942871, 0.005296895980834961, 0.005325119972229004, 0.005265183925628662, 0.0052854399681091305, 0.005297535896301269, 0.005495935916900635, 0.005672575950622559, 0.005738272190093994, 0.005566336154937744, 0.005630527973175049, 0.005587135791778565, 0.005408448219299316, 0.005414559841156006, 0.005397151947021484, 0.0053860158920288085, 0.005349408149719239, 0.005315648078918457, 0.00528275203704834, 0.005418399810791016, 0.005460031986236572, 0.005444191932678223, 0.005426623821258545, 0.005340256214141846, 0.00556387186050415, 0.005488863945007324, 0.005500927925109863, 0.005345280170440674, 0.005418752193450928, 0.00542464017868042, 0.005534048080444336, 0.0057358717918396, 0.005736800193786621, 0.005816095829010009, 0.005698560237884521, 0.005691232204437256, 0.0056193599700927735, 0.00564086389541626, 0.005473983764648437, 0.005380095958709717, 0.0053268160820007324, 0.005341216087341309, 0.005326687812805176, 0.0053597760200500486, 0.005506400108337402, 0.005333663940429688, 0.0053455038070678714, 0.005314176082611084, 0.005314720153808594, 0.0053143038749694825, 0.005275904178619385, 0.005253024101257324, 0.005275296211242676, 0.005398975849151611, 0.005367008209228516, 0.005306784152984619, 0.005275296211242676, 0.005241856098175048, 0.005261023998260498, 0.005282911777496338, 0.005302207946777344, 0.0053155522346496585, 0.005453504085540771, 0.005680863857269287, 0.005891776084899903, 0.0058520641326904295, 0.005741727828979492, 0.005641056060791016, 0.005574656009674072, 0.005638144016265869, 0.005578495979309082, 0.00550870418548584, 0.005355552196502685, 0.00528659200668335, 0.005281727790832519, 0.005322751998901368, 0.005342656135559082, 0.005333568096160889, 0.005344768047332763, 0.005302432060241699, 0.005351967811584472, 0.005465919971466064, 0.005303743839263916, 0.005349760055541992, 0.005454304218292236, 0.0054802241325378415, 0.005073184013366699, 0.005455743789672851, 0.005568640232086182, 0.005574431896209717, 0.005541600227355957, 0.005525951862335205, 0.005571936130523682, 0.00550601577758789, 0.005465439796447754, 0.005462431907653808, 0.005544032096862793, 0.005397984027862549, 0.00543993616104126, 0.00539024019241333, 0.005515007972717285, 0.005378399848937988, 0.0052973442077636716, 0.005288767814636231, 0.005285888195037842, 0.0052715520858764645, 0.0052367358207702636, 0.005336927890777588, 0.005451295852661133, 0.005819136142730713, 0.005812255859375, 0.005937280178070069, 0.005826623916625977, 0.005701280117034912, 0.00569536018371582, 0.00558406400680542, 0.00560591983795166, 0.0055586881637573245, 0.00541868782043457, 0.005742623805999756, 0.005367904186248779, 0.005406911849975586, 0.005342720031738281, 0.00530847978591919, 0.005339583873748779, 0.0053136320114135745, 0.005258143901824951, 0.00525497579574585, 0.005311999797821045, 0.005245471954345703, 0.005255328178405762, 0.005348896026611328, 0.0052731199264526366, 0.005337791919708252, 0.005435328006744385, 0.005411136150360108, 0.005629119873046875, 0.005626688003540039, 0.005475999832153321, 0.0054926080703735355, 0.0054635519981384275, 0.005616511821746826, 0.0058635520935058595, 0.00572822380065918, 0.00564415979385376, 0.005487071990966797, 0.005461664199829102, 0.005510496139526367, 0.005443935871124268]",tokens/s,181.76259509774255,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,837.615616,9637.39648,0.0,9242.148864,8603.568128,s,1,7.7355849609375,7.7355849609375,0.0,7.7355849609375,7.7355849609375,7.7355849609375,7.7355849609375,[7.7355849609375],,kWh,1.466184626665381e-05,1.4242632171994635e-06,7.069727877992427e-06,2.31558373618457e-05,,MB,1139.339264,9886.957568,0.0,9481.224192,8972.090368,s,10,6.982478088378905,0.6982478088378905,0.0036517305287179706,0.6994198913574219,0.7011476623535157,0.7023103729248047,0.7032405413818359,"[0.69003857421875, 0.697864501953125, 0.6987088012695313, 0.6942640380859375, 0.6964507446289062, 0.700400634765625, 0.7002574462890625, 0.7034730834960937, 0.7001309814453125, 0.7008892822265625]",tokens/s,366.6320133908713,kWh,2.0463702589164413e-05,2.256788519096522e-06,1.354058490653324e-05,3.6261076014794174e-05,tokens/kWh,7059911.843089114,MB,1161.076736,9891.151872,0.0,9485.418496,8972.092928,s,10,24.507166992187496,2.45071669921875,0.0028596785383194555,2.45123291015625,2.453633666992187,2.4545537719726562,2.455289855957031,"[2.450625244140625, 2.447038330078125, 2.445943115234375, 2.45255517578125, 2.448777099609375, 2.44886376953125, 2.451840576171875, 2.45342919921875, 2.45262060546875, 2.455473876953125]",tokens/s,25.706765706572046,kWh,7.17809645554217e-05,7.917776365660735e-06,4.777901970466671e-05,0.00012747776062574915,tokens/kWh,494203.84928910236,,s,630,24.50396546554564,0.03889518327864389,0.0005780801186598636,0.03882415962219238,0.039298535919189456,0.03955597248077392,0.0418481328201294,"[0.04240982437133789, 0.03919449615478516, 0.038448894500732425, 0.03843920135498047, 0.038400318145751955, 0.03821561431884766, 0.038188255310058594, 0.03821200180053711, 0.038166912078857425, 0.03850604629516602, 0.03846115112304688, 0.0384453125, 0.038309761047363285, 0.03864358520507812, 0.03840892791748047, 0.03843600082397461, 0.03828412628173828, 0.03838771057128906, 0.03832371139526367, 0.038511104583740234, 0.041850879669189454, 0.03854441452026367, 0.038468257904052734, 0.03840646362304687, 0.03894681549072266, 0.039301185607910155, 0.03922323226928711, 0.03886297607421875, 0.03875212860107422, 0.03869465637207031, 0.03862963104248047, 0.03875859069824219, 0.038529983520507814, 0.038953857421875, 0.03902822494506836, 0.03879542541503906, 0.03858003234863281, 0.03874256134033203, 0.03874611282348633, 0.03896115112304688, 0.03889126586914062, 0.03896275329589844, 0.03880422210693359, 0.03891299057006836, 0.03892937469482422, 0.03910041427612305, 0.038809600830078124, 0.038768638610839845, 0.039041023254394534, 0.03917004776000976, 0.03905331039428711, 0.039137279510498044, 0.03910041427612305, 0.039375873565673826, 0.03939430236816406, 0.03921820831298828, 0.03953142547607422, 0.03900831985473633, 0.03985203170776367, 0.038983680725097655, 0.03911398315429687, 0.03894553756713867, 0.039024063110351566, 0.04228028869628906, 0.039467681884765626, 0.03859199905395508, 0.03863798522949219, 0.038174110412597655, 0.03832048034667969, 0.0382081298828125, 0.0384983024597168, 0.038449153900146485, 0.03842067337036133, 0.03831788635253906, 0.03850239944458008, 0.03843699264526367, 0.038504318237304686, 0.03838137435913086, 0.038328033447265625, 0.03826736068725586, 0.03842639923095703, 0.04023945617675781, 0.03832819366455078, 0.03829945755004883, 0.03852921676635742, 0.03851059341430664, 0.03851676940917969, 0.03917820739746094, 0.039392799377441404, 0.039156192779541014, 0.038991870880126955, 0.03890176010131836, 0.03886489486694336, 0.03877814483642578, 0.03855228805541992, 0.03853094482421875, 0.038678657531738284, 0.038649406433105465, 0.03873427200317383, 0.03861420822143555, 0.03869164657592773, 0.03888947296142578, 0.03896934509277344, 0.038778881072998046, 0.03892950439453125, 0.03887401580810547, 0.03883974456787109, 0.038812095642089844, 0.03884431838989258, 0.0386992301940918, 0.03865190505981445, 0.03917004776000976, 0.03892428970336914, 0.039000064849853515, 0.03933388900756836, 0.03938508987426758, 0.03971072006225586, 0.038909217834472654, 0.03890454483032227, 0.038979167938232424, 0.03910083389282227, 0.03894236755371094, 0.03895449447631836, 0.03881260681152344, 0.038983585357666016, 0.03899955368041992, 0.04168294525146484, 0.03928044891357422, 0.03848211288452148, 0.038397823333740234, 0.03837145614624023, 0.03829900741577148, 0.03815078353881836, 0.03844230270385742, 0.038287487030029294, 0.038359264373779296, 0.03845119857788086, 0.038693248748779295, 0.03854131317138672, 0.03895817565917969, 0.03849305725097656, 0.038330368041992184, 0.03844707107543945, 0.03850841522216797, 0.03833414459228516, 0.03849264144897461, 0.03854950332641602, 0.03890892791748047, 0.03855683135986328, 0.03847971343994141, 0.03858432006835937, 0.03910438537597656, 0.03901401519775391, 0.03887363052368164, 0.0387806396484375, 0.038785057067871095, 0.03871120071411133, 0.038760353088378906, 0.03867075347900391, 0.03887011337280273, 0.038924320220947266, 0.03873891067504883, 0.03874332809448242, 0.03887776184082031, 0.03867654418945313, 0.038662143707275394, 0.038950912475585936, 0.03869900894165039, 0.03893155288696289, 0.039447456359863284, 0.03983321762084961, 0.038766975402832034, 0.03877478408813476, 0.038715328216552734, 0.0390423698425293, 0.03913324737548828, 0.03901830291748047, 0.03903084945678711, 0.038886207580566406, 0.03893068695068359, 0.038843711853027346, 0.03899951934814453, 0.03906835174560547, 0.039080223083496096, 0.03912908935546875, 0.03902873611450195, 0.03894428634643555, 0.0389964485168457, 0.03914547348022461, 0.04183116912841797, 0.03911676788330078, 0.03839340972900391, 0.03831145477294922, 0.03812035369873047, 0.03829350280761719, 0.03823782348632813, 0.03829983901977539, 0.03849849700927734, 0.03841212844848633, 0.038434814453125, 0.03860617446899414, 0.03827590560913086, 0.04026163101196289, 0.03870719909667969, 0.03842876815795898, 0.03836710357666016, 0.038593727111816405, 0.03867939376831055, 0.03879731369018555, 0.03861836624145508, 0.03873276901245117, 0.0387968635559082, 0.03864303970336914, 0.03889849472045898, 0.03930492782592773, 0.039372638702392576, 0.03893503952026367, 0.038752254486083985, 0.04029439926147461, 0.038621185302734375, 0.0385986557006836, 0.038757633209228516, 0.03874819183349609, 0.03879600143432617, 0.04150476837158203, 0.038416385650634766, 0.0385269775390625, 0.038637569427490234, 0.038823486328125, 0.03871334457397461, 0.03867078399658203, 0.039019519805908204, 0.038816959381103515, 0.03877225494384766, 0.038856990814208986, 0.03897919845581055, 0.03913356781005859, 0.039144927978515626, 0.039119392395019534, 0.03903692626953125, 0.03902246475219726, 0.03912511825561524, 0.03929449462890625, 0.03948191833496094, 0.039147422790527346, 0.03908758544921875, 0.03906000137329101, 0.03908403015136719, 0.03907139205932617, 0.039184703826904296, 0.03899955368041992, 0.03895286560058594, 0.04184140777587891, 0.039497695922851565, 0.03907993698120117, 0.03835811233520508, 0.03825551986694336, 0.03832831954956055, 0.038371326446533204, 0.03828700637817383, 0.038516670227050784, 0.03856835174560547, 0.03844255828857422, 0.03854380798339844, 0.03834864044189453, 0.03848195266723633, 0.03841443252563476, 0.03829558563232422, 0.03884425735473633, 0.03888336181640625, 0.03840134429931641, 0.03855238342285156, 0.03867776107788086, 0.038615806579589844, 0.03846758270263672, 0.03863935852050781, 0.039032161712646486, 0.039371681213378903, 0.039139328002929685, 0.03901440048217773, 0.03904092788696289, 0.03867043304443359, 0.038752254486083985, 0.03890585708618164, 0.03881369781494141, 0.038803455352783206, 0.03864371109008789, 0.0388455696105957, 0.038817790985107424, 0.038806400299072265, 0.03878092956542969, 0.03868832015991211, 0.03870550537109375, 0.03929916763305664, 0.03864780807495117, 0.038788223266601564, 0.038824832916259766, 0.03894268798828125, 0.039077919006347654, 0.03896105575561523, 0.039172191619873044, 0.03911884689331055, 0.03901235198974609, 0.03905535888671875, 0.03905535888671875, 0.03911475372314453, 0.039180286407470705, 0.03903078460693359, 0.03909632110595703, 0.03928387069702149, 0.039023456573486326, 0.038905406951904295, 0.03894931030273437, 0.0391657600402832, 0.03923747253417969, 0.04187583923339844, 0.039359935760498045, 0.03847574234008789, 0.038427169799804685, 0.038367584228515626, 0.03836928176879883, 0.038255615234375, 0.038691841125488284, 0.03841999816894531, 0.03863619232177734, 0.03843462371826172, 0.03851878356933594, 0.03831193542480469, 0.03847568130493164, 0.038506591796875, 0.038338207244873045, 0.03842287826538086, 0.038561790466308594, 0.038413665771484376, 0.03881337738037109, 0.038652671813964846, 0.03866236877441406, 0.0385167350769043, 0.03869081497192383, 0.038834175109863284, 0.038940673828125, 0.0388485107421875, 0.03907583999633789, 0.03894883346557617, 0.039006240844726564, 0.03888483047485351, 0.038725536346435545, 0.038574718475341795, 0.03904512023925781, 0.03900201416015625, 0.03889564895629883, 0.038723777770996094, 0.03876236724853516, 0.03863347244262695, 0.03871091079711914, 0.03869529724121094, 0.039021568298339845, 0.03878806304931641, 0.038819103240966796, 0.038808319091796876, 0.039018497467041016, 0.03906355285644531, 0.03907174301147461, 0.039257377624511716, 0.039254753112792966, 0.039005470275878903, 0.03901103973388672, 0.03900572967529297, 0.03913679885864258, 0.039078208923339845, 0.03920550537109375, 0.03904716873168945, 0.03918048095703125, 0.03927366256713867, 0.03907648086547852, 0.03912089538574219, 0.039569408416748046, 0.039122943878173826, 0.041544288635253904, 0.03916595077514649, 0.03852313613891602, 0.03836883163452148, 0.03838969421386719, 0.03837974548339844, 0.038141632080078126, 0.0385043830871582, 0.0383944320678711, 0.03843609619140625, 0.03854217529296875, 0.03851590347290039, 0.03836521530151367, 0.03840252685546875, 0.03826921463012695, 0.03835644912719727, 0.03848988723754883, 0.03864451217651367, 0.03867824172973633, 0.038508033752441405, 0.03879919815063477, 0.03901740646362305, 0.03867232131958008, 0.03867388916015625, 0.039014209747314454, 0.03933795166015625, 0.03914956665039063, 0.04085228729248047, 0.03866009521484375, 0.038813793182373046, 0.03863951873779297, 0.038596607208251955, 0.03867427062988281, 0.03875446319580078, 0.03879683303833008, 0.03904710388183594, 0.038707744598388674, 0.038742015838623044, 0.03867443084716797, 0.03867567825317383, 0.03883087921142578, 0.03879116821289062, 0.03914547348022461, 0.03884236907958984, 0.038793216705322264, 0.03902054214477539, 0.03889273452758789, 0.038832416534423826, 0.03905795288085938, 0.0396308479309082, 0.042305057525634765, 0.039413951873779295, 0.03916009521484375, 0.039174144744873046, 0.0390709114074707, 0.039008544921875, 0.039019039154052734, 0.03905875015258789, 0.038918846130371096, 0.03890924835205078, 0.038892032623291016, 0.03895856094360352, 0.03892707061767578, 0.042289119720458984, 0.039257984161376956, 0.038289535522460935, 0.03848396682739258, 0.038345951080322266, 0.03889641571044922, 0.038098846435546875, 0.03801094436645508, 0.0381495361328125, 0.038375518798828126, 0.03826742553710937, 0.03844095993041992, 0.03830579376220703, 0.03846553421020508, 0.038539134979248046, 0.038324352264404296, 0.03925196838378906, 0.03841024017333984, 0.03835299301147461, 0.038743968963623046, 0.0384983024597168, 0.03860070419311523, 0.03845487976074219, 0.038650272369384765, 0.04048691177368164, 0.03898323059082031, 0.03893088150024414, 0.03907993698120117, 0.039239646911621094, 0.03926339340209961, 0.038836544036865234, 0.038591041564941406, 0.03861913681030273, 0.038733631134033206, 0.03861318588256836, 0.03873382568359375, 0.03885670471191406, 0.038760448455810545, 0.038916095733642575, 0.03890995025634766, 0.038874336242675785, 0.03903116989135742, 0.0387977294921875, 0.03885055923461914, 0.03893990325927734, 0.039088897705078125, 0.03906889724731445, 0.03907254409790039, 0.039448673248291016, 0.03914947128295899, 0.03933139038085937, 0.03974803161621094, 0.0392806396484375, 0.039298465728759766, 0.039199329376220705, 0.03995340728759766, 0.039610721588134765, 0.039182815551757816, 0.03916204833984375, 0.03927040100097656, 0.03917571258544922, 0.03916233444213867, 0.03933763122558594, 0.04176278305053711, 0.03926428985595703, 0.03845248031616211, 0.038432960510253904, 0.03846611022949219, 0.03868057632446289, 0.03832831954956055, 0.038388832092285156, 0.03837948989868164, 0.03873174285888672, 0.03843376159667969, 0.038569984436035154, 0.03841203308105469, 0.03857209777832031, 0.03845465469360351, 0.03844384002685547, 0.03842832183837891, 0.03875875091552734, 0.038645759582519534, 0.03867647933959961, 0.03896115112304688, 0.038943870544433594, 0.03871587371826172, 0.03870966339111328, 0.03891404724121094, 0.03907174301147461, 0.03901808166503906, 0.039010719299316404, 0.038828033447265625, 0.03888127899169922, 0.03886016082763672, 0.03882611083984375, 0.03859711837768555, 0.038950912475585936, 0.039024639129638675, 0.03890998458862305, 0.038760257720947267, 0.03876422500610351, 0.038902240753173827, 0.03875743865966797, 0.03901126480102539, 0.03870719909667969, 0.03909222412109375, 0.03882150268554688, 0.03886262512207031, 0.03905187225341797, 0.03906150436401367, 0.03932070541381836, 0.03965574264526367, 0.039745471954345704, 0.039712928771972654, 0.039465152740478515, 0.039112991333007815, 0.03900774383544922, 0.03898121643066406, 0.03892496109008789, 0.03925404739379883, 0.03916003036499023, 0.0391736946105957, 0.039389537811279296, 0.038991966247558595, 0.039061054229736325, 0.03904739379882813, 0.042189983367919924, 0.03953955078125, 0.03873708724975586, 0.03855974578857422, 0.03835481643676758, 0.03852134323120117, 0.038801151275634764, 0.038230270385742185, 0.03829542541503906, 0.03843260955810547, 0.038470497131347654, 0.038594017028808596, 0.03839215850830078, 0.038856769561767576, 0.038499679565429684, 0.03836937713623047, 0.03883875274658203, 0.03846358489990234, 0.03857408142089844, 0.03860595321655273, 0.038614974975585935, 0.03986928176879883, 0.0385081901550293, 0.03874246215820312, 0.03907353591918945, 0.03920111846923828, 0.03945257568359375, 0.039479263305664064, 0.03925747299194336, 0.03923830413818359, 0.03870505523681641, 0.038638847351074215, 0.03867324829101562, 0.038727294921875, 0.03866457748413086, 0.03866419219970703, 0.0389939193725586, 0.03876051330566406, 0.0386682243347168, 0.03870719909667969, 0.03889152145385742, 0.04039475250244141, 0.0388853759765625, 0.03886284637451172, 0.038776641845703126, 0.03885689544677735, 0.038950912475585936, 0.03937497711181641, 0.03889753723144531, 0.03926015853881836, 0.03914080047607422, 0.03937068939208985, 0.0393939208984375, 0.03947315216064453, 0.039103488922119144, 0.039136257171630856, 0.03924991989135742, 0.03964313507080078, 0.03922534561157227, 0.03917619323730469, 0.038957054138183594, 0.03916799926757813, 0.038960289001464844]",tokens/s,25.710124383166697,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.054848,1326.383104,0.0,931.135488,917.648384,s,1,7.24853369140625,7.24853369140625,0.0,7.24853369140625,7.24853369140625,7.24853369140625,7.24853369140625,[7.24853369140625],,kWh,9.548473929161597e-06,1.0427212493425665e-06,4.293058989995879e-06,1.4884254168500042e-05,,MB,1105.32608,1456.406528,0.0,1050.673152,1018.330112,s,10,0.6607934722900392,0.06607934722900391,0.001316728408823133,0.06577188873291015,0.06745689849853516,0.06821476974487305,0.06882106674194335,"[0.06897264099121093, 0.0657655029296875, 0.06409379577636719, 0.06572882843017579, 0.06587734222412109, 0.06715283203125, 0.06728848266601563, 0.064994140625, 0.06514163208007813, 0.06577827453613282]",tokens/s,3874.1302802645587,kWh,2.2091043958334123e-06,2.4347515192221887e-07,1.460139012721776e-06,3.912718560477407e-06,tokens/kWh,65427654.97776165,MB,1127.124992,1473.183744,0.0,1067.450368,1033.282048,s,10,11.373588623046876,1.1373588623046875,0.018928872592497294,1.143686767578125,1.158686865234375,1.1600309326171876,1.1611061865234376,"[1.161375, 1.15838818359375, 1.14709521484375, 1.1516285400390625, 1.153206298828125, 1.1402783203125, 1.10854150390625, 1.1196385498046875, 1.11889013671875, 1.114546875]",tokens/s,55.391488199546735,kWh,3.215545112375109e-05,3.5464884643910856e-06,1.5936513793477674e-05,5.163845338161984e-05,tokens/kWh,1220021.0477725924,,s,630,11.3675133228302,0.01804367194100032,0.0004886498748275712,0.018087935447692872,0.018502759170532224,0.01859877986907959,0.01939240159988404,"[0.018602048873901367, 0.01836595153808594, 0.018436063766479494, 0.018344032287597657, 0.018258527755737306, 0.01824291229248047, 0.01822313690185547, 0.01821571159362793, 0.018474176406860353, 0.01845030403137207, 0.018693023681640625, 0.018503679275512695, 0.01834156799316406, 0.018278112411499025, 0.018426464080810546, 0.018339136123657226, 0.018713279724121092, 0.018345951080322265, 0.018307167053222655, 0.01839052772521973, 0.01839689636230469, 0.018373600006103517, 0.01833679962158203, 0.01827299118041992, 0.01840127944946289, 0.018490720748901367, 0.018332319259643556, 0.018313472747802734, 0.01859584045410156, 0.018231039047241212, 0.018224512100219727, 0.018334335327148437, 0.018231296539306642, 0.018925695419311522, 0.018544511795043947, 0.01851571273803711, 0.01840563201904297, 0.018528127670288087, 0.01841574478149414, 0.01842585563659668, 0.018329599380493163, 0.018364416122436524, 0.019517696380615235, 0.01843120002746582, 0.018443967819213865, 0.0187544002532959, 0.018328960418701173, 0.018360960006713868, 0.018145280838012694, 0.018983104705810546, 0.018695775985717773, 0.018256095886230467, 0.018345983505249023, 0.018284543991088868, 0.01828659248352051, 0.018501632690429686, 0.0181711368560791, 0.018123519897460937, 0.018309120178222657, 0.018300928115844727, 0.018783584594726562, 0.018365087509155272, 0.018380416870117187, 0.018537567138671874, 0.018504159927368164, 0.018549280166625978, 0.018623712539672852, 0.01839583969116211, 0.019228128433227538, 0.020404767990112305, 0.018491392135620118, 0.018616031646728516, 0.018491680145263673, 0.018509952545166016, 0.01833888053894043, 0.018274112701416014, 0.01834623908996582, 0.01821891212463379, 0.01858236885070801, 0.01846681594848633, 0.01826201629638672, 0.018351776123046875, 0.018395456314086914, 0.018472991943359374, 0.018345504760742187, 0.018198335647583008, 0.018132768630981445, 0.017873056411743166, 0.018178335189819338, 0.01828316879272461, 0.018507551193237305, 0.018448640823364258, 0.01831500816345215, 0.0190928955078125, 0.018149215698242186, 0.018524448394775392, 0.018030656814575195, 0.018165151596069337, 0.018405567169189452, 0.018248512268066407, 0.018253311157226563, 0.018196672439575196, 0.01806982421875, 0.01813043212890625, 0.017992191314697266, 0.0184237117767334, 0.018450527191162108, 0.01842492866516113, 0.01839606475830078, 0.018404832839965822, 0.018065471649169922, 0.018008544921875, 0.018055200576782227, 0.01806035232543945, 0.01815750312805176, 0.018205280303955077, 0.01822774314880371, 0.01811622428894043, 0.01825404739379883, 0.018325504302978517, 0.018348031997680665, 0.018665184020996095, 0.018236928939819336, 0.01837148857116699, 0.018499456405639648, 0.018391040802001952, 0.01832476806640625, 0.018405664443969728, 0.018299232482910155, 0.01825391960144043, 0.01830611228942871, 0.018119232177734375, 0.017961536407470703, 0.01797715187072754, 0.01803264045715332, 0.01816761589050293, 0.018270399093627928, 0.01835811233520508, 0.01821126365661621, 0.018275360107421874, 0.01820128059387207, 0.018135040283203126, 0.018426048278808595, 0.0184237117767334, 0.019324832916259766, 0.018457599639892578, 0.018535423278808593, 0.018289920806884765, 0.017920896530151366, 0.018113920211791992, 0.01800595283508301, 0.0184102725982666, 0.018552608489990234, 0.018280448913574218, 0.01841971206665039, 0.018200128555297852, 0.018384992599487306, 0.018127199172973632, 0.01810371208190918, 0.017928319931030272, 0.018102975845336915, 0.018106271743774414, 0.017757728576660158, 0.017502080917358397, 0.018143711090087892, 0.01843731117248535, 0.018502656936645507, 0.018238271713256836, 0.018144287109375, 0.018056991577148438, 0.01793667221069336, 0.018529535293579102, 0.017931135177612304, 0.017847808837890625, 0.018087711334228516, 0.017950687408447265, 0.018184736251831056, 0.01821286392211914, 0.01818828773498535, 0.018397184371948243, 0.018085887908935547, 0.0179814395904541, 0.01852822494506836, 0.018233375549316408, 0.018313215255737304, 0.017970848083496093, 0.018266815185546875, 0.0178449592590332, 0.017841184616088867, 0.018251775741577148, 0.018103647232055663, 0.018148000717163087, 0.017978944778442384, 0.018205120086669923, 0.018142847061157225, 0.0183855037689209, 0.019420000076293947, 0.01832441520690918, 0.018442176818847657, 0.018222463607788085, 0.018020832061767578, 0.018223072052001955, 0.018020511627197266, 0.018534496307373048, 0.018228607177734377, 0.01834409523010254, 0.018298847198486328, 0.018164319992065428, 0.018437536239624023, 0.01828096008300781, 0.018295936584472657, 0.01809702491760254, 0.018120447158813478, 0.018286144256591797, 0.018243839263916015, 0.018163839340209962, 0.01805958366394043, 0.01799734306335449, 0.01784009552001953, 0.018194944381713866, 0.01821392059326172, 0.018659872055053713, 0.018347904205322264, 0.018702911376953124, 0.018316640853881835, 0.01809270477294922, 0.018299104690551758, 0.018269983291625977, 0.018524160385131837, 0.018271392822265625, 0.018383808135986328, 0.018132896423339845, 0.017944639205932617, 0.01784419250488281, 0.018128543853759765, 0.018186208724975585, 0.01803228759765625, 0.018426080703735352, 0.01831164741516113, 0.018203935623168944, 0.018208511352539064, 0.018209184646606445, 0.018217536926269533, 0.017903615951538086, 0.01846428871154785, 0.01873673629760742, 0.018449407577514648, 0.018370431900024416, 0.018280031204223633, 0.018104736328125, 0.018309120178222657, 0.018927104949951173, 0.0182457275390625, 0.018450464248657227, 0.018698400497436523, 0.018311071395874023, 0.01810188865661621, 0.017799360275268555, 0.017690303802490235, 0.018329919815063475, 0.018507680892944335, 0.01839321517944336, 0.018307039260864258, 0.018308448791503906, 0.018172576904296876, 0.018300384521484376, 0.018313760757446288, 0.018118656158447266, 0.018096128463745118, 0.018062976837158202, 0.018320991516113282, 0.018471616744995117, 0.018317407608032226, 0.018436031341552736, 0.018286048889160158, 0.01812950325012207, 0.018257919311523436, 0.018544544219970704, 0.018512256622314455, 0.018417375564575195, 0.018251136779785158, 0.018045215606689452, 0.018035295486450196, 0.01819241523742676, 0.018130176544189452, 0.01831545639038086, 0.018499872207641602, 0.018318912506103516, 0.018329599380493163, 0.018298431396484374, 0.01827315139770508, 0.018126848220825196, 0.018591360092163087, 0.018057760238647462, 0.017848031997680664, 0.018159263610839842, 0.01857174491882324, 0.018435903549194336, 0.018415456771850587, 0.01839344024658203, 0.018255168914794923, 0.01822585678100586, 0.01922649574279785, 0.018708608627319337, 0.018305023193359374, 0.018187711715698242, 0.01836031913757324, 0.018277952194213867, 0.018109439849853515, 0.018284799575805664, 0.018140224456787108, 0.01817865562438965, 0.018294591903686524, 0.01814556884765625, 0.018601184844970704, 0.01856492805480957, 0.018177824020385744, 0.018069856643676756, 0.01821273612976074, 0.018489343643188477, 0.018249631881713867, 0.01846895980834961, 0.018286720275878906, 0.018195743560791015, 0.018244192123413085, 0.01839923286437988, 0.018274303436279296, 0.018341888427734376, 0.018507360458374023, 0.018532960891723634, 0.01878611183166504, 0.02018284797668457, 0.018112703323364256, 0.01820057678222656, 0.01817190361022949, 0.018309343338012696, 0.01808380889892578, 0.018263872146606446, 0.01829043197631836, 0.01812879943847656, 0.018049375534057617, 0.018296831130981444, 0.018077695846557617, 0.017874399185180665, 0.018088159561157228, 0.018441984176635742, 0.01851375961303711, 0.023320735931396483, 0.01804319953918457, 0.01761859130859375, 0.017739616394042968, 0.017676544189453126, 0.01774608039855957, 0.018483072280883788, 0.017612800598144532, 0.017564352035522462, 0.01760220718383789, 0.017729663848876955, 0.017915903091430666, 0.018087167739868164, 0.017988351821899413, 0.01759436798095703, 0.01763759994506836, 0.017624927520751954, 0.01747551918029785, 0.017548799514770508, 0.01741423988342285, 0.017392032623291014, 0.01751795196533203, 0.017545087814331055, 0.017449728012084963, 0.01739072036743164, 0.01744985580444336, 0.017315200805664063, 0.0174881591796875, 0.017439071655273437, 0.01792745590209961, 0.017510143280029297, 0.018091936111450196, 0.017770015716552734, 0.01772172737121582, 0.01758969688415527, 0.017731679916381835, 0.01771404838562012, 0.01783488082885742, 0.017795711517333983, 0.017840415954589843, 0.018069759368896484, 0.017423744201660155, 0.017448448181152345, 0.01730191993713379, 0.017428064346313478, 0.017398591995239257, 0.01769683265686035, 0.017327871322631836, 0.017377536773681642, 0.017440479278564455, 0.01744540786743164, 0.017411840438842772, 0.017630912780761718, 0.017655136108398438, 0.017818592071533204, 0.017680383682250975, 0.017537023544311522, 0.01749545669555664, 0.017599071502685547, 0.018300832748413084, 0.017415456771850586, 0.01753152084350586, 0.017335903167724608, 0.017446624755859376, 0.0174553279876709, 0.01749260711669922, 0.01756777572631836, 0.018521663665771484, 0.017494112014770507, 0.01761110305786133, 0.017811456680297853, 0.01792201614379883, 0.01772265625, 0.017702816009521484, 0.017543327331542968, 0.017402559280395507, 0.01736000061035156, 0.01736342430114746, 0.017349023818969727, 0.017287168502807617, 0.017297407150268555, 0.017494016647338868, 0.01728339195251465, 0.017450815200805665, 0.017456480026245117, 0.01728156852722168, 0.01740595245361328, 0.017708959579467772, 0.017415584564208983, 0.01854879951477051, 0.017461824417114257, 0.01737481689453125, 0.017507904052734374, 0.017447872161865233, 0.017247871398925783, 0.01726268768310547, 0.017420576095581054, 0.01731279945373535, 0.017401056289672853, 0.017981184005737304, 0.0175710391998291, 0.017330976486206056, 0.017487871170043946, 0.017476640701293945, 0.017689119338989256, 0.01747603225708008, 0.017484960556030275, 0.01746614456176758, 0.017461280822753906, 0.017868831634521486, 0.017969120025634767, 0.02079350471496582, 0.018648895263671875, 0.017985599517822266, 0.017680383682250975, 0.017411775588989258, 0.017582399368286133, 0.01802239990234375, 0.017630495071411133, 0.017949344635009766, 0.017629247665405273, 0.017514432907104492, 0.017762208938598634, 0.01782707214355469, 0.017906591415405272, 0.017696767807006835, 0.017616287231445312, 0.017547168731689454, 0.018108383178710937, 0.01999331283569336, 0.01765376091003418, 0.017557504653930665, 0.01755945587158203, 0.017801023483276366, 0.017637088775634767, 0.017638015747070312, 0.01749100875854492, 0.017578367233276368, 0.017654144287109375, 0.017601728439331055, 0.017603519439697266, 0.017477344512939454, 0.017469728469848633, 0.01746086311340332, 0.017596960067749023, 0.01790755271911621, 0.01789743995666504, 0.01787718391418457, 0.017915903091430666, 0.017829727172851563, 0.017753503799438478, 0.017830751419067384, 0.017673471450805663, 0.017689088821411132, 0.017702911376953127, 0.017903167724609374, 0.018074047088623046, 0.017911104202270507, 0.017998559951782227, 0.017673696517944336, 0.017389184951782228, 0.017359552383422853, 0.01735055923461914, 0.01751219177246094, 0.017731456756591796, 0.018528959274291993, 0.01789673614501953, 0.017691360473632813, 0.01770086479187012, 0.01821468734741211, 0.017905439376831055, 0.017641183853149413, 0.01768726348876953, 0.017543167114257813, 0.017561279296875, 0.017733407974243165, 0.017960704803466798, 0.017903743743896486, 0.017914079666137697, 0.017827648162841797, 0.01804147148132324, 0.01772115135192871, 0.017637727737426757, 0.01756991958618164, 0.017615840911865233, 0.01762326431274414, 0.017822240829467772, 0.017873184204101562, 0.01782086372375488, 0.017714879989624024, 0.017674495697021484, 0.01761075210571289, 0.018047584533691406, 0.01782508850097656, 0.01776710319519043, 0.017827520370483397, 0.01782406425476074, 0.01798684883117676, 0.017783424377441407, 0.017854560852050783, 0.017834047317504882, 0.0176661434173584, 0.01764726448059082, 0.017645055770874024, 0.017521631240844725, 0.01752444839477539, 0.01758950424194336, 0.017572608947753907, 0.017613983154296874, 0.01789014434814453, 0.01771660804748535, 0.017483488082885742, 0.017656736373901367, 0.017628543853759764, 0.017766496658325196, 0.017629728317260743, 0.017671455383300783, 0.017918399810791016, 0.017917535781860353, 0.01805564880371094, 0.018364864349365233, 0.018130943298339842, 0.01801625633239746, 0.017974592208862303, 0.017605056762695314, 0.017854719161987304, 0.01759846305847168, 0.017757728576660158, 0.01793276786804199, 0.01787254333496094, 0.017768064498901368, 0.0176790714263916, 0.017541120529174805, 0.01760870361328125, 0.01790732765197754, 0.017666528701782227, 0.01757379150390625, 0.017600223541259764, 0.017612991333007814, 0.017760351181030275, 0.017522079467773437, 0.017424991607666016, 0.01745305633544922, 0.017502208709716797, 0.01778483200073242, 0.01775619125366211, 0.017782272338867186, 0.01877244758605957, 0.018358272552490236, 0.017922048568725587, 0.017746143341064453, 0.017565216064453125, 0.01750383949279785, 0.017511072158813475, 0.01744076728820801, 0.017622432708740234, 0.017857280731201172, 0.017608543395996094, 0.01753001594543457, 0.01749206352233887, 0.017695487976074217, 0.017648704528808595, 0.017670719146728516, 0.017641183853149413, 0.017678112030029298, 0.01777324867248535, 0.017604799270629884, 0.017543167114257813, 0.01736832046508789, 0.017274784088134765, 0.01750511932373047, 0.017573888778686524, 0.017704959869384765, 0.017737728118896484, 0.018524160385131837, 0.01763737678527832, 0.017358175277709963, 0.017361568450927733, 0.01739731216430664, 0.017336736679077147, 0.017293312072753905, 0.01738140869140625, 0.017378623962402345]",tokens/s,55.42109185258005,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,816.2304,3354.329088,0.0,2959.081472,2942.567424,s,1,7.491158203125,7.491158203125,0.0,7.491158203125,7.491158203125,7.491158203125,7.491158203125,[7.491158203125],,kWh,1.0103033966659798e-05,1.1042954326628756e-06,3.3333359999998535e-06,1.4540665399322528e-05,,MB,1107.894272,3547.267072,0.0,3141.533696,3105.830912,s,10,2.592166152954102,0.2592166152954101,0.002362535045213905,0.26002906799316405,0.2611373199462891,0.26149653167724607,0.2617839010620117,"[0.25391856384277345, 0.25702288818359376, 0.2618557434082031, 0.2610574951171875, 0.2597601623535156, 0.259385498046875, 0.2569664611816406, 0.26088134765625, 0.26102001953125, 0.2602979736328125]",tokens/s,987.5910142112442,kWh,7.643231509936003e-06,8.42910826341281e-07,5.049056745794699e-06,1.353519908207198e-05,tokens/kWh,18913648.661369473,MB,1129.177088,3589.210112,0.0,3183.476736,3163.057152,s,10,11.573644653320313,1.1573644653320314,0.012999661095040372,1.1591688232421875,1.171823876953125,1.1718432128906249,1.171858681640625,"[1.1349251708984376, 1.157430419921875, 1.171819580078125, 1.16227685546875, 1.170641357421875, 1.1583616943359376, 1.1599759521484374, 1.1343333740234376, 1.171862548828125, 1.1520177001953125]",tokens/s,54.43401960844391,kWh,3.340594418672954e-05,3.684344683805461e-06,2.217671503480601e-05,5.926700390534102e-05,tokens/kWh,1062986.077390063,,s,630,11.570615110397329,0.018366055730789425,0.00036591905731029006,0.01838521671295166,0.01866156406402588,0.018822656536102295,0.019628646717071544,"[0.019042463302612306, 0.01836031913757324, 0.01830499267578125, 0.0184421443939209, 0.018424192428588868, 0.01829449653625488, 0.018257120132446288, 0.018156320571899413, 0.018214912414550782, 0.018104320526123048, 0.01807910346984863, 0.018102432250976564, 0.018143711090087892, 0.01790755271911621, 0.01811043167114258, 0.018022592544555665, 0.017938432693481447, 0.01787446403503418, 0.017916000366210938, 0.017910144805908204, 0.01790540885925293, 0.01791756820678711, 0.017947263717651367, 0.01801935958862305, 0.01811324882507324, 0.01823904037475586, 0.018235424041748046, 0.018119071960449217, 0.017932640075683594, 0.01790483283996582, 0.017777376174926758, 0.018077695846557617, 0.01789548873901367, 0.017817535400390626, 0.018032447814941406, 0.01786092758178711, 0.018283775329589844, 0.018236032485961916, 0.017978975296020508, 0.017807775497436524, 0.01796505546569824, 0.018069055557250975, 0.017938880920410155, 0.017820991516113282, 0.01771779251098633, 0.01772764778137207, 0.017755903244018555, 0.017724832534790038, 0.017830560684204102, 0.01794476890563965, 0.017924095153808595, 0.0180633602142334, 0.01803398323059082, 0.018307775497436524, 0.017885183334350584, 0.017766271591186523, 0.017846399307250977, 0.017735679626464843, 0.017821695327758787, 0.017844224929809572, 0.017846271514892577, 0.017692256927490234, 0.017715616226196287, 0.018753568649291993, 0.01827987289428711, 0.01861894416809082, 0.01860767936706543, 0.01799622344970703, 0.01802239990234375, 0.017897472381591797, 0.018244735717773436, 0.018377599716186525, 0.018298784255981446, 0.018563167572021484, 0.01846067237854004, 0.01862841606140137, 0.01852191925048828, 0.018616479873657228, 0.019132640838623045, 0.01857472038269043, 0.018590047836303712, 0.018362655639648437, 0.018466623306274414, 0.01822972869873047, 0.018202335357666015, 0.018193536758422852, 0.018361215591430665, 0.018420799255371094, 0.018432607650756837, 0.018398719787597655, 0.018540992736816406, 0.018415935516357423, 0.018357471466064455, 0.018239904403686523, 0.01835212707519531, 0.01821129608154297, 0.018447679519653322, 0.018277055740356447, 0.01850531196594238, 0.018547103881835936, 0.018513568878173826, 0.01842620849609375, 0.018391040802001952, 0.018096128463745118, 0.018147327423095702, 0.018128448486328125, 0.017983488082885742, 0.017975839614868164, 0.018061216354370118, 0.01816985511779785, 0.018534400939941405, 0.01827030372619629, 0.018267967224121093, 0.018118751525878905, 0.018316703796386717, 0.01836911964416504, 0.018263168334960937, 0.018295679092407226, 0.018195743560791015, 0.01836310386657715, 0.018679584503173828, 0.018495296478271483, 0.01849920082092285, 0.01835296058654785, 0.018407392501831054, 0.018617664337158203, 0.018977664947509767, 0.01842585563659668, 0.01863199996948242, 0.01872550392150879, 0.018660959243774415, 0.018445856094360353, 0.018371519088745118, 0.01845814323425293, 0.01851644706726074, 0.01852592086791992, 0.018432287216186522, 0.018499584197998048, 0.01827020835876465, 0.01845417594909668, 0.018589408874511718, 0.020296319961547852, 0.018468288421630858, 0.018520639419555663, 0.018448383331298827, 0.0188723201751709, 0.01883942413330078, 0.01887808036804199, 0.018550527572631835, 0.018292800903320312, 0.018526912689208985, 0.01870751953125, 0.018760639190673827, 0.01858355140686035, 0.01848320007324219, 0.01841152000427246, 0.01862041664123535, 0.018668991088867187, 0.018606655120849608, 0.018442176818847657, 0.01848531150817871, 0.018354175567626953, 0.018464799880981445, 0.018673631668090822, 0.01845020866394043, 0.018643167495727538, 0.018517311096191407, 0.018490047454833985, 0.018589696884155273, 0.018746912002563478, 0.018659807205200194, 0.018564895629882814, 0.019021472930908202, 0.01849158477783203, 0.01832383918762207, 0.019398656845092774, 0.018646240234375, 0.018524959564208986, 0.018452159881591795, 0.018476863861083985, 0.01843667221069336, 0.018437055587768553, 0.01850060844421387, 0.018656959533691408, 0.018554719924926757, 0.018518495559692382, 0.01842134475708008, 0.018454559326171877, 0.01854502487182617, 0.01898748779296875, 0.01831465530395508, 0.01843084716796875, 0.01834569549560547, 0.018262304306030274, 0.018667520523071288, 0.018528255462646484, 0.018464767456054687, 0.018549983978271484, 0.018475296020507813, 0.018487039566040038, 0.018509727478027344, 0.018385759353637697, 0.018321407318115233, 0.018323455810546875, 0.018282400131225587, 0.018275840759277344, 0.01862883186340332, 0.01838528060913086, 0.01845849609375, 0.018361600875854492, 0.01828748893737793, 0.01862041664123535, 0.018540191650390624, 0.018477407455444336, 0.018470624923706054, 0.018391328811645506, 0.0185031681060791, 0.01838515281677246, 0.018352575302124023, 0.018351295471191405, 0.018362079620361328, 0.018502527236938477, 0.01837059211730957, 0.01831078338623047, 0.0184036808013916, 0.018307104110717773, 0.018464767456054687, 0.018485248565673826, 0.018613344192504884, 0.018480031967163087, 0.01839468765258789, 0.018385343551635742, 0.018267711639404296, 0.018248191833496095, 0.01829875183105469, 0.01852422332763672, 0.018501216888427735, 0.018561279296875, 0.01858780860900879, 0.018509952545166016, 0.01857472038269043, 0.018446975708007813, 0.01824732780456543, 0.018339456558227538, 0.018562847137451172, 0.018637279510498046, 0.018485599517822266, 0.018296096801757814, 0.018459392547607423, 0.018411487579345704, 0.018601984024047852, 0.018497535705566406, 0.019444000244140624, 0.01851798439025879, 0.018350080490112306, 0.018497535705566406, 0.01854182434082031, 0.018313983917236328, 0.018036319732666017, 0.018487615585327147, 0.018700351715087892, 0.0186265926361084, 0.018485248565673826, 0.018339839935302735, 0.018298879623413086, 0.018343936920166014, 0.01884320068359375, 0.021878528594970702, 0.01910223960876465, 0.018391199111938476, 0.018306175231933595, 0.018385440826416015, 0.018406848907470703, 0.018444448471069335, 0.018238208770751954, 0.01816160011291504, 0.018354240417480468, 0.01863862419128418, 0.0186144962310791, 0.018505727767944336, 0.018526111602783203, 0.018530559539794923, 0.018302175521850587, 0.01843222427368164, 0.018288415908813478, 0.018571264266967775, 0.018363008499145506, 0.018339839935302735, 0.01839427185058594, 0.01830179214477539, 0.01836358451843262, 0.01827734375, 0.018232799530029296, 0.01799942398071289, 0.018305856704711913, 0.018720640182495116, 0.01849888038635254, 0.018596511840820312, 0.01831747245788574, 0.018255872726440428, 0.018374656677246092, 0.018593631744384765, 0.019454111099243165, 0.018333311080932616, 0.018205055236816405, 0.01838809585571289, 0.018322303771972658, 0.018411104202270507, 0.02052751922607422, 0.020413984298706056, 0.01836079978942871, 0.018224672317504884, 0.01853228759765625, 0.018788896560668945, 0.018505727767944336, 0.01930905532836914, 0.018534175872802733, 0.01869238471984863, 0.018627904891967775, 0.018438848495483398, 0.01846067237854004, 0.018415615081787108, 0.018350048065185545, 0.01837171173095703, 0.018489343643188477, 0.018232000350952147, 0.01805948829650879, 0.018159616470336915, 0.018274303436279296, 0.01821059226989746, 0.01802579116821289, 0.0181844482421875, 0.01802511978149414, 0.01799577522277832, 0.018100223541259765, 0.0188272647857666, 0.01862403106689453, 0.01832803153991699, 0.018466144561767577, 0.01845305633544922, 0.018346080780029295, 0.01827769660949707, 0.018459327697753908, 0.01829680061340332, 0.01822313690185547, 0.018208736419677733, 0.018706464767456056, 0.018149375915527344, 0.017920000076293945, 0.018251775741577148, 0.018324575424194335, 0.018092960357666017, 0.01799081611633301, 0.01868067169189453, 0.018679359436035155, 0.01841596794128418, 0.018301023483276366, 0.01823315238952637, 0.018159807205200194, 0.01814873504638672, 0.01828438377380371, 0.01860585594177246, 0.018397184371948243, 0.018389055252075195, 0.018400127410888673, 0.018439807891845704, 0.018366783142089844, 0.018437503814697265, 0.018399744033813475, 0.018290943145751953, 0.01842790412902832, 0.018538463592529298, 0.018685983657836913, 0.018671615600585938, 0.018511199951171876, 0.01880950355529785, 0.01843596839904785, 0.0184586238861084, 0.01918976020812988, 0.018634752273559572, 0.018579456329345705, 0.018605535507202148, 0.018591999053955078, 0.018409631729125978, 0.018386592864990236, 0.018292671203613282, 0.01850339126586914, 0.018397216796875, 0.018489952087402343, 0.01849718475341797, 0.018430496215820314, 0.018323392868041993, 0.018409408569335938, 0.018434175491333006, 0.018298336029052734, 0.018311616897583007, 0.018255392074584962, 0.018534400939941405, 0.01845510482788086, 0.018315263748168945, 0.018487295150756835, 0.01845452880859375, 0.01826767921447754, 0.018452384948730468, 0.018260543823242187, 0.018546367645263673, 0.018382783889770507, 0.018680160522460937, 0.018450464248657227, 0.01835612869262695, 0.018667007446289064, 0.018256128311157225, 0.01845180892944336, 0.018276704788208007, 0.018393760681152345, 0.01830019187927246, 0.018598623275756836, 0.01845625686645508, 0.018633024215698242, 0.01845846366882324, 0.018409151077270508, 0.018391519546508788, 0.018288639068603514, 0.01816166305541992, 0.018440000534057616, 0.01828883171081543, 0.018359487533569335, 0.018494144439697265, 0.01823139190673828, 0.018257951736450194, 0.018069503784179687, 0.01821900749206543, 0.018501119613647463, 0.018438432693481447, 0.018378976821899415, 0.018468320846557616, 0.01806153678894043, 0.018140832901000978, 0.018532991409301758, 0.01826806449890137, 0.018147455215454102, 0.018881759643554687, 0.018193183898925783, 0.018026208877563475, 0.017942815780639648, 0.01794767951965332, 0.017914623260498048, 0.01784649658203125, 0.017958911895751953, 0.01791328048706055, 0.01780588722229004, 0.017952127456665037, 0.017985439300537108, 0.017879776000976563, 0.017870847702026366, 0.017757568359375, 0.017737855911254884, 0.017850879669189454, 0.017756160736083985, 0.017698816299438477, 0.017752128601074217, 0.017880159378051756, 0.017840991973876952, 0.017987583160400392, 0.01795686340332031, 0.018095392227172852, 0.017965791702270507, 0.01801603126525879, 0.017860416412353516, 0.01787487983703613, 0.01782831954956055, 0.017874752044677734, 0.017833759307861328, 0.01780940818786621, 0.017811168670654298, 0.017853120803833007, 0.018271615982055664, 0.017862783432006837, 0.017779199600219727, 0.017893375396728514, 0.017838048934936523, 0.018616352081298828, 0.017977344512939454, 0.017778688430786133, 0.017880607604980468, 0.018086368560791016, 0.018020191192626954, 0.017946752548217773, 0.018068960189819336, 0.017898048400878906, 0.017837503433227538, 0.017797760009765625, 0.017874591827392577, 0.017844224929809572, 0.0179931526184082, 0.017858528137207032, 0.018099071502685547, 0.01788876724243164, 0.01828096008300781, 0.01885798454284668, 0.018491167068481446, 0.018548959732055663, 0.019050495147705078, 0.018513343811035157, 0.0191362247467041, 0.01850192070007324, 0.018522111892700196, 0.01847500801086426, 0.018773792266845703, 0.01969993591308594, 0.020068351745605468, 0.01838057518005371, 0.018460895538330076, 0.01841766357421875, 0.018722623825073243, 0.018651296615600586, 0.018711904525756835, 0.018463424682617188, 0.018406496047973633, 0.01837148857116699, 0.018655231475830078, 0.018558048248291017, 0.018688896179199218, 0.01840480041503906, 0.018535007476806642, 0.018667327880859376, 0.018558464050292968, 0.018342592239379882, 0.018448383331298827, 0.01858121681213379, 0.018474720001220704, 0.01864147186279297, 0.018817024230957033, 0.018400415420532227, 0.018483999252319337, 0.018851903915405272, 0.01901705551147461, 0.01849616050720215, 0.018757631301879883, 0.01883456039428711, 0.018652032852172852, 0.018683904647827147, 0.01869331169128418, 0.018473472595214844, 0.018624832153320312, 0.018469087600708006, 0.0186345272064209, 0.018871871948242188, 0.01842118453979492, 0.01838387107849121, 0.01846886444091797, 0.01839923286437988, 0.018547775268554688, 0.01848201560974121, 0.018597984313964845, 0.018364032745361327, 0.018212287902832032, 0.01813190460205078, 0.018522111892700196, 0.01869593620300293, 0.018658784866333007, 0.018496288299560546, 0.01846272087097168, 0.018476800918579103, 0.018425952911376952, 0.018315391540527345, 0.018413183212280273, 0.019150848388671874, 0.01862041664123535, 0.018515968322753908, 0.018480703353881835, 0.018506175994873048, 0.01822425651550293, 0.018183040618896484, 0.018092031478881835, 0.01803638458251953, 0.01822960090637207, 0.018288639068603514, 0.01832111930847168, 0.018357984542846678, 0.018182527542114257, 0.018198720932006834, 0.018592992782592774, 0.01856105613708496, 0.01827302360534668, 0.019986431121826173, 0.018534400939941405, 0.018386943817138672, 0.01846067237854004, 0.018449951171875, 0.018217439651489257, 0.018054239273071288, 0.018031520843505858, 0.018033727645874024, 0.018011072158813476, 0.018120704650878908, 0.018182144165039063, 0.018096128463745118, 0.018128896713256838, 0.017919647216796876, 0.01848512077331543, 0.018495071411132814, 0.01829158401489258, 0.018335744857788085, 0.01845180892944336, 0.018164384841918946, 0.018201759338378906, 0.01823174476623535, 0.018198623657226562, 0.017982879638671876, 0.01789139175415039, 0.01803664016723633, 0.017882047653198244, 0.01792527961730957, 0.01786556816101074, 0.018120704650878908, 0.018124799728393554, 0.017960960388183594, 0.01781273651123047, 0.018192352294921874, 0.01858639907836914, 0.018356224060058594, 0.01845043182373047, 0.018343936920166014, 0.018388992309570314, 0.018276031494140626, 0.018383167266845704, 0.018312416076660155, 0.0182906551361084, 0.018258752822875975]",tokens/s,54.448272109050016,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 609, in __init__ self.dense_4h_to_h = nn.Linear(config.intermediate_size, config.hidden_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 200.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 955, in __init__ self.model = MistralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 707, in __init__ [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 707, in [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 504, in __init__ self.self_attn = MISTRAL_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 196, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 162, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 163, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 164, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 768.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 608, in __init__ self.dense_h_to_4h = nn.Linear(config.hidden_size, config.intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,812.05248,12523.077632,0.0,12127.830016,12122.08896,s,1,7.1470244140625,7.1470244140625,0.0,7.1470244140625,7.1470244140625,7.1470244140625,7.1470244140625,[7.1470244140625],,kWh,1.1793927454169288e-05,1.2933164151426684e-06,5.384726529999995e-06,1.847197039931195e-05,,MB,1128.370176,12695.044096,0.0,12289.31072,12248.586752,s,10,11.387053466796877,1.1387053466796877,0.0040679235285528866,1.1404854736328125,1.1433168212890623,1.14371474609375,1.1440330859375,"[1.13221044921875, 1.1353138427734375, 1.133013427734375, 1.13561669921875, 1.140848876953125, 1.1403653564453125, 1.1417381591796876, 1.1406055908203125, 1.1432283935546874, 1.1441126708984375]",tokens/s,224.81671904541568,kWh,3.331983561374988e-05,3.6729775271002274e-06,2.205385097639994e-05,5.904666411725005e-05,tokens/kWh,4335553.986448009,MB,1172.037632,12701.335552,0.0,12295.602176,12248.589312,s,10,33.83045361328125,3.383045361328125,0.0016382533517189726,3.382980712890625,3.38504169921875,3.38539853515625,3.38568400390625,"[3.38241748046875, 3.381180908203125, 3.38253857421875, 3.38005712890625, 3.3834228515625, 3.38220703125, 3.383595947265625, 3.38575537109375, 3.38431591796875, 3.38496240234375]",tokens/s,18.62227468781775,kWh,9.886225469458334e-05,1.0904857549761307e-05,6.581610820840001e-05,0.0001755832204527447,tokens/kWh,358804.21738223784,,s,630,33.827737373352036,0.05369482122754294,0.0002572514064457032,0.053686574935913084,0.053942374420166014,0.054014467430114746,0.054938772735595706,"[0.05515267181396484, 0.05363302230834961, 0.05338665771484375, 0.05337926483154297, 0.053221630096435546, 0.05333417510986328, 0.053190654754638675, 0.05322137451171875, 0.05331545639038086, 0.053556961059570314, 0.053712512969970705, 0.05363792037963867, 0.05347516632080078, 0.05363235092163086, 0.053555423736572266, 0.05354550552368164, 0.05347651290893555, 0.053695392608642575, 0.05385340881347656, 0.05386076736450195, 0.05361452865600586, 0.05363654327392578, 0.05363328170776367, 0.0534310417175293, 0.053303295135498044, 0.053651454925537106, 0.05348966217041016, 0.05359619140625, 0.05344662475585937, 0.05350400161743164, 0.053713920593261716, 0.05366400146484375, 0.053623550415039065, 0.053698558807373044, 0.05363302230834961, 0.05375503921508789, 0.053668704986572266, 0.05385420989990235, 0.05379072189331055, 0.05373270416259766, 0.05391222381591797, 0.05372854232788086, 0.0536030387878418, 0.053768192291259766, 0.05369241714477539, 0.05376803207397461, 0.053626209259033206, 0.053635902404785156, 0.05377590560913086, 0.053817825317382814, 0.053821376800537106, 0.05400515365600586, 0.05385257720947265, 0.05389132690429688, 0.0539156494140625, 0.053890625, 0.05383612823486328, 0.05412054443359375, 0.054038047790527344, 0.053801441192626955, 0.053766143798828124, 0.05377228927612305, 0.05382495880126953, 0.054830944061279294, 0.05366732788085937, 0.053269153594970704, 0.053364479064941406, 0.05316198348999023, 0.05334041595458985, 0.05325008010864258, 0.053236736297607425, 0.05365244674682617, 0.053596160888671876, 0.053466239929199216, 0.05362163162231445, 0.05352447891235351, 0.053405696868896485, 0.05346051025390625, 0.05355712127685547, 0.05342435073852539, 0.05375734329223633, 0.05364828872680664, 0.05354297637939453, 0.05347532653808594, 0.053512191772460936, 0.0533985595703125, 0.053434398651123045, 0.053315937042236326, 0.053451519012451175, 0.053664768218994144, 0.05358678436279297, 0.05346495819091797, 0.053723262786865233, 0.05363056182861328, 0.053588382720947264, 0.05350934219360352, 0.05367071914672852, 0.05369852828979492, 0.053903392791748043, 0.053778400421142576, 0.05393395233154297, 0.05378060913085937, 0.05378662490844727, 0.053653247833251955, 0.05369987106323242, 0.05363811111450195, 0.05378656005859375, 0.05376387023925781, 0.05377795028686523, 0.05366198348999023, 0.05378841781616211, 0.05356777572631836, 0.05365804672241211, 0.053663105010986326, 0.05391782379150391, 0.05375654220581055, 0.05382131195068359, 0.05379276657104492, 0.05383782577514649, 0.05430006408691406, 0.054153472900390624, 0.053868896484375, 0.05399552154541016, 0.05380204772949219, 0.05395552062988281, 0.05390457534790039, 0.05504355239868164, 0.05392438507080078, 0.053386463165283206, 0.05330614471435547, 0.05312102508544922, 0.05327462387084961, 0.053212383270263675, 0.053334815979003906, 0.053313278198242185, 0.05349811172485352, 0.05349299240112305, 0.05353657531738281, 0.05383468627929688, 0.05346889495849609, 0.053448352813720706, 0.05355916976928711, 0.05354095840454102, 0.05367055892944336, 0.05369651031494141, 0.05378211212158203, 0.05359001541137695, 0.05372560119628906, 0.053569534301757815, 0.05339129638671875, 0.05331155014038086, 0.05362483215332031, 0.05365887832641601, 0.0541124496459961, 0.05382374572753906, 0.05361056137084961, 0.05350016021728515, 0.05361395263671875, 0.05353740692138672, 0.05360435104370117, 0.053767326354980466, 0.05416159820556641, 0.053854881286621095, 0.053997566223144534, 0.0537393913269043, 0.05376422500610351, 0.05371903991699219, 0.053709854125976564, 0.05367907333374024, 0.05364326477050781, 0.053544960021972655, 0.053662750244140626, 0.05378761672973633, 0.05373747253417969, 0.05369369506835937, 0.053771007537841795, 0.053677471160888675, 0.05379945755004883, 0.053704769134521484, 0.0539422721862793, 0.0538869743347168, 0.05385942459106445, 0.05387139129638672, 0.05390742492675781, 0.05372940826416016, 0.053856288909912106, 0.05398323059082031, 0.053856254577636715, 0.053866497039794924, 0.05495568084716797, 0.053563743591308596, 0.05327462387084961, 0.05324390411376953, 0.053340160369873046, 0.053411838531494144, 0.053362144470214846, 0.05336886215209961, 0.05327718353271484, 0.05348556900024414, 0.05338521575927734, 0.05354086303710937, 0.0535530891418457, 0.05355116653442383, 0.053540542602539064, 0.05355756759643555, 0.05343027114868164, 0.05395865631103516, 0.05373747253417969, 0.05372927856445313, 0.05363507080078125, 0.053579776763916016, 0.05333353424072266, 0.05348803329467773, 0.05355023956298828, 0.05362575912475586, 0.053499454498291014, 0.05354691314697266, 0.05346563339233398, 0.05354684829711914, 0.053432289123535155, 0.05355440139770508, 0.05358252716064453, 0.053528190612792965, 0.05346985626220703, 0.05387174224853516, 0.053934974670410155, 0.053855327606201174, 0.05375596618652344, 0.053691104888916014, 0.053536895751953126, 0.05367193603515625, 0.05368627166748047, 0.053788257598876954, 0.05366620635986328, 0.053688030242919925, 0.05355753707885742, 0.05369161605834961, 0.05381324768066406, 0.053828384399414064, 0.05367193603515625, 0.05380422210693359, 0.05370758438110351, 0.05379276657104492, 0.05396889495849609, 0.05383891296386719, 0.053814208984375, 0.05387059020996094, 0.05378224182128906, 0.05392790222167969, 0.05373574447631836, 0.05393561553955078, 0.053800609588623045, 0.05472249603271485, 0.05370383834838867, 0.05342879867553711, 0.05328726577758789, 0.05326847839355469, 0.0533438720703125, 0.053288959503173826, 0.05341632080078125, 0.05352035140991211, 0.053633056640625, 0.05364070510864258, 0.05361305618286133, 0.05340972900390625, 0.05344467163085938, 0.0533988151550293, 0.053365310668945315, 0.05351628875732422, 0.05376015853881836, 0.05374092864990234, 0.053631359100341794, 0.05342972946166992, 0.05356614303588867, 0.05361667251586914, 0.05355718231201172, 0.05362700653076172, 0.053663745880126956, 0.05352243041992188, 0.05367113494873047, 0.053639968872070315, 0.05372108840942383, 0.053856510162353516, 0.05358975982666016, 0.053491519927978515, 0.05353286361694336, 0.05349484634399414, 0.053609375, 0.05357904052734375, 0.05375052642822266, 0.053755489349365235, 0.053921951293945315, 0.05372134399414062, 0.053817344665527345, 0.053733375549316405, 0.05393203353881836, 0.05361996841430664, 0.0544117431640625, 0.053644798278808595, 0.05386483383178711, 0.05403408050537109, 0.05413065719604492, 0.05381814575195312, 0.05401337432861328, 0.05377900695800781, 0.05392176055908203, 0.05382896041870117, 0.05379756927490234, 0.05387059020996094, 0.05397452926635742, 0.05380966567993164, 0.05389926528930664, 0.053997310638427734, 0.053946495056152344, 0.053858432769775394, 0.05504214477539063, 0.0534984016418457, 0.05328630447387695, 0.053279327392578124, 0.053354305267333986, 0.05372537612915039, 0.053381153106689457, 0.05333193588256836, 0.053515743255615235, 0.05364284896850586, 0.053416160583496096, 0.05359872055053711, 0.053637344360351565, 0.05349763107299805, 0.053446880340576174, 0.05352767944335937, 0.053418880462646486, 0.053814624786376955, 0.05389126586914063, 0.05381558227539063, 0.05355868911743164, 0.05365430450439453, 0.05334630584716797, 0.053376575469970704, 0.053457344055175784, 0.05368832015991211, 0.053601375579833986, 0.05368924713134766, 0.0535551986694336, 0.05363097763061524, 0.0535551986694336, 0.05371696090698242, 0.05371292877197266, 0.05360153579711914, 0.05359199905395508, 0.05381808090209961, 0.05381539154052734, 0.05374771118164062, 0.05369241714477539, 0.05376409530639648, 0.053689823150634766, 0.05365151977539063, 0.05355507278442383, 0.0536868782043457, 0.05394636917114258, 0.05384172821044922, 0.05379897689819336, 0.0536736946105957, 0.05358633422851562, 0.05374566268920898, 0.053752864837646484, 0.053856254577636715, 0.05371798324584961, 0.053824928283691405, 0.05386908721923828, 0.05405036926269531, 0.05374003219604492, 0.054086753845214844, 0.053701534271240234, 0.05386751937866211, 0.053836799621582034, 0.05390335845947265, 0.05388006210327148, 0.05498470306396484, 0.05384755325317383, 0.053370784759521485, 0.05333769607543945, 0.05325449752807617, 0.053340576171875, 0.053348190307617185, 0.053344673156738284, 0.05343231964111328, 0.053438465118408204, 0.053605537414550784, 0.053773151397705075, 0.05350400161743164, 0.05340774536132813, 0.053351585388183596, 0.05336147308349609, 0.053558334350585934, 0.05362992095947266, 0.05366294479370117, 0.05380585479736328, 0.053628929138183595, 0.053710849761962894, 0.05365264129638672, 0.053437278747558596, 0.053433345794677733, 0.053648384094238284, 0.05347078323364258, 0.053580223083496095, 0.05365964889526367, 0.05372915267944336, 0.05373759841918945, 0.0536346549987793, 0.05358428955078125, 0.05364227294921875, 0.05361059188842773, 0.05376895904541015, 0.053864574432373045, 0.05367724609375, 0.05371769714355469, 0.05389516830444336, 0.05359580612182617, 0.05361865615844726, 0.05363750457763672, 0.05379904174804687, 0.05375696182250977, 0.053832672119140626, 0.05384185409545898, 0.05378396987915039, 0.05390607833862305, 0.053822559356689455, 0.05375683212280274, 0.054079521179199216, 0.05371030426025391, 0.05385782241821289, 0.05399856185913086, 0.05411635208129883, 0.05387673568725586, 0.05408134460449219, 0.05403871917724609, 0.05389644622802734, 0.05400243377685547, 0.05387468719482422, 0.053972991943359375, 0.055091358184814455, 0.0538603515625, 0.053319679260253904, 0.05336064147949219, 0.05342588806152344, 0.05346128082275391, 0.0534835205078125, 0.05346918487548828, 0.05361004638671875, 0.05365395355224609, 0.05355641555786133, 0.05358675384521484, 0.0535551986694336, 0.05348112106323242, 0.053373279571533205, 0.05346239852905273, 0.053569278717041015, 0.05376633453369141, 0.0538221435546875, 0.053792736053466794, 0.05369244766235352, 0.05380662536621094, 0.05358540725708008, 0.053682239532470706, 0.0537097282409668, 0.053733184814453126, 0.05364550399780273, 0.053732799530029296, 0.053750049591064455, 0.05370399856567383, 0.053701824188232425, 0.05364313507080078, 0.05367798233032227, 0.05377206420898437, 0.05358335876464844, 0.05376073455810547, 0.05401536178588867, 0.05391219329833984, 0.05394432067871094, 0.05384806442260742, 0.053594112396240234, 0.053792415618896486, 0.053671295166015626, 0.05372630310058594, 0.05384342575073242, 0.05381980895996094, 0.05377964782714844, 0.05381817626953125, 0.053682174682617184, 0.05384601593017578, 0.0538419189453125, 0.053755615234375, 0.053672222137451174, 0.053952129364013675, 0.05386240005493164, 0.05400950241088867, 0.053938911437988284, 0.05388224029541016, 0.05387731170654297, 0.05396207809448242, 0.05379699325561523, 0.053952224731445314, 0.05381824111938477, 0.05496422576904297, 0.05350604629516602, 0.05325423812866211, 0.05340467071533203, 0.05343734359741211, 0.05338422393798828, 0.05338211059570312, 0.0534466552734375, 0.05345235061645508, 0.05341843032836914, 0.05334220886230469, 0.05342617416381836, 0.05359795379638672, 0.05356943893432617, 0.05342448043823242, 0.05353472137451172, 0.05351955032348633, 0.053889057159423825, 0.053814048767089846, 0.053866497039794924, 0.05359001541137695, 0.05362483215332031, 0.05354848098754883, 0.053596702575683594, 0.05369244766235352, 0.05371244812011719, 0.053622848510742185, 0.05362521743774414, 0.05360835266113281, 0.05369865417480469, 0.05364940643310547, 0.05361628723144531, 0.05358601760864258, 0.05365526580810547, 0.05360591888427734, 0.053943294525146485, 0.05380300903320313, 0.05382928085327148, 0.053981311798095705, 0.05374780654907227, 0.053758079528808594, 0.05401769638061524, 0.05383980941772461, 0.05369843292236328, 0.053678398132324216, 0.053741695404052735, 0.05386399841308594, 0.0538135986328125, 0.05367417526245117, 0.053916961669921874, 0.05371567916870117, 0.053927745819091794, 0.05371718215942383, 0.053868385314941404, 0.05385555267333984, 0.0538427848815918, 0.05399484634399414, 0.053927745819091794, 0.053989761352539065, 0.054047199249267576, 0.05381907272338867, 0.053948734283447264, 0.054001247406005856, 0.054897377014160156, 0.05348947143554687, 0.05324425506591797, 0.05351174545288086, 0.053403167724609374, 0.053421825408935544, 0.05336716842651367, 0.05370479965209961, 0.05359260940551758, 0.05365305709838867, 0.0538298568725586, 0.053673919677734376, 0.0535513916015625, 0.05349692916870117, 0.05357231903076172, 0.053548641204833984, 0.05343907165527344, 0.05374771118164062, 0.053823486328125, 0.05362073516845703, 0.05345894241333008, 0.053526527404785154, 0.053495807647705076, 0.05351804733276367, 0.053467296600341795, 0.05353279876708984, 0.05353881454467774, 0.05361635208129883, 0.053698848724365235, 0.053902816772460935, 0.05378102493286133, 0.05376204681396484, 0.05370169448852539, 0.053727680206298825, 0.0536165771484375, 0.05388345718383789, 0.05390665435791016, 0.05403510284423828, 0.053685630798339844, 0.05365945434570313, 0.05373139190673828, 0.05383257675170899, 0.05373132705688476, 0.05384729766845703, 0.05365619277954101, 0.05359628677368164, 0.053690174102783206, 0.05394364929199219, 0.053742431640625, 0.05382099151611328, 0.054042945861816405, 0.05396902465820313, 0.053819393157958986, 0.05385526275634766, 0.054051361083984374, 0.05399321746826172, 0.05380940628051758, 0.0538218879699707, 0.05397452926635742, 0.054067745208740234, 0.05378249740600586, 0.05385980987548828, 0.05396089553833008]",tokens/s,18.62376998635106,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 162, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 982, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.16 GiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 555, in __init__ self.self_attn = QWEN2_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 219, in __init__ self.o_proj = nn.Linear(self.num_heads * self.head_dim, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,814.706688,806.289408,0.0,411.041792,391.374848,s,1,7.4192998046875,7.4192998046875,0.0,7.4192998046875,7.4192998046875,7.4192998046875,7.4192998046875,[7.4192998046875],,kWh,5.054497087508025e-06,5.503927119673102e-07,2.0302794020016224e-06,7.635169201476957e-06,,MB,1098.924032,879.689728,0.0,473.956352,454.832128,s,16,0.37611609649658206,0.02350725603103638,0.0003895896859630637,0.0234224796295166,0.02354478359222412,0.023951807975769044,0.024774489402770997,"[0.024980159759521486, 0.023404767990112305, 0.023301408767700194, 0.02340281677246094, 0.023271360397338868, 0.023257152557373047, 0.02345427131652832, 0.023463552474975585, 0.023609024047851562, 0.023480543136596678, 0.023447519302368165, 0.02347452735900879, 0.023395904541015623, 0.02337260818481445, 0.023360288619995118, 0.0234401912689209]",tokens/s,10890.254467046512,kWh,8.355347409166522e-07,9.214411109693993e-08,5.548179041714452e-07,1.4824967561850374e-06,tokens/kWh,172681659.45858395,MB,1120.452608,906.952704,0.0,501.219328,454.834688,s,16,9.710575134277343,0.606910945892334,0.012089048943782812,0.6111069030761719,0.6191018981933594,0.6193922882080078,0.6196740142822266,"[0.6129658203125, 0.6042952880859375, 0.5861619262695312, 0.5873612060546874, 0.59046728515625, 0.5870277709960937, 0.6189288940429688, 0.6136454467773438, 0.61927490234375, 0.6182135620117187, 0.6197444458007813, 0.610319091796875, 0.6175580444335937, 0.6102855224609375, 0.6118947143554687, 0.6024312133789063]",tokens/s,103.80435618502786,kWh,1.734154936766225e-05,1.912482562088808e-06,7.800629956652067e-06,2.7054661886403127e-05,tokens/kWh,2328619.010820532,,s,1008,9.702061608314505,0.009625061119359637,0.0002930521366950281,0.009635200023651123,0.009933778953552247,0.009996521377563477,0.010454401950836181,"[0.009595935821533202, 0.009617247581481934, 0.009658559799194336, 0.00968832015991211, 0.009486175537109376, 0.009703424453735352, 0.009759455680847169, 0.0095, 0.009595552444458007, 0.009773056030273437, 0.009921759605407714, 0.010906399726867676, 0.009795583724975587, 0.009715200424194336, 0.009690752029418945, 0.009709823608398437, 0.009646719932556152, 0.00959488010406494, 0.009988096237182617, 0.009772576332092286, 0.009902560234069824, 0.009748895645141602, 0.00976956844329834, 0.00978006362915039, 0.00966051197052002, 0.009490495681762695, 0.009498335838317872, 0.009705471992492675, 0.01026857566833496, 0.00993727970123291, 0.009926655769348144, 0.009846783638000489, 0.0104017915725708, 0.009987808227539063, 0.0098155517578125, 0.009814335823059081, 0.009681568145751954, 0.009592160224914551, 0.00963811206817627, 0.009708864212036133, 0.009450464248657227, 0.009678815841674805, 0.009659744262695313, 0.00963651180267334, 0.009541824340820312, 0.009458623886108398, 0.009355456352233886, 0.009453472137451171, 0.009627519607543945, 0.009857952117919922, 0.009644351959228515, 0.009650079727172852, 0.009551199913024902, 0.009541728019714356, 0.009773407936096191, 0.009838144302368164, 0.009813983917236329, 0.009648608207702638, 0.009670656204223632, 0.00979270362854004, 0.009621855735778808, 0.009470432281494141, 0.009422975540161133, 0.010043392181396485, 0.010067392349243164, 0.011911935806274414, 0.010933183670043945, 0.009993120193481446, 0.009969823837280274, 0.009827263832092285, 0.00975171184539795, 0.009776288032531739, 0.00984547233581543, 0.010139488220214843, 0.009764415740966797, 0.009422944068908692, 0.009446880340576173, 0.009726847648620605, 0.009952832221984863, 0.009535807609558106, 0.009451647758483887, 0.009485312461853027, 0.009582719802856444, 0.009597824096679688, 0.009531295776367188, 0.009828096389770507, 0.009483712196350098, 0.009544608116149902, 0.009797504425048828, 0.010219967842102051, 0.009595711708068848, 0.009472895622253417, 0.009584799766540527, 0.009500384330749512, 0.009672351837158203, 0.009814687728881836, 0.009842592239379883, 0.009693375587463379, 0.00961731243133545, 0.009320608139038086, 0.009229920387268066, 0.009223872184753418, 0.009292160034179688, 0.009307456016540528, 0.009231040000915527, 0.00930406379699707, 0.009259008407592773, 0.009224287986755371, 0.009274911880493165, 0.009378175735473632, 0.00924403190612793, 0.009240703582763672, 0.009250880241394043, 0.009322943687438965, 0.009514816284179687, 0.009324735641479492, 0.009257216453552247, 0.009189151763916015, 0.009220064163208008, 0.00919753646850586, 0.00925209617614746, 0.009265952110290528, 0.009208928108215332, 0.009225055694580079, 0.009318016052246093, 0.00932476806640625, 0.009152511596679687, 0.00943887996673584, 0.009678879737854003, 0.009345439910888672, 0.009829888343811035, 0.009355584144592284, 0.009468031883239745, 0.009420160293579102, 0.00928553581237793, 0.009267328262329101, 0.009265376091003417, 0.009291423797607421, 0.009310912132263184, 0.009414112091064453, 0.009246560096740722, 0.009224896430969238, 0.00928767967224121, 0.009243935585021972, 0.009230719566345216, 0.00921225643157959, 0.009205375671386719, 0.009164575576782227, 0.009260640144348145, 0.009251775741577149, 0.009193023681640626, 0.009236639976501464, 0.009228416442871094, 0.009269472122192382, 0.009305343627929688, 0.00928223991394043, 0.009220319747924805, 0.009203136444091797, 0.009217568397521973, 0.009234335899353028, 0.009264191627502442, 0.009242591857910156, 0.009243616104125977, 0.009192416191101074, 0.009349087715148925, 0.009387392044067383, 0.009248671531677246, 0.00938595199584961, 0.009239583969116212, 0.00923209571838379, 0.009193120002746582, 0.009314432144165039, 0.009232640266418457, 0.009236767768859863, 0.009205471992492676, 0.009342144012451172, 0.009321311950683593, 0.009342368125915528, 0.009325119972229004, 0.009268320083618165, 0.00927836799621582, 0.009270912170410156, 0.009334976196289063, 0.00920524787902832, 0.009288415908813477, 0.009228256225585938, 0.009314432144165039, 0.009615232467651367, 0.009349151611328125, 0.00907529640197754, 0.00935321617126465, 0.00931430435180664, 0.00943824005126953, 0.009460543632507324, 0.009474047660827637, 0.009586784362792969, 0.009400383949279785, 0.009326592445373535, 0.00935529613494873, 0.009220352172851562, 0.009365216255187988, 0.009666560173034668, 0.009269536018371582, 0.009219807624816894, 0.00932249641418457, 0.009393407821655273, 0.009364224433898926, 0.009260607719421386, 0.009214400291442871, 0.00923363208770752, 0.009272095680236816, 0.009271295547485351, 0.009254560470581055, 0.009283935546875, 0.009248767852783203, 0.00923472023010254, 0.009413536071777345, 0.009356096267700195, 0.009383328437805176, 0.009321056365966796, 0.009390080451965332, 0.00929587173461914, 0.009464960098266601, 0.009462656021118164, 0.009324543952941895, 0.009312255859375, 0.009346847534179687, 0.009400511741638183, 0.009407999992370606, 0.009204256057739257, 0.00920684814453125, 0.009233344078063964, 0.009265279769897461, 0.009559103965759277, 0.009300800323486328, 0.009259008407592773, 0.009242624282836913, 0.009342880249023437, 0.00933897590637207, 0.00926028823852539, 0.009255680084228515, 0.009183232307434081, 0.009153792381286621, 0.00925158405303955, 0.009328767776489258, 0.009185248374938965, 0.009264960289001464, 0.009277536392211913, 0.009252863883972168, 0.009274944305419922, 0.009251423835754394, 0.009246560096740722, 0.00908675193786621, 0.009285887718200684, 0.009311679840087891, 0.009428576469421386, 0.009465888023376464, 0.009413311958312989, 0.009361408233642577, 0.009326432228088379, 0.009369759559631348, 0.009272704124450684, 0.009273152351379394, 0.009345536231994628, 0.009443648338317871, 0.009682304382324218, 0.00957913589477539, 0.010866687774658204, 0.009437184333801269, 0.009347200393676758, 0.009295519828796386, 0.009285856246948242, 0.009312447547912597, 0.009291487693786621, 0.009273440361022948, 0.009254816055297852, 0.009268671989440917, 0.009275808334350585, 0.009394432067871094, 0.009377792358398437, 0.00941004753112793, 0.009274080276489257, 0.009271072387695313, 0.009348575592041015, 0.009357119560241699, 0.009322976112365723, 0.009248640060424805, 0.009269856452941894, 0.009375519752502441, 0.009332991600036622, 0.009227999687194824, 0.009578495979309083, 0.009266528129577636, 0.009265312194824219, 0.00928159999847412, 0.009498944282531738, 0.009390239715576171, 0.009215999603271484, 0.009289728164672852, 0.009399616241455078, 0.009382335662841797, 0.00927667236328125, 0.009245408058166505, 0.009297696113586426, 0.009263615608215332, 0.00931222438812256, 0.009436927795410156, 0.009330400466918945, 0.009327168464660645, 0.00943446445465088, 0.009468255996704101, 0.009410016059875488, 0.009308671951293946, 0.009249247550964356, 0.009307200431823731, 0.00907852840423584, 0.009291775703430176, 0.009310272216796875, 0.009390015602111817, 0.009408415794372559, 0.009343071937561035, 0.009314240455627442, 0.009257023811340331, 0.009233951568603516, 0.009255647659301758, 0.009381728172302246, 0.009220000267028808, 0.009281536102294922, 0.009381631851196288, 0.00947430419921875, 0.009414752006530762, 0.009379712104797364, 0.009289759635925293, 0.009265215873718261, 0.009271231651306153, 0.009453568458557129, 0.00934716796875, 0.009275296211242675, 0.009248767852783203, 0.009281215667724609, 0.009312576293945312, 0.009227392196655273, 0.009233344078063964, 0.00923641586303711, 0.009242624282836913, 0.009367168426513672, 0.009244832038879394, 0.009305855751037598, 0.009247200012207031, 0.009320575714111328, 0.009295743942260742, 0.009340928077697755, 0.009285568237304687, 0.009265215873718261, 0.009234560012817384, 0.0092542724609375, 0.009236991882324219, 0.009303487777709962, 0.009245247840881347, 0.009240575790405273, 0.009344351768493653, 0.009362015724182129, 0.009381695747375489, 0.0092674560546875, 0.009273344039916993, 0.009262432098388671, 0.009212575912475585, 0.009254912376403808, 0.009352383613586425, 0.009234880447387695, 0.009349504470825195, 0.009393407821655273, 0.009487104415893555, 0.00940771198272705, 0.00938649559020996, 0.009382143974304199, 0.00945132827758789, 0.00941267204284668, 0.009422719955444336, 0.009874688148498536, 0.00984233570098877, 0.009905055999755859, 0.010100799560546875, 0.009820159912109374, 0.009754624366760254, 0.009803808212280274, 0.009934720039367675, 0.00997590446472168, 0.009852928161621094, 0.00994099235534668, 0.009751872062683105, 0.010789567947387696, 0.00984607982635498, 0.009918656349182129, 0.00986128044128418, 0.009813952445983886, 0.00966697597503662, 0.009576671600341797, 0.009762656211853028, 0.009985343933105469, 0.00990998363494873, 0.0099334716796875, 0.009839872360229492, 0.009741120338439942, 0.009863360404968262, 0.009777152061462402, 0.009538911819458007, 0.009591456413269042, 0.009760800361633301, 0.010008543968200683, 0.009789440155029297, 0.009641119956970215, 0.009715968132019043, 0.009969728469848633, 0.009904671669006347, 0.009881407737731933, 0.009738528251647949, 0.009743616104125977, 0.009793984413146972, 0.009873439788818359, 0.00992240047454834, 0.00973964786529541, 0.009845120429992676, 0.009763423919677734, 0.009775103569030762, 0.009598591804504394, 0.009484671592712402, 0.009584799766540527, 0.009719776153564454, 0.009797504425048828, 0.009814016342163086, 0.009918463706970216, 0.010140735626220703, 0.009993151664733886, 0.009967231750488281, 0.009863264083862304, 0.009818400382995605, 0.009803744316101074, 0.009582624435424805, 0.009481760025024414, 0.009559647560119629, 0.01043827247619629, 0.010006143569946288, 0.009634559631347656, 0.009580863952636718, 0.009482943534851074, 0.009728223800659179, 0.00961616039276123, 0.009576448440551758, 0.009748576164245605, 0.009754528045654296, 0.00963321590423584, 0.009611840248107911, 0.009512639999389649, 0.009462240219116212, 0.009446368217468262, 0.009718655586242676, 0.009745951652526855, 0.009664511680603028, 0.009598464012145995, 0.009546719551086425, 0.009498016357421875, 0.009451583862304687, 0.009390368461608886, 0.009633440017700196, 0.009744768142700195, 0.009699040412902833, 0.00971622371673584, 0.009573504447937012, 0.009616448402404785, 0.009702336311340333, 0.009892736434936523, 0.009828319549560547, 0.009772928237915039, 0.009695455551147461, 0.009557791709899902, 0.009494848251342773, 0.0095283203125, 0.009503583908081054, 0.009992192268371582, 0.010205183982849121, 0.009803775787353516, 0.009901247978210449, 0.009673184394836425, 0.009715776443481445, 0.00973142433166504, 0.009672991752624512, 0.009713727951049805, 0.00971014404296875, 0.009891712188720703, 0.009902432441711425, 0.009868255615234375, 0.009923328399658203, 0.010332256317138673, 0.009916095733642579, 0.009941087722778321, 0.009920255661010742, 0.009767583847045899, 0.009804736137390137, 0.00978384017944336, 0.009766752243041993, 0.009777664184570312, 0.009646080017089843, 0.009854975700378419, 0.009486047744750977, 0.009804575920104981, 0.009672703742980958, 0.009702912330627441, 0.009640064239501953, 0.009677184104919434, 0.009594911575317383, 0.009574496269226074, 0.009545599937438965, 0.009637344360351562, 0.009893471717834473, 0.009973888397216796, 0.010060832023620606, 0.009762751579284667, 0.00976041603088379, 0.009698559761047363, 0.009523743629455567, 0.009604736328125, 0.009975584030151367, 0.00992972755432129, 0.009787391662597657, 0.009901760101318359, 0.009875776290893555, 0.009764863967895507, 0.009877504348754883, 0.00976416015625, 0.009962176322937012, 0.009623295783996582, 0.009592991828918457, 0.009551551818847656, 0.009655839920043945, 0.010060223579406738, 0.010168191909790038, 0.010455615997314453, 0.010293184280395508, 0.009933216094970703, 0.011004896163940429, 0.010186944007873535, 0.009890303611755372, 0.009822208404541016, 0.009773216247558593, 0.009983839988708497, 0.00994649600982666, 0.009958271980285644, 0.010020223617553711, 0.00994326400756836, 0.00986672019958496, 0.009668831825256347, 0.009726431846618653, 0.00963584041595459, 0.009600031852722167, 0.009851263999938966, 0.009757280349731445, 0.009805824279785156, 0.009811327934265137, 0.009731840133666993, 0.009710399627685547, 0.009834560394287109, 0.009859071731567384, 0.009672703742980958, 0.009719807624816895, 0.009620800018310546, 0.009536479949951172, 0.009924927711486816, 0.00989132785797119, 0.009902239799499512, 0.00986736011505127, 0.00986956787109375, 0.009840640068054199, 0.009651616096496582, 0.009636704444885253, 0.009799424171447755, 0.009844736099243164, 0.009897664070129394, 0.009847040176391602, 0.009802016258239746, 0.009680864334106445, 0.009557248115539551, 0.009607744216918946, 0.009569791793823243, 0.009705984115600585, 0.009799679756164551, 0.009885696411132813, 0.009969663619995118, 0.01028502368927002, 0.009883968353271485, 0.00982755184173584, 0.00987609577178955, 0.009803647994995118, 0.009815520286560058, 0.009794079780578613, 0.009692768096923828, 0.009879263877868652, 0.009917375564575195, 0.009786463737487794, 0.009730367660522462, 0.009704031944274903, 0.009760607719421386, 0.009733375549316406, 0.009673376083374024, 0.00970137596130371, 0.009671680450439453, 0.009750911712646484, 0.009882240295410157, 0.009891839981079101, 0.009926655769348144, 0.009930751800537109, 0.009780799865722656, 0.009773504257202149, 0.009940896034240723, 0.009931967735290528, 0.010032032012939453, 0.009950976371765136, 0.009988351821899414, 0.009924192428588868, 0.009750304222106933, 0.00972662353515625, 0.00976252841949463, 0.009756192207336426, 0.009783295631408692, 0.009796319961547851, 0.0097260160446167, 0.009672479629516601, 0.009625151634216308, 0.009663071632385254, 0.0096278076171875, 0.009656864166259766, 0.00974396800994873, 0.009816415786743164, 0.009833951950073243, 0.009826592445373535, 0.010085856437683106, 0.01001683235168457, 0.01003600025177002, 0.009943231582641602, 0.009804896354675293, 0.009706208229064942, 0.009964768409729003, 0.009718560218811035, 0.009781248092651367, 0.009865216255187988, 0.010059935569763184, 0.00986511993408203, 0.009869248390197753, 0.009765983581542969, 0.00969820785522461, 0.009807871818542481, 0.009645407676696777, 0.009732768058776856, 0.009911616325378418, 0.009956031799316407, 0.009838591575622559, 0.009805824279785156, 0.009906175613403321, 0.009829631805419922, 0.009919327735900879, 0.009916319847106933, 0.009902400016784668, 0.00982323169708252, 0.010006239891052246, 0.00984499168395996, 0.009917344093322754, 0.010024352073669434, 0.010055839538574219, 0.009873824119567871, 0.009860992431640625, 0.009678815841674805, 0.00971782398223877, 0.009768992424011231, 0.009739263534545899, 0.009640128135681152, 0.009645888328552246, 0.00961622428894043, 0.009850784301757813, 0.009716992378234863, 0.009522080421447754, 0.009629695892333985, 0.009953503608703613, 0.009828415870666504, 0.009950559616088867, 0.009920895576477051, 0.009965567588806153, 0.009808992385864258, 0.00967356777191162, 0.00958620834350586, 0.009799391746520995, 0.00973087978363037, 0.009928383827209473, 0.009846847534179687, 0.009527296066284179, 0.009790656089782714, 0.009670559883117675, 0.00948691177368164, 0.009461088180541992, 0.009458527565002442, 0.009591967582702636, 0.009903103828430175, 0.00997920036315918, 0.00986736011505127, 0.009792384147644042, 0.009727456092834473, 0.009768704414367675, 0.009619487762451171, 0.009566720008850099, 0.009587776184082032, 0.009583519935607911, 0.009502655982971191, 0.009680959701538086, 0.009758720397949219, 0.009752415657043456, 0.009748127937316895, 0.009568767547607422, 0.00955388832092285, 0.009469440460205078, 0.009428768157958985, 0.009650943756103515, 0.00972544002532959, 0.009794048309326172, 0.009953280448913575, 0.00999833583831787, 0.009938943862915038, 0.009938336372375489, 0.009729824066162109, 0.009919296264648438, 0.009755680084228516, 0.009671648025512696, 0.009631232261657715, 0.00961996841430664, 0.009662464141845703, 0.009583776473999023, 0.009472064018249512, 0.00951968002319336, 0.009677023887634277, 0.00996771240234375, 0.009801631927490234, 0.009822400093078613, 0.009930527687072754, 0.009678879737854003, 0.009709024429321289, 0.00978764820098877, 0.00963817596435547, 0.009778656005859376, 0.009871904373168944, 0.009703424453735352, 0.009547776222229003, 0.00951523208618164, 0.009572064399719239, 0.009545536041259765, 0.009475520133972168, 0.009401151657104492, 0.00942841625213623, 0.009470399856567382, 0.009504704475402833, 0.009680191993713378, 0.009623807907104492, 0.00953331184387207, 0.009396415710449219, 0.009484671592712402, 0.009615360260009765, 0.009778656005859376, 0.009747039794921876, 0.009885631561279296, 0.00993449592590332, 0.009886207580566407, 0.00998969554901123, 0.00991875171661377, 0.00992204761505127, 0.00982476806640625, 0.00984438419342041, 0.009713919639587402, 0.00964031982421875, 0.009664223670959473, 0.009707103729248047, 0.009703200340270996, 0.009585472106933593, 0.009570112228393554, 0.00941875171661377, 0.009395936012268066, 0.009357600212097169, 0.0097259521484375, 0.009965567588806153, 0.009844127655029298, 0.009667167663574219, 0.009570143699645995, 0.009584223747253418, 0.009840224266052246, 0.011205632209777832, 0.009805215835571288, 0.011686464309692383, 0.009758720397949219, 0.009872384071350097, 0.009802111625671387, 0.009834527969360352, 0.009747039794921876, 0.009660415649414063, 0.009620896339416504, 0.009603967666625976, 0.009686752319335938, 0.009595295906066895, 0.009614687919616699, 0.00988595199584961, 0.009885919570922852, 0.009971487998962402, 0.010090496063232422, 0.009975808143615723, 0.010016768455505372, 0.009874848365783692, 0.009917023658752442, 0.010260479927062988, 0.00986678409576416, 0.009841312408447265, 0.009613056182861329, 0.009680031776428222, 0.009558943748474122, 0.009566240310668946, 0.009944640159606933, 0.009962719917297363, 0.009788928031921386, 0.009909407615661621, 0.009777536392211914, 0.010347071647644044, 0.00962342357635498, 0.009500800132751465, 0.009459487915039063, 0.009586784362792969, 0.00988806438446045, 0.009510368347167968, 0.009646400451660157, 0.010149920463562011, 0.009768768310546875, 0.009682432174682617, 0.009644736289978027, 0.009652223587036133, 0.009596896171569825, 0.009519136428833008, 0.009637887954711915, 0.009711615562438965, 0.00940886402130127, 0.009385631561279298, 0.009439455986022948, 0.009330207824707032, 0.009674592018127441, 0.01010934352874756, 0.01005894374847412, 0.009902912139892578, 0.009905535697937012, 0.009790080070495605, 0.009715104103088379, 0.00973862361907959, 0.009885408401489259, 0.009767104148864746, 0.00968735980987549, 0.009541631698608399, 0.009695072174072266, 0.009814175605773925, 0.009621503829956055, 0.009669983863830566, 0.009956000328063964, 0.0097892484664917, 0.00976095962524414, 0.009565183639526367, 0.009593855857849122, 0.009506272315979005, 0.009421343803405762, 0.009439359664916993, 0.009432064056396485, 0.009431936264038087, 0.009459263801574707, 0.009688544273376464, 0.009898943901062012, 0.009838624000549316, 0.009644031524658203, 0.009453536033630371, 0.00957033634185791, 0.009560288429260254, 0.009435071945190429, 0.009424736022949218, 0.009457759857177735, 0.009454079627990723, 0.009742688179016114, 0.009935968399047852, 0.010724287986755371, 0.009986240386962891, 0.010014495849609374, 0.009878815650939942, 0.009883808135986329, 0.009866175651550292, 0.009821824073791504, 0.009651424407958985, 0.009535296440124512, 0.009546719551086425, 0.009489855766296386, 0.009504735946655274, 0.009435744285583495, 0.009771103858947755, 0.009936800003051758, 0.00972390365600586, 0.00961740779876709, 0.009775103569030762, 0.00970137596130371, 0.009811712265014648, 0.009906432151794434, 0.009761055946350098, 0.009721920013427735, 0.009657088279724122, 0.009710623741149902, 0.009738304138183594, 0.00953286361694336, 0.010322367668151856, 0.010501919746398925, 0.009730400085449219, 0.009576224327087402, 0.009659711837768554, 0.009697919845581054, 0.009689184188842773, 0.00962281608581543, 0.009476832389831543, 0.009308223724365234, 0.009374688148498535, 0.009348064422607423, 0.00959488010406494, 0.009645503997802734, 0.009455679893493652, 0.009574399948120118, 0.00942131233215332, 0.009461503982543945, 0.009394432067871094, 0.009488384246826171, 0.009897983551025391, 0.01001471996307373, 0.010024959564208985, 0.009777376174926758, 0.009917471885681152, 0.009753279685974121, 0.00965180778503418, 0.009503328323364257, 0.009557184219360351, 0.009501376152038575, 0.009440704345703125, 0.00946233558654785, 0.009644031524658203, 0.009865280151367187, 0.009760767936706542, 0.00949836826324463, 0.009394432067871094, 0.009359519958496093, 0.009307871818542481, 0.009514880180358887, 0.009690943717956544, 0.009639424324035644, 0.00956281566619873, 0.009689599990844726, 0.009675935745239258, 0.009586496353149414, 0.009454367637634277, 0.009360639572143555, 0.009288448333740235, 0.00930406379699707, 0.009449472427368164, 0.00929587173461914, 0.009260319709777831, 0.009263808250427247, 0.009195008277893067, 0.009209568023681641, 0.009429823875427247, 0.009452735900878906, 0.009394847869873046, 0.009441439628601073, 0.009596927642822266, 0.00962559986114502, 0.009453696250915527, 0.00934825611114502, 0.00930844783782959, 0.009316576004028321, 0.009537376403808594, 0.009916319847106933, 0.00980016040802002, 0.009843839645385741, 0.009824959754943848, 0.009953472137451172, 0.009904128074645996, 0.009957375526428223, 0.009776224136352539, 0.009560928344726563, 0.009496640205383301, 0.009430399894714355, 0.009455615997314454, 0.009442079544067382, 0.009514847755432128, 0.009721952438354492, 0.009608192443847656, 0.00938486385345459, 0.009518624305725098, 0.00965231990814209, 0.009873791694641114, 0.009743616104125977, 0.00960588836669922, 0.009569952011108399, 0.009538047790527344, 0.009674592018127441, 0.009702848434448243, 0.009605695724487304, 0.009518912315368652, 0.00969536018371582]",tokens/s,103.89544415345289,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.177728,3354.329088,0.0,2959.081472,2942.567424,s,1,7.613796875,7.613796875,0.0,7.613796875,7.613796875,7.613796875,7.613796875,[7.613796875],,kWh,1.060672593750193e-05,1.1526767979209956e-06,4.308059002006148e-06,1.6067461737429074e-05,,MB,1100.869632,3547.267072,0.0,3141.533696,3105.830912,s,10,2.598595458984375,0.2598595458984375,0.0010426096901951518,0.25929754638671876,0.2613603698730469,0.2614039855957031,0.2614388781738281,"[0.2589762268066406, 0.25927383422851563, 0.2591113586425781, 0.2612887878417969, 0.2588882141113281, 0.2593212585449219, 0.2588128356933594, 0.26144760131835937, 0.2601246643066406, 0.2613506774902344]",tokens/s,985.1475692951993,kWh,7.642937999786426e-06,8.428794512388161e-07,5.078194945743549e-06,1.3564012396768791e-05,tokens/kWh,18873471.397075996,MB,1122.267136,3589.210112,0.0,3183.476736,3163.057152,s,10,11.634897216796876,1.1634897216796873,0.01150969353260174,1.1660227661132812,1.177038916015625,1.1774024536132812,1.1776932836914062,"[1.1777659912109375, 1.1690953369140624, 1.1769581298828125, 1.1456146240234375, 1.141520751953125, 1.158644775390625, 1.1683118896484375, 1.163733642578125, 1.1710692138671874, 1.162182861328125]",tokens/s,54.147448684848904,kWh,3.4141401808964265e-05,3.763897673904306e-06,2.2511104903456173e-05,6.041640438632475e-05,tokens/kWh,1042763.1475245495,,s,630,11.63173612594604,0.018463073215787374,0.0004140495528841139,0.018481151580810547,0.01874390411376953,0.018877248573303224,0.020176229000091556,"[0.01915817642211914, 0.018645503997802734, 0.01864534378051758, 0.018683904647827147, 0.01863065528869629, 0.01859174346923828, 0.018743263244628907, 0.01859177589416504, 0.01863680076599121, 0.01860323143005371, 0.01851798439025879, 0.018459455490112305, 0.018545984268188476, 0.018516672134399413, 0.018518016815185546, 0.018530303955078126, 0.018343936920166014, 0.018617599487304688, 0.01888128089904785, 0.019720191955566405, 0.018764991760253907, 0.018629440307617186, 0.018517120361328125, 0.01843903923034668, 0.018597888946533202, 0.018564512252807617, 0.01841551971435547, 0.021264991760253905, 0.01870857620239258, 0.018677759170532226, 0.0184597110748291, 0.018441152572631837, 0.01844432067871094, 0.018483072280883788, 0.018518047332763674, 0.01846665573120117, 0.01838243293762207, 0.018802623748779296, 0.018848447799682616, 0.018628639221191408, 0.018695423126220703, 0.01856175994873047, 0.018448352813720703, 0.018704032897949217, 0.018551168441772462, 0.018622175216674804, 0.01847324752807617, 0.018397184371948243, 0.018577407836914063, 0.018581504821777343, 0.018597440719604494, 0.018526655197143554, 0.01868720054626465, 0.018505727767944336, 0.01845737648010254, 0.01865100860595703, 0.018757888793945313, 0.018592767715454102, 0.018566015243530274, 0.018669567108154296, 0.0188272647857666, 0.02052412796020508, 0.018760608673095702, 0.019173887252807616, 0.01856515121459961, 0.018584800720214845, 0.018772735595703124, 0.018481151580810547, 0.01867923164367676, 0.018670143127441405, 0.018517183303833007, 0.01865609550476074, 0.018343711853027345, 0.018409664154052735, 0.018737152099609376, 0.018492992401123048, 0.01833011245727539, 0.018454368591308595, 0.018607616424560547, 0.01874390411376953, 0.0188723201751709, 0.01862860870361328, 0.018597728729248048, 0.0183338565826416, 0.018350080490112306, 0.018353471755981444, 0.01835897636413574, 0.018587648391723634, 0.018322528839111327, 0.018250656127929688, 0.018601984024047852, 0.018689056396484376, 0.018640863418579103, 0.01863987159729004, 0.01855824089050293, 0.01849212837219238, 0.01848518371582031, 0.018372671127319336, 0.018610176086425782, 0.018583200454711915, 0.018686304092407225, 0.018759359359741212, 0.018594079971313477, 0.01859177589416504, 0.018491071701049806, 0.01811030387878418, 0.018466272354125977, 0.018508800506591795, 0.018609312057495116, 0.018506591796875, 0.018573312759399413, 0.018521600723266602, 0.01877452850341797, 0.018448383331298827, 0.018290176391601562, 0.01842243194580078, 0.019279712677001952, 0.01880825614929199, 0.018588224411010743, 0.018382623672485353, 0.018372352600097657, 0.018700767517089843, 0.018443391799926757, 0.018590591430664063, 0.018300928115844727, 0.018391040802001952, 0.01912214469909668, 0.018615968704223634, 0.018544288635253908, 0.01843846321105957, 0.01849081611633301, 0.01859270477294922, 0.0184770565032959, 0.018661376953125, 0.018464767456054687, 0.01859286308288574, 0.018313215255737304, 0.018231935501098633, 0.018313280105590822, 0.01870038414001465, 0.01880281639099121, 0.01875712013244629, 0.01857155227661133, 0.0184117431640625, 0.01826201629638672, 0.018356224060058594, 0.0184586238861084, 0.018534400939941405, 0.01839308738708496, 0.018525279998779298, 0.018576383590698242, 0.018540096282958985, 0.018446783065795898, 0.01880054473876953, 0.018499839782714845, 0.01836796760559082, 0.018321695327758788, 0.018495487213134765, 0.018573312759399413, 0.018684064865112305, 0.01864687919616699, 0.018534400939941405, 0.018464767456054687, 0.018461952209472655, 0.018471424102783202, 0.01841177558898926, 0.018421728134155272, 0.01835775947570801, 0.018788671493530272, 0.0192589111328125, 0.018747392654418944, 0.018756288528442383, 0.018497535705566406, 0.01860416030883789, 0.018743167877197264, 0.018602272033691407, 0.018791807174682616, 0.01875299263000488, 0.020423295974731446, 0.02287379264831543, 0.019982271194458008, 0.018472896575927735, 0.018618335723876955, 0.018572000503540038, 0.018523679733276368, 0.018452863693237304, 0.01832969665527344, 0.018447391510009764, 0.018688735961914064, 0.019151359558105468, 0.018565120697021483, 0.0186711368560791, 0.018683488845825196, 0.018404224395751952, 0.01847200012207031, 0.018675872802734375, 0.01859436798095703, 0.01855072021484375, 0.018579296112060547, 0.018532800674438476, 0.018745344161987306, 0.01865228843688965, 0.018468767166137694, 0.018283296585083007, 0.017977535247802736, 0.0180296630859375, 0.017764415740966797, 0.017879583358764647, 0.018177759170532225, 0.017926752090454103, 0.017930240631103517, 0.018165760040283203, 0.01825939178466797, 0.018391616821289064, 0.018549888610839844, 0.01827110481262207, 0.018323295593261717, 0.018049184799194335, 0.018135040283203126, 0.01815872001647949, 0.017953664779663085, 0.018306911468505858, 0.018048864364624023, 0.0178670711517334, 0.017969152450561524, 0.017928064346313475, 0.017834112167358397, 0.017920000076293945, 0.017885183334350584, 0.017870847702026366, 0.018112512588500978, 0.017946624755859376, 0.017923967361450195, 0.01800614356994629, 0.017890687942504882, 0.01804729652404785, 0.017918399810791016, 0.018187551498413085, 0.01793903923034668, 0.018024511337280273, 0.017971136093139647, 0.01816927909851074, 0.018047552108764648, 0.017889280319213868, 0.018025983810424806, 0.01786684799194336, 0.01809244728088379, 0.018081792831420897, 0.018470943450927733, 0.018034656524658202, 0.01807155227661133, 0.01803878402709961, 0.019248159408569335, 0.018264415740966797, 0.018201120376586916, 0.018126880645751953, 0.0180644474029541, 0.017988544464111328, 0.018008064270019532, 0.017983104705810545, 0.01795929527282715, 0.01816307258605957, 0.017906303405761718, 0.018077695846557617, 0.018044927597045898, 0.0182476806640625, 0.018198528289794923, 0.018542591094970702, 0.01819340705871582, 0.018042911529541017, 0.017951263427734374, 0.01801875114440918, 0.01817100715637207, 0.017941375732421876, 0.01800595283508301, 0.017991743087768554, 0.01801420783996582, 0.018089984893798827, 0.018304479598999022, 0.017986080169677735, 0.017942527770996093, 0.017890464782714843, 0.018221920013427734, 0.0179866886138916, 0.017883264541625976, 0.018486143112182617, 0.017899391174316406, 0.018054336547851563, 0.018550880432128908, 0.018064096450805665, 0.0184586238861084, 0.01919385528564453, 0.018245311737060548, 0.018097631454467772, 0.01823369598388672, 0.01802668762207031, 0.018104000091552733, 0.017971839904785156, 0.01803878402709961, 0.018059263229370116, 0.01799782371520996, 0.018000160217285156, 0.017947999954223633, 0.0178383674621582, 0.018153568267822266, 0.017989183425903322, 0.017989599227905273, 0.017948703765869142, 0.017990079879760743, 0.018013408660888672, 0.01821676826477051, 0.017945056915283204, 0.01791779136657715, 0.017948543548583985, 0.018207359313964843, 0.019017791748046874, 0.018353567123413086, 0.01842367935180664, 0.01818191909790039, 0.018381599426269532, 0.018128992080688477, 0.01809596824645996, 0.017976831436157227, 0.018232032775878905, 0.019179519653320314, 0.01912575912475586, 0.01806617546081543, 0.018194175720214843, 0.018440160751342773, 0.018519872665405272, 0.01822537612915039, 0.017967103958129883, 0.01800396728515625, 0.017928192138671875, 0.017874431610107423, 0.01815123176574707, 0.017820575714111327, 0.017900447845458984, 0.017869632720947267, 0.017979167938232423, 0.01790390396118164, 0.018062431335449217, 0.01773251152038574, 0.017835039138793946, 0.017943103790283202, 0.018127264022827147, 0.018128896713256838, 0.018380800247192384, 0.018563039779663087, 0.01864297676086426, 0.018546688079833985, 0.018666847229003906, 0.01852892875671387, 0.01871254348754883, 0.018462751388549806, 0.018625696182250975, 0.018572128295898438, 0.01861631965637207, 0.018713951110839844, 0.018639520645141603, 0.018643295288085938, 0.018576799392700197, 0.018433439254760743, 0.0186243839263916, 0.01866156768798828, 0.01849193572998047, 0.018604288101196288, 0.018542591094970702, 0.0186562557220459, 0.01860304069519043, 0.018597856521606445, 0.01869158363342285, 0.018481344223022462, 0.018732799530029296, 0.018628511428833008, 0.018681568145751955, 0.018477760314941406, 0.01870275115966797, 0.019372320175170897, 0.018633216857910157, 0.018700288772583007, 0.018638111114501952, 0.018627296447753905, 0.018667520523071288, 0.018702335357666015, 0.018638496398925782, 0.018542943954467775, 0.018540544509887694, 0.018548736572265623, 0.018481151580810547, 0.018522111892700196, 0.018528255462646484, 0.01848271942138672, 0.018483680725097658, 0.018583295822143554, 0.018630720138549803, 0.018600128173828126, 0.018509824752807616, 0.018382848739624022, 0.01844223976135254, 0.018425504684448243, 0.018441791534423827, 0.018500383377075196, 0.018597183227539064, 0.018356319427490234, 0.018641504287719726, 0.01863039970397949, 0.018530527114868165, 0.018608095169067383, 0.01853830337524414, 0.01844207954406738, 0.01869455909729004, 0.018479040145874023, 0.018626623153686524, 0.018547840118408203, 0.01845542335510254, 0.01845583915710449, 0.018614112854003908, 0.018520959854125975, 0.018308832168579103, 0.018440479278564452, 0.018630271911621095, 0.018813312530517576, 0.018699392318725586, 0.018529151916503905, 0.01860403251647949, 0.018302463531494142, 0.018612735748291014, 0.018563072204589845, 0.0186060791015625, 0.018599584579467775, 0.01863462448120117, 0.01857174491882324, 0.018333696365356447, 0.01819593620300293, 0.018275936126708983, 0.018099071502685547, 0.018028608322143556, 0.018503488540649413, 0.018728992462158204, 0.01857142448425293, 0.01909212875366211, 0.018731008529663085, 0.018849504470825194, 0.018647327423095703, 0.018636064529418947, 0.018555616378784178, 0.018554367065429688, 0.0186549129486084, 0.018440704345703125, 0.018395328521728517, 0.01872697639465332, 0.018593215942382814, 0.018815616607666015, 0.018366176605224608, 0.01814659118652344, 0.018371679306030272, 0.018470815658569336, 0.018675647735595702, 0.018245567321777345, 0.018237567901611327, 0.018718719482421875, 0.018937023162841796, 0.0185860481262207, 0.0185797119140625, 0.01857472038269043, 0.018526975631713866, 0.018696191787719727, 0.01840742492675781, 0.018647008895874024, 0.018501855850219726, 0.018554943084716797, 0.018552448272705076, 0.018368640899658204, 0.01794047927856445, 0.018092031478881835, 0.018190336227416993, 0.018163711547851562, 0.018231296539306642, 0.018569215774536133, 0.018777727127075195, 0.018545087814331056, 0.018450368881225587, 0.0186494083404541, 0.018482879638671876, 0.018374656677246092, 0.018577215194702148, 0.018309215545654296, 0.01807369613647461, 0.018247615814208983, 0.018144832611083985, 0.01804547119140625, 0.018030559539794922, 0.017956256866455078, 0.018500192642211914, 0.018366464614868162, 0.018257408142089843, 0.018102272033691406, 0.018548351287841797, 0.01874390411376953, 0.01872105598449707, 0.018513120651245118, 0.01847785568237305, 0.018448383331298827, 0.01924723243713379, 0.018618240356445312, 0.018518016815185546, 0.01878153610229492, 0.01866803169250488, 0.01847862434387207, 0.018487936019897462, 0.018589599609375, 0.018273439407348633, 0.01895315170288086, 0.018391040802001952, 0.018358272552490236, 0.018290592193603517, 0.018337888717651366, 0.018515968322753908, 0.01838489532470703, 0.01876201629638672, 0.018587360382080077, 0.01846886444091797, 0.018615743637084962, 0.018727615356445314, 0.019367040634155272, 0.02104944038391113, 0.01850828742980957, 0.01828700828552246, 0.01854745674133301, 0.01820159912109375, 0.01814246368408203, 0.018064128875732423, 0.018157567977905274, 0.018495487213134765, 0.01846790313720703, 0.018348415374755858, 0.018417823791503907, 0.018800735473632812, 0.01850809669494629, 0.018624128341674803, 0.018497087478637694, 0.018617055892944337, 0.018391136169433595, 0.018425247192382813, 0.01840140724182129, 0.01817033576965332, 0.018421695709228515, 0.018053184509277342, 0.018217023849487306, 0.018267168045043945, 0.01841417694091797, 0.018372928619384766, 0.018933759689331055, 0.018532352447509767, 0.018565120697021483, 0.020483840942382814, 0.018521888732910156, 0.01857174491882324, 0.018485248565673826, 0.018572639465332032, 0.018796575546264647, 0.018586240768432617, 0.018568479537963867, 0.018586080551147462, 0.01866326332092285, 0.018590112686157227, 0.019146751403808594, 0.01827993583679199, 0.01836435127258301, 0.018192703247070313, 0.01819878387451172, 0.01812665557861328, 0.018135232925415037, 0.018195520401000975, 0.018186464309692382, 0.018559711456298828, 0.018771968841552734, 0.018520063400268554, 0.018551040649414062, 0.01847475242614746, 0.018280448913574218, 0.018205759048461913, 0.01812371253967285, 0.01822431945800781, 0.018244224548339842, 0.01805913543701172, 0.01802272033691406, 0.01799897575378418, 0.018103168487548827, 0.01829478454589844, 0.018497055053710937, 0.01852422332763672, 0.018418079376220704, 0.018662527084350587, 0.01840937614440918, 0.01868079948425293, 0.01878131294250488, 0.018405248641967773, 0.0182609920501709, 0.01814233589172363, 0.018178943634033204, 0.01802422332763672, 0.018173152923583985, 0.018174976348876954, 0.018218271255493163, 0.01847983932495117, 0.018452159881591795, 0.01824515151977539, 0.0182523193359375, 0.01819264030456543, 0.01821696090698242, 0.01860812759399414, 0.018609792709350585, 0.018448095321655273, 0.018330400466918945, 0.018632064819335936, 0.0185533447265625, 0.018530303955078126, 0.018224735260009766, 0.01814352035522461, 0.01824492835998535, 0.018461503982543946, 0.01945315170288086, 0.019262239456176757, 0.020195104598999022, 0.020130016326904296, 0.018365472793579102, 0.018611072540283203, 0.018466047286987305]",tokens/s,54.16216402938389,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,836.452352,4675.534848,0.0,4280.287232,4115.121152,s,1,7.65303857421875,7.65303857421875,0.0,7.65303857421875,7.65303857421875,7.65303857421875,7.65303857421875,[7.65303857421875],,kWh,1.0351801950006727e-05,1.1340980008129043e-06,4.7408371259966e-06,1.622673707681623e-05,,MB,1142.31296,4981.71904,0.0,4575.985664,4408.408064,s,10,3.030411315917968,0.3030411315917969,0.002580261041039593,0.3030240173339844,0.3054149444580078,0.3060226119995117,0.30650874603271483,"[0.2980228271484375, 0.29910711669921874, 0.30268963623046874, 0.30315701293945313, 0.3052799072265625, 0.3048192138671875, 0.3028910217285156, 0.30517254638671876, 0.30264175415039063, 0.3066302795410156]",tokens/s,844.7698127818426,kWh,9.061008412627508e-06,9.98694762992155e-07,5.996208500666343e-06,1.6055911676286006e-05,tokens/kWh,15944283.025553923,MB,1163.50976,4981.71904,0.0,4575.985664,4408.410624,s,10,14.811874389648438,1.4811874389648438,0.010192541895537985,1.4821577758789062,1.491000354003906,1.492011590576172,1.4928205798339844,"[1.458283935546875, 1.4831822509765624, 1.4930228271484376, 1.48113330078125, 1.489197265625, 1.478267822265625, 1.4705389404296876, 1.490775634765625, 1.4900311279296874, 1.4774412841796876]",tokens/s,42.533441982217155,kWh,4.301731266653926e-05,4.745208429294184e-06,2.843334682073334e-05,7.619586791656678e-05,tokens/kWh,826816.4891695172,,s,630,14.80945721435548,0.02350707494342138,0.0003652562417825801,0.02352883243560791,0.023837715148925784,0.024032551956176755,0.024826040382385256,"[0.024179264068603514, 0.02332838439941406, 0.02325542449951172, 0.023571935653686524, 0.023126623153686524, 0.022810495376586915, 0.02295327949523926, 0.022874719619750978, 0.02343731117248535, 0.023152799606323243, 0.023086463928222656, 0.023060895919799804, 0.022964096069335936, 0.022962528228759764, 0.022966272354125978, 0.022945535659790038, 0.02320518493652344, 0.02302867126464844, 0.02311577606201172, 0.023164735794067384, 0.02349056053161621, 0.02337606430053711, 0.02351286315917969, 0.023074464797973634, 0.023984703063964843, 0.022965503692626954, 0.0229117431640625, 0.022939104080200196, 0.022780384063720703, 0.02317888069152832, 0.022918912887573244, 0.02284547233581543, 0.022947872161865234, 0.02283955192565918, 0.022862207412719725, 0.022763519287109374, 0.022816064834594727, 0.02278223991394043, 0.023113536834716796, 0.022927967071533203, 0.02285971260070801, 0.022884416580200195, 0.024978752136230468, 0.024314559936523438, 0.023011007308959962, 0.023246335983276366, 0.02304083251953125, 0.02294169616699219, 0.02297622489929199, 0.02327516746520996, 0.02326927947998047, 0.0232138557434082, 0.023081920623779298, 0.02295199966430664, 0.023014495849609375, 0.02323747253417969, 0.023005247116088867, 0.022987775802612305, 0.02320649528503418, 0.023129600524902344, 0.023249759674072265, 0.023013376235961915, 0.022957727432250975, 0.02403536033630371, 0.023434751510620116, 0.023245279312133788, 0.0235479679107666, 0.024457151412963868, 0.02391766357421875, 0.02341731262207031, 0.023945087432861327, 0.023502399444580078, 0.02369843292236328, 0.023676288604736327, 0.02342732810974121, 0.02323289680480957, 0.023358463287353515, 0.023931327819824218, 0.02350761604309082, 0.023482271194458008, 0.02353561592102051, 0.023469823837280274, 0.023475456237792968, 0.02337436866760254, 0.023605728149414064, 0.02333286476135254, 0.023357376098632813, 0.023224384307861327, 0.02347804832458496, 0.02313852882385254, 0.023324544906616212, 0.023433055877685547, 0.02327071952819824, 0.02337276840209961, 0.023993728637695312, 0.023318656921386717, 0.023343616485595704, 0.023050111770629884, 0.0231200008392334, 0.023093183517456053, 0.023076927185058594, 0.023128320693969726, 0.02336105537414551, 0.023158496856689453, 0.024087039947509766, 0.02369740867614746, 0.02366854476928711, 0.023543231964111327, 0.023788032531738282, 0.023597055435180665, 0.023731584548950194, 0.023657344818115236, 0.023812095642089845, 0.023607295989990236, 0.023692607879638672, 0.02377801513671875, 0.023883520126342775, 0.023617759704589843, 0.023711103439331055, 0.023794303894042967, 0.02367283248901367, 0.023450944900512697, 0.023521280288696288, 0.023560895919799804, 0.023623392105102538, 0.023568672180175783, 0.02430156707763672, 0.02373222351074219, 0.023830528259277343, 0.023736320495605468, 0.023558143615722657, 0.02396272087097168, 0.023570655822753906, 0.023548608779907228, 0.02373129653930664, 0.02364009666442871, 0.02370035171508789, 0.02362892723083496, 0.02355289649963379, 0.023558143615722657, 0.023578624725341796, 0.023613439559936524, 0.02361657524108887, 0.02374732780456543, 0.02379385566711426, 0.02372812843322754, 0.023512895584106446, 0.023521472930908203, 0.023777280807495117, 0.025438207626342774, 0.023703136444091798, 0.02349507141113281, 0.02356153678894043, 0.023567039489746092, 0.023493824005126954, 0.023429759979248045, 0.023705791473388672, 0.023538976669311523, 0.02402911949157715, 0.023810527801513673, 0.023572799682617187, 0.02347417640686035, 0.02349056053161621, 0.02355948829650879, 0.02335136032104492, 0.023652416229248047, 0.0238209285736084, 0.02361529541015625, 0.02406617546081543, 0.024272159576416017, 0.024326879501342772, 0.023631872177124022, 0.023412704467773438, 0.023238687515258788, 0.023769088745117187, 0.023404544830322265, 0.02347007942199707, 0.02389401626586914, 0.023455743789672853, 0.023642112731933593, 0.02343731117248535, 0.023690591812133788, 0.02358937644958496, 0.023570816040039064, 0.023779104232788086, 0.02357596778869629, 0.024078943252563476, 0.023686336517333983, 0.023548736572265624, 0.024302879333496095, 0.0236430721282959, 0.023540767669677734, 0.023364416122436525, 0.023197696685791015, 0.023150047302246093, 0.02304368019104004, 0.023753664016723634, 0.023439359664916993, 0.02346931266784668, 0.023785280227661132, 0.02355295944213867, 0.023442880630493164, 0.023685440063476563, 0.024180992126464844, 0.02349875259399414, 0.02360425567626953, 0.023598047256469728, 0.023395999908447266, 0.02345404815673828, 0.023414783477783203, 0.023502975463867187, 0.023393728256225585, 0.02328767967224121, 0.02324127960205078, 0.02355606460571289, 0.023672447204589844, 0.023694847106933595, 0.023565216064453123, 0.023793664932250977, 0.02368694305419922, 0.023747840881347657, 0.02361356735229492, 0.023458656311035157, 0.023431167602539063, 0.023473567962646484, 0.02335804748535156, 0.023377311706542968, 0.023306848526000977, 0.023453567504882814, 0.023160959243774416, 0.023787071228027343, 0.023583168029785155, 0.023513088226318358, 0.023525375366210938, 0.023814079284667968, 0.02359225654602051, 0.023369951248168944, 0.023286304473876952, 0.023134208679199218, 0.02326323127746582, 0.023321983337402343, 0.023325311660766603, 0.02327347183227539, 0.02329395294189453, 0.023178752899169923, 0.02317568016052246, 0.023928831100463867, 0.02393907165527344, 0.023721055984497072, 0.023638015747070314, 0.02356425666809082, 0.023342016220092774, 0.025831424713134765, 0.023721887588500978, 0.0236046085357666, 0.023611839294433595, 0.023316768646240233, 0.023382015228271484, 0.023488512039184572, 0.02386124801635742, 0.023400447845458985, 0.023513088226318358, 0.023562175750732422, 0.02355558395385742, 0.023674720764160155, 0.023710399627685546, 0.02361142349243164, 0.023628992080688478, 0.023724863052368164, 0.023851007461547852, 0.02351513671875, 0.023566335678100587, 0.023569728851318358, 0.023458175659179688, 0.023562496185302734, 0.02359916877746582, 0.023357440948486328, 0.023615039825439454, 0.02359856033325195, 0.023700447082519532, 0.023582719802856447, 0.02348646354675293, 0.02349260711669922, 0.023721120834350587, 0.023495616912841796, 0.02343107223510742, 0.023459840774536132, 0.023379968643188476, 0.02353936004638672, 0.023435615539550782, 0.023619583129882812, 0.023319679260253905, 0.023900159835815428, 0.02398044776916504, 0.023576320648193358, 0.02442313575744629, 0.02351628875732422, 0.023556991577148436, 0.023638015747070314, 0.023642112731933593, 0.02405116844177246, 0.023553823471069334, 0.02335001564025879, 0.023465120315551757, 0.023391103744506835, 0.02346544075012207, 0.023545503616333008, 0.023515775680541993, 0.023990495681762695, 0.02370150375366211, 0.02369254493713379, 0.023599872589111327, 0.023695232391357422, 0.02352681541442871, 0.023613216400146485, 0.02434454345703125, 0.023666336059570314, 0.023396703720092775, 0.023341056823730468, 0.023412736892700195, 0.023330816268920897, 0.023379968643188476, 0.02332467269897461, 0.023430624008178712, 0.023685663223266602, 0.023452991485595702, 0.023552032470703126, 0.02374518394470215, 0.023538848876953126, 0.023513023376464843, 0.023753440856933594, 0.023427263259887695, 0.023392255783081056, 0.023533344268798828, 0.02328335952758789, 0.02318547248840332, 0.023097856521606445, 0.023435327529907228, 0.02374239921569824, 0.023386112213134767, 0.02349260711669922, 0.02345952033996582, 0.023737855911254883, 0.023669567108154297, 0.023530879974365235, 0.02348451232910156, 0.023444000244140624, 0.02351923179626465, 0.02361100769042969, 0.023677312850952148, 0.023586816787719726, 0.023395519256591796, 0.023919424057006835, 0.023500320434570312, 0.023755231857299806, 0.023317792892456054, 0.02369340705871582, 0.025008031845092774, 0.023364320755004882, 0.023582719802856447, 0.02351103973388672, 0.023239967346191406, 0.023140703201293945, 0.023365535736083985, 0.024134111404418946, 0.023357440948486328, 0.023257087707519532, 0.023230464935302734, 0.023150592803955077, 0.023173120498657225, 0.023085216522216796, 0.023066463470458983, 0.022986751556396484, 0.02304204750061035, 0.023078399658203123, 0.02300979232788086, 0.02308095932006836, 0.023035903930664063, 0.024086143493652342, 0.02361587142944336, 0.02334867286682129, 0.023151168823242186, 0.02297225570678711, 0.022929567337036133, 0.022986560821533202, 0.02316716766357422, 0.023166976928710937, 0.022994047164916993, 0.022838144302368163, 0.023007232666015624, 0.023027711868286133, 0.02325299263000488, 0.024681631088256835, 0.022922079086303712, 0.02310553550720215, 0.02289651107788086, 0.02310544013977051, 0.02300499153137207, 0.02368889617919922, 0.023000799179077148, 0.02292799949645996, 0.02284172821044922, 0.022769472122192384, 0.022839679718017578, 0.02281667137145996, 0.02285763168334961, 0.02290483283996582, 0.02292857551574707, 0.02302239990234375, 0.02296118354797363, 0.022874143600463866, 0.022854591369628908, 0.022844831466674806, 0.02282966423034668, 0.022916479110717772, 0.022880064010620118, 0.02298476791381836, 0.023325439453125, 0.023514528274536133, 0.023757024765014647, 0.02361382484436035, 0.02384022331237793, 0.023582752227783204, 0.023685632705688478, 0.023541759490966797, 0.02352742385864258, 0.023564287185668945, 0.025159616470336914, 0.024561376571655275, 0.02389027214050293, 0.02356617546081543, 0.02382035255432129, 0.02366454315185547, 0.023980224609375, 0.0235284481048584, 0.02371686363220215, 0.023527584075927734, 0.023736127853393553, 0.02367695999145508, 0.023696895599365234, 0.023783071517944336, 0.024363231658935548, 0.023821823120117186, 0.02373023986816406, 0.023821023941040038, 0.02371753692626953, 0.02376406478881836, 0.023622207641601563, 0.02389139175415039, 0.023638111114501953, 0.023595903396606447, 0.02348646354675293, 0.023637088775634765, 0.023548416137695313, 0.023691680908203124, 0.02383635139465332, 0.023651775360107423, 0.02378432083129883, 0.023602848052978517, 0.023853471755981445, 0.02371945571899414, 0.02358844757080078, 0.023946048736572266, 0.023836288452148437, 0.02375433540344238, 0.02369820785522461, 0.02404159927368164, 0.023909631729125976, 0.023734912872314454, 0.023791616439819335, 0.02362739181518555, 0.023517568588256835, 0.023578624725341796, 0.023517183303833008, 0.023740415573120118, 0.023608608245849608, 0.02367148780822754, 0.023701536178588868, 0.023754751205444336, 0.023582719802856447, 0.02360099220275879, 0.023549823760986327, 0.023707935333251953, 0.023563840866088866, 0.02372243118286133, 0.023609344482421874, 0.02352921676635742, 0.023581247329711914, 0.023511903762817383, 0.023356256484985353, 0.023278656005859374, 0.02328428840637207, 0.0238022403717041, 0.02360438346862793, 0.02350166320800781, 0.023476224899291992, 0.023459840774536132, 0.023385087966918947, 0.023770111083984375, 0.02351420783996582, 0.023833471298217772, 0.023539743423461913, 0.023586240768432618, 0.023394271850585936, 0.02433932876586914, 0.02374358367919922, 0.02349910354614258, 0.023532096862792968, 0.023459840774536132, 0.02337295913696289, 0.023837535858154298, 0.02351513671875, 0.023503936767578126, 0.023350208282470704, 0.023502559661865235, 0.024280416488647462, 0.024840192794799806, 0.024897823333740233, 0.023560863494873047, 0.02346700859069824, 0.023286367416381838, 0.023815967559814452, 0.023839328765869142, 0.023596832275390625, 0.02350105667114258, 0.023760255813598634, 0.02357062339782715, 0.023536064147949218, 0.023443456649780273, 0.023582719802856447, 0.023640064239501952, 0.023649503707885742, 0.02366761589050293, 0.02366041564941406, 0.02368297576904297, 0.023451648712158202, 0.023681119918823244, 0.024008319854736327, 0.02358310317993164, 0.023379648208618164, 0.023437631607055663, 0.023465375900268554, 0.023538272857666017, 0.023570432662963867, 0.023584800720214842, 0.02341036796569824, 0.023509151458740236, 0.02374790382385254, 0.023690048217773436, 0.02360051155090332, 0.0235784969329834, 0.02354979133605957, 0.023499616622924803, 0.023737951278686522, 0.023480800628662108, 0.02360121536254883, 0.02350796890258789, 0.02357734489440918, 0.023474367141723632, 0.023621631622314454, 0.023601152420043944, 0.023551616668701172, 0.02374835205078125, 0.023620223999023436, 0.02377654457092285, 0.023673599243164062, 0.023571807861328124, 0.0242587833404541, 0.02387353515625, 0.02352742385864258, 0.02353984069824219, 0.02375433540344238, 0.02374684715270996, 0.02353971290588379, 0.023556095123291015, 0.023582176208496095, 0.023566047668457032, 0.02340108871459961, 0.02341231918334961, 0.02359328079223633, 0.023549280166625976, 0.023749696731567384, 0.02352921676635742, 0.023634048461914064, 0.023614463806152345, 0.023577600479125976, 0.023496511459350587, 0.023660127639770507, 0.023634271621704103, 0.023431615829467775, 0.023521087646484376, 0.02326937675476074, 0.023581823348999022, 0.02360204887390137, 0.023846912384033202, 0.023967744827270508, 0.023348608016967774, 0.02336319923400879, 0.023318656921386717, 0.022993183135986327, 0.022948448181152343, 0.023076864242553712, 0.023250944137573244, 0.023510208129882814, 0.023992671966552734, 0.023560672760009765, 0.023645503997802735, 0.023485088348388673, 0.023425056457519532, 0.02351820755004883, 0.02350979232788086, 0.02392460823059082, 0.024791391372680664, 0.02327756881713867, 0.02331222343444824, 0.023272991180419922, 0.023020160675048827, 0.022982656478881838, 0.022880159378051757, 0.022990943908691407, 0.022978143692016603, 0.023241119384765627, 0.02336128044128418, 0.023355648040771483, 0.023216127395629883, 0.023003135681152344, 0.022994623184204102, 0.022875808715820314, 0.02285430335998535, 0.022939647674560547]",tokens/s,42.54038422078784,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1068, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 845, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 845, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 634, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 230, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,816.320512,14274.199552,0.0,13878.951936,13865.632768,s,1,7.65110205078125,7.65110205078125,0.0,7.65110205078125,7.65110205078125,7.65110205078125,7.65110205078125,[7.65110205078125],,kWh,1.2706350270832445e-05,1.3818572771536117e-06,5.095281853995104e-06,1.9183489401981163e-05,,MB,1148.06784,14697.824256,0.0,14289.993728,14237.628416,s,10,1.8708953247070312,0.18708953247070312,0.005850038407180689,0.1897191162109375,0.1903833023071289,0.1907439353942871,0.19103244186401366,"[0.18225616455078125, 0.18965721130371094, 0.19030316162109376, 0.1911045684814453, 0.1879140167236328, 0.1710326385498047, 0.18994551086425782, 0.18882829284667968, 0.19007273864746094, 0.18978102111816406]",tokens/s,1368.328824276087,kWh,5.581673963128834e-06,6.1531401767901e-07,3.7278593764151706e-06,9.924847357223015e-06,tokens/kWh,25793847.581312235,MB,1165.975552,14865.596416,0.0,14457.765888,14415.235584,s,10,38.91846997070313,3.8918469970703127,0.00808245757850383,3.8932133789062497,3.9007560302734374,3.9016928344726565,3.9024422778320313,"[3.875691162109375, 3.883259033203125, 3.88573486328125, 3.88935595703125, 3.8903212890625, 3.89610546875, 3.9005478515625, 3.8968857421875, 3.89793896484375, 3.902629638671875]",tokens/s,16.187686732655436,kWh,0.0001137893467947874,1.2551477776801096e-05,7.54338983973839e-05,0.00020177472296897236,tokens/kWh,312229.3965914043,,s,630,38.91394614791871,0.06176816848875984,0.0006070965176817643,0.06171547126770019,0.062132289123535155,0.06224803886413574,0.0650584912109375,"[0.0646937255859375, 0.0624031982421875, 0.06150543975830078, 0.06130284881591797, 0.06101833724975586, 0.06089932632446289, 0.06093407821655274, 0.060762016296386716, 0.060932254791259764, 0.06090502548217774, 0.06112303924560547, 0.061316287994384766, 0.061047584533691406, 0.061317119598388675, 0.06105452728271484, 0.0614504623413086, 0.061891937255859376, 0.06169484710693359, 0.061427295684814455, 0.06129296112060547, 0.06113846588134766, 0.061043167114257814, 0.06099148941040039, 0.061329086303710936, 0.061448577880859376, 0.061362110137939456, 0.061216766357421876, 0.06121881484985352, 0.061271617889404294, 0.06127228927612305, 0.061626625061035153, 0.061781566619873045, 0.061843902587890624, 0.06176124954223633, 0.061617919921875, 0.06153993606567383, 0.061585662841796875, 0.06148982238769531, 0.061190174102783206, 0.06135804748535156, 0.06122659301757812, 0.06129296112060547, 0.06137855911254883, 0.06160793685913086, 0.06156425476074219, 0.06146524810791015, 0.06177177429199219, 0.06187011337280274, 0.06203577423095703, 0.06179759979248047, 0.06181369781494141, 0.06175324630737305, 0.061698143005371096, 0.06172652816772461, 0.061793472290039064, 0.06177862548828125, 0.0614956169128418, 0.06142483139038086, 0.06145516967773437, 0.06183116912841797, 0.061687808990478515, 0.06188851165771484, 0.06187007904052735, 0.06510774230957031, 0.06277423858642578, 0.061744895935058594, 0.061610240936279294, 0.06134991836547852, 0.061220832824707035, 0.061061023712158206, 0.06116470336914062, 0.061149921417236325, 0.0611596794128418, 0.06125337600708008, 0.06122723388671875, 0.06121267318725586, 0.061292545318603515, 0.061720577239990235, 0.061767711639404296, 0.06203744125366211, 0.061886398315429685, 0.061567264556884764, 0.06138857650756836, 0.06137459182739258, 0.06119260787963867, 0.06127206420898437, 0.061590816497802736, 0.06147760009765625, 0.061501502990722656, 0.06129452896118164, 0.06161407852172852, 0.06150348663330078, 0.061558719635009765, 0.06174319839477539, 0.06176559829711914, 0.06188982391357422, 0.061950687408447266, 0.06192947387695313, 0.061712383270263675, 0.06147404861450195, 0.06155945587158203, 0.06144979095458984, 0.06134592056274414, 0.06136259078979492, 0.06136627197265625, 0.06128643035888672, 0.061505569458007815, 0.06163654327392578, 0.06184550476074219, 0.06185292816162109, 0.06183603286743164, 0.06222784042358399, 0.06204687881469727, 0.061889793395996096, 0.06170492935180664, 0.061753345489501954, 0.06161203384399414, 0.061677566528320314, 0.061529407501220705, 0.0614714241027832, 0.06137638473510742, 0.06146879959106445, 0.0614257926940918, 0.06161743927001953, 0.06160240173339844, 0.06188851165771484, 0.06509616088867187, 0.06276697540283203, 0.06175139236450195, 0.06156288146972656, 0.0611759033203125, 0.06106240081787109, 0.06111708831787109, 0.06107340621948242, 0.061431678771972656, 0.06127833557128906, 0.06123110580444336, 0.06137651062011719, 0.06121638488769531, 0.06130080032348633, 0.06149766540527344, 0.06189056015014648, 0.06209331130981445, 0.062064640045166014, 0.06182092666625977, 0.06153955078125, 0.06138044738769531, 0.061295520782470705, 0.06125116729736328, 0.06123772811889648, 0.06133689498901367, 0.061663936614990235, 0.06138252639770508, 0.06142092895507813, 0.061534942626953124, 0.0615813102722168, 0.061859489440917965, 0.06203631973266602, 0.06212796783447266, 0.062120094299316406, 0.06199705505371094, 0.061730846405029294, 0.06166739273071289, 0.06151366424560547, 0.061378528594970706, 0.061402816772460934, 0.0615140495300293, 0.061306880950927733, 0.06150348663330078, 0.06187129592895508, 0.061487934112548825, 0.0616673583984375, 0.06174512100219726, 0.061906623840332034, 0.06183353424072266, 0.06207897567749023, 0.062037696838378904, 0.06203334426879883, 0.06194793701171875, 0.06167023849487305, 0.06170009613037109, 0.061437950134277344, 0.061423614501953126, 0.06133145523071289, 0.06156256103515625, 0.061663551330566405, 0.06152761459350586, 0.06175993728637695, 0.06204787063598633, 0.06505612945556641, 0.06332454299926758, 0.06217452621459961, 0.06167644882202149, 0.061261089324951175, 0.061147361755371096, 0.0611835823059082, 0.06109686279296875, 0.0611693115234375, 0.061326847076416016, 0.06121353530883789, 0.06126300811767578, 0.06133615875244141, 0.0612059211730957, 0.06143475341796875, 0.06171852874755859, 0.06199251174926758, 0.0618639030456543, 0.06191558456420899, 0.06158950424194336, 0.06133935928344727, 0.061484512329101564, 0.061367103576660156, 0.06151919937133789, 0.0614304313659668, 0.061609375, 0.06133411026000977, 0.06133145523071289, 0.061483009338378906, 0.06155059051513672, 0.06176496124267578, 0.06196284866333008, 0.062182945251464845, 0.06204678344726562, 0.06185776138305664, 0.061685760498046874, 0.061878273010253906, 0.06148198318481445, 0.061484031677246094, 0.061603839874267576, 0.061506847381591796, 0.06176227188110352, 0.06177791976928711, 0.06157894515991211, 0.061532478332519534, 0.06181820678710938, 0.0621308479309082, 0.061955329895019534, 0.06190361785888672, 0.061900798797607424, 0.06195337677001953, 0.06208784103393555, 0.06187417602539062, 0.061624126434326174, 0.06173510360717773, 0.0617347526550293, 0.06185385513305664, 0.06156902313232422, 0.06182297515869141, 0.06172585678100586, 0.06195644760131836, 0.061896766662597656, 0.061868606567382814, 0.0655218276977539, 0.06336307144165039, 0.06227478408813476, 0.06163286590576172, 0.0613092155456543, 0.06114284896850586, 0.06121088027954102, 0.0612391357421875, 0.06120265579223633, 0.06114300918579101, 0.061206558227539065, 0.06127926254272461, 0.06138159942626953, 0.0613438720703125, 0.06142310333251953, 0.061721057891845704, 0.061926559448242186, 0.06205724716186523, 0.06176969528198242, 0.06167728042602539, 0.06167372894287109, 0.061560863494873046, 0.06152550506591797, 0.0614155502319336, 0.061478816986083984, 0.06144825744628906, 0.061278656005859376, 0.06166291046142578, 0.06173519897460938, 0.06177328109741211, 0.06163711929321289, 0.061716510772705076, 0.06216025543212891, 0.06213286590576172, 0.061857505798339846, 0.06177536010742188, 0.06148988723754883, 0.061619297027587894, 0.06175638580322266, 0.061726753234863284, 0.06156259155273437, 0.061407489776611326, 0.06160521697998047, 0.061639358520507816, 0.06166934585571289, 0.06188652801513672, 0.06169724655151367, 0.061905311584472655, 0.062093631744384765, 0.062029407501220706, 0.06221865463256836, 0.0619683837890625, 0.061712383270263675, 0.061652992248535154, 0.061693950653076174, 0.061654209136962894, 0.06165151977539062, 0.06158975982666016, 0.0615813102722168, 0.06171443176269531, 0.06167552185058594, 0.06195814514160156, 0.062133758544921876, 0.06830694580078125, 0.0645693130493164, 0.06316835021972657, 0.06209145736694336, 0.061423583984375, 0.06153548812866211, 0.061290271759033205, 0.06118048095703125, 0.06107791900634765, 0.06109584045410156, 0.06122063827514648, 0.0612825927734375, 0.06115740966796875, 0.06109596633911133, 0.06120640182495117, 0.06130905532836914, 0.06159561538696289, 0.06180422210693359, 0.06207110214233398, 0.061886463165283206, 0.061992961883544924, 0.061652992248535154, 0.061521919250488284, 0.061392383575439455, 0.06133145523071289, 0.06146297454833984, 0.061289886474609374, 0.06158748626708985, 0.06156895828247071, 0.06157587051391602, 0.06155059051513672, 0.06150457763671875, 0.061844287872314455, 0.06183747100830078, 0.06189456176757813, 0.06199699020385742, 0.06207104110717773, 0.06223795318603516, 0.06207302474975586, 0.06178246307373047, 0.06176678466796875, 0.061535102844238285, 0.06157721710205078, 0.06175859069824219, 0.06150377655029297, 0.06151023864746094, 0.06165462493896484, 0.06170399856567383, 0.06185539245605469, 0.0618935661315918, 0.06198028945922852, 0.06188278579711914, 0.06190217590332031, 0.062021728515625, 0.06196688079833984, 0.06186393737792969, 0.061790271759033205, 0.06172256088256836, 0.06181798553466797, 0.06159193420410156, 0.06190950393676758, 0.06188556671142578, 0.061960639953613283, 0.06541251373291015, 0.0634823989868164, 0.062201343536376956, 0.06179894256591797, 0.061388671875, 0.061294689178466796, 0.061347007751464844, 0.061366401672363284, 0.06124771118164062, 0.06134835052490235, 0.06133103942871094, 0.06141584014892578, 0.061389984130859374, 0.06152019119262695, 0.06146310424804687, 0.06182601547241211, 0.062241790771484375, 0.062211872100830075, 0.06215497589111328, 0.062015487670898435, 0.06179779052734375, 0.061751903533935545, 0.061620223999023435, 0.06160793685913086, 0.06150688171386719, 0.061534912109375, 0.06154367828369141, 0.06159846496582031, 0.06186598587036133, 0.06181071853637695, 0.06178531265258789, 0.06195657730102539, 0.06191132736206055, 0.062061790466308595, 0.062350112915039065, 0.062367198944091796, 0.06218310546875, 0.062139232635498046, 0.061943809509277345, 0.061876224517822265, 0.061830623626708985, 0.06166329574584961, 0.06167599868774414, 0.061599552154541014, 0.06172668838500977, 0.06194095993041992, 0.061848575592041016, 0.06193766403198242, 0.061992961883544924, 0.062297470092773435, 0.06215948867797851, 0.062238719940185545, 0.06226943969726562, 0.06216195297241211, 0.06194659042358398, 0.06189491271972656, 0.06185968017578125, 0.06187760162353516, 0.06176851272583008, 0.061982719421386716, 0.0619315185546875, 0.06185929489135742, 0.06189315032958984, 0.06505945587158203, 0.06316239929199219, 0.062101566314697265, 0.06172662353515625, 0.06140111923217773, 0.06124550247192383, 0.06145180892944336, 0.06125196838378906, 0.06123724746704102, 0.0612782096862793, 0.06137187194824219, 0.06158185577392578, 0.061417312622070314, 0.06146047973632812, 0.06167932891845703, 0.06182489776611328, 0.06208774566650391, 0.06213017654418945, 0.06203340911865234, 0.06185420989990234, 0.061788257598876954, 0.061671329498291017, 0.06145014572143555, 0.061464672088623044, 0.06147020721435547, 0.06148966217041016, 0.061618175506591794, 0.06159564971923828, 0.061661182403564455, 0.06162428665161133, 0.061767711639404296, 0.062013214111328124, 0.06209312057495117, 0.06228329467773437, 0.062233470916748045, 0.062121440887451175, 0.06218163299560547, 0.06223046493530274, 0.06191142272949219, 0.06166425704956055, 0.06168425750732422, 0.061585887908935544, 0.061599712371826175, 0.06177507019042969, 0.061788959503173826, 0.06189814376831055, 0.0620645751953125, 0.06196207809448242, 0.06231532669067383, 0.06198067092895508, 0.06194697570800781, 0.06189503860473633, 0.06200579071044922, 0.06195395278930664, 0.06176729583740234, 0.061645278930664064, 0.061656929016113284, 0.06188460922241211, 0.061954017639160155, 0.061859264373779296, 0.06188908767700195, 0.06175539016723633, 0.06185692977905274, 0.06502175903320312, 0.06303590393066406, 0.06203097534179688, 0.06165532684326172, 0.061409950256347656, 0.06140927886962891, 0.06127519989013672, 0.061294689178466796, 0.06141219329833984, 0.06123724746704102, 0.06121596908569336, 0.06140393447875977, 0.061568832397460936, 0.061367809295654295, 0.06149571228027344, 0.061773727416992184, 0.06188275146484375, 0.061960193634033205, 0.0621033935546875, 0.06176729583740234, 0.061739585876464845, 0.061513023376464845, 0.06144483184814453, 0.061633758544921875, 0.06157385635375977, 0.06157929611206055, 0.06166278457641602, 0.06167907333374024, 0.06169708633422852, 0.06174835205078125, 0.06191347122192383, 0.06192985534667969, 0.062205951690673826, 0.06210153579711914, 0.06236550521850586, 0.06216447830200195, 0.06217504119873047, 0.06208393478393555, 0.06184489440917969, 0.0616671028137207, 0.06169068908691406, 0.06190460968017578, 0.06176943969726562, 0.06164332962036133, 0.06189404678344727, 0.06177580642700195, 0.06177654266357422, 0.061833248138427735, 0.061976577758789064, 0.062210079193115234, 0.06241068649291992, 0.06221823883056641, 0.06214246368408203, 0.062064414978027345, 0.06204191970825195, 0.062126270294189455, 0.06188054275512695, 0.06177526473999023, 0.06185219192504883, 0.06188243103027344, 0.061900798797607424, 0.061753345489501954, 0.06190697479248047, 0.06509228515625, 0.06318505477905273, 0.06206038284301758, 0.061625919342041015, 0.06138719940185547, 0.06134783935546875, 0.061357471466064455, 0.06155939102172851, 0.06150559997558594, 0.06150454330444336, 0.06197481536865234, 0.061659870147705076, 0.06163600158691406, 0.06144607925415039, 0.06168812942504883, 0.062085407257080075, 0.06222230529785156, 0.06223257446289063, 0.061956127166748046, 0.06175900650024414, 0.06163689422607422, 0.06142316818237305, 0.061534046173095706, 0.06144073486328125, 0.06175727844238281, 0.061724864959716796, 0.061876224517822265, 0.061642784118652344, 0.061873950958251954, 0.06172691345214844, 0.06201958465576172, 0.06207692718505859, 0.06225920104980469, 0.062132225036621094, 0.06217692947387695, 0.06212361526489258, 0.06193436813354492, 0.06182601547241211, 0.06174591827392578, 0.0618658561706543, 0.06192371368408203, 0.06180252838134766, 0.06187731170654297, 0.06172288131713867, 0.06205305480957031, 0.06197244644165039, 0.062004737854003907, 0.06209926223754883, 0.061902942657470705, 0.062024288177490235, 0.06212326431274414, 0.06221481704711914, 0.06225315093994141, 0.06196559906005859, 0.06208585739135742, 0.06211756896972656, 0.06221196746826172, 0.06199065780639648, 0.062065376281738284, 0.061760608673095706, 0.061784927368164065, 0.06200252914428711, 0.062021728515625]",tokens/s,16.1895685830797,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,836.358144,1903.099904,0.0,1507.852288,1469.840384,s,1,7.45740087890625,7.45740087890625,0.0,7.45740087890625,7.45740087890625,7.45740087890625,7.45740087890625,[7.45740087890625],,kWh,9.550118462531524e-06,1.0461776287751266e-06,3.1608358620066612e-06,1.3757131953313311e-05,,MB,1197.367296,1947.140096,0.0,1539.309568,1426.272256,s,10,0.24179651069641114,0.024179651069641112,0.00046095716185577874,0.024065407752990722,0.024881748008728027,0.024938137912750243,0.024983249835968016,"[0.024869216918945312, 0.024114591598510742, 0.024016223907470702, 0.023701440811157225, 0.02364329528808594, 0.02429817581176758, 0.024511583328247072, 0.023997503280639647, 0.023649951934814454, 0.02499452781677246]",tokens/s,10587.414982237777,kWh,7.189442209051912e-07,7.92598589746777e-08,4.742566738768366e-07,1.2724607537567055e-06,tokens/kWh,201184986.8408179,MB,1225.703424,1955.528704,0.0,1547.698176,1426.274816,s,10,13.2709794921875,1.32709794921875,0.012723619353887257,1.3269437866210936,1.3424001220703126,1.3441677124023437,1.3455817846679687,"[1.345935302734375, 1.34200732421875, 1.32387158203125, 1.3224248046875, 1.32552392578125, 1.3310013427734375, 1.316973876953125, 1.2987156982421875, 1.3283636474609375, 1.3361619873046875]",tokens/s,47.472004637704025,kWh,3.917520655284144e-05,4.3206234925896856e-06,1.9435648142923713e-05,6.293147818835484e-05,tokens/kWh,1001088.8320697009,,s,630,13.268557798385622,0.021061202854580345,0.0005340236860464876,0.021051663398742676,0.02142468090057373,0.021588804149627685,0.022359155750274674,"[0.022026239395141603, 0.021432319641113282, 0.021514240264892577, 0.021436416625976562, 0.021334016799926758, 0.021228927612304688, 0.021269119262695313, 0.021651456832885742, 0.021456544876098632, 0.02138083267211914, 0.02136284828186035, 0.021461471557617188, 0.021370304107666015, 0.021455167770385742, 0.02122368049621582, 0.02122137641906738, 0.021720447540283204, 0.02149849510192871, 0.0214835205078125, 0.021383167266845703, 0.021485567092895508, 0.021538528442382812, 0.021433664321899415, 0.02170364761352539, 0.02168832015991211, 0.021344095230102538, 0.02143657684326172, 0.021575199127197266, 0.021523136138916016, 0.02148464012145996, 0.021414432525634765, 0.02134809684753418, 0.021604768753051756, 0.021387264251708983, 0.02137455940246582, 0.02182796859741211, 0.021495840072631837, 0.021428224563598632, 0.02123980712890625, 0.02110054397583008, 0.02116307258605957, 0.021313983917236327, 0.02124236869812012, 0.021227519989013673, 0.021116512298583984, 0.021259967803955077, 0.021236448287963866, 0.021784576416015625, 0.02125929641723633, 0.02153536033630371, 0.021139808654785156, 0.021413888931274414, 0.021265663146972657, 0.021144096374511718, 0.0210229434967041, 0.02096291160583496, 0.020980127334594728, 0.020965375900268556, 0.020914176940917968, 0.021102239608764648, 0.021094751358032227, 0.021104639053344726, 0.021090303421020508, 0.021079999923706055, 0.02112339210510254, 0.020805376052856445, 0.02085433578491211, 0.021340608596801758, 0.021259263992309572, 0.02145587158203125, 0.021202720642089844, 0.023353567123413087, 0.02206697654724121, 0.021454944610595703, 0.021168256759643556, 0.021327871322631836, 0.0211079044342041, 0.02113817596435547, 0.021192768096923827, 0.020912128448486327, 0.020938688278198243, 0.02079484748840332, 0.02061190414428711, 0.021284223556518555, 0.021293472290039063, 0.021104639053344726, 0.02103059196472168, 0.021040800094604493, 0.020920991897583008, 0.02093680000305176, 0.020950464248657225, 0.021010303497314455, 0.021043807983398437, 0.021337984085083007, 0.02102694320678711, 0.02079280090332031, 0.021662080764770508, 0.025331232070922853, 0.026960512161254883, 0.02122956848144531, 0.02137855911254883, 0.021315488815307617, 0.021262943267822267, 0.0210512638092041, 0.020803712844848634, 0.021209087371826172, 0.021309440612792968, 0.02103232002258301, 0.020996736526489257, 0.020922367095947265, 0.02088755226135254, 0.02093699264526367, 0.02055548858642578, 0.020700319290161133, 0.021110784530639647, 0.02058121681213379, 0.02105548858642578, 0.021198848724365234, 0.021064735412597655, 0.020975967407226563, 0.02099622344970703, 0.022031871795654297, 0.02110700798034668, 0.021010847091674806, 0.02109644889831543, 0.021034528732299804, 0.02124835205078125, 0.020961280822753905, 0.020905376434326172, 0.021148256301879883, 0.02103891181945801, 0.02094099235534668, 0.021523584365844728, 0.021480415344238283, 0.021116256713867188, 0.020944799423217773, 0.02095580863952637, 0.020944896697998046, 0.02151628875732422, 0.02109443283081055, 0.021051359176635743, 0.021235712051391603, 0.021016576766967773, 0.02101068878173828, 0.02089049530029297, 0.02106662368774414, 0.021137344360351563, 0.020957279205322265, 0.021012447357177735, 0.021362176895141603, 0.021256704330444336, 0.02097727966308594, 0.020906368255615235, 0.020951040267944337, 0.020776256561279297, 0.020839103698730467, 0.020950559616088868, 0.02085321617126465, 0.021179840087890624, 0.021162559509277343, 0.02105465507507324, 0.02100716781616211, 0.020960704803466797, 0.02099827194213867, 0.02081167984008789, 0.020846303939819337, 0.020785087585449218, 0.02076144027709961, 0.021436288833618165, 0.02103321647644043, 0.020999807357788086, 0.021063968658447264, 0.02097385597229004, 0.020872800827026368, 0.020930688858032228, 0.02088960075378418, 0.021100191116333007, 0.02098601531982422, 0.021020063400268553, 0.020968223571777345, 0.020941951751708984, 0.021212095260620116, 0.021241792678833006, 0.020951040267944337, 0.02072985649108887, 0.020568063735961914, 0.020763904571533202, 0.020595455169677736, 0.02073798370361328, 0.02094179153442383, 0.02105548858642578, 0.02089593505859375, 0.02115155220031738, 0.020891008377075197, 0.0209619197845459, 0.021011775970458984, 0.02120979118347168, 0.020760576248168947, 0.020758367538452147, 0.021228992462158203, 0.021272512435913087, 0.021158687591552733, 0.021071231842041016, 0.02115238380432129, 0.02105548858642578, 0.021157888412475585, 0.021129215240478515, 0.021188608169555666, 0.02121014404296875, 0.021246015548706056, 0.021061824798583983, 0.021017311096191406, 0.021016576766967773, 0.02090943908691406, 0.021265024185180663, 0.020725759506225586, 0.02184828758239746, 0.021962528228759767, 0.02106883239746094, 0.021023296356201173, 0.02131395149230957, 0.021251583099365236, 0.020910591125488282, 0.020842144012451172, 0.020782943725585937, 0.020466272354125976, 0.020527008056640626, 0.020671743392944336, 0.020656896591186524, 0.020639360427856444, 0.020590976715087892, 0.02063564872741699, 0.02101862335205078, 0.02066227149963379, 0.020756479263305663, 0.020744192123413087, 0.02086499214172363, 0.0209715518951416, 0.021200895309448242, 0.021444223403930665, 0.020832704544067382, 0.021155296325683595, 0.021078496932983398, 0.021028480529785155, 0.020983776092529296, 0.020869535446166994, 0.021110784530639647, 0.0209815673828125, 0.020721855163574218, 0.02080486488342285, 0.020568704605102538, 0.020717727661132813, 0.020752992630004883, 0.020731103897094726, 0.020685600280761718, 0.020612415313720704, 0.02085139274597168, 0.020483776092529295, 0.020627775192260743, 0.020531200408935548, 0.021258207321166993, 0.021257280349731445, 0.020579296112060545, 0.020551679611206054, 0.020962848663330078, 0.02097609519958496, 0.020636991500854494, 0.02047648048400879, 0.02060710334777832, 0.020700384140014648, 0.02068355178833008, 0.020574207305908202, 0.020587648391723633, 0.020505472183227538, 0.02064588737487793, 0.020739776611328125, 0.02051513671875, 0.02065433692932129, 0.020795072555541992, 0.0211661434173584, 0.02088140869140625, 0.020647136688232422, 0.02085148811340332, 0.021153791427612305, 0.021284160614013673, 0.021045951843261718, 0.021292095184326173, 0.02120182418823242, 0.021118175506591796, 0.02118943977355957, 0.02108415985107422, 0.021137407302856445, 0.020985855102539062, 0.021118080139160156, 0.021222015380859376, 0.021134719848632813, 0.02297702407836914, 0.023402143478393554, 0.02119548797607422, 0.021141504287719725, 0.02172438430786133, 0.021424768447875976, 0.021090463638305666, 0.02127872085571289, 0.021196512222290038, 0.021288448333740235, 0.021366880416870116, 0.02143712043762207, 0.021191743850708007, 0.02135545539855957, 0.02110873603820801, 0.02112054443359375, 0.021226976394653322, 0.021029888153076173, 0.02122137641906738, 0.021130720138549806, 0.02106422424316406, 0.020915296554565428, 0.0209366397857666, 0.021226720809936525, 0.021135007858276367, 0.021095903396606445, 0.021015167236328125, 0.021186016082763673, 0.02110518455505371, 0.021086208343505858, 0.020981279373168946, 0.021204832077026368, 0.020979711532592774, 0.021181055068969726, 0.02099567985534668, 0.021156543731689452, 0.021100255966186525, 0.020989952087402345, 0.021338111877441408, 0.021346303939819337, 0.021014303207397462, 0.020955360412597657, 0.020813695907592772, 0.020944063186645507, 0.020884288787841796, 0.020983936309814454, 0.020918272018432618, 0.021164031982421876, 0.02100169563293457, 0.021025312423706054, 0.020973567962646485, 0.022478496551513672, 0.0210883846282959, 0.02113148880004883, 0.021165887832641603, 0.02077033615112305, 0.021172895431518554, 0.021301248550415038, 0.021159936904907226, 0.02159993553161621, 0.021086208343505858, 0.021263839721679688, 0.02109014320373535, 0.021152767181396484, 0.02088707160949707, 0.02089219284057617, 0.021107776641845703, 0.021039424896240236, 0.02105196762084961, 0.020973472595214843, 0.020862335205078124, 0.02087731170654297, 0.02116387176513672, 0.021119808197021483, 0.021264032363891603, 0.021094816207885742, 0.021003744125366212, 0.021641408920288086, 0.02107145690917969, 0.021463327407836914, 0.021072063446044922, 0.02181353569030762, 0.021118976593017577, 0.02106368064880371, 0.021004287719726563, 0.02129305648803711, 0.021025920867919923, 0.020757375717163087, 0.021223424911499023, 0.021141504287719725, 0.021190656661987304, 0.02123788833618164, 0.020848512649536133, 0.020865119934082032, 0.021062944412231447, 0.02130803108215332, 0.02098726463317871, 0.020863616943359375, 0.020983808517456053, 0.02106368064880371, 0.02106368064880371, 0.021126304626464844, 0.020957504272460938, 0.02071401596069336, 0.021174272537231444, 0.02119411277770996, 0.020829919815063477, 0.020660863876342774, 0.02079977607727051, 0.020815872192382814, 0.02086911964416504, 0.020914176940917968, 0.020997472763061523, 0.020768960952758788, 0.02086960029602051, 0.020570175170898437, 0.02060076713562012, 0.02065203285217285, 0.020559648513793945, 0.020464096069335937, 0.02050534439086914, 0.0202926082611084, 0.020352575302124025, 0.02063961601257324, 0.020547264099121092, 0.020590976715087892, 0.02059913635253906, 0.020692928314208985, 0.020623231887817384, 0.021147775650024413, 0.020610464096069335, 0.02085327911376953, 0.02086742401123047, 0.022033407211303712, 0.021386175155639647, 0.020756479263305663, 0.020999231338500977, 0.020482559204101563, 0.020644512176513672, 0.020692768096923827, 0.02065344047546387, 0.021655263900756835, 0.02179574394226074, 0.02090598487854004, 0.02077516746520996, 0.02072313690185547, 0.02077948760986328, 0.020815328598022462, 0.020680351257324217, 0.020628416061401367, 0.02059267234802246, 0.020736000061035157, 0.020951040267944337, 0.020602880477905275, 0.02046976089477539, 0.02041801643371582, 0.020910144805908203, 0.020738079071044923, 0.02066486358642578, 0.02071072006225586, 0.02058700752258301, 0.020574304580688478, 0.020512960433959962, 0.020383455276489257, 0.02050444793701172, 0.020537567138671876, 0.020669536590576174, 0.020458080291748046, 0.02045574378967285, 0.020371456146240235, 0.020916223526000977, 0.020980768203735352, 0.02090902328491211, 0.020522655487060545, 0.020424896240234375, 0.020766271591186523, 0.020770591735839845, 0.02085750389099121, 0.020574207305908202, 0.020508575439453124, 0.020788831710815428, 0.020699167251586915, 0.020494976043701173, 0.020407712936401368, 0.020338848114013673, 0.020516895294189454, 0.020468128204345702, 0.02047337532043457, 0.020463327407836913, 0.02056012725830078, 0.02071340751647949, 0.020451904296875, 0.020401567459106446, 0.02039664077758789, 0.020508672714233397, 0.020762624740600585, 0.020488191604614257, 0.020330495834350586, 0.020395296096801758, 0.02060969543457031, 0.0208590087890625, 0.020745344161987304, 0.020795743942260744, 0.020732383728027343, 0.020867071151733398, 0.020549503326416016, 0.02058457565307617, 0.02040575981140137, 0.02074425506591797, 0.020609024047851563, 0.020540672302246092, 0.020695808410644532, 0.020545183181762697, 0.020485599517822264, 0.0205402889251709, 0.020602880477905275, 0.02068252754211426, 0.020551776885986327, 0.020881311416625976, 0.02068876838684082, 0.020467136383056642, 0.02041539192199707, 0.02062745666503906, 0.020537343978881836, 0.020649311065673828, 0.020510463714599608, 0.020435903549194338, 0.020663455963134767, 0.020663103103637694, 0.020562112808227537, 0.02059654426574707, 0.020676607131958007, 0.02102272033691406, 0.02097260856628418, 0.021095359802246094, 0.021016031265258788, 0.021221920013427733, 0.02112512016296387, 0.021133312225341795, 0.021098495483398438, 0.02099190330505371, 0.02112870407104492, 0.021363296508789063, 0.021151296615600584, 0.021913055419921876, 0.020964319229125977, 0.021319679260253906, 0.021409887313842774, 0.02133795166015625, 0.021112863540649413, 0.021317087173461913, 0.021297727584838867, 0.021323776245117186, 0.021121023178100586, 0.021300960540771484, 0.021202720642089844, 0.02109491157531738, 0.02124799919128418, 0.021336063385009766, 0.021155839920043946, 0.021120351791381838, 0.021146272659301756, 0.020997343063354493, 0.02129961585998535, 0.021055871963500977, 0.021151199340820312, 0.021424671173095704, 0.021120607376098634, 0.020953088760375976, 0.021729696273803712, 0.027923967361450194, 0.021534496307373047, 0.02133577537536621, 0.021623296737670897, 0.021307008743286133, 0.02121561622619629, 0.02126006317138672, 0.021159999847412108, 0.02136284828186035, 0.021358591079711914, 0.02147737693786621, 0.021180128097534178, 0.021145376205444336, 0.021199359893798828, 0.021036575317382813, 0.021123552322387697, 0.02127667236328125, 0.021086208343505858, 0.021186176300048827, 0.02095552062988281, 0.021204992294311522, 0.021082111358642578, 0.0218787841796875, 0.021235712051391603, 0.021211135864257814, 0.02111510467529297, 0.021208032608032227, 0.021093183517456055, 0.021173664093017578, 0.021135040283203125, 0.021244831085205078, 0.021062719345092774, 0.020973600387573243, 0.02145987129211426, 0.021168127059936523, 0.021166080474853514, 0.02125801658630371, 0.021291231155395506, 0.02122547149658203, 0.021001983642578125, 0.02092460823059082, 0.021215200424194336, 0.021076095581054687, 0.02138502311706543, 0.02113580894470215, 0.021075775146484375, 0.021067487716674806, 0.021385120391845702, 0.02118684768676758, 0.02105094337463379, 0.021217727661132814, 0.02169036865234375, 0.020946943283081054, 0.021184511184692383, 0.021247104644775392, 0.021187456130981445, 0.02109644889831543, 0.02107948875427246, 0.021115455627441406, 0.021202943801879884, 0.02109235191345215, 0.02109596824645996, 0.021063840866088868, 0.02117206382751465]",tokens/s,47.48066892971985,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1709, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,816.222208,6223.233024,0.0,5827.985408,5712.718848,s,1,7.54969677734375,7.54969677734375,0.0,7.54969677734375,7.54969677734375,7.54969677734375,7.54969677734375,[7.54969677734375],,kWh,1.0976639595829358e-05,1.1910635902911466e-06,4.5283369559945275e-06,1.6696040142115032e-05,,MB,1147.817984,6432.948224,0.0,6025.117696,5988.31104,s,10,0.6711704254150391,0.0671170425415039,0.004576825575853077,0.06750219345092773,0.07148004150390626,0.07151811599731446,0.07154857559204102,"[0.06773433685302735, 0.06620070648193359, 0.05516780853271484, 0.06727005004882812, 0.06571596527099609, 0.06938191986083984, 0.06544518280029298, 0.0712266845703125, 0.0714715805053711, 0.07155619049072266]",tokens/s,3814.2324260145174,kWh,2.2953832184243813e-06,2.5303696379512485e-07,1.532210427156221e-06,4.080630609375728e-06,tokens/kWh,62735401.585188806,MB,1175.916544,6516.834304,0.0,6109.003776,6092.144128,s,10,18.60473205566406,1.860473205566406,0.004147410932867698,1.8609232177734376,1.8643675903320311,1.865044512939453,1.8655860510253905,"[1.8559962158203125, 1.863658935546875, 1.8511259765625, 1.8593104248046874, 1.8601689453125, 1.861677490234375, 1.86344873046875, 1.85940673828125, 1.865721435546875, 1.8642171630859374]",tokens/s,33.862352766762996,kWh,5.426698956574311e-05,5.985475470271603e-06,3.596873624024384e-05,9.622120127625856e-05,tokens/kWh,654741.3580830497,,s,630,18.601391460418704,0.029526018191140795,0.0008989477070408279,0.029379535675048828,0.029734858322143554,0.030054059314727783,0.0352632112121582,"[0.03342947387695312, 0.031324159622192385, 0.03038755226135254, 0.029514400482177735, 0.029248863220214843, 0.029041311264038087, 0.02899760055541992, 0.029062175750732423, 0.02906563186645508, 0.029282272338867186, 0.028957311630249023, 0.029175840377807617, 0.029105119705200196, 0.02931372833251953, 0.029274560928344726, 0.0292105598449707, 0.029188032150268554, 0.029234304428100585, 0.02914121627807617, 0.029112960815429686, 0.029093919754028322, 0.029050880432128907, 0.029243392944335936, 0.029179904937744142, 0.029288127899169923, 0.029497663497924806, 0.029398656845092772, 0.02911270332336426, 0.02915238380432129, 0.02922995185852051, 0.0291246395111084, 0.02938412857055664, 0.029391008377075194, 0.029539775848388673, 0.029840320587158204, 0.029789791107177735, 0.02963907241821289, 0.02993721580505371, 0.029727039337158204, 0.029534271240234375, 0.029496383666992188, 0.029449216842651366, 0.02949068832397461, 0.02931353569030762, 0.029267967224121092, 0.029200096130371094, 0.029198400497436522, 0.029318975448608398, 0.029385120391845702, 0.029429759979248047, 0.02951308822631836, 0.029909631729125977, 0.02967046356201172, 0.029356992721557618, 0.029327360153198243, 0.029372415542602538, 0.02938470458984375, 0.029359840393066407, 0.02925391960144043, 0.02931056022644043, 0.029348255157470703, 0.029543872833251952, 0.029553216934204103, 0.03524521636962891, 0.0326418571472168, 0.031092735290527345, 0.02991926383972168, 0.02933897590637207, 0.029161983489990235, 0.029162784576416016, 0.029028608322143556, 0.02911408042907715, 0.029074079513549806, 0.029151456832885742, 0.029073408126831055, 0.029164575576782228, 0.02917465591430664, 0.029703807830810548, 0.0294998722076416, 0.029147136688232423, 0.029124607086181642, 0.029057024002075195, 0.02919628715515137, 0.029057024002075195, 0.029212671279907225, 0.02957107162475586, 0.029056224822998047, 0.02916348838806152, 0.029260608673095705, 0.02931439971923828, 0.029235872268676757, 0.029613183975219726, 0.02944691276550293, 0.02945449638366699, 0.02934169578552246, 0.029396223068237304, 0.029545183181762694, 0.02973695945739746, 0.02987343978881836, 0.029816736221313478, 0.02966815948486328, 0.029618015289306642, 0.02950495910644531, 0.029393632888793944, 0.029480960845947264, 0.02972256088256836, 0.029560895919799806, 0.029417472839355467, 0.029470239639282227, 0.029464832305908205, 0.029470432281494142, 0.029470399856567384, 0.029512128829956054, 0.029513952255249023, 0.0293656005859375, 0.02944220733642578, 0.029643423080444337, 0.029460479736328125, 0.0294334716796875, 0.02951603126525879, 0.029472896575927734, 0.029441280364990233, 0.029479679107666017, 0.02950553512573242, 0.029521791458129883, 0.02954457664489746, 0.029317472457885744, 0.029381792068481447, 0.030681407928466797, 0.030263519287109374, 0.02976799964904785, 0.029634559631347656, 0.029527711868286132, 0.02935638427734375, 0.029136608123779297, 0.029085599899291992, 0.029081375122070312, 0.0290382080078125, 0.029184095382690428, 0.029281152725219726, 0.02921459197998047, 0.02918822479248047, 0.02917910385131836, 0.029106559753417968, 0.029079967498779297, 0.029179904937744142, 0.029091392517089844, 0.02927155113220215, 0.029238208770751953, 0.02927577590942383, 0.02919001579284668, 0.029377056121826173, 0.029325279235839843, 0.029085695266723634, 0.029179744720458985, 0.02925788879394531, 0.029229055404663085, 0.02927414321899414, 0.0293305606842041, 0.029239391326904295, 0.029244159698486327, 0.029163520812988283, 0.029462207794189454, 0.0294136962890625, 0.02953830337524414, 0.02960383987426758, 0.029594655990600585, 0.029569503784179687, 0.029573631286621094, 0.029584991455078126, 0.0294936637878418, 0.029447296142578124, 0.02934668731689453, 0.029394943237304686, 0.029437408447265626, 0.0294487361907959, 0.02938470458984375, 0.029366207122802735, 0.029370431900024415, 0.029440031051635743, 0.02941334342956543, 0.029413375854492187, 0.029425664901733397, 0.029419519424438476, 0.02953327941894531, 0.029418399810791016, 0.029375680923461912, 0.029415231704711914, 0.029479103088378908, 0.03527056121826172, 0.03192457580566406, 0.030197792053222657, 0.029588991165161133, 0.029264352798461915, 0.02904832077026367, 0.029044448852539064, 0.029034431457519532, 0.028867424011230467, 0.029003263473510742, 0.02907935905456543, 0.02911712074279785, 0.028964448928833007, 0.02914899253845215, 0.029272544860839845, 0.029204639434814453, 0.029183263778686522, 0.029274816513061522, 0.029351520538330077, 0.02922742462158203, 0.029216768264770508, 0.02923014450073242, 0.029178720474243164, 0.029353759765625, 0.02945465660095215, 0.029396352767944337, 0.029311616897583007, 0.029253631591796874, 0.02916147232055664, 0.02927359962463379, 0.029118976593017577, 0.029319168090820313, 0.029441375732421875, 0.029552608489990233, 0.029647552490234375, 0.02958291244506836, 0.02956284713745117, 0.029654848098754884, 0.029843839645385742, 0.029646976470947266, 0.029543615341186522, 0.0293734073638916, 0.02932080078125, 0.029305248260498046, 0.029290496826171877, 0.02935315132141113, 0.029374656677246095, 0.02943824005126953, 0.029522111892700195, 0.029569023132324217, 0.029425312042236328, 0.029485439300537108, 0.029403263092041016, 0.02940447998046875, 0.02938889694213867, 0.029641311645507814, 0.029411327362060546, 0.02949087905883789, 0.029570783615112305, 0.02961199951171875, 0.029612159729003905, 0.029568799972534178, 0.029606624603271483, 0.03591609573364258, 0.03236640167236328, 0.030769311904907226, 0.02995199966430664, 0.02938051223754883, 0.029157375335693358, 0.029098079681396483, 0.028940031051635742, 0.0290731201171875, 0.029006368637084962, 0.028976255416870118, 0.0290251522064209, 0.029132736206054687, 0.02908576011657715, 0.029171712875366212, 0.029082656860351563, 0.028955615997314454, 0.02919171142578125, 0.029227487564086913, 0.02926585578918457, 0.029283456802368164, 0.029407743453979493, 0.029299135208129882, 0.029253631591796874, 0.029278207778930664, 0.02914633560180664, 0.029260128021240235, 0.02923289680480957, 0.029407936096191405, 0.029280256271362305, 0.02928233528137207, 0.029296607971191407, 0.02936627197265625, 0.02965212821960449, 0.029792896270751955, 0.029787839889526366, 0.02965724754333496, 0.02966102409362793, 0.029483552932739257, 0.029424671173095704, 0.029493759155273438, 0.029571456909179686, 0.02938585662841797, 0.029504159927368163, 0.029401151657104493, 0.02933308792114258, 0.029402816772460937, 0.02942624092102051, 0.02941993522644043, 0.029509311676025392, 0.029245759963989256, 0.029237247467041014, 0.029208448410034178, 0.029283456802368164, 0.029377536773681642, 0.029421567916870117, 0.02953327941894531, 0.0294835205078125, 0.02958937644958496, 0.029532703399658203, 0.02947260856628418, 0.029454431533813476, 0.029536319732666017, 0.03626790237426758, 0.0325305290222168, 0.030698528289794923, 0.029940095901489258, 0.029350496292114257, 0.02916147232055664, 0.029086847305297852, 0.028969856262207033, 0.02893824005126953, 0.029144704818725584, 0.029096351623535157, 0.028970975875854493, 0.029014047622680665, 0.029353952407836913, 0.029257280349731445, 0.029337312698364256, 0.029275007247924804, 0.029128543853759764, 0.02918524742126465, 0.029270368576049803, 0.02921628761291504, 0.029244287490844727, 0.02931715202331543, 0.029336639404296875, 0.029342655181884766, 0.02929804801940918, 0.02913046455383301, 0.02920307159423828, 0.029147424697875977, 0.029255231857299804, 0.02921072006225586, 0.029419424057006836, 0.029489599227905273, 0.02970729637145996, 0.029717344284057617, 0.02992959976196289, 0.030055744171142578, 0.029827680587768555, 0.029708383560180664, 0.029620223999023438, 0.029474815368652343, 0.02939632034301758, 0.029360416412353516, 0.029501407623291016, 0.029456064224243163, 0.029294656753540038, 0.029303455352783205, 0.02939084815979004, 0.029369888305664064, 0.029388383865356447, 0.02941632080078125, 0.029422687530517577, 0.02929142379760742, 0.02934377670288086, 0.029288415908813478, 0.0293621768951416, 0.02936422348022461, 0.02941935920715332, 0.02948726463317871, 0.029561887741088866, 0.029548608779907226, 0.029438880920410155, 0.029323135375976563, 0.03735049438476563, 0.03290796661376953, 0.030916383743286133, 0.030052000045776368, 0.0295263671875, 0.029189727783203126, 0.02896281623840332, 0.028950687408447265, 0.02914371109008789, 0.028958751678466798, 0.028917728424072267, 0.02919366455078125, 0.029168191909790038, 0.02913689613342285, 0.02918191909790039, 0.02917731285095215, 0.02903481674194336, 0.029083295822143553, 0.029106752395629883, 0.029151264190673827, 0.029159423828125, 0.029337600708007814, 0.029318464279174804, 0.029303487777709962, 0.02928755187988281, 0.029305599212646485, 0.02933919906616211, 0.029268543243408204, 0.02921660804748535, 0.029163520812988283, 0.02913865661621094, 0.029292287826538085, 0.029337791442871092, 0.02962838363647461, 0.02978006362915039, 0.03020841598510742, 0.029736991882324218, 0.02979638481140137, 0.029757312774658203, 0.029572256088256837, 0.02951468849182129, 0.029575168609619142, 0.02953830337524414, 0.0295251522064209, 0.029354496002197264, 0.029317472457885744, 0.029308576583862305, 0.029513536453247072, 0.029458976745605468, 0.029343391418457033, 0.029290847778320313, 0.029261760711669922, 0.029345855712890626, 0.02938265609741211, 0.02938265609741211, 0.0294072322845459, 0.029389919281005858, 0.029390815734863282, 0.02955708885192871, 0.02969046401977539, 0.02939449691772461, 0.029495775222778322, 0.02957923126220703, 0.0359403190612793, 0.03200185775756836, 0.030426368713378907, 0.029723615646362306, 0.029251455307006836, 0.029151327133178712, 0.02896281623840332, 0.029183359146118165, 0.029143680572509767, 0.029128416061401367, 0.029135040283203125, 0.028992639541625977, 0.029248416900634764, 0.028907583236694335, 0.028923871994018555, 0.02891779136657715, 0.028893087387084963, 0.029120576858520507, 0.029068384170532226, 0.029189056396484375, 0.02923107147216797, 0.02928748893737793, 0.029332447052001952, 0.029449344635009766, 0.0291910400390625, 0.029149183273315428, 0.029100032806396486, 0.029296640396118165, 0.029243423461914064, 0.029194047927856445, 0.029200544357299806, 0.029214719772338867, 0.02943292808532715, 0.029526784896850587, 0.029651103973388673, 0.029808639526367187, 0.02976464080810547, 0.02970899200439453, 0.029622528076171876, 0.029577247619628905, 0.029593599319458007, 0.029470367431640623, 0.029428064346313478, 0.029476863861083984, 0.029499391555786132, 0.02949660873413086, 0.02954310417175293, 0.02947385597229004, 0.029334495544433594, 0.02931920051574707, 0.02930838394165039, 0.02950809669494629, 0.02942060852050781, 0.029352127075195314, 0.029471456527709963, 0.029452159881591798, 0.029546655654907227, 0.029479936599731447, 0.029541376113891602, 0.029502496719360352, 0.029496288299560545, 0.029511327743530272, 0.029564416885375977, 0.03757638549804688, 0.032578174591064456, 0.03095756721496582, 0.03017318344116211, 0.02932905578613281, 0.029178207397460937, 0.02903615951538086, 0.028895488739013674, 0.029014144897460938, 0.02916761589050293, 0.029231103897094726, 0.02906425666809082, 0.029050912857055664, 0.029205408096313477, 0.029294591903686523, 0.029249216079711916, 0.02933955192565918, 0.029401504516601562, 0.0291778564453125, 0.029360128402709962, 0.029165567398071288, 0.029691808700561522, 0.029230623245239257, 0.02938243293762207, 0.02933635139465332, 0.029299840927124024, 0.029264768600463866, 0.029171712875366212, 0.02926358413696289, 0.02927440071105957, 0.029378559112548826, 0.02920159912109375, 0.029227840423583985, 0.029445728302001952, 0.029682079315185548, 0.029677568435668947, 0.029825023651123047, 0.029837312698364257, 0.029705888748168947, 0.029671775817871095, 0.02946393585205078, 0.02940582466125488, 0.02929254341125488, 0.029437952041625977, 0.029387968063354492, 0.02951865577697754, 0.029435903549194335, 0.029442047119140623, 0.02953830337524414, 0.029609535217285158, 0.02962063980102539, 0.029638687133789064, 0.02950553512573242, 0.029529247283935547, 0.029520736694335938, 0.029482336044311525, 0.029598367691040038, 0.029424831390380858, 0.02948588752746582, 0.02942742347717285, 0.029514015197753905, 0.02954457664489746, 0.02955251121520996, 0.036372577667236325, 0.032198078155517576, 0.030544448852539062, 0.0298024959564209, 0.02949836730957031, 0.02915603256225586, 0.02915670394897461, 0.029078527450561522, 0.029083520889282226, 0.02898543930053711, 0.029075456619262696, 0.02927824020385742, 0.029338592529296874, 0.029367200851440428, 0.02917740821838379, 0.029170207977294922, 0.029081600189208984, 0.02924457550048828, 0.02925859260559082, 0.029347871780395506, 0.029357952117919923, 0.029431552886962892, 0.029384191513061524, 0.029286272048950197, 0.029172895431518554, 0.029178848266601564, 0.029148000717163086, 0.029144895553588866, 0.029161663055419923, 0.02921062469482422, 0.029237247467041014, 0.029373727798461913, 0.02959228706359863, 0.029837312698364257, 0.02981888008117676, 0.02981888008117676, 0.029734624862670898, 0.029739295959472656, 0.029870080947875976, 0.029513759613037108, 0.02962428855895996, 0.029466623306274413, 0.02944118309020996, 0.029561695098876954, 0.02954854393005371, 0.029605920791625977, 0.029431520462036134, 0.029397024154663085, 0.02937001609802246, 0.029376415252685546, 0.02957993507385254, 0.02944000053405762, 0.02949862480163574, 0.029641471862792968, 0.02962227249145508, 0.029576768875122072, 0.029505983352661132, 0.02949241638183594, 0.02946950340270996, 0.02959676742553711, 0.029451168060302735, 0.02951545524597168, 0.029436223983764647]",tokens/s,33.86843405454676,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1153, in __init__ self.model = Qwen2MoeModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 918, in __init__ [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 918, in [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 691, in __init__ self.self_attn = QWEN2MOE_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 294, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 634, in __init__ self.attention = GPT_NEOX_ATTENTION_CLASSES[config._attn_implementation](config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 402, in __init__ super().__init__(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 102, in __init__ self.query_key_value = nn.Linear(config.hidden_size, 3 * config.hidden_size, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 150.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 163, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,814.63296,516.882432,0.0,174.063616,172.57984,s,1,7.257505859375,7.257505859375,0.0,7.257505859375,7.257505859375,7.257505859375,7.257505859375,[7.257505859375],,kWh,4.556478499978311e-06,4.953557472553107e-07,1.993334927993806e-06,7.045169175227427e-06,,MB,1146.044416,642.711552,0.0,234.881024,215.589888,s,32,0.1822226881980896,0.0056944590061903,0.00011450180687880147,0.005671855926513671,0.005745030546188355,0.0059754145860672,0.006084908156394959,"[0.0060908799171447755, 0.005681056022644043, 0.005745632171630859, 0.005715551853179931, 0.005696095943450928, 0.0056877121925354, 0.005663871765136719, 0.00567142391204834, 0.005592832088470459, 0.005616000175476074, 0.005704319953918457, 0.0056828479766845704, 0.005642687797546387, 0.005672287940979004, 0.005628384113311767, 0.005597599983215332, 0.0056349759101867675, 0.006071616172790528, 0.0056938881874084475, 0.005676320075988769, 0.00573961591720581, 0.005615551948547363, 0.005654208183288574, 0.0056230401992797855, 0.005657887935638427, 0.005622687816619873, 0.005689407825469971, 0.0056431999206542965, 0.005635424137115479, 0.005896704196929932, 0.005598720073699951, 0.005680255889892578]",tokens/s,44955.982600227515,kWh,1.6538748928571e-07,1.823924301159991e-08,8.807845922562414e-08,2.7170519152293407e-07,tokens/kWh,942197675.9630357,MB,1174.319104,646.905856,0.0,239.075328,215.592448,s,32,10.018107879638674,0.3130658712387085,0.02405860689388873,0.3079739990234375,0.31471940002441406,0.3182858184814453,0.40618933898925796,"[0.317748779296875, 0.4453873291015625, 0.31407513427734374, 0.31141497802734375, 0.3147909851074219, 0.3109840698242187, 0.31894219970703125, 0.31339761352539064, 0.3118016357421875, 0.3115425109863281, 0.30998098754882814, 0.3064769897460938, 0.30851312255859376, 0.30743487548828125, 0.3044540710449219, 0.3086247863769531, 0.3065010986328125, 0.3053970031738281, 0.3058859558105469, 0.30894720458984376, 0.3051858215332031, 0.3060135498046875, 0.3067017517089844, 0.30892779541015625, 0.3048805541992187, 0.30572418212890623, 0.30633673095703123, 0.30638446044921874, 0.3061524353027344, 0.3095575866699219, 0.30547216796875, 0.3044695129394531]",tokens/s,201.23560498858515,kWh,8.70659662397215e-06,9.601826387654262e-07,3.6837764101080034e-06,1.3350555672845578e-05,tokens/kWh,4718904.706576306,,s,2016,10.003623904705027,0.004962115032095758,0.0029838044893107014,0.0048530719280242916,0.005034496068954468,0.005185472130775451,0.0054494496345520015,"[0.005335008144378662, 0.005156896114349365, 0.006113247871398926, 0.005140511989593506, 0.005128479957580566, 0.005082464218139649, 0.0051625919342041015, 0.005249824047088623, 0.005186944007873535, 0.005190271854400635, 0.00512175989151001, 0.005148960113525391, 0.005062975883483887, 0.0050495038032531735, 0.0050161280632019046, 0.004988255977630615, 0.0049437122344970705, 0.005042975902557373, 0.00505401611328125, 0.00548089599609375, 0.0053678078651428224, 0.005316127777099609, 0.0050973758697509764, 0.005171807765960693, 0.005079296112060547, 0.005083168029785156, 0.00507257604598999, 0.005038047790527344, 0.004941664218902588, 0.004902207851409912, 0.004965472221374512, 0.005025311946868897, 0.005043807983398437, 0.004879007816314698, 0.0049192957878112795, 0.0049146881103515625, 0.004964863777160645, 0.004937727928161621, 0.0049482879638671875, 0.005002943992614746, 0.0050003199577331545, 0.00496892786026001, 0.004914656162261963, 0.004993887901306152, 0.004955935955047608, 0.004902688026428223, 0.00486630392074585, 0.004847775936126709, 0.004883679866790771, 0.005194655895233154, 0.004882463932037354, 0.004879776000976562, 0.004886911869049072, 0.004917439937591553, 0.0049192957878112795, 0.004900864124298096, 0.004859903812408447, 0.004896192073822021, 0.004878880023956299, 0.004853631973266602, 0.004871871948242188, 0.00483292818069458, 0.004859871864318847, 0.004777056217193603, 0.004970335960388183, 0.004855584144592285, 0.0048273601531982424, 0.0048492798805236815, 0.004950751781463623, 0.004964000225067139, 0.004903103828430176, 0.004861440181732178, 0.004894303798675537, 0.00490553617477417, 0.004898975849151612, 0.004882400035858154, 0.004952095985412598, 0.1387552032470703, 0.005401472091674805, 0.005199391841888428, 0.00506060791015625, 0.005013887882232666, 0.005037856101989746, 0.00493395185470581, 0.004888576030731201, 0.004888576030731201, 0.004884223937988281, 0.004909023761749267, 0.004917535781860351, 0.004878335952758789, 0.00495747184753418, 0.005005792140960693, 0.004892928123474121, 0.004951519966125488, 0.004859871864318847, 0.004841311931610108, 0.004903488159179688, 0.00487340784072876, 0.00488044786453247, 0.004862880229949951, 0.004931583881378174, 0.004874239921569825, 0.0050787200927734375, 0.005136703968048096, 0.005147712230682373, 0.0050308480262756345, 0.004970335960388183, 0.0049153599739074705, 0.005026815891265869, 0.00498748779296875, 0.004942240238189698, 0.00493065595626831, 0.0049136638641357425, 0.004888927936553955, 0.004894271850585937, 0.004946176052093506, 0.004860000133514404, 0.004935711860656738, 0.004913280010223389, 0.004900063991546631, 0.004886943817138672, 0.004940159797668457, 0.00496230411529541, 0.004872191905975342, 0.004876287937164306, 0.004867104053497315, 0.004776415824890137, 0.0048989119529724125, 0.004866079807281494, 0.004902400016784668, 0.004902431964874268, 0.0048670082092285155, 0.004904960155487061, 0.004833280086517334, 0.004828288078308106, 0.004885312080383301, 0.004890367984771728, 0.00489299201965332, 0.004855264186859131, 0.0048788161277771, 0.004899199962615967, 0.0048923521041870114, 0.0049716482162475584, 0.004975552082061767, 0.0049558720588684085, 0.005007391929626465, 0.004966591835021973, 0.004997119903564453, 0.0050094079971313476, 0.0049459199905395506, 0.004982912063598633, 0.004900352001190185, 0.004868480205535888, 0.004839456081390381, 0.004927455902099609, 0.004859295845031739, 0.004894911766052246, 0.004909408092498779, 0.004888288021087647, 0.00495958423614502, 0.004885727882385254, 0.0048886399269104, 0.004910816192626953, 0.004890719890594483, 0.004879744052886963, 0.004866591930389404, 0.004873856067657471, 0.004958591938018799, 0.0050516161918640135, 0.005139232158660889, 0.005449728012084961, 0.005341343879699707, 0.005386079788208008, 0.00535964822769165, 0.005425119876861572, 0.00529372787475586, 0.005206431865692139, 0.005208000183105469, 0.005060544013977051, 0.005015552043914795, 0.005081151962280274, 0.0049658560752868655, 0.0049270401000976564, 0.004922336101531983, 0.004934783935546875, 0.0049552001953125, 0.004976672172546387, 0.004896543979644776, 0.005021599769592286, 0.004775167942047119, 0.00504256010055542, 0.005314943790435791, 0.005130239963531494, 0.0049658560752868655, 0.0050795841217041015, 0.005119999885559082, 0.005021376132965088, 0.004997087955474854, 0.004919648170471191, 0.004911104202270508, 0.004874495983123779, 0.004922368049621582, 0.00489577579498291, 0.0048657598495483395, 0.005146368026733399, 0.004888544082641602, 0.004985119819641113, 0.004907040119171142, 0.004890495777130127, 0.004884575843811035, 0.004928800106048584, 0.004899392127990723, 0.005003168106079102, 0.004884736061096191, 0.004861152172088623, 0.004852511882781982, 0.0048919358253479, 0.004856895923614502, 0.004873439788818359, 0.0049064640998840335, 0.005032927989959717, 0.004906527996063233, 0.004858528137207031, 0.00491871976852417, 0.004848000049591065, 0.004907008171081543, 0.004871712207794189, 0.00488047981262207, 0.004825471878051758, 0.00491315221786499, 0.004841567993164063, 0.004867072105407715, 0.004832159996032715, 0.004810272216796875, 0.004864319801330567, 0.0048784961700439455, 0.004875775814056397, 0.004857503890991211, 0.004896607875823975, 0.004891647815704346, 0.004863423824310303, 0.005185344219207764, 0.004884384155273438, 0.004978720188140869, 0.00506544017791748, 0.004985951900482178, 0.005026463985443115, 0.005034336090087891, 0.005033984184265137, 0.004984831809997559, 0.004904096126556397, 0.004926015853881836, 0.004779232025146484, 0.004861887931823731, 0.004993887901306152, 0.004859903812408447, 0.004888576030731201, 0.004925248146057129, 0.0050833277702331545, 0.004875775814056397, 0.004973055839538575, 0.005031199932098389, 0.0049927678108215336, 0.004959487915039062, 0.004952896118164063, 0.004956223964691162, 0.004952735900878906, 0.004950399875640869, 0.004857664108276367, 0.004859903812408447, 0.00492083215713501, 0.004880735874176026, 0.004896927833557129, 0.004878560066223145, 0.004982560157775879, 0.005209504127502441, 0.005435167789459228, 0.005477183818817138, 0.0054579200744628905, 0.005336895942687988, 0.005327040195465088, 0.0053244481086730956, 0.0052820158004760745, 0.005081471920013428, 0.005063456058502197, 0.005081920146942138, 0.005050528049468994, 0.004943871974945068, 0.004941823959350586, 0.004886144161224365, 0.004868480205535888, 0.004836800098419189, 0.004899519920349121, 0.0048536000251770016, 0.004869984149932861, 0.004867712020874024, 0.005083712100982666, 0.0050854082107543944, 0.00502291202545166, 0.005022304058074951, 0.005050687789916992, 0.005029407978057862, 0.005017792224884033, 0.00497046422958374, 0.004941215991973877, 0.004868703842163086, 0.004884479999542236, 0.004855103969573974, 0.004875135898590088, 0.0048453760147094726, 0.004888415813446045, 0.004868256092071533, 0.004853087902069092, 0.004924191951751709, 0.00484768009185791, 0.0047924799919128415, 0.0048815679550170896, 0.004907872200012207, 0.004947840213775635, 0.004896895885467529, 0.005031871795654297, 0.004893760204315186, 0.004850687980651855, 0.004871327877044678, 0.004860511779785156, 0.004903359889984131, 0.004865856170654297, 0.004829504013061523, 0.0048429441452026364, 0.004833280086517334, 0.0048287358283996586, 0.004823647975921631, 0.004849215984344482, 0.004848159790039062, 0.0048352317810058595, 0.004851263999938965, 0.004847392082214356, 0.004864768028259277, 0.00496230411529541, 0.004942016124725342, 0.004910751819610596, 0.004872352123260498, 0.00496614408493042, 0.004954495906829834, 0.004866047859191895, 0.004859583854675293, 0.004808159828186035, 0.00486630392074585, 0.004839903831481933, 0.00488259220123291, 0.004855648040771484, 0.004937568187713623, 0.005093535900115967, 0.005294079780578613, 0.0053821439743042, 0.005304255962371826, 0.005144639968872071, 0.005130239963531494, 0.005158912181854248, 0.005068992137908936, 0.005053279876708984, 0.004993087768554687, 0.005000224113464355, 0.004988800048828125, 0.004879744052886963, 0.004921664237976074, 0.004903232097625733, 0.00493990421295166, 0.004837247848510742, 0.004883999824523926, 0.004854527950286866, 0.004877312183380127, 0.004850399971008301, 0.0048455681800842285, 0.004880352020263672, 0.004895040035247803, 0.004951168060302735, 0.004911712169647217, 0.004885183811187744, 0.004841311931610108, 0.004872096061706543, 0.004892672061920166, 0.005029664039611816, 0.005042367935180664, 0.004912447929382324, 0.004960959911346436, 0.004972576141357422, 0.004853759765625, 0.004853759765625, 0.004988704204559326, 0.0049268159866333, 0.005001440048217774, 0.005194399833679199, 0.005349376201629639, 0.005293407917022705, 0.005184160232543945, 0.005146240234375, 0.0052893757820129395, 0.00502678394317627, 0.004906303882598877, 0.0049220480918884275, 0.004894720077514648, 0.004911104202270508, 0.0048323521614074706, 0.005189568042755127, 0.00492412805557251, 0.005044479846954345, 0.004865439891815185, 0.00498748779296875, 0.004904960155487061, 0.005332992076873779, 0.005281792163848877, 0.00542310380935669, 0.005691391944885254, 0.005476352214813233, 0.005134335994720459, 0.0050360321998596195, 0.005217311859130859, 0.005098080158233643, 0.004952544212341308, 0.004929567813873291, 0.004934783935546875, 0.004932608127593994, 0.004952159881591797, 0.004933631896972656, 0.00488150405883789, 0.004872767925262451, 0.00487014389038086, 0.004859903812408447, 0.004886271953582763, 0.004986911773681641, 0.005221727848052978, 0.0054997758865356445, 0.005306367874145508, 0.005302432060241699, 0.0052420802116394044, 0.005090943813323975, 0.0052070398330688475, 0.00509881591796875, 0.004974783897399903, 0.004966911792755127, 0.0047422399520874025, 0.0048278717994689946, 0.004958399772644043, 0.005199391841888428, 0.00519212818145752, 0.005202303886413575, 0.005253056049346924, 0.005142271995544434, 0.005160927772521973, 0.0052219839096069335, 0.005035871982574463, 0.004983391761779785, 0.004949440002441406, 0.004975135803222656, 0.005004543781280518, 0.005116672039031983, 0.005691391944885254, 0.005136223793029785, 0.0050136961936950684, 0.004914368152618408, 0.00494601583480835, 0.004949984073638916, 0.004943840026855469, 0.004915967941284179, 0.00480460786819458, 0.004968448162078858, 0.005094592094421387, 0.005009759902954101, 0.004880544185638428, 0.004903232097625733, 0.0050011839866638185, 0.004976319789886475, 0.004882783889770508, 0.0048594241142272945, 0.004881120204925537, 0.00491212797164917, 0.004966879844665527, 0.004878623962402344, 0.004888576030731201, 0.004875584125518799, 0.0048789758682250975, 0.004879648208618164, 0.004901663780212402, 0.004896096229553223, 0.004868671894073486, 0.004851295948028565, 0.004833631992340088, 0.004868256092071533, 0.004896383762359619, 0.004859936237335205, 0.004866240024566651, 0.004868256092071533, 0.005447872161865234, 0.005324672222137452, 0.004810688018798828, 0.0048230400085449215, 0.004820096015930176, 0.004880288124084473, 0.0048178877830505375, 0.004824192047119141, 0.00483622407913208, 0.00481279993057251, 0.004828159809112549, 0.004894879817962647, 0.004803904056549072, 0.004807360172271728, 0.004788064002990723, 0.004824543952941894, 0.00490550422668457, 0.005205632209777832, 0.005427584171295166, 0.0053821439743042, 0.005293248176574707, 0.005235519886016846, 0.005395584106445313, 0.005395328044891358, 0.005154816150665284, 0.004943295955657959, 0.004862527847290039, 0.0049502401351928715, 0.004984608173370361, 0.004933695793151855, 0.004855743885040283, 0.004865568161010742, 0.004853439807891846, 0.004901023864746093, 0.004846208095550537, 0.004859903812408447, 0.004872128009796143, 0.004947616100311279, 0.004856224060058594, 0.004896768093109131, 0.0052286720275878905, 0.005323775768280029, 0.005218207836151123, 0.004967520236968994, 0.004932544231414795, 0.004946879863739014, 0.0048865280151367185, 0.00488431978225708, 0.004849823951721191, 0.004833280086517334, 0.004825088024139404, 0.004877376079559326, 0.004826272010803222, 0.004810656070709228, 0.004810111999511719, 0.004993152141571045, 0.004907264232635498, 0.004906784057617188, 0.004843008041381836, 0.0048382081985473635, 0.004835360050201416, 0.004833280086517334, 0.004872223854064941, 0.004896736145019532, 0.004820991992950439, 0.004845119953155517, 0.004848063945770264, 0.004820799827575684, 0.004800511837005615, 0.004806496143341064, 0.004845823764801025, 0.004829279899597168, 0.004834911823272705, 0.0048206720352172855, 0.004733503818511963, 0.004800672054290771, 0.004829184055328369, 0.004903103828430176, 0.0048822398185729985, 0.004831232070922851, 0.004796544075012207, 0.00482857608795166, 0.004834047794342041, 0.004822688102722168, 0.004811007976531983, 0.004806464195251465, 0.004959392070770264, 0.00494268798828125, 0.004925407886505127, 0.00482860803604126, 0.004813504219055176, 0.004954368114471436, 0.004824192047119141, 0.004841216087341308, 0.00495900821685791, 0.004872159957885742, 0.004839200019836426, 0.004888832092285156, 0.004988927841186524, 0.005177184104919434, 0.005365568161010742, 0.005357920169830322, 0.0053821439743042, 0.005298175811767578, 0.005251071929931641, 0.0052657599449157716, 0.005213247776031494, 0.00498748779296875, 0.004883552074432373, 0.004864927768707276, 0.005111711978912354, 0.004991231918334961, 0.005007199764251709, 0.004849823951721191, 0.0048776321411132815, 0.004899360179901123, 0.00485587215423584, 0.0048657598495483395, 0.0049151678085327145, 0.004880640029907226, 0.004898816108703613, 0.004907008171081543, 0.005060895919799805, 0.005137856006622314, 0.004915487766265869, 0.004865183830261231, 0.004803423881530762, 0.004856895923614502, 0.004865024089813232, 0.004874176025390625, 0.004843200206756592, 0.00485203218460083, 0.004925439834594727, 0.0048865280151367185, 0.0048540477752685544, 0.004857567787170411, 0.004872191905975342, 0.004791296005249023, 0.00489024019241333, 0.0048676800727844236, 0.004862016201019287, 0.004868832111358643, 0.004947968006134033, 0.0049695358276367185, 0.004878335952758789, 0.004869056224822998, 0.004859903812408447, 0.004857855796813965, 0.004856128215789795, 0.0048453760147094726, 0.004840991973876953, 0.004832704067230225, 0.0048362560272216795, 0.004894720077514648, 0.0048148479461669925, 0.004836991786956787, 0.004810688018798828, 0.004962687969207764, 0.004868447780609131, 0.004861216068267823, 0.005134143829345703, 0.005034656047821045, 0.004894527912139892, 0.0048594560623168945, 0.0049016962051391605, 0.004902431964874268, 0.004864575862884521, 0.00493126392364502, 0.004941408157348633, 0.005024288177490234, 0.0049580798149108886, 0.004927487850189209, 0.004913055896759033, 0.004931680202484131, 0.004878335952758789, 0.00486191987991333, 0.004920928001403808, 0.004977087974548339, 0.004930848121643066, 0.004904704093933105, 0.004996064186096191, 0.005031775951385498, 0.005140160083770752, 0.005144159793853759, 0.005094207763671875, 0.005064223766326904, 0.0050507521629333495, 0.004993184089660645, 0.004871840000152588, 0.004860256195068359, 0.004863423824310303, 0.004864575862884521, 0.0049192957878112795, 0.004835328102111816, 0.004810207843780517, 0.00489731216430664, 0.0048351998329162595, 0.004856063842773438, 0.004904895782470703, 0.0048700799942016605, 0.004744991779327392, 0.004850272178649902, 0.00493065595626831, 0.004864927768707276, 0.00483948802947998, 0.004831168174743652, 0.00487014389038086, 0.004843520164489746, 0.004883840084075928, 0.0048154878616333005, 0.004827072143554687, 0.004952127933502197, 0.004839104175567627, 0.004837567806243896, 0.004816927909851074, 0.004816864013671875, 0.0048388481140136715, 0.004821695804595947, 0.0048189439773559575, 0.004911104202270508, 0.004888063907623291, 0.004927455902099609, 0.004896736145019532, 0.004862592220306396, 0.004857791900634766, 0.004970176219940186, 0.004866367816925049, 0.004851295948028565, 0.004825407981872558, 0.004843616008758545, 0.0048537921905517575, 0.004937600135803222, 0.004956223964691162, 0.004833312034606934, 0.004833280086517334, 0.00482915210723877, 0.00487446403503418, 0.004832575798034668, 0.004815328121185303, 0.004855743885040283, 0.004849152088165283, 0.004895328044891357, 0.004947968006134033, 0.004887936115264893, 0.004843520164489746, 0.004976895809173584, 0.004830719947814942, 0.0048065919876098635, 0.004856768131256103, 0.004825088024139404, 0.004834368228912354, 0.004817855834960937, 0.004810751914978028, 0.0049666881561279295, 0.004859615802764892, 0.004812384128570556, 0.004790463924407959, 0.004876480102539062, 0.004820256233215332, 0.004854527950286866, 0.004841599941253662, 0.0048167681694030766, 0.004845471858978272, 0.0047545919418334965, 0.004825952053070068, 0.004820320129394531, 0.004847328186035157, 0.0048232641220092775, 0.004845439910888672, 0.004860256195068359, 0.004853888034820557, 0.004823808193206787, 0.0048986878395080565, 0.004826879978179931, 0.004843552112579346, 0.004859871864318847, 0.004837376117706299, 0.004820991992950439, 0.004834400177001953, 0.0048650879859924315, 0.004818784236907959, 0.004831232070922851, 0.004841311931610108, 0.004852960109710694, 0.004858816146850586, 0.004836703777313232, 0.004870463848114013, 0.004878015995025635, 0.004854432106018066, 0.004868095874786377, 0.004820991992950439, 0.004847424030303955, 0.0048215041160583495, 0.004996096134185791, 0.005114560127258301, 0.004857855796813965, 0.0048455681800842285, 0.004956352233886719, 0.004862815856933594, 0.0048752322196960445, 0.004837376117706299, 0.004843520164489746, 0.004877952098846436, 0.004841856002807617, 0.004827136039733886, 0.004845471858978272, 0.004856927871704102, 0.004837952136993408, 0.004905248165130615, 0.004864160060882568, 0.004841311931610108, 0.004935840129852295, 0.00486195182800293, 0.004861728191375732, 0.0051216320991516115, 0.00487283182144165, 0.004997375965118408, 0.0048873920440673825, 0.004862751960754395, 0.004866399765014648, 0.004968128204345703, 0.004890912055969239, 0.0048529281616210935, 0.004958847999572754, 0.005185855865478516, 0.005813024044036865, 0.004753536224365235, 0.004802432060241699, 0.004860032081604004, 0.004859776020050049, 0.0047924799919128415, 0.004882431983947754, 0.00485152006149292, 0.004867743968963623, 0.0048336639404296874, 0.004803711891174316, 0.004806848049163819, 0.0049276800155639644, 0.005423808097839355, 0.005127999782562256, 0.005007359981536865, 0.004865407943725586, 0.004967040061950684, 0.0048187518119812015, 0.004857183933258057, 0.00481932783126831, 0.00482147216796875, 0.004833343982696533, 0.004825024127960205, 0.00478985595703125, 0.004813216209411621, 0.0048230400085449215, 0.004901887893676758, 0.004936384201049805, 0.004802400112152099, 0.004796256065368652, 0.0048362560272216795, 0.00486195182800293, 0.004847712039947509, 0.00486953592300415, 0.004993472099304199, 0.004837215900421143, 0.0048427839279174805, 0.004879007816314698, 0.005195551872253418, 0.0050210561752319334, 0.0048377919197082515, 0.004868671894073486, 0.004845600128173828, 0.0048167362213134765, 0.004884479999542236, 0.004822879791259766, 0.0048041920661926266, 0.0048063678741455075, 0.004899871826171875, 0.004818816184997559, 0.004848576068878173, 0.004814943790435791, 0.00490777587890625, 0.004841824054718017, 0.004831168174743652, 0.004830912113189697, 0.0048232321739196775, 0.004838943958282471, 0.004877920150756836, 0.004806943893432617, 0.004970528125762939, 0.004903647899627685, 0.004816256046295166, 0.00468998384475708, 0.004793983936309814, 0.0048195838928222655, 0.004779935836791992, 0.004928832054138184, 0.004837887763977051, 0.004775936126708984, 0.004841472148895264, 0.004841407775878906, 0.004810815811157227, 0.004843008041381836, 0.004796351909637451, 0.0048023362159729, 0.004807487964630127, 0.004783584117889404, 0.0047940478324890136, 0.004794847965240478, 0.0048254399299621584, 0.0048512320518493655, 0.004788864135742188, 0.004819104194641113, 0.004824704170227051, 0.004817984104156494, 0.004868256092071533, 0.004798719882965088, 0.004919904232025147, 0.004802559852600098, 0.004841472148895264, 0.004860095977783203, 0.004819104194641113, 0.00484227180480957, 0.004843391895294189, 0.004784895896911621, 0.004828832149505615, 0.0049014720916748045, 0.004803808212280274, 0.004827936172485351, 0.004792031764984131, 0.004814720153808594, 0.004880959987640381, 0.0048067841529846195, 0.004855008125305176, 0.004829504013061523, 0.004777152061462403, 0.004896895885467529, 0.004795392036437988, 0.004939167976379394, 0.004818975925445557, 0.004766143798828125, 0.004794367790222168, 0.0049090561866760255, 0.004843232154846192, 0.004861343860626221, 0.004811456203460693, 0.004886176109313965, 0.004813216209411621, 0.00478220796585083, 0.004810688018798828, 0.004900928020477295, 0.004787231922149658, 0.004835296154022217, 0.00479744005203247, 0.0047983360290527344, 0.004910624027252198, 0.004785632133483887, 0.004878399848937988, 0.0048772478103637695, 0.0051849279403686525, 0.0052436161041259765, 0.004848703861236572, 0.005105567932128906, 0.005626783847808838, 0.004949440002441406, 0.004882688045501709, 0.005509119987487793, 0.0051840639114379885, 0.004828927993774414, 0.004816895961761475, 0.004844927787780761, 0.004864640235900879, 0.004819295883178711, 0.004820608139038086, 0.004783967971801758, 0.004839615821838379, 0.004871424198150635, 0.004807104110717773, 0.00485641622543335, 0.004779744148254395, 0.004827424049377442, 0.004867519855499268, 0.004858143806457519, 0.004825088024139404, 0.004833280086517334, 0.004856128215789795, 0.004845471858978272, 0.004856991767883301, 0.0048616318702697755, 0.004864607810974121, 0.004846303939819336, 0.004838592052459717, 0.004816927909851074, 0.0048306241035461425, 0.0048642559051513675, 0.004834047794342041, 0.004849184036254883, 0.004837408065795899, 0.004835775852203369, 0.0052583680152893065, 0.004831967830657959, 0.00487340784072876, 0.0048644161224365235, 0.0048596482276916505, 0.004806655883789063, 0.0048156800270080565, 0.004826560020446778, 0.004801087856292725, 0.0048455681800842285, 0.004808832168579102, 0.004876160144805909, 0.004822751998901367, 0.004855264186859131, 0.004821824073791504, 0.004818975925445557, 0.00480998420715332, 0.004804800033569336, 0.004815392017364502, 0.004708320140838623, 0.005086495876312256, 0.004821760177612305, 0.005007455825805664, 0.004842688083648681, 0.004822847843170166, 0.0048546562194824215, 0.004832799911499023, 0.0049275197982788085, 0.004831711769104004, 0.004819071769714355, 0.004844863891601562, 0.004810400009155273, 0.004814911842346192, 0.0048607678413391114, 0.004784128189086914, 0.00484281587600708, 0.004821695804595947, 0.004816383838653565, 0.004790592193603516, 0.004966591835021973, 0.004880544185638428, 0.004829216003417969, 0.0048652801513671875, 0.0048371520042419435, 0.004877088069915772, 0.0048189439773559575, 0.004843776226043701, 0.004821760177612305, 0.005183680057525635, 0.004848512172698974, 0.004827072143554687, 0.0048429760932922365, 0.004827936172485351, 0.004840928077697754, 0.004829472064971924, 0.004857567787170411, 0.0048063998222351075, 0.004810368061065674, 0.004825056076049805, 0.00483843183517456, 0.004845471858978272, 0.004830880165100098, 0.0048150081634521485, 0.004872128009796143, 0.004853055953979492, 0.0048438720703125, 0.004805215835571289, 0.0050728960037231445, 0.004912255764007568, 0.004840320110321045, 0.004810751914978028, 0.004806943893432617, 0.004830495834350586, 0.0048540477752685544, 0.004804224014282227, 0.004807199954986572, 0.00479747200012207, 0.004830143928527832, 0.004865056037902832, 0.004809728145599365, 0.0048022718429565426, 0.00521449613571167, 0.004769792079925537, 0.0050421757698059086, 0.0048865280151367185, 0.004841472148895264, 0.004882431983947754, 0.00483513593673706, 0.00487443208694458, 0.004827136039733886, 0.004816256046295166, 0.00482367992401123, 0.004869311809539795, 0.0048525438308715824, 0.004906752109527588, 0.004856063842773438, 0.004794367790222168, 0.004808703899383545, 0.004842688083648681, 0.00487446403503418, 0.004876895904541016, 0.004833280086517334, 0.004828383922576904, 0.004856607913970947, 0.004843520164489746, 0.004831232070922851, 0.0048332161903381346, 0.004818496227264405, 0.004827583789825439, 0.004821407794952393, 0.004832096099853515, 0.004897280216217041, 0.004836991786956787, 0.004811744213104248, 0.004810463905334472, 0.00480460786819458, 0.004847616195678711, 0.004836544036865235, 0.004796895980834961, 0.004796768188476562, 0.004769792079925537, 0.004941343784332275, 0.004855423927307129, 0.0048607678413391114, 0.0048455681800842285, 0.0048063998222351075, 0.004876543998718261, 0.00479859209060669, 0.004806528091430664, 0.004888576030731201, 0.004837376117706299, 0.004777791976928711, 0.004800479888916015, 0.004838719844818116, 0.004791200160980225, 0.004814879894256592, 0.0048793601989746095, 0.004793312072753906, 0.004775328159332275, 0.0049049282073974606, 0.00482367992401123, 0.004827136039733886, 0.004908512115478516, 0.004823391914367676, 0.004837376117706299, 0.004709824085235596, 0.004787327766418457, 0.004840735912322998, 0.004894464015960693, 0.004802591800689697, 0.004817279815673828, 0.004810336112976074, 0.004934495925903321, 0.004810656070709228, 0.004777503967285156, 0.004814943790435791, 0.004841631889343262, 0.004831456184387207, 0.004825088024139404, 0.004800511837005615, 0.0047961602210998535, 0.004819200038909912, 0.004902495861053467, 0.004808576107025147, 0.004821792125701905, 0.0048005437850952145, 0.004953695774078369, 0.004810592174530029, 0.004898560047149658, 0.004811295986175537, 0.00481279993057251, 0.004835328102111816, 0.004820127964019775, 0.0048091840744018555, 0.004809088230133056, 0.004810751914978028, 0.004874239921569825, 0.004834368228912354, 0.00486191987991333, 0.004797023773193359, 0.004798719882965088, 0.004833407878875732, 0.004810751914978028, 0.0049469761848449706, 0.0048273601531982424, 0.004794496059417725, 0.004821631908416748, 0.004833280086517334, 0.004845056056976318, 0.00490070390701294, 0.0048401918411254885, 0.004822688102722168, 0.00545743989944458, 0.004885216236114502, 0.004857855796813965, 0.004822783946990967, 0.0048063678741455075, 0.004801055908203125, 0.004800352096557617, 0.004831615924835205, 0.0048781118392944335, 0.00482038402557373, 0.004790143966674805, 0.00524502420425415, 0.0049015040397644045, 0.004833600044250488, 0.004810592174530029, 0.004839263916015625, 0.005285791873931885, 0.006253119945526123, 0.005736063957214355, 0.005308095932006836, 0.0048668160438537595, 0.004861887931823731, 0.004863999843597412, 0.004855519771575928, 0.004882143974304199, 0.004848095893859863, 0.004814432144165039, 0.00481331205368042, 0.0047923197746276855, 0.0048865280151367185, 0.004837088108062744, 0.004798751831054687, 0.005115903854370117, 0.004847904205322265, 0.004998784065246582, 0.004850880146026611, 0.0048419198989868165, 0.0049361600875854495, 0.004872352123260498, 0.004820831775665283, 0.004849088191986084, 0.004840223789215088, 0.004839200019836426, 0.0048189439773559575, 0.0048139519691467285, 0.004811647891998291, 0.00482476806640625, 0.004833600044250488, 0.004824351787567138, 0.0048278717994689946, 0.004816895961761475, 0.004783711910247803, 0.004894271850585937, 0.004860191822052002, 0.00483513593673706, 0.00482585620880127, 0.004825088024139404, 0.004841760158538818, 0.004990687847137451, 0.0048393278121948245, 0.004886623859405518, 0.004833119869232178, 0.004813183784484863, 0.004882400035858154, 0.004804416179656983, 0.004804895877838135, 0.0048198080062866215, 0.004818848133087158, 0.004834239959716797, 0.004810624122619629, 0.004806240081787109, 0.004821568012237549, 0.004820864200592041, 0.0048211197853088375, 0.004837376117706299, 0.004829184055328369, 0.004841472148895264, 0.004830751895904541, 0.004805088043212891, 0.005019680023193359, 0.004822847843170166, 0.004798912048339844, 0.004802495956420899, 0.004826591968536377, 0.004860256195068359, 0.0048230400085449215, 0.004835487842559815, 0.004847424030303955, 0.004836415767669678, 0.0048670401573181156, 0.004806655883789063, 0.00486195182800293, 0.004874239921569825, 0.004931583881378174, 0.004832704067230225, 0.004815231800079345, 0.004874591827392578, 0.00481660795211792, 0.00482316780090332, 0.004840576171875, 0.004825215816497802, 0.004823808193206787, 0.004911104202270508, 0.004825088024139404, 0.004814271926879883, 0.004813375949859619, 0.00480460786819458, 0.004876639842987061, 0.0048165440559387205, 0.004798463821411133, 0.004819136142730713, 0.004828767776489258, 0.004869984149932861, 0.004835872173309326, 0.0048122878074646, 0.004809055805206299, 0.004836959838867188, 0.004825632095336914, 0.004867263793945312, 0.004818848133087158, 0.004791071891784668, 0.0048189439773559575, 0.004792223930358887, 0.004825471878051758, 0.004807807922363281, 0.0048249921798706056, 0.00482374382019043, 0.004802527904510498, 0.0047916479110717775, 0.004795072078704834, 0.004857855796813965, 0.004799647808074951, 0.004887296199798584, 0.004831327915191651, 0.004910560131072998, 0.004919839859008789, 0.004827104091644287, 0.004816927909851074, 0.00480460786819458, 0.004886911869049072, 0.004824704170227051, 0.004845695972442627, 0.004706783771514893, 0.004873951911926269, 0.004812640190124512, 0.004815135955810547, 0.005631552219390869, 0.004862559795379639, 0.004838912010192871, 0.004838751792907715, 0.004798431873321533, 0.004873119831085205, 0.004884736061096191, 0.004832640171051026, 0.004804704189300537, 0.004795904159545898, 0.004846367835998535, 0.0048023362159729, 0.004812960147857666, 0.004808767795562744, 0.004811071872711182, 0.004838719844818116, 0.004885119915008545, 0.004818784236907959, 0.0048065600395202634, 0.004808703899383545, 0.004833439826965332, 0.004820703983306885, 0.0048100481033325196, 0.004819424152374268, 0.00480291223526001, 0.004843200206756592, 0.004847712039947509, 0.004816415786743164, 0.004866112232208252, 0.00488047981262207, 0.0048215360641479495, 0.004839424133300781, 0.004829184055328369, 0.00481603193283081, 0.004859072208404541, 0.004808224201202392, 0.004817024230957031, 0.0048100161552429195, 0.004822944164276123, 0.0048791680335998535, 0.004881887912750244, 0.004823359966278076, 0.00483571195602417, 0.004827328205108642, 0.004852896213531494, 0.004812992095947265, 0.0048455362319946285, 0.00482751989364624, 0.0047918400764465335, 0.004819392204284668, 0.004866047859191895, 0.004835328102111816, 0.004845695972442627, 0.004861792087554931, 0.004836959838867188, 0.004816959857940674, 0.004802976131439209, 0.004808383941650391, 0.005316319942474365, 0.004695744037628174, 0.004822624206542969, 0.004883520126342774, 0.004779679775238037, 0.004794464111328125, 0.004794271945953369, 0.0048189439773559575, 0.005156864166259765, 0.00488969612121582, 0.004840352058410644, 0.004875679969787597, 0.004822847843170166, 0.004848095893859863, 0.004876607894897461, 0.004834400177001953, 0.004856224060058594, 0.004825600147247314, 0.004792255878448486, 0.004821216106414795, 0.004780032157897949, 0.004834464073181152, 0.004819168090820313, 0.004808640003204346, 0.00485430383682251, 0.004823359966278076, 0.004832863807678223, 0.004811103820800781, 0.004853119850158691, 0.004936255931854248, 0.004894527912139892, 0.004831232070922851, 0.004858176231384278, 0.005182784080505371, 0.004954495906829834, 0.005048319816589355, 0.004918591976165771, 0.004860608100891113, 0.004833280086517334, 0.004876287937164306, 0.004845791816711425, 0.004831007957458496, 0.004831232070922851, 0.004817152023315429, 0.004858880043029785, 0.0050525121688842775, 0.004844255924224853, 0.004835264205932617, 0.004885791778564453, 0.004809440135955811, 0.0048949441909790035, 0.00483622407913208, 0.004864319801330567, 0.004901631832122803, 0.004855552196502686, 0.004851808071136474, 0.004810751914978028, 0.004945184230804443, 0.004827231884002686, 0.004893311977386475, 0.004811967849731445, 0.004796576023101806, 0.004830143928527832, 0.004888288021087647, 0.004709727764129638, 0.004865920066833496, 0.00482751989364624, 0.0048287038803100585, 0.004807007789611816, 0.004914944171905518, 0.00480950403213501, 0.004843520164489746, 0.004833280086517334, 0.00482092809677124, 0.0049845118522644045, 0.004863967895507812, 0.00482480001449585, 0.00482371187210083, 0.004818975925445557, 0.004820831775665283, 0.004862112045288086, 0.0048130879402160645, 0.004791808128356933, 0.005223775863647461, 0.0050338878631591795, 0.0053851838111877446, 0.005140192031860351, 0.006209695816040039, 0.005349696159362793, 0.004919104099273682, 0.004935679912567138, 0.004900288105010986, 0.004850560188293457, 0.00484284782409668, 0.004955967903137207, 0.0048624958992004395, 0.004834911823272705, 0.004834784030914307, 0.004862239837646484, 0.004838335990905762, 0.0048323521614074706, 0.004816832065582276, 0.004840415954589844, 0.004826655864715576, 0.004832960128784179, 0.0048009281158447265, 0.004786240100860596, 0.0048005437850952145, 0.004868256092071533, 0.004806496143341064, 0.004872191905975342, 0.004797952175140381, 0.004827136039733886, 0.004811200141906738, 0.004872255802154541, 0.004797984123229981, 0.004796224117279053, 0.004811423778533936, 0.004891871929168701, 0.004853631973266602, 0.004886879920959473, 0.004948031902313232, 0.004857376098632813, 0.004864160060882568, 0.004809535980224609, 0.004887743949890137, 0.004821375846862793, 0.004702208042144776, 0.0047923197746276855, 0.004854976177215576, 0.004903744220733643, 0.004824543952941894, 0.004817440032958985, 0.004802559852600098, 0.004838719844818116, 0.0048698558807373045, 0.00481990385055542, 0.004811903953552246, 0.004823328018188477, 0.004844160079956055, 0.004825088024139404, 0.004789504051208496, 0.004801568031311035, 0.0047983360290527344, 0.004791296005249023, 0.004892831802368164, 0.004827712059020996, 0.004855936050415039, 0.004832992076873779, 0.004902751922607422, 0.004821663856506348, 0.004886303901672363, 0.004840991973876953, 0.004847775936126709, 0.004833183765411377, 0.004899231910705566, 0.00484768009185791, 0.004822271823883057, 0.004889440059661865, 0.004839360237121582, 0.004824575901031494, 0.004833983898162842, 0.004833151817321777, 0.00483516788482666, 0.00482860803604126, 0.004832960128784179, 0.004872735977172852, 0.004808735847473144, 0.004803199768066407, 0.0048906559944152835, 0.004795231819152832, 0.004892928123474121, 0.004846144199371338, 0.004833280086517334, 0.004830880165100098, 0.004817376136779785, 0.004800032138824463, 0.004819295883178711, 0.004800672054290771, 0.004796512126922607, 0.004810495853424072, 0.004876287937164306, 0.004837376117706299, 0.004837279796600342, 0.0048026561737060545, 0.004825344085693359, 0.004798208236694336, 0.004820991992950439, 0.004802559852600098, 0.004882431983947754, 0.0047291841506958004, 0.004978271961212158, 0.004796319961547851, 0.004812928199768066, 0.00480787181854248, 0.00483619213104248, 0.0048089919090270996, 0.004797152042388916, 0.004806943893432617, 0.004825664043426514, 0.004796576023101806, 0.004802559852600098, 0.0048570241928100584, 0.004801343917846679, 0.004839424133300781, 0.0048083200454711915, 0.004796095848083496, 0.0047909760475158695, 0.004806655883789063, 0.004838687896728516, 0.005176032066345215, 0.004835328102111816, 0.004926720142364502, 0.004932352066040039, 0.0048369278907775875, 0.0048355841636657714, 0.004838784217834472, 0.004823872089385986, 0.004848800182342529, 0.004833792209625244, 0.00480291223526001, 0.0048211197853088375, 0.004841119766235352, 0.004804927825927735, 0.004831039905548095, 0.0048434557914733885, 0.004814112186431884, 0.004823935985565186, 0.00481279993057251, 0.0048232641220092775, 0.004810431957244873, 0.004792352199554443, 0.004889984130859375, 0.004819647789001465, 0.00486300802230835, 0.004791264057159424, 0.004855519771575928, 0.00495849609375, 0.0049500160217285155, 0.004958208084106445, 0.0048269758224487305, 0.004876416206359863, 0.004888607978820801, 0.0048438401222229005, 0.004856959819793701, 0.004823488235473633, 0.004863903999328613, 0.004951295852661133, 0.004813504219055176, 0.004837024211883545, 0.00482367992401123, 0.004876319885253906, 0.00486191987991333, 0.004763775825500488, 0.004837503910064697, 0.004849535942077637, 0.004947968006134033, 0.005063007831573487, 0.004880095958709717, 0.004831168174743652, 0.004839424133300781, 0.004872096061706543, 0.0048776321411132815, 0.004823840141296386, 0.0048189439773559575, 0.004820991992950439, 0.004798463821411133, 0.0048455681800842285, 0.004829184055328369, 0.004878335952758789, 0.004818208217620849, 0.004819680213928223, 0.004890399932861328, 0.004825376033782959, 0.004830719947814942, 0.004833439826965332, 0.004899168014526367, 0.0048065919876098635, 0.004809823989868164, 0.005219232082366943, 0.004922368049621582, 0.004840447902679444, 0.004867263793945312, 0.004852128028869629, 0.004974431991577148, 0.004844096183776856, 0.004840672016143799, 0.00481279993057251, 0.004800511837005615, 0.004813600063323975, 0.004827136039733886, 0.004884672164916992, 0.004820223808288574, 0.004913472175598145, 0.004798304080963134, 0.004856224060058594, 0.004875775814056397, 0.004845183849334717, 0.00482751989364624, 0.004838175773620605, 0.004818431854248047, 0.004835103988647461, 0.004808703899383545, 0.0048685441017150876, 0.004863296031951904, 0.004840223789215088, 0.004824416160583496, 0.0048379840850830075, 0.004826816082000732, 0.004839456081390381, 0.004835519790649414, 0.004785344123840332, 0.004797311782836914, 0.004831232070922851, 0.004988287925720215, 0.004913792133331298, 0.0047820801734924315, 0.004894720077514648, 0.004829184055328369, 0.0048559679985046384, 0.004806623935699463, 0.004853024005889893, 0.004842144012451172, 0.004827072143554687, 0.004867072105407715, 0.004834080219268799, 0.004849215984344482, 0.004841856002807617, 0.004818336009979248, 0.005354432106018067, 0.004846528053283692, 0.00486297607421875, 0.004843520164489746, 0.0049081602096557615, 0.004889472007751465, 0.004935328006744385, 0.004905280113220215, 0.004861983776092529, 0.004853824138641358, 0.00486630392074585, 0.004838496208190918, 0.0048198080062866215, 0.004830592155456543, 0.004875840187072754, 0.004830111980438232, 0.0048369278907775875, 0.004827487945556641, 0.004880383968353271, 0.004905280113220215, 0.004823999881744385, 0.004810527801513672, 0.004833920001983643, 0.004808032035827637, 0.0048895998001098635, 0.004919616222381592, 0.004810175895690918, 0.004812863826751709, 0.004827328205108642, 0.004918879985809326, 0.004838943958282471, 0.004883488178253174, 0.004806496143341064, 0.004843103885650635, 0.0048410239219665524, 0.004807168006896972, 0.0048213438987731935, 0.004829311847686767, 0.004810272216796875, 0.0047926721572875974, 0.004841567993164063, 0.004795616149902344, 0.004803552150726319, 0.004878047943115234, 0.004826464176177978, 0.00492633581161499, 0.0048412480354309085, 0.004829184055328369, 0.004868095874786377, 0.004923391819000244, 0.004818816184997559, 0.004837503910064697, 0.004840479850769043, 0.004807712078094483, 0.004860928058624267, 0.004817279815673828, 0.004856256008148194, 0.004825215816497802, 0.004821280002593994, 0.004851391792297363, 0.004822080135345459, 0.004831232070922851, 0.004886943817138672, 0.004856383800506592, 0.004848735809326172, 0.004961184024810791, 0.004856128215789795, 0.0048263039588928225, 0.004837887763977051, 0.0048496642112731934, 0.0048223681449890135, 0.004792992115020752, 0.00483241605758667, 0.004952799797058105, 0.004814976215362548, 0.004901216030120849, 0.004808351993560791, 0.004820576190948486, 0.004837471961975098, 0.004837056159973144, 0.004807104110717773, 0.004810080051422119, 0.004821856021881104, 0.004837376117706299, 0.005093311786651612, 0.004849728107452393, 0.004878464221954345, 0.004855040073394775, 0.00482367992401123, 0.004853919982910156, 0.00481990385055542, 0.004825632095336914, 0.004831999778747559, 0.004826752185821534, 0.004827040195465088, 0.004816991806030273, 0.004860191822052002, 0.004833343982696533, 0.004808191776275635, 0.004812320232391357, 0.004811391830444336, 0.004814271926879883, 0.004858431816101075, 0.004810751914978028, 0.0051363840103149415, 0.0049909758567810054, 0.004907072067260742, 0.004895008087158203, 0.004843391895294189, 0.0048364801406860355, 0.004821663856506348, 0.004851712226867676, 0.004907008171081543, 0.004811135768890381, 0.004956192016601563, 0.0048373441696166995, 0.004835360050201416, 0.004937983989715576, 0.004839136123657227, 0.004837376117706299, 0.004825088024139404, 0.004810751914978028, 0.004839424133300781, 0.004843520164489746, 0.004827136039733886, 0.004845727920532226, 0.004910943984985351, 0.004804224014282227, 0.005154304027557373, 0.0050553598403930665, 0.005570047855377197, 0.005091839790344238, 0.005136288166046142, 0.005193888187408447, 0.004902016162872314, 0.004913983821868896, 0.0050360321998596195, 0.004851583957672119, 0.005140096187591553, 0.004944543838500976, 0.004873184204101563, 0.004862847805023193, 0.004892672061920166, 0.004863296031951904, 0.004850240230560303, 0.004849760055541992, 0.004863647937774658, 0.004884191989898682, 0.00589024019241333, 0.004908927917480469, 0.004919904232025147, 0.004843520164489746, 0.0048384637832641604, 0.0048438081741333005, 0.004823135852813721, 0.004822688102722168, 0.004868864059448242, 0.004791808128356933, 0.004795040130615234, 0.004809055805206299, 0.004830239772796631, 0.004856063842773438, 0.004798848152160645, 0.004808576107025147, 0.0048100481033325196, 0.004877120018005371, 0.004820991992950439, 0.004838784217834472, 0.004815392017364502, 0.004819039821624756, 0.00481279993057251, 0.004839744091033936, 0.004822720050811768, 0.004861311912536621, 0.0048380160331726076, 0.004806816101074219, 0.004679456233978272, 0.004821407794952393, 0.00482147216796875, 0.0048496642112731934, 0.004833183765411377, 0.0048661441802978515, 0.004816895961761475, 0.004884416103363037, 0.0048429760932922365, 0.004815584182739258, 0.004863776206970215, 0.00482092809677124, 0.004826687812805176, 0.004835487842559815, 0.004822624206542969, 0.004842336177825928, 0.0048475837707519535, 0.004818975925445557, 0.004863999843597412, 0.004845791816711425, 0.004847392082214356, 0.004839424133300781, 0.004859776020050049, 0.004810368061065674, 0.004798880100250244, 0.00483951997756958, 0.004811808109283448, 0.004815839767456055, 0.004800511837005615, 0.004845183849334717, 0.004845503807067871, 0.004839744091033936, 0.004812928199768066, 0.004818848133087158, 0.0048323521614074706, 0.004810976028442383, 0.004814655780792237, 0.004795040130615234, 0.004818655967712402, 0.004830912113189697, 0.0047578558921813965, 0.004840159893035888, 0.004796256065368652, 0.004788127899169922, 0.004816991806030273, 0.004794591903686523, 0.004818816184997559, 0.00478607988357544, 0.004780223846435547, 0.004830048084259033, 0.004852255821228027, 0.0049008002281188965, 0.004860095977783203, 0.0048269758224487305, 0.004856063842773438, 0.00482528018951416, 0.0048373441696166995, 0.004810815811157227, 0.005746304035186767, 0.004872416019439697, 0.004806303977966308, 0.00480947208404541, 0.004912896156311035, 0.004710400104522705, 0.004851712226867676, 0.00481385612487793, 0.0048273601531982424, 0.004797183990478516, 0.004810751914978028, 0.0048323521614074706, 0.004838592052459717, 0.004847328186035157, 0.004823135852813721, 0.004796319961547851, 0.004890624046325683, 0.004793824195861817, 0.00485152006149292, 0.004799488067626953, 0.004812511920928955, 0.0048371520042419435, 0.004814208030700684, 0.004807519912719727, 0.004811808109283448, 0.004828383922576904, 0.004832511901855469, 0.004895423889160156, 0.00482425594329834, 0.004809343814849853, 0.004822783946990967, 0.004853759765625, 0.004858463764190674, 0.004836319923400879, 0.0048146238327026365, 0.004789152145385742, 0.004841087818145752, 0.004837024211883545, 0.004815584182739258, 0.004873856067657471, 0.004823488235473633, 0.004827072143554687, 0.00482912015914917, 0.004812064170837402, 0.004809055805206299, 0.004832992076873779, 0.004791007995605469, 0.004832863807678223, 0.004816864013671875, 0.004876832008361817, 0.0048616318702697755, 0.004810143947601318, 0.004967232227325439, 0.004816991806030273, 0.004803616046905518, 0.004844639778137207, 0.004811679840087891, 0.004819839954376221, 0.004802432060241699, 0.004788352012634277, 0.004820991992950439, 0.004885600090026855, 0.004860703945159912, 0.004777919769287109, 0.004788288116455078, 0.004812928199768066, 0.004820543766021729, 0.004788447856903076]",tokens/s,201.52696854704882,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,838.36928,9637.39648,0.0,9242.148864,8603.568128,s,1,7.5771103515625,7.5771103515625,0.0,7.5771103515625,7.5771103515625,7.5771103515625,7.5771103515625,[7.5771103515625],,kWh,1.2212581470809407e-05,1.3398002122987354e-06,5.9325047460090374e-06,1.948488642911718e-05,,MB,1216.487424,9889.05472,0.0,9481.224192,8972.090368,s,10,1.0719497299194336,0.10719497299194336,0.008302012551692599,0.11059019088745117,0.11199024887084962,0.11226201972961426,0.11247943641662599,"[0.10413276672363281, 0.11084531402587891, 0.10690787506103516, 0.11083315277099609, 0.11034722900390626, 0.11118134307861328, 0.10990019226074219, 0.11253379058837891, 0.08333821105957032, 0.1119298553466797]",tokens/s,2388.171691775515,kWh,3.3949029875972787e-06,3.7433836348732785e-07,2.2527537401861285e-06,6.021995091270735e-06,tokens/kWh,42510828.408194534,MB,1244.884992,9893.249024,0.0,9485.418496,8972.092928,s,10,24.113931396484375,2.4113931396484376,0.0033347543767851566,2.411421142578125,2.4156163574218747,2.4166848876953124,2.4175397119140625,"[2.406288818359375, 2.40878857421875, 2.411273681640625, 2.406954345703125, 2.41148291015625, 2.412856201171875, 2.411359375, 2.41537890625, 2.411795166015625, 2.41775341796875]",tokens/s,26.12597629318333,kWh,7.044531578990048e-05,7.770005280472887e-06,4.676936881081512e-05,0.0001249846898811885,tokens/kWh,504061.7379607721,,s,630,24.110681304931674,0.038270922706240704,0.0008782322904293862,0.03810611152648926,0.03860705413818359,0.03883380527496338,0.04386638484954834,"[0.04344163131713867, 0.03990086364746094, 0.03855628967285156, 0.038123390197753904, 0.03779580688476562, 0.03776681518554687, 0.03764499282836914, 0.03774457550048828, 0.03763347244262695, 0.037787296295166015, 0.03779804611206055, 0.03770825576782227, 0.03776956939697266, 0.037895584106445314, 0.037704288482666014, 0.03771360015869141, 0.03773443222045898, 0.03780624008178711, 0.03800179290771484, 0.03775743865966797, 0.03782447814941406, 0.037833248138427734, 0.037770721435546876, 0.03780230331420899, 0.038133983612060544, 0.0386899528503418, 0.03869168090820312, 0.03866624069213867, 0.038406143188476564, 0.03820697784423828, 0.037917182922363284, 0.03773952102661133, 0.03792588806152344, 0.03813369750976563, 0.03826646423339844, 0.03801545715332031, 0.03798992156982422, 0.038090431213378906, 0.03812432098388672, 0.03811459350585938, 0.038187744140625, 0.03824025726318359, 0.03788185501098633, 0.03928473663330078, 0.03786124801635742, 0.03809084701538086, 0.03799168014526367, 0.03794585418701172, 0.038320415496826174, 0.03808870315551758, 0.038282783508300784, 0.03838000106811523, 0.03863347244262695, 0.03860889434814453, 0.03859782409667969, 0.038529857635498044, 0.03821158218383789, 0.0379059829711914, 0.038162494659423826, 0.037953632354736325, 0.03805417633056641, 0.03805593490600586, 0.0380682258605957, 0.04390889739990234, 0.03971324920654297, 0.03872927856445312, 0.038076862335205075, 0.03775897598266602, 0.0377704963684082, 0.03778432083129883, 0.03791257476806641, 0.03793222427368164, 0.03827180862426758, 0.037850910186767575, 0.037694782257080076, 0.03772476959228516, 0.03781049728393555, 0.037781505584716796, 0.03811520004272461, 0.0379736328125, 0.03799091339111328, 0.037910526275634765, 0.037758430480957034, 0.037751327514648436, 0.03791820907592774, 0.03786393737792969, 0.03770556640625, 0.038459583282470705, 0.03836310577392578, 0.03826399993896484, 0.03842031860351563, 0.03825350570678711, 0.0383158073425293, 0.03794150543212891, 0.03809075164794922, 0.038069568634033206, 0.038202049255371094, 0.03831552124023437, 0.03814374542236328, 0.038228416442871095, 0.038031681060791016, 0.03806617736816406, 0.03789619064331055, 0.03790848159790039, 0.03806208038330078, 0.038324222564697266, 0.038340351104736325, 0.03784524917602539, 0.03795558547973633, 0.037969921112060545, 0.03797532653808594, 0.03799296188354492, 0.03820307159423828, 0.038447360992431644, 0.03876499176025391, 0.038854496002197265, 0.03846553421020508, 0.03823535919189453, 0.03832854461669922, 0.03810934448242188, 0.03809260940551758, 0.03854153442382813, 0.03883852767944336, 0.03807382583618164, 0.038261409759521484, 0.038088542938232425, 0.0468045768737793, 0.04062665557861328, 0.038690174102783206, 0.03782463836669922, 0.037985950469970706, 0.03812438583374023, 0.038061790466308594, 0.03796201705932617, 0.03796783828735351, 0.03765660858154297, 0.037697406768798826, 0.03789174270629883, 0.037742176055908204, 0.03782950210571289, 0.037904384613037106, 0.03765033721923828, 0.03793315124511719, 0.03793100738525391, 0.037720062255859374, 0.037787647247314454, 0.03810713577270508, 0.03825254440307617, 0.03788943862915039, 0.03799919891357422, 0.038076416015625, 0.03837055969238281, 0.038429439544677736, 0.03866995239257812, 0.038166175842285155, 0.03805667114257812, 0.03789158248901367, 0.03823462295532227, 0.03814153671264649, 0.03791689682006836, 0.03796604919433594, 0.03810464096069336, 0.03812598419189453, 0.03794112014770508, 0.03810508728027344, 0.03849843215942383, 0.03797401428222656, 0.03777536010742188, 0.03794729614257813, 0.03800870513916016, 0.037988319396972656, 0.0380951042175293, 0.03796713638305664, 0.03801971054077148, 0.038186496734619144, 0.03796847915649414, 0.03827302551269531, 0.03829555130004883, 0.03845939254760742, 0.03837747192382813, 0.038526912689208985, 0.038417793273925784, 0.038357505798339846, 0.03851887893676758, 0.03874211120605469, 0.038250495910644534, 0.03805593490600586, 0.03789139175415039, 0.03811196899414063, 0.04447507095336914, 0.03966265487670898, 0.03868764877319336, 0.03808585739135742, 0.03790099334716797, 0.03779593658447265, 0.03775827026367187, 0.03802758407592773, 0.03782060623168945, 0.03771104049682617, 0.0377017936706543, 0.03806499099731445, 0.037682464599609375, 0.03784777450561523, 0.03781756973266601, 0.037767967224121096, 0.03786051177978516, 0.03818083190917969, 0.038122367858886716, 0.03812102508544922, 0.03800928115844727, 0.03802659225463867, 0.03798492813110352, 0.037875614166259765, 0.0378488655090332, 0.03817824172973633, 0.038214271545410156, 0.038246654510498045, 0.03845465469360351, 0.038402687072753905, 0.0379266242980957, 0.03790415954589844, 0.037935169219970706, 0.03810070419311523, 0.038034015655517575, 0.03796758270263672, 0.038230430603027346, 0.03840108871459961, 0.03806902313232422, 0.03790383911132812, 0.037986526489257814, 0.038088382720947264, 0.038091552734375, 0.03803327941894531, 0.03807859039306641, 0.03808870315551758, 0.03839091110229492, 0.03808345413208008, 0.03794124984741211, 0.03821353530883789, 0.03833206558227539, 0.03822022247314453, 0.038338558197021484, 0.03851651382446289, 0.03842816162109375, 0.03852975845336914, 0.03815628814697265, 0.037916576385498044, 0.03790652847290039, 0.038122943878173825, 0.03804339218139648, 0.03822041702270508, 0.038162624359130856, 0.04376230239868164, 0.03968368148803711, 0.0383656005859375, 0.0380549430847168, 0.03799043273925781, 0.03784758377075195, 0.03777753448486328, 0.03763024139404297, 0.03788780975341797, 0.03775279998779297, 0.03777763366699219, 0.03775897598266602, 0.0377262077331543, 0.037768447875976566, 0.03790719985961914, 0.03786038589477539, 0.037999584197998044, 0.03805388641357422, 0.0380682258605957, 0.03818086242675781, 0.03788595199584961, 0.037986305236816405, 0.0380948486328125, 0.038131614685058594, 0.038338558197021484, 0.03903881454467773, 0.03902899169921875, 0.038316032409667966, 0.03828902435302734, 0.038271358489990234, 0.038126655578613285, 0.038028224945068356, 0.03790777587890625, 0.038085311889648435, 0.03830108642578125, 0.03842108917236328, 0.03794739151000977, 0.038034622192382815, 0.03790111923217773, 0.037986305236816405, 0.038122718811035156, 0.03820131301879883, 0.0381407356262207, 0.03804569625854492, 0.03784662246704101, 0.03815670394897461, 0.03812761688232422, 0.038166080474853516, 0.037981697082519535, 0.038167488098144534, 0.038225887298583984, 0.03861507034301758, 0.0389769287109375, 0.0387625617980957, 0.038843936920166015, 0.03836419296264648, 0.038381534576416014, 0.03835903930664063, 0.03828531265258789, 0.0382479362487793, 0.03836886215209961, 0.03831286239624023, 0.03849132919311524, 0.044175838470458986, 0.03992620849609375, 0.03837958526611328, 0.03801417541503906, 0.03811203384399414, 0.037894142150878905, 0.03779302215576172, 0.03779865646362305, 0.0380579833984375, 0.03824156951904297, 0.037870304107666015, 0.03819472122192383, 0.03803388977050781, 0.0380682258605957, 0.038004737854003906, 0.03803340911865234, 0.0381781120300293, 0.038218433380126954, 0.038316032409667966, 0.03788595199584961, 0.03809049606323242, 0.038039806365966794, 0.038040576934814455, 0.037993183135986326, 0.03829792022705078, 0.0387454719543457, 0.03862710571289062, 0.03836191940307617, 0.038257823944091794, 0.038394718170166015, 0.038065376281738283, 0.03788438415527344, 0.038226238250732424, 0.037822463989257815, 0.03806367874145508, 0.03813011169433594, 0.038125568389892575, 0.038284481048583986, 0.03798099136352539, 0.038029312133789066, 0.0380211181640625, 0.038125568389892575, 0.0380313606262207, 0.03829759979248047, 0.03801456069946289, 0.03792057418823242, 0.038037216186523434, 0.038198143005371096, 0.038117374420166016, 0.03817062377929688, 0.03831193542480469, 0.038376670837402344, 0.03879935836791992, 0.03885862350463867, 0.038613918304443356, 0.038430721282958984, 0.038221759796142576, 0.03830585479736328, 0.038268638610839845, 0.03809462356567383, 0.03805644989013672, 0.038109184265136715, 0.038449153900146485, 0.04441084671020508, 0.03985408020019531, 0.03848134231567383, 0.037970497131347654, 0.0378342399597168, 0.03787417602539062, 0.038096736907958985, 0.03808201599121094, 0.037695648193359375, 0.037810592651367186, 0.03783283233642578, 0.037770431518554685, 0.037826942443847655, 0.038334911346435546, 0.03800064086914062, 0.0383851203918457, 0.03796796798706055, 0.03775737762451172, 0.03802316665649414, 0.03806412887573242, 0.03793471908569336, 0.037889793395996095, 0.03784761428833008, 0.038039615631103516, 0.038141952514648435, 0.03843670272827148, 0.038469791412353516, 0.038502079010009765, 0.038326591491699216, 0.03824844741821289, 0.038182910919189454, 0.03808051300048828, 0.03824844741821289, 0.037961727142333986, 0.037918495178222655, 0.037991649627685545, 0.03834982299804687, 0.03807436752319336, 0.037904384613037106, 0.03811494445800781, 0.038349056243896486, 0.03792294311523438, 0.038299648284912106, 0.03787571334838867, 0.03802092742919922, 0.03821382522583008, 0.03803881454467774, 0.03793353652954102, 0.03806028747558594, 0.03834470367431641, 0.03837542343139649, 0.038335521697998046, 0.038585311889648435, 0.03833980941772461, 0.03839670562744141, 0.03845523071289063, 0.038413665771484376, 0.038447841644287106, 0.03828521728515625, 0.03837961578369141, 0.038563838958740236, 0.03826179122924805, 0.038388702392578126, 0.04575455856323242, 0.04010540771484375, 0.038629470825195314, 0.03806070327758789, 0.03787776184082031, 0.03807231903076172, 0.037748737335205076, 0.03782867050170898, 0.037957569122314454, 0.03783270263671875, 0.03788579177856445, 0.03774889755249024, 0.038072128295898434, 0.03791686248779297, 0.037793342590332034, 0.038154689788818356, 0.03767091369628906, 0.037867454528808596, 0.03785715103149414, 0.037873855590820314, 0.03808051300048828, 0.03801456069946289, 0.037891902923583985, 0.03821628952026367, 0.03853420639038086, 0.03838457489013672, 0.038438911437988284, 0.03874819183349609, 0.03888518524169922, 0.03860086441040039, 0.0381399040222168, 0.03827462387084961, 0.03827347183227539, 0.038352127075195315, 0.03808742523193359, 0.03794944000244141, 0.0380412483215332, 0.038037857055664065, 0.038166175842285155, 0.03801327896118164, 0.037969921112060545, 0.038133792877197266, 0.03827094268798828, 0.038250495910644534, 0.03799808120727539, 0.03804620742797851, 0.03798204803466797, 0.03801513671875, 0.038161792755126954, 0.03824294281005859, 0.03826483154296875, 0.038567615509033204, 0.038926559448242186, 0.03870115280151367, 0.03860684967041016, 0.038461505889892576, 0.038598590850830075, 0.03818038558959961, 0.038214111328125, 0.038422401428222654, 0.03829062271118164, 0.03818096160888672, 0.03869782257080078, 0.04184486389160156, 0.042041439056396485, 0.04006908798217773, 0.03896319961547851, 0.03871654510498047, 0.037931903839111325, 0.037713920593261716, 0.037705726623535156, 0.03765657424926758, 0.03774457550048828, 0.03789574432373047, 0.037943809509277344, 0.03804569625854492, 0.03800806427001953, 0.03826969528198242, 0.038258689880371094, 0.03802067184448242, 0.037939647674560546, 0.038055614471435545, 0.0380173454284668, 0.037976062774658204, 0.03828883361816406, 0.0379172477722168, 0.03796105575561524, 0.03795011138916016, 0.03790959930419922, 0.03780905532836914, 0.03790643310546875, 0.038160385131835936, 0.038238239288330075, 0.03848944091796875, 0.038179454803466795, 0.038174720764160154, 0.03806208038330078, 0.03812351989746094, 0.03814604949951172, 0.038258689880371094, 0.038063232421875, 0.03809769439697266, 0.03869091033935547, 0.0384266242980957, 0.038147167205810545, 0.03794348907470703, 0.03813040161132813, 0.03817244720458984, 0.03833391952514648, 0.03825945663452148, 0.03827916717529297, 0.038174720764160154, 0.03794944000244141, 0.038063934326171875, 0.038295616149902345, 0.03825436782836914, 0.03814384078979492, 0.03833087921142578, 0.038319934844970704, 0.03822022247314453, 0.03812531280517578, 0.03801641464233398, 0.03783939361572266, 0.03833055877685547, 0.03829302215576172, 0.038231521606445315, 0.04493308639526367, 0.04011142349243164, 0.03869152069091797, 0.03813750457763672, 0.037970272064208985, 0.037720062255859374, 0.037774848937988284, 0.03792127990722656, 0.037891201019287106, 0.03780492782592773, 0.0379249267578125, 0.03816236877441406, 0.03813785552978516, 0.03807846450805664, 0.03804569625854492, 0.03799407958984375, 0.038080928802490234, 0.03804764938354492, 0.03792240142822265, 0.0382325439453125, 0.03834268951416016, 0.03805184173583984, 0.038093982696533205, 0.0380219841003418, 0.038365184783935545, 0.038481311798095705, 0.03863203048706055, 0.03865974426269531, 0.038702880859375, 0.03843337631225586, 0.038408161163330075, 0.038408191680908206, 0.03833446502685547, 0.03824025726318359, 0.03779135894775391, 0.0380863037109375, 0.03792300796508789, 0.03814217758178711, 0.038082111358642576, 0.03812838363647461, 0.038612991333007815, 0.038400001525878906, 0.03818905639648437, 0.038122943878173825, 0.03832889556884766, 0.03786751937866211, 0.037932289123535155, 0.037923583984375, 0.03830169677734375, 0.03853023910522461, 0.03845203018188476, 0.03862268829345703, 0.038468128204345704, 0.038828033447265625, 0.03868467330932617, 0.038542591094970706, 0.038437633514404296, 0.03843635177612305, 0.03851728057861328, 0.038481887817382814, 0.038356990814208985, 0.038288959503173826, 0.03814854431152344]",tokens/s,26.129498044136092,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.214592,1326.383104,0.0,931.135488,917.648384,s,1,7.24930126953125,7.24930126953125,0.0,7.24930126953125,7.24930126953125,7.24930126953125,7.24930126953125,[7.24930126953125],,kWh,9.300030833340618e-06,1.0187963388527686e-06,3.1194469399947744e-06,1.343827411218816e-05,,MB,1148.239872,1458.50368,0.0,1050.673152,1018.330112,s,10,0.17473472023010256,0.017473472023010257,0.00032826587233633533,0.017637968063354492,0.017802262306213377,0.017810283184051513,0.01781669988632202,"[0.01692780876159668, 0.01697609519958496, 0.017071264266967773, 0.017644447326660158, 0.017800479888916015, 0.017631488800048827, 0.017719167709350586, 0.017500288009643556, 0.01781830406188965, 0.017645376205444336]",tokens/s,14650.780317894565,kWh,5.129729545540996e-07,5.6536604880475513e-08,3.414330216842004e-07,9.109425811187756e-07,tokens/kWh,281027591.97576773,MB,1176.51456,1475.280896,0.0,1067.450368,1033.282048,s,10,10.554966979980469,1.055496697998047,0.019562259450065147,1.0613449096679688,1.0746231079101562,1.076492706298828,1.0779883850097656,"[1.0216134643554688, 1.019472412109375, 1.0551478271484376, 1.0742076416015625, 1.071249755859375, 1.0658709716796875, 1.06161083984375, 1.046352783203125, 1.0783623046875, 1.0610789794921875]",tokens/s,59.68753869101784,kWh,2.952359973044379e-05,3.255993862737637e-06,1.4851884688515072e-05,4.7631478281696486e-05,tokens/kWh,1322654.7290305123,,s,630,10.548605572700493,0.016743818369365872,0.0004883465879551693,0.016839056015014647,0.017199705696105957,0.017319140911102295,0.017867099857330326,"[0.015735648155212403, 0.016029695510864257, 0.0160328311920166, 0.01604243278503418, 0.015974464416503905, 0.016023296356201172, 0.016036544799804688, 0.017674495697021484, 0.016321632385253908, 0.016054943084716798, 0.016043455123901366, 0.016112192153930664, 0.01601740837097168, 0.016089088439941408, 0.016088607788085938, 0.016128032684326173, 0.01617145538330078, 0.016051200866699217, 0.01614463996887207, 0.016037664413452148, 0.016076959609985352, 0.01610016059875488, 0.016183296203613282, 0.01608697509765625, 0.016150592803955078, 0.016082239151000977, 0.01610406494140625, 0.016195648193359374, 0.016128000259399415, 0.016496639251708984, 0.01596726417541504, 0.016001535415649415, 0.016041439056396486, 0.016153600692749022, 0.01612953567504883, 0.016041696548461912, 0.016371679306030274, 0.016505727767944334, 0.016426944732666017, 0.016332799911499024, 0.016385791778564453, 0.016278976440429686, 0.016296768188476564, 0.01621401596069336, 0.016183296203613282, 0.016060415267944335, 0.01600223922729492, 0.015999808311462402, 0.016017055511474608, 0.016078624725341797, 0.016155199050903322, 0.015984095573425294, 0.01602409553527832, 0.016262208938598633, 0.016014272689819337, 0.01603379249572754, 0.01602681541442871, 0.016895872116088867, 0.0175031681060791, 0.01760665512084961, 0.016152576446533205, 0.016281183242797852, 0.01628758430480957, 0.015768608093261718, 0.015981535911560058, 0.015935487747192383, 0.01598591995239258, 0.01641753578186035, 0.016683008193969725, 0.016441408157348632, 0.016324256896972655, 0.016214303970336914, 0.016297536849975584, 0.016320415496826172, 0.016322816848754883, 0.016174976348876952, 0.016126495361328125, 0.016121248245239257, 0.016017887115478517, 0.016273344039916992, 0.016175167083740234, 0.016039039611816405, 0.01624153518676758, 0.016107519149780272, 0.016018463134765626, 0.01605116844177246, 0.016006816864013673, 0.016021856307983397, 0.017209344863891602, 0.01609884834289551, 0.016044448852539063, 0.01607686424255371, 0.016088096618652344, 0.01603843116760254, 0.01605062484741211, 0.01605788803100586, 0.016019264221191407, 0.016368127822875975, 0.016300191879272462, 0.016652288436889647, 0.016696672439575195, 0.01655695915222168, 0.016360864639282227, 0.01609539222717285, 0.016152767181396483, 0.016205087661743164, 0.015977184295654298, 0.016004831314086913, 0.015994303703308104, 0.016048896789550782, 0.016133567810058595, 0.016111679077148437, 0.016028255462646485, 0.016164735794067384, 0.016054048538208007, 0.016007104873657228, 0.01603830337524414, 0.016138240814208983, 0.016111616134643555, 0.016039743423461914, 0.01643846321105957, 0.016089696884155274, 0.01625904083251953, 0.01607651138305664, 0.016032480239868165, 0.016090208053588868, 0.0157260799407959, 0.015909055709838867, 0.016009536743164063, 0.016273408889770507, 0.016209312438964844, 0.016144256591796875, 0.016095968246459962, 0.0160296630859375, 0.016172607421875, 0.016134431838989258, 0.0161112003326416, 0.016074783325195314, 0.016102079391479493, 0.0160515193939209, 0.016048704147338867, 0.016150527954101563, 0.016072704315185548, 0.016225791931152343, 0.0160317440032959, 0.01606447982788086, 0.016022048950195312, 0.01619055938720703, 0.016560415267944335, 0.016674720764160156, 0.017380191802978517, 0.017051519393920897, 0.01705743980407715, 0.017076095581054686, 0.01695996856689453, 0.017227327346801758, 0.017111488342285156, 0.017641471862792968, 0.01706972885131836, 0.0172445125579834, 0.017063936233520507, 0.01726438331604004, 0.01700057601928711, 0.01697385597229004, 0.017112159729003908, 0.01703424072265625, 0.016903999328613282, 0.016742591857910157, 0.01728102493286133, 0.016887807846069337, 0.01721958351135254, 0.017137664794921875, 0.016859136581420898, 0.01696076774597168, 0.01703785514831543, 0.017029056549072264, 0.017166624069213866, 0.017133567810058595, 0.017176607131958007, 0.017237535476684572, 0.017308095932006835, 0.01717612838745117, 0.017160640716552735, 0.01717625617980957, 0.016970048904418944, 0.01680179214477539, 0.017479679107666016, 0.017108991622924806, 0.01713961601257324, 0.01660412788391113, 0.01707414436340332, 0.01686960029602051, 0.016716960906982423, 0.0169007682800293, 0.016889856338500975, 0.016732160568237304, 0.01656559944152832, 0.01684342384338379, 0.016699392318725585, 0.01659699249267578, 0.016658143997192384, 0.01705603218078613, 0.016842815399169923, 0.01669865608215332, 0.016691583633422852, 0.016787839889526368, 0.01712268829345703, 0.01745952033996582, 0.01720137596130371, 0.017499391555786132, 0.019490848541259764, 0.018039199829101564, 0.017139936447143556, 0.016836511611938478, 0.01686137580871582, 0.016791263580322267, 0.016924224853515624, 0.01713385581970215, 0.017101247787475585, 0.016881023406982422, 0.017132192611694335, 0.017317855834960937, 0.016922399520874022, 0.01707766342163086, 0.016802623748779298, 0.017004512786865236, 0.01697932815551758, 0.016927391052246093, 0.016970848083496092, 0.017133600234985353, 0.017019392013549805, 0.01694963264465332, 0.01701888084411621, 0.01689616012573242, 0.016933984756469726, 0.017889919281005858, 0.016887935638427734, 0.017308832168579102, 0.017179519653320312, 0.017304832458496094, 0.01675075149536133, 0.017304128646850585, 0.017059743881225584, 0.017016927719116212, 0.017035263061523438, 0.016920576095581053, 0.01691788864135742, 0.017207679748535157, 0.01695155143737793, 0.01704960060119629, 0.017016000747680664, 0.016941343307495117, 0.01683008003234863, 0.016934688568115235, 0.017101343154907227, 0.017596063613891603, 0.017020448684692383, 0.01705068778991699, 0.01714156723022461, 0.017049407958984374, 0.017006175994873047, 0.01715827178955078, 0.01697849655151367, 0.0171560001373291, 0.017129472732543945, 0.017449184417724608, 0.0171428165435791, 0.01698508834838867, 0.017168127059936523, 0.01710470390319824, 0.01690812873840332, 0.017310047149658205, 0.016863231658935548, 0.016689151763916017, 0.016855039596557618, 0.017303552627563477, 0.01717043113708496, 0.017145856857299805, 0.01717180824279785, 0.017076896667480468, 0.016988256454467773, 0.01695120048522949, 0.01707241630554199, 0.016762239456176758, 0.01715439987182617, 0.016772512435913087, 0.016833120346069336, 0.017077312469482422, 0.016853952407836915, 0.01687548828125, 0.016947328567504884, 0.016959327697753906, 0.017223743438720702, 0.017158432006835936, 0.017413856506347657, 0.016990207672119142, 0.016986112594604492, 0.016949247360229493, 0.01661948776245117, 0.01665827178955078, 0.016977407455444335, 0.01674934387207031, 0.016746400833129883, 0.016785408020019533, 0.016855039596557618, 0.01681203269958496, 0.01682841682434082, 0.017004831314086914, 0.017100511550903322, 0.016942975997924804, 0.016630975723266602, 0.01633523178100586, 0.0169451847076416, 0.017103391647338866, 0.01703340721130371, 0.016616800308227538, 0.016629663467407227, 0.016738496780395507, 0.016527103424072265, 0.016677343368530273, 0.016856704711914063, 0.017048383712768556, 0.01727065658569336, 0.016943136215209962, 0.016651872634887696, 0.01661292839050293, 0.01673616027832031, 0.01655072021484375, 0.016924736022949218, 0.01669126319885254, 0.016363519668579102, 0.016007104873657228, 0.016673887252807617, 0.017052223205566406, 0.017199520111083985, 0.016957504272460937, 0.016935935974121095, 0.01691334342956543, 0.016670719146728515, 0.016515071868896485, 0.016594112396240233, 0.016783327102661134, 0.0168723201751709, 0.01683987236022949, 0.016950368881225586, 0.016844480514526368, 0.016918527603149415, 0.01683046340942383, 0.016702911376953126, 0.016877695083618163, 0.01683705520629883, 0.016821535110473632, 0.017025760650634766, 0.01735856056213379, 0.017192960739135742, 0.017168031692504884, 0.016917119979858397, 0.017004032135009766, 0.016648544311523437, 0.01672412872314453, 0.016930240631103516, 0.01694927978515625, 0.01683305549621582, 0.01737107276916504, 0.01671993637084961, 0.016801055908203126, 0.01647177505493164, 0.01691766357421875, 0.016973087310791016, 0.016917055130004882, 0.016871423721313478, 0.01695088005065918, 0.019883712768554686, 0.017387840270996095, 0.017113664627075195, 0.017219423294067383, 0.017301055908203126, 0.016910783767700194, 0.016424800872802733, 0.016643360137939454, 0.016883392333984375, 0.017431936264038085, 0.017148672103881837, 0.016998176574707032, 0.016732160568237304, 0.01664009666442871, 0.017137664794921875, 0.016626848220825195, 0.016675615310668947, 0.016680288314819335, 0.016858112335205077, 0.01693462371826172, 0.016982015609741212, 0.016744447708129884, 0.016644096374511717, 0.01702911949157715, 0.01699260711669922, 0.016887584686279298, 0.017104448318481444, 0.016875839233398436, 0.016975360870361327, 0.016861919403076173, 0.01681545639038086, 0.01687366485595703, 0.01711568069458008, 0.016803455352783204, 0.016765024185180662, 0.01676255989074707, 0.01688960075378418, 0.016937311172485352, 0.016838239669799804, 0.016856767654418944, 0.016697568893432616, 0.016454368591308593, 0.016539264678955078, 0.01675916862487793, 0.016584447860717774, 0.01634124755859375, 0.016725120544433595, 0.01714441680908203, 0.017104639053344726, 0.016986879348754882, 0.016903968811035158, 0.016805919647216797, 0.01687548828125, 0.016955392837524414, 0.016846975326538085, 0.016885631561279296, 0.01697587203979492, 0.01686083221435547, 0.016890207290649415, 0.016873472213745116, 0.01659699249267578, 0.016670400619506837, 0.016777151107788085, 0.016510847091674805, 0.01657907295227051, 0.0163492488861084, 0.01676691246032715, 0.018147327423095702, 0.01682431983947754, 0.016661792755126952, 0.01697455978393555, 0.016766016006469726, 0.016810815811157228, 0.016449151992797853, 0.016224544525146486, 0.016248128890991212, 0.01618217658996582, 0.016162080764770506, 0.016253664016723634, 0.016476415634155275, 0.01689356803894043, 0.016647359848022462, 0.0163624324798584, 0.01617100715637207, 0.016174240112304686, 0.016190303802490234, 0.016154624938964843, 0.016670719146728515, 0.016717824935913086, 0.016862464904785157, 0.016742719650268554, 0.016726463317871094, 0.016447488784790038, 0.016321760177612304, 0.016083711624145507, 0.01606768035888672, 0.016177951812744142, 0.0162653751373291, 0.016338815689086916, 0.01617897605895996, 0.016385759353637695, 0.01608768081665039, 0.01601068878173828, 0.01624940872192383, 0.016044031143188475, 0.016022880554199218, 0.016165536880493166, 0.016166431427001953, 0.016058847427368163, 0.016106527328491212, 0.015979616165161133, 0.016150400161743163, 0.01598259162902832, 0.01642300796508789, 0.0165579833984375, 0.01741823959350586, 0.01635103988647461, 0.016734399795532227, 0.01706710433959961, 0.017089439392089845, 0.016941247940063478, 0.01781123161315918, 0.020127071380615234, 0.017105855941772462, 0.017173728942871093, 0.016986848831176758, 0.017149728775024416, 0.01705561637878418, 0.017591615676879883, 0.017134687423706055, 0.017103904724121093, 0.0170133113861084, 0.01682044792175293, 0.017222911834716796, 0.017117727279663087, 0.01699430465698242, 0.01697590446472168, 0.01714787292480469, 0.01700399971008301, 0.01722217559814453, 0.01730294418334961, 0.017320192337036133, 0.017471839904785156, 0.017056896209716798, 0.01705459213256836, 0.01696767997741699, 0.01697567939758301, 0.017123199462890624, 0.017099071502685546, 0.017098751068115235, 0.01721139144897461, 0.017139360427856444, 0.01708847999572754, 0.017071935653686525, 0.017068607330322266, 0.017131519317626954, 0.017242271423339842, 0.01707811164855957, 0.017257984161376954, 0.01709926414489746, 0.01710652732849121, 0.01733286476135254, 0.017198879241943358, 0.017238016128540038, 0.01705513572692871, 0.01700271987915039, 0.016953344345092772, 0.017184160232543946, 0.017066976547241212, 0.017039039611816405, 0.017090879440307617, 0.0175916805267334, 0.016969600677490235, 0.016978015899658205, 0.01715852737426758, 0.01703753662109375, 0.017006784439086913, 0.01703327941894531, 0.01719043159484863, 0.017170400619506837, 0.017434944152832033, 0.017268735885620116, 0.017196928024291992, 0.017165760040283203, 0.017553951263427733, 0.01718492889404297, 0.01679302406311035, 0.016891456604003905, 0.016806848526000978, 0.01701091194152832, 0.017106880187988283, 0.01687558364868164, 0.01675257682800293, 0.016844736099243165, 0.017045408248901366, 0.0165513916015625, 0.016777759552001954, 0.017000064849853516, 0.01718729591369629, 0.0173156795501709, 0.017073568344116212, 0.01687558364868164, 0.016775775909423828, 0.01679545593261719, 0.01692185592651367, 0.017187776565551757, 0.01690118408203125, 0.01694611167907715, 0.016781343460083007, 0.016730112075805666, 0.016586143493652342, 0.016627328872680664, 0.016755264282226564, 0.016714111328125, 0.01661952018737793, 0.01662156867980957, 0.01682784080505371, 0.017351232528686523, 0.017133663177490235, 0.01688675117492676, 0.016792768478393554, 0.01682815933227539, 0.01820240020751953, 0.017045728683471678, 0.016780799865722656, 0.016765439987182617, 0.016857311248779296, 0.016879295349121092, 0.016852575302124022, 0.016908191680908204, 0.016750688552856444, 0.016695327758789062, 0.01670569610595703, 0.01668947219848633, 0.01635327911376953, 0.01613804817199707, 0.0166627197265625, 0.017133567810058595, 0.017024896621704103, 0.016902496337890625, 0.016841535568237306, 0.01692720031738281, 0.01693084716796875, 0.017205024719238283, 0.01684883117675781, 0.016841215133666994, 0.01676313591003418, 0.016742399215698242, 0.01658470344543457, 0.016695295333862305, 0.01664204788208008, 0.016582656860351562, 0.016711679458618164, 0.016514688491821288, 0.01621343994140625, 0.016183935165405273, 0.016951616287231446, 0.017260671615600586]",tokens/s,59.723533661209466,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.255552,3354.329088,0.0,2959.081472,2942.567424,s,1,7.56006298828125,7.56006298828125,0.0,7.56006298828125,7.56006298828125,7.56006298828125,7.56006298828125,[7.56006298828125],,kWh,1.030244897499036e-05,1.129170060222218e-06,4.934170613993261e-06,1.636578964920584e-05,,MB,1145.683968,3549.364224,0.0,3141.533696,3105.830912,s,10,0.3191483516693115,0.03191483516693115,0.001310640518279943,0.03153615951538086,0.0330703311920166,0.03414887790679931,0.03501171527862549,"[0.03522742462158203, 0.03179126358032226, 0.03283065414428711, 0.032280128479003904, 0.030215551376342773, 0.032129951477050785, 0.03128105545043945, 0.03119024085998535, 0.030974496841430665, 0.031227584838867187]",tokens/s,8021.348023920134,kWh,1.1245226313461203e-06,1.239511474870338e-07,7.47360640623088e-07,1.995834419456242e-06,tokens/kWh,128267153.57967736,MB,1174.114304,3591.307264,0.0,3183.476736,3163.048448,s,10,10.659780151367189,1.0659780151367189,0.013331280476035226,1.0704387817382812,1.0778973876953126,1.08138466796875,1.0841744921875,"[1.0745426025390625, 1.0615531005859375, 1.0421826171875, 1.0848719482421876, 1.068078125, 1.0727994384765625, 1.0771224365234375, 1.0770347900390624, 1.049612060546875, 1.0519830322265624]",tokens/s,59.1006560223663,kWh,3.100471784532199e-05,3.419352761774082e-06,2.0457878545777293e-05,5.488194915287337e-05,tokens/kWh,1147918.4134753277,,s,630,10.656761390686036,0.016915494270930215,0.00042222898674190197,0.016895071983337402,0.01727531833648682,0.01764526271820068,0.01838836977005005,"[0.017031999588012697, 0.01689187240600586, 0.016939008712768554, 0.01679155158996582, 0.016696416854858398, 0.01682115173339844, 0.01739129638671875, 0.017162559509277343, 0.017147296905517577, 0.016937503814697264, 0.017070144653320313, 0.017983488082885742, 0.02095235252380371, 0.01775689506530762, 0.016965631484985352, 0.016955392837524414, 0.017227264404296876, 0.01702348709106445, 0.0169881591796875, 0.016957439422607423, 0.016889184951782227, 0.016878240585327147, 0.016842752456665038, 0.0166046085357666, 0.01683888053894043, 0.017377376556396484, 0.017266944885253908, 0.017156095504760743, 0.01695052719116211, 0.017171199798583985, 0.01697532844543457, 0.016945695877075194, 0.0176843204498291, 0.01694326400756836, 0.016904096603393554, 0.01711503982543945, 0.017010816574096678, 0.016957504272460937, 0.016916479110717773, 0.016955007553100587, 0.016845184326171873, 0.017035263061523438, 0.016721920013427736, 0.016787296295166017, 0.016965791702270507, 0.017297407150268555, 0.017165472030639648, 0.01708937644958496, 0.016875360488891603, 0.016824480056762695, 0.016746271133422853, 0.01687126350402832, 0.016902528762817382, 0.016987808227539064, 0.016943456649780274, 0.017315839767456053, 0.01680384063720703, 0.016701440811157226, 0.016682783126831056, 0.016670495986938476, 0.01677510452270508, 0.01669990348815918, 0.01651273536682129, 0.017988576889038085, 0.017946624755859376, 0.01725644874572754, 0.0167956485748291, 0.01685215950012207, 0.016728384017944336, 0.016623775482177736, 0.01680214309692383, 0.016930112838745116, 0.016910655975341797, 0.016795616149902343, 0.01679747200012207, 0.01675436782836914, 0.016701887130737305, 0.016646656036376953, 0.016478208541870116, 0.016604991912841798, 0.017174720764160156, 0.01886207962036133, 0.017231679916381835, 0.01680512046813965, 0.01665119934082031, 0.017086463928222655, 0.017059839248657227, 0.016961536407470702, 0.01713283157348633, 0.017989952087402342, 0.016932512283325197, 0.01689574432373047, 0.01678985595703125, 0.016951904296875, 0.0170098876953125, 0.016871648788452147, 0.017111648559570314, 0.016954816818237305, 0.01687126350402832, 0.016638368606567384, 0.016671072006225585, 0.01657423973083496, 0.01688598442077637, 0.016920576095581053, 0.01678950309753418, 0.016699392318725585, 0.016596511840820314, 0.016435007095336913, 0.01685068893432617, 0.017111967086791992, 0.01699865531921387, 0.016762624740600585, 0.016764928817749023, 0.016381952285766603, 0.016467967987060548, 0.016343040466308592, 0.01689116859436035, 0.01639049530029297, 0.016391551971435547, 0.016299007415771484, 0.016354719161987306, 0.01639894485473633, 0.01655388832092285, 0.016384096145629884, 0.016355327606201172, 0.01636147117614746, 0.0179303035736084, 0.018018367767333985, 0.017269216537475585, 0.016719871520996094, 0.01657379150390625, 0.016730783462524414, 0.0165086727142334, 0.01643129539489746, 0.016281663894653322, 0.016407808303833007, 0.016366304397583006, 0.016463584899902343, 0.0162860164642334, 0.01698585510253906, 0.01793811225891113, 0.017041984558105468, 0.016537471771240233, 0.01660531234741211, 0.01647955131530762, 0.016534208297729492, 0.01647760009765625, 0.016763359069824218, 0.016468095779418945, 0.016357248306274413, 0.016410751342773436, 0.016403743743896484, 0.016253120422363283, 0.016382495880126954, 0.016541696548461913, 0.0164270076751709, 0.01643267250061035, 0.016306528091430662, 0.016342239379882814, 0.016322687149047853, 0.016378463745117186, 0.01645136070251465, 0.016404895782470702, 0.016448511123657226, 0.016583423614501953, 0.0164453125, 0.01642464065551758, 0.01643769645690918, 0.01637196731567383, 0.01638809585571289, 0.0165086727142334, 0.01634124755859375, 0.01638185691833496, 0.016351295471191407, 0.0163656005859375, 0.016363519668579102, 0.01641267204284668, 0.01651257514953613, 0.01653376007080078, 0.016476255416870117, 0.016428672790527343, 0.01634761619567871, 0.016324607849121094, 0.016361215591430663, 0.016326911926269533, 0.01633273506164551, 0.016360992431640624, 0.016343584060668947, 0.016496639251708984, 0.018335296630859376, 0.018438688278198244, 0.01752467155456543, 0.01724617576599121, 0.01708255958557129, 0.01722480010986328, 0.01723075294494629, 0.01743052864074707, 0.01719705581665039, 0.017061567306518553, 0.017178304672241212, 0.01711497688293457, 0.01721833610534668, 0.01713148880004883, 0.017141792297363283, 0.017137887954711915, 0.01722956848144531, 0.01712950325012207, 0.017130783081054687, 0.017191455841064452, 0.017178815841674806, 0.017141759872436522, 0.01715171241760254, 0.017316064834594726, 0.017086528778076173, 0.017024223327636718, 0.017257247924804688, 0.01726851272583008, 0.017135839462280273, 0.01721548843383789, 0.017154016494750977, 0.017057823181152343, 0.01739366340637207, 0.01702707290649414, 0.016873472213745116, 0.01707811164855957, 0.01718492889404297, 0.017286304473876954, 0.017215583801269533, 0.017222400665283202, 0.01722310447692871, 0.017140031814575196, 0.017149824142456055, 0.01716876792907715, 0.017093664169311525, 0.017052640914916994, 0.01729097557067871, 0.0172956485748291, 0.017164287567138673, 0.017303552627563477, 0.0172106876373291, 0.017265344619750978, 0.017307647705078123, 0.01737049674987793, 0.017054336547851563, 0.01708624076843262, 0.01720921516418457, 0.017102624893188475, 0.01696211242675781, 0.017127424240112304, 0.016998367309570314, 0.017274911880493165, 0.017039360046386717, 0.017170751571655273, 0.01673200035095215, 0.016605279922485353, 0.01669126319885254, 0.0170250244140625, 0.016852479934692383, 0.01690880012512207, 0.016876544952392578, 0.016458751678466797, 0.016684896469116212, 0.01751030349731445, 0.01722598457336426, 0.017125375747680666, 0.016920160293579102, 0.016773536682128908, 0.01683830451965332, 0.016691551208496094, 0.016715328216552736, 0.01683705520629883, 0.017154048919677735, 0.017069984436035156, 0.016922719955444337, 0.01678335952758789, 0.01700864028930664, 0.016981407165527342, 0.016906848907470705, 0.01681612777709961, 0.017027040481567383, 0.01686300849914551, 0.016851200103759765, 0.01681612777709961, 0.016738304138183592, 0.016705535888671876, 0.01738956832885742, 0.017147903442382813, 0.017077823638916016, 0.01698975944519043, 0.01712169647216797, 0.01698396873474121, 0.01690825653076172, 0.0169531192779541, 0.016826591491699218, 0.016830528259277344, 0.017127744674682616, 0.016948991775512696, 0.016841184616088866, 0.01680588722229004, 0.01679974365234375, 0.016893951416015626, 0.01682636833190918, 0.017073152542114257, 0.016902656555175782, 0.017076736450195314, 0.017651296615600585, 0.016974016189575194, 0.016832576751708985, 0.016795711517333986, 0.016959583282470703, 0.017210752487182616, 0.01776473617553711, 0.01699660873413086, 0.016945152282714843, 0.0168407039642334, 0.018430559158325196, 0.0183110408782959, 0.017455232620239257, 0.016942848205566408, 0.017125631332397463, 0.017031328201293945, 0.017835872650146484, 0.016857088088989256, 0.01737932777404785, 0.01706598472595215, 0.016832000732421876, 0.01668070411682129, 0.01708665657043457, 0.017023712158203124, 0.017157983779907227, 0.016951295852661134, 0.01669059181213379, 0.016921087265014647, 0.01686332893371582, 0.016715776443481444, 0.016725536346435546, 0.016894432067871095, 0.016875328063964842, 0.017042816162109373, 0.01701968002319336, 0.01682579231262207, 0.017637887954711915, 0.016781408309936522, 0.016815872192382814, 0.016896352767944336, 0.017034719467163086, 0.016662847518920897, 0.016908416748046873, 0.017126432418823244, 0.01703183937072754, 0.017060096740722657, 0.016948511123657226, 0.016827167510986327, 0.016855039596557618, 0.0166495361328125, 0.01683737564086914, 0.0169769287109375, 0.016907167434692384, 0.016699392318725585, 0.016893951416015626, 0.016860736846923827, 0.016871871948242186, 0.01701273536682129, 0.016928768157958983, 0.016997888565063478, 0.016957759857177734, 0.01680179214477539, 0.017274816513061522, 0.017395967483520507, 0.017278976440429687, 0.0169881591796875, 0.017075584411621093, 0.016921215057373047, 0.016873472213745116, 0.016914432525634765, 0.01700022315979004, 0.017090272903442384, 0.016957792282104492, 0.017672191619873046, 0.018103296279907227, 0.017461759567260742, 0.017084928512573243, 0.016990207672119142, 0.016701440811157226, 0.016728000640869142, 0.01665439987182617, 0.01721507263183594, 0.017082784652709963, 0.01701251220703125, 0.016918752670288088, 0.016899999618530274, 0.016806175231933593, 0.016821855545043944, 0.016856672286987305, 0.016799840927124023, 0.016779808044433592, 0.017005760192871092, 0.01690707206726074, 0.01688150405883789, 0.01705999946594238, 0.016885759353637696, 0.016846847534179688, 0.01705504035949707, 0.01702364730834961, 0.017399839401245117, 0.01705369567871094, 0.017121280670166016, 0.017110591888427736, 0.017119680404663086, 0.01689571189880371, 0.01692086410522461, 0.019200000762939453, 0.01750822448730469, 0.017000127792358398, 0.01704185676574707, 0.01693657684326172, 0.01710323143005371, 0.016979232788085937, 0.01716092872619629, 0.017143808364868163, 0.016947200775146484, 0.016907327651977538, 0.016870336532592775, 0.016869375228881836, 0.01680780792236328, 0.017438848495483397, 0.017147903442382813, 0.017138816833496093, 0.017052543640136718, 0.016905567169189454, 0.01685161590576172, 0.016787296295166017, 0.01693302345275879, 0.016936063766479492, 0.017022911071777343, 0.01776313591003418, 0.017092607498168946, 0.017194303512573242, 0.017003328323364257, 0.017170143127441407, 0.01795305633544922, 0.017828832626342772, 0.01779097557067871, 0.017250303268432618, 0.017133567810058595, 0.017377279281616212, 0.01794047927856445, 0.017143327713012694, 0.016941535949707032, 0.017069503784179686, 0.016873023986816407, 0.01691267204284668, 0.016980703353881837, 0.017176576614379883, 0.016855039596557618, 0.016910335540771485, 0.01702911949157715, 0.016957439422607423, 0.016883295059204103, 0.016850719451904295, 0.016949888229370116, 0.01703856086730957, 0.016846879959106446, 0.017071968078613282, 0.017210271835327147, 0.017307647705078123, 0.017122880935668945, 0.017102848052978514, 0.01718726348876953, 0.01687318420410156, 0.016906496047973632, 0.016981504440307618, 0.017201663970947266, 0.01708598327636719, 0.01691257667541504, 0.017131839752197266, 0.017104000091552735, 0.0170578556060791, 0.016902080535888674, 0.016995199203491212, 0.017303552627563477, 0.01696767997741699, 0.016979936599731446, 0.01708812713623047, 0.017222047805786133, 0.017166080474853514, 0.017182592391967774, 0.01702911949157715, 0.0169781436920166, 0.016879167556762695, 0.016869951248168945, 0.016805728912353515, 0.017049472808837892, 0.0174902400970459, 0.017811264038085937, 0.01711123275756836, 0.017096607208251954, 0.016934112548828126, 0.01709129524230957, 0.01696169662475586, 0.017067743301391602, 0.01700182342529297, 0.0169051513671875, 0.016866687774658204, 0.019163232803344726, 0.017799072265625, 0.017244319915771484, 0.01669728088378906, 0.016480255126953124, 0.016453632354736326, 0.016504703521728517, 0.016608896255493163, 0.01659942436218262, 0.016492671966552734, 0.01639219284057617, 0.01640652847290039, 0.016430368423461916, 0.01635606384277344, 0.01654374313354492, 0.01642239952087402, 0.016810207366943358, 0.016607168197631837, 0.01649660873413086, 0.01666249656677246, 0.01671824073791504, 0.016508352279663085, 0.016453407287597657, 0.016501535415649415, 0.016571744918823243, 0.016437631607055664, 0.01643721580505371, 0.016569664001464843, 0.016703968048095704, 0.016475744247436523, 0.016669631958007813, 0.01789952087402344, 0.017153247833251953, 0.01656015968322754, 0.016595712661743166, 0.01685443115234375, 0.01656278419494629, 0.016500736236572267, 0.01645676803588867, 0.01632352066040039, 0.016465919494628906, 0.016359199523925783, 0.016533727645874022, 0.01643929672241211, 0.01642073631286621, 0.01641484832763672, 0.016627712249755858, 0.01636966323852539, 0.016529407501220703, 0.016582656860351562, 0.016480255126953124, 0.016440959930419923, 0.016474496841430663, 0.016459775924682618, 0.017380447387695314, 0.01656515121459961, 0.016506879806518555, 0.01656399917602539, 0.01673561668395996, 0.016615776062011717, 0.016707071304321287, 0.016671743392944336, 0.01683612823486328, 0.01841004753112793, 0.01769808006286621, 0.01730838394165039, 0.0168222713470459, 0.016555200576782225, 0.016509759902954103, 0.016529375076293946, 0.01652720069885254, 0.01664224052429199, 0.01664156723022461, 0.016677343368530273, 0.0165928955078125, 0.016664575576782227, 0.016664575576782227, 0.016669824600219728, 0.01663680076599121, 0.016576351165771483, 0.01648361587524414, 0.016503488540649414, 0.01645136070251465, 0.016394655227661134, 0.01641164779663086, 0.01640732765197754, 0.016422719955444337, 0.016480672836303712, 0.016465919494628906, 0.01643110466003418, 0.016521215438842773, 0.01659859275817871, 0.016984031677246093, 0.016822399139404295, 0.01678505516052246, 0.01688137626647949, 0.01657881546020508, 0.016454368591308593, 0.016434431076049805, 0.016530176162719727, 0.0167587833404541, 0.016738304138183592, 0.01683456039428711, 0.01681612777709961, 0.01696870422363281, 0.01664102363586426, 0.016781312942504883, 0.016711679458618164, 0.016639999389648438, 0.016711679458618164, 0.016736255645751954, 0.016739936828613283, 0.01667487907409668, 0.01659529685974121, 0.016616800308227538, 0.016704160690307616, 0.016687103271484375, 0.016756479263305663, 0.01671603202819824, 0.01658576011657715, 0.016720863342285158, 0.016689151763916017, 0.016685056686401366, 0.016694911956787108, 0.0166808967590332, 0.01665273666381836]",tokens/s,59.11739757546015,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 609, in __init__ self.dense_4h_to_h = nn.Linear(config.intermediate_size, config.hidden_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 200.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 955, in __init__ self.model = MistralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 707, in __init__ [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 707, in [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 504, in __init__ self.self_attn = MISTRAL_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 196, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 162, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 163, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 164, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 768.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 608, in __init__ self.dense_h_to_4h = nn.Linear(config.hidden_size, config.intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,811.880448,12523.077632,0.0,12127.830016,12122.08896,s,1,7.14912158203125,7.14912158203125,0.0,7.14912158203125,7.14912158203125,7.14912158203125,7.14912158203125,[7.14912158203125],,kWh,1.16733577624989e-05,1.2804078687359217e-06,6.025560375999724e-06,1.8979326007234543e-05,,MB,1144.537088,12697.141248,0.0,12289.31072,12248.586752,s,10,1.830730926513672,0.1830730926513672,0.004234504773003929,0.1828845748901367,0.18805665130615234,0.18874602279663086,0.18929751998901367,"[0.17489613342285157, 0.1831339569091797, 0.18228807067871095, 0.1866171875, 0.1813321533203125, 0.18943539428710937, 0.18263519287109375, 0.18790345764160157, 0.1776706848144531, 0.18481869506835938]",tokens/s,1398.3485846689125,kWh,5.5184299355345865e-06,6.085838316859708e-07,3.665511318150949e-06,9.792525085371505e-06,tokens/kWh,26142388.99243912,MB,1185.230848,12705.529856,0.0,12297.699328,12248.589312,s,10,33.465481201171876,3.3465481201171867,0.0032630793536641643,3.3474332275390624,3.3502978515625,3.3506911621093747,3.3510058105468747,"[3.341596923828125, 3.343175048828125, 3.341919189453125, 3.34682958984375, 3.345089111328125, 3.34810986328125, 3.348036865234375, 3.35021044921875, 3.3494296875, 3.35108447265625]",tokens/s,18.8253680325965,kWh,9.774854880654882e-05,1.0781801465979355e-05,6.493179355964916e-05,0.00017346214383217733,tokens/kWh,363191.6371387165,,s,630,33.46292974090575,0.053115761493501217,0.00048685466314468635,0.053035871505737305,0.053312567901611325,0.05350089111328125,0.05635084976196289,"[0.056864990234375, 0.05423715209960937, 0.05326361465454101, 0.05293033599853516, 0.05284262466430664, 0.05283107376098633, 0.05295718383789062, 0.05289363098144531, 0.05282003021240234, 0.05274323272705078, 0.05282502365112305, 0.052751937866210935, 0.05267910385131836, 0.05283225631713867, 0.052779006958007815, 0.05272371292114258, 0.05263679885864258, 0.05282438278198242, 0.0535107192993164, 0.05336886215209961, 0.05309369659423828, 0.05307254409790039, 0.0529705924987793, 0.05297449493408203, 0.05297532653808594, 0.05300252914428711, 0.05307392120361328, 0.05286297607421875, 0.05279334259033203, 0.05274166488647461, 0.05284457778930664, 0.052873119354248044, 0.05281644821166992, 0.052743839263916015, 0.05269945526123047, 0.05284403228759765, 0.05301708984375, 0.05314963150024414, 0.0530203857421875, 0.05301283264160156, 0.052944896697998046, 0.052891681671142575, 0.053026782989501954, 0.05299609756469727, 0.052916160583496095, 0.05293231964111328, 0.05296572875976562, 0.05313238525390625, 0.05309123229980469, 0.053052703857421876, 0.05297020721435547, 0.052985855102539066, 0.05288959884643555, 0.05286902236938477, 0.05313455963134766, 0.053162879943847656, 0.05311283111572265, 0.05309772872924805, 0.05302899169921875, 0.05316636657714844, 0.05294457626342773, 0.05305001449584961, 0.05308415985107422, 0.056467041015625, 0.054403072357177736, 0.0535280647277832, 0.05318713760375977, 0.05289388656616211, 0.05288505554199219, 0.05284713745117187, 0.052832321166992186, 0.05285014343261719, 0.05286540985107422, 0.05286707305908203, 0.05284220886230469, 0.0527400016784668, 0.05282870483398437, 0.052762622833251956, 0.052715137481689454, 0.052660606384277345, 0.05270281600952149, 0.0529453125, 0.05316201782226562, 0.05312905502319336, 0.05314524841308594, 0.05304774475097656, 0.0531388168334961, 0.05305001449584961, 0.053055423736572266, 0.05290195083618164, 0.05292851257324219, 0.05284211349487305, 0.05306729507446289, 0.05284540939331055, 0.05289507293701172, 0.05293942260742188, 0.05271862411499023, 0.05274012756347656, 0.05263622283935547, 0.05269334411621094, 0.05305347061157226, 0.053233665466308595, 0.05320899200439453, 0.05298799896240235, 0.05314889526367188, 0.053203102111816405, 0.05304383850097656, 0.052822017669677736, 0.05291212844848633, 0.052975582122802733, 0.053225440979003905, 0.05330281448364258, 0.0531495361328125, 0.05318931198120117, 0.052929920196533205, 0.052974208831787106, 0.05295308685302735, 0.05294675064086914, 0.053211326599121096, 0.0535470085144043, 0.05303868865966797, 0.05334675216674805, 0.05318473434448242, 0.05291795349121094, 0.052813888549804684, 0.052851776123046874, 0.055715904235839844, 0.05353148651123047, 0.05297308731079101, 0.05286345672607422, 0.052832286834716795, 0.052912094116210937, 0.0530247688293457, 0.05293033599853516, 0.05299836730957031, 0.05322668838500977, 0.053012287139892575, 0.052853759765625, 0.0528524169921875, 0.052832576751708986, 0.052752384185791014, 0.05267865753173828, 0.05270249557495117, 0.05302345657348633, 0.05322956848144531, 0.05341603088378906, 0.053122974395751955, 0.05295820617675781, 0.052867488861083986, 0.05286137771606445, 0.05292252731323242, 0.053059585571289064, 0.052918270111083986, 0.053036064147949216, 0.05284534454345703, 0.05294918441772461, 0.053020160675048826, 0.05310310363769531, 0.05292031860351563, 0.05293056106567383, 0.05274012756347656, 0.052714656829833985, 0.05262828826904297, 0.053100543975830077, 0.05335836791992187, 0.05317244720458984, 0.05294899368286133, 0.052993759155273434, 0.05287964630126953, 0.05287321472167969, 0.052891647338867184, 0.05290115356445312, 0.05309513473510742, 0.053143520355224606, 0.05323574447631836, 0.053161087036132815, 0.05330745697021484, 0.05301536178588867, 0.05293625640869141, 0.05288924789428711, 0.053297119140625, 0.05317715072631836, 0.05311078262329102, 0.05330739212036133, 0.05312220764160156, 0.05306777572631836, 0.0529252815246582, 0.052866943359375, 0.05293587112426758, 0.056655902862548825, 0.05414064025878906, 0.05344076919555664, 0.052944896697998046, 0.05295513534545898, 0.05284659194946289, 0.05285820770263672, 0.05287923049926758, 0.053007102966308596, 0.05298179244995117, 0.052980960845947264, 0.05284124755859375, 0.05298518371582031, 0.052865089416503905, 0.052811809539794925, 0.05284272003173828, 0.05272143936157227, 0.05312160110473633, 0.05324380874633789, 0.05327881622314453, 0.05345280075073242, 0.05321318435668945, 0.05305475234985352, 0.053080223083496095, 0.053084735870361326, 0.05314713668823242, 0.05302937698364258, 0.052989761352539064, 0.05285014343261719, 0.05298454284667969, 0.05296131134033203, 0.053069793701171875, 0.05297484970092774, 0.052996864318847654, 0.05294879913330078, 0.05306745529174805, 0.05318502426147461, 0.05300428771972656, 0.05295487976074219, 0.05323769760131836, 0.05321964645385742, 0.05329919815063477, 0.053231616973876954, 0.05295446395874023, 0.052955806732177736, 0.052803585052490234, 0.052951038360595705, 0.05320294570922852, 0.053082111358642575, 0.05303039932250977, 0.053142017364501956, 0.052942718505859375, 0.052975265502929685, 0.05321980667114258, 0.05318860626220703, 0.05314329528808594, 0.05297097778320312, 0.05296412658691406, 0.05295878219604492, 0.053193153381347655, 0.053180065155029294, 0.05315209579467774, 0.05312307357788086, 0.056371200561523435, 0.0541736946105957, 0.053305343627929686, 0.052893695831298826, 0.05288259124755859, 0.052873119354248044, 0.05304991912841797, 0.05293494415283203, 0.053004383087158206, 0.053008384704589843, 0.053135551452636716, 0.05288473510742187, 0.052789825439453125, 0.05284572982788086, 0.0528306884765625, 0.052687232971191406, 0.05269475173950195, 0.0530366096496582, 0.05316886520385742, 0.05358182525634766, 0.053571456909179686, 0.05307609558105469, 0.05293868637084961, 0.052813919067382815, 0.05291823959350586, 0.05297971343994141, 0.05294870376586914, 0.05304361724853516, 0.053026687622070315, 0.053016574859619144, 0.05291382217407226, 0.05283260726928711, 0.05283379364013672, 0.052942943572998044, 0.0528633918762207, 0.05272576141357422, 0.053030815124511715, 0.05316182327270508, 0.053106945037841795, 0.053305343627929686, 0.05326623916625976, 0.05307411193847656, 0.052852607727050784, 0.05303091049194336, 0.05287097549438476, 0.0529279670715332, 0.05338556671142578, 0.05338982391357422, 0.05334988784790039, 0.05313897705078125, 0.052910465240478516, 0.05277308654785156, 0.05284668731689453, 0.05298614501953125, 0.053300926208496094, 0.05319712066650391, 0.05299776077270508, 0.05318899154663086, 0.05328236770629883, 0.053182910919189454, 0.05289779281616211, 0.05291417694091797, 0.05286502456665039, 0.05668511962890625, 0.05410153579711914, 0.05316806411743164, 0.053093921661376955, 0.05300060653686523, 0.05288175964355469, 0.05303116989135742, 0.05304115295410156, 0.053026782989501954, 0.053120094299316405, 0.05307865524291992, 0.05292678451538086, 0.052665950775146485, 0.05266435241699219, 0.05273977661132812, 0.052798145294189455, 0.05281302261352539, 0.05330115127563476, 0.053334911346435546, 0.05331148910522461, 0.05330944061279297, 0.05312307357788086, 0.05293260955810547, 0.052910079956054686, 0.05291334533691406, 0.05299020767211914, 0.05304787063598633, 0.05302067184448242, 0.05297356796264648, 0.05306140899658203, 0.053037120819091794, 0.052908191680908205, 0.05283996963500977, 0.05287369537353516, 0.05329103851318359, 0.053086177825927734, 0.052995326995849606, 0.05334092712402344, 0.0532880973815918, 0.05337990570068359, 0.052999488830566405, 0.05312176132202148, 0.053174304962158206, 0.05305750274658203, 0.05294899368286133, 0.05293414306640625, 0.05320755386352539, 0.05317196655273437, 0.05307606506347656, 0.05309455871582031, 0.053136608123779294, 0.05298278427124024, 0.05337644958496094, 0.053131168365478515, 0.05307027053833008, 0.05336678314208984, 0.05322137451171875, 0.05305865478515625, 0.05299609756469727, 0.05323196792602539, 0.053406272888183594, 0.053016609191894534, 0.0529653434753418, 0.05629951858520508, 0.05410380935668945, 0.05306803131103516, 0.05288550567626953, 0.05289539337158203, 0.052807838439941406, 0.052887649536132814, 0.052948577880859375, 0.053067905426025394, 0.0530456314086914, 0.05298755264282227, 0.05303932952880859, 0.05304537582397461, 0.05297151947021484, 0.0528353271484375, 0.0527534065246582, 0.052726879119873046, 0.05296419143676758, 0.053340160369873046, 0.05351007843017578, 0.05327990341186523, 0.053128158569335934, 0.0532644157409668, 0.053323486328125, 0.053114433288574216, 0.05308803176879883, 0.053025760650634766, 0.05293868637084961, 0.052983070373535154, 0.05329987335205078, 0.052865089416503905, 0.05293027114868164, 0.05305168151855469, 0.0530794563293457, 0.05292092895507813, 0.05306547164916992, 0.053096702575683594, 0.053065727233886716, 0.05304076766967773, 0.05316032028198242, 0.05304528045654297, 0.053357601165771484, 0.05313017654418945, 0.05288140869140625, 0.052883678436279294, 0.05304467010498047, 0.053182815551757814, 0.05312239837646485, 0.05307459259033203, 0.05325619125366211, 0.053147647857666014, 0.05316534423828125, 0.05300457763671875, 0.052964832305908205, 0.05299708938598633, 0.0532715835571289, 0.05310870361328125, 0.05336576080322265, 0.053192577362060546, 0.05320512008666992, 0.05330905532836914, 0.053117313385009766, 0.053028865814208986, 0.056301025390625, 0.053897727966308595, 0.053232929229736325, 0.052969566345214845, 0.052908382415771484, 0.052920639038085936, 0.05289267349243164, 0.05292035293579102, 0.05302479934692383, 0.05304617691040039, 0.0529958381652832, 0.05307398223876953, 0.05302076721191406, 0.05292246246337891, 0.05287936019897461, 0.05308415985107422, 0.0528056640625, 0.05342819213867187, 0.05339136123657227, 0.05354694366455078, 0.05331155014038086, 0.05326230239868164, 0.053053184509277346, 0.053269985198974606, 0.05316444778442383, 0.05303443145751953, 0.05299708938598633, 0.053098400115966796, 0.05294646453857422, 0.05297932815551758, 0.05306816101074219, 0.053008159637451174, 0.05287401580810547, 0.05287526321411133, 0.05312102508544922, 0.0531082878112793, 0.05306780624389648, 0.053090721130371096, 0.053106334686279295, 0.05354025650024414, 0.05325423812866211, 0.05323980712890625, 0.05317510223388672, 0.05299929428100586, 0.05294172668457031, 0.05297356796264648, 0.05306163024902344, 0.05306367874145508, 0.053267616271972656, 0.0533287353515625, 0.053245281219482424, 0.05309036636352539, 0.05287587356567383, 0.05310579299926758, 0.05298995208740234, 0.0532383041381836, 0.05329955291748047, 0.05344460678100586, 0.05337059020996094, 0.05336297607421875, 0.053139041900634766, 0.05319071960449219, 0.05306531143188477, 0.05669683074951172, 0.054657024383544923, 0.05362649536132812, 0.05325372695922852, 0.052972320556640626, 0.05289363098144531, 0.05284159851074219, 0.052951999664306644, 0.05321830368041992, 0.053064449310302735, 0.052977279663085936, 0.052783744812011715, 0.052891326904296876, 0.05301279830932617, 0.053036449432373046, 0.052902496337890625, 0.05290595245361328, 0.05300537490844726, 0.053197792053222656, 0.053321727752685545, 0.053294654846191405, 0.05330579376220703, 0.05320054244995117, 0.05300259017944336, 0.05296332931518555, 0.05297257614135742, 0.052902240753173825, 0.05290252685546875, 0.05300390243530274, 0.053026206970214845, 0.053023712158203125, 0.05299817657470703, 0.052985824584960935, 0.05289295959472656, 0.05297020721435547, 0.0530513916015625, 0.053130592346191406, 0.05303567886352539, 0.053016574859619144, 0.053065727233886716, 0.05317622375488281, 0.05336687850952149, 0.05322956848144531, 0.05308006286621094, 0.053034526824951175, 0.05285279846191406, 0.05295759963989258, 0.053130817413330075, 0.053262367248535156, 0.053604766845703124, 0.05313689422607422, 0.05301094436645508, 0.053144607543945316, 0.05307468795776367, 0.05312124633789062, 0.053298686981201174, 0.053175807952880856, 0.05325904083251953, 0.05315804672241211, 0.05306540679931641, 0.05309686279296875, 0.05291999816894531, 0.05304899215698242, 0.05680332946777344, 0.05424284744262695, 0.05320751953125, 0.05307526397705078, 0.0529488639831543, 0.05304198455810547, 0.053000190734863284, 0.05297971343994141, 0.053133312225341796, 0.05317987060546875, 0.05297788619995117, 0.05281209564208984, 0.052838401794433595, 0.053040321350097654, 0.05312172698974609, 0.052932769775390624, 0.052889568328857425, 0.05323980712890625, 0.05367193603515625, 0.05363302230834961, 0.053123104095458985, 0.05322963333129883, 0.053041057586669924, 0.052870559692382815, 0.0529923210144043, 0.0530926399230957, 0.05303894424438477, 0.053097919464111326, 0.05287395095825195, 0.05286707305908203, 0.05289779281616211, 0.05301769638061524, 0.053095329284667966, 0.05312102508544922, 0.05290393447875977, 0.052981311798095704, 0.053106590270996096, 0.05324857711791992, 0.05337651062011719, 0.053281246185302736, 0.05300223922729492, 0.05304729461669922, 0.05309215927124023, 0.05313532638549805, 0.05302908706665039, 0.05312921524047851, 0.05320867156982422, 0.0531583023071289, 0.053110305786132815, 0.05327283096313477, 0.053357822418212894, 0.053049343109130856, 0.05308063888549805, 0.05295759963989258, 0.053171199798583986, 0.05327974319458008, 0.05348966217041016, 0.053471233367919924, 0.05323980712890625, 0.05322684860229492, 0.05314345550537109, 0.053108959197998046, 0.05303350448608399]",tokens/s,18.826803417331245,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 162, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 982, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.16 GiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 555, in __init__ self.self_attn = QWEN2_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 219, in __init__ self.o_proj = nn.Linear(self.num_heads * self.head_dim, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,814.727168,806.289408,0.0,411.041792,391.374848,s,1,7.34471484375,7.34471484375,0.0,7.34471484375,7.34471484375,7.34471484375,7.34471484375,[7.34471484375],,kWh,4.979785358329991e-06,5.420543576206471e-07,1.008334140004119e-06,6.530173855954757e-06,,MB,1157.623808,881.78688,0.0,473.956352,454.832128,s,18,0.17947494411468504,0.009970830228593614,0.00041730724994131976,0.009976736068725586,0.010160755348205566,0.010392897748947142,0.011255180959701535,"[0.010007264137268067, 0.009718848228454589, 0.01008521556854248, 0.010202688217163086, 0.009753952026367188, 0.009642751693725585, 0.009979840278625488, 0.009712639808654786, 0.009546496391296387, 0.009600768089294434, 0.009583616256713867, 0.010026559829711915, 0.009973631858825684, 0.010042240142822265, 0.009878175735473633, 0.010106719970703126, 0.011470751762390137, 0.010142784118652343]",tokens/s,25674.89307619159,kWh,2.869601984267479e-07,3.164608360747545e-08,1.79733564523107e-07,4.983398465573304e-07,tokens/kWh,513705660.4414013,MB,1186.168832,909.049856,0.0,501.219328,454.834688,s,18,10.204578491210937,0.5669210272894964,0.010994312838018253,0.5672750854492188,0.5786530029296875,0.5795169067382813,0.5801510864257813,"[0.5803096313476562, 0.5649026489257812, 0.5646107788085938, 0.5665227661132812, 0.5680274047851562, 0.57097021484375, 0.5630609741210938, 0.5465955810546875, 0.5445765380859375, 0.546088134765625, 0.5648844604492187, 0.5793770141601563, 0.5783427124023437, 0.5777233276367187, 0.5779256591796875, 0.5652088012695312, 0.57047607421875, 0.5749757690429688]",tokens/s,111.12658900871787,kWh,1.611264126638789e-05,1.7769285780013281e-06,7.406182998032452e-06,2.529575284242167e-05,tokens/kWh,2490536.6680507436,,s,1134,10.194637586593622,0.008989980235091384,0.0002737373905640035,0.009007359981536865,0.00931346254348755,0.009394726514816284,0.009587025899887087,"[0.009422847747802734, 0.009308064460754394, 0.009404512405395507, 0.009428159713745117, 0.009292608261108398, 0.00938361644744873, 0.009435456275939941, 0.009379743576049804, 0.009396320343017578, 0.009295647621154784, 0.009268896102905273, 0.009192000389099121, 0.009263104438781738, 0.009144319534301757, 0.009259008407592773, 0.009332736015319825, 0.009464159965515136, 0.009314208030700684, 0.009199359893798827, 0.009211903572082519, 0.009334783554077148, 0.009252863883972168, 0.009203488349914551, 0.009160672187805176, 0.00925705623626709, 0.009261216163635254, 0.009248671531677246, 0.009336928367614745, 0.009347295761108399, 0.009195296287536622, 0.009383935928344727, 0.009297599792480469, 0.00915443229675293, 0.009131775856018067, 0.00902233600616455, 0.009092960357666015, 0.00908022403717041, 0.009120320320129395, 0.009250240325927734, 0.009259584426879883, 0.009268608093261719, 0.009378432273864746, 0.009406463623046875, 0.009397695541381836, 0.009359840393066407, 0.009346367835998536, 0.00921455955505371, 0.009047391891479493, 0.008957056045532227, 0.008928383827209473, 0.008945247650146485, 0.009150527954101562, 0.008868800163269044, 0.008819968223571777, 0.008755776405334472, 0.008980287551879882, 0.009119808197021484, 0.00893779182434082, 0.009252863883972168, 0.009072575569152832, 0.009037887573242188, 0.00888764762878418, 0.008878751754760742, 0.00851417636871338, 0.008738816261291504, 0.008873984336853028, 0.008887583732604981, 0.008769439697265626, 0.008719200134277344, 0.008769503593444824, 0.00890675163269043, 0.008815711975097656, 0.008790528297424317, 0.008802720069885254, 0.008859199523925781, 0.008688063621520995, 0.00896992015838623, 0.009326911926269532, 0.009486335754394531, 0.009266655921936035, 0.00935968017578125, 0.00910051155090332, 0.00890777587890625, 0.00890006446838379, 0.008946208000183106, 0.008869888305664063, 0.008855839729309083, 0.008856448173522949, 0.008761823654174805, 0.008763039588928222, 0.00882051181793213, 0.008808480262756348, 0.008811424255371094, 0.008935423851013183, 0.009224287986755371, 0.009137503623962403, 0.009251392364501954, 0.009261152267456055, 0.009158464431762695, 0.0088472318649292, 0.008841440200805665, 0.008857407569885255, 0.008853280067443847, 0.00895631980895996, 0.00889241600036621, 0.008771871566772461, 0.008901663780212402, 0.00903222370147705, 0.009186944007873536, 0.008790559768676758, 0.008695808410644532, 0.008619647979736328, 0.008677696228027344, 0.008582655906677245, 0.009029472351074219, 0.009464832305908203, 0.009360383987426758, 0.009278335571289063, 0.009143808364868163, 0.00928598403930664, 0.008988672256469727, 0.009065567970275879, 0.009099712371826172, 0.00906611156463623, 0.009079551696777344, 0.009041248321533203, 0.009126879692077637, 0.009217568397521973, 0.009429696083068848, 0.009115424156188965, 0.009019583702087402, 0.00899401569366455, 0.008919648170471191, 0.008761343955993652, 0.008764863967895508, 0.008771295547485352, 0.008888319969177246, 0.008825695991516114, 0.008867839813232421, 0.008828831672668456, 0.008788064002990722, 0.008734047889709473, 0.008690336227416991, 0.008882176399230958, 0.008814592361450196, 0.008681471824645997, 0.008668928146362305, 0.008591775894165038, 0.008654687881469727, 0.008755200386047364, 0.009107711791992188, 0.009412351608276368, 0.009325823783874511, 0.009179648399353027, 0.009319680213928223, 0.009153023719787597, 0.009011712074279785, 0.008804351806640624, 0.008818752288818359, 0.008839167594909669, 0.008951744079589844, 0.008853471755981446, 0.00882652759552002, 0.008806912422180176, 0.008750975608825684, 0.008720383644104004, 0.00908083152770996, 0.008998175621032715, 0.008946399688720703, 0.008967840194702148, 0.009050463676452637, 0.0090862398147583, 0.0090447998046875, 0.008990912437438965, 0.009237567901611328, 0.009147040367126465, 0.00903708839416504, 0.008970303535461426, 0.009054688453674317, 0.009038335800170898, 0.009017024040222168, 0.009053631782531739, 0.00890937614440918, 0.008950048446655274, 0.00892467212677002, 0.008765151977539062, 0.00876540756225586, 0.00897439956665039, 0.009310751914978028, 0.009320096015930176, 0.009319135665893555, 0.009262944221496582, 0.009048128128051757, 0.008931008338928223, 0.009074175834655761, 0.00903446388244629, 0.008959296226501465, 0.008919520378112793, 0.008843487739562988, 0.008808639526367188, 0.008832832336425782, 0.008928832054138183, 0.00927519989013672, 0.009138879776000976, 0.008984512329101562, 0.008951871871948243, 0.008849344253540038, 0.008914943695068359, 0.008779135704040528, 0.00875331211090088, 0.008791520118713378, 0.008985152244567871, 0.008927680015563965, 0.008838303565979004, 0.008712512016296386, 0.008669599533081055, 0.008783391952514649, 0.008824895858764649, 0.008748831748962403, 0.008648960113525391, 0.00857369613647461, 0.008547167778015136, 0.008759488105773925, 0.008836959838867188, 0.009403264045715332, 0.009566207885742188, 0.009387264251708985, 0.009312864303588866, 0.009058655738830567, 0.008939328193664551, 0.009005311965942384, 0.009016160011291505, 0.008985088348388673, 0.008960639953613281, 0.008843168258666993, 0.009076607704162598, 0.008971551895141601, 0.009207615852355957, 0.009349023818969727, 0.009083392143249512, 0.009055007934570312, 0.009034815788269044, 0.009033632278442384, 0.00913702392578125, 0.0091278076171875, 0.009023327827453614, 0.009005215644836426, 0.009001055717468261, 0.009027487754821777, 0.00901910400390625, 0.008923423767089845, 0.008877344131469726, 0.008674719810485839, 0.00886025619506836, 0.00876364803314209, 0.00872217559814453, 0.00871014404296875, 0.008661151885986328, 0.008626015663146972, 0.0086080961227417, 0.00921987247467041, 0.009463680267333985, 0.009400383949279785, 0.00933071994781494, 0.009267040252685547, 0.00900102424621582, 0.009107456207275391, 0.009108672142028809, 0.009007136344909667, 0.008989695549011231, 0.009013119697570801, 0.008863648414611817, 0.008843263626098634, 0.008810336112976075, 0.009181216239929199, 0.009477472305297852, 0.009083680152893066, 0.008939519882202148, 0.008826784133911133, 0.008931424140930176, 0.008957951545715333, 0.009009152412414552, 0.008838399887084961, 0.008782272338867187, 0.008895808219909668, 0.00899772834777832, 0.009034048080444336, 0.008853119850158691, 0.008792384147644043, 0.008717311859130859, 0.00887833595275879, 0.008987584114074708, 0.009283295631408691, 0.008988672256469727, 0.009211135864257812, 0.009054783821105958, 0.009128128051757813, 0.009234687805175782, 0.009381407737731934, 0.009455615997314454, 0.00926534366607666, 0.00926681613922119, 0.00925654411315918, 0.00926585578918457, 0.009109951972961427, 0.009011936187744141, 0.008954560279846192, 0.008942943572998047, 0.008971199989318848, 0.008996864318847657, 0.008897695541381836, 0.008835647583007812, 0.009015071868896484, 0.008942303657531738, 0.008836992263793945, 0.008502176284790039, 0.00880784034729004, 0.009112064361572265, 0.010286944389343262, 0.010262528419494628, 0.010143744468688964, 0.009098943710327148, 0.0089965763092041, 0.009091903686523437, 0.008937248229980469, 0.008914943695068359, 0.008955904006958008, 0.009111488342285156, 0.00916431999206543, 0.00895798397064209, 0.008710783958435058, 0.008710016250610351, 0.008727999687194824, 0.009170880317687988, 0.009484928131103515, 0.009469311714172363, 0.00938976001739502, 0.009464832305908203, 0.009243583679199219, 0.009184479713439941, 0.009045568466186524, 0.008847583770751953, 0.008765439987182617, 0.008784992218017578, 0.008778656005859375, 0.009676223754882812, 0.008823712348937989, 0.008779328346252441, 0.008757344245910645, 0.008964096069335938, 0.008978336334228516, 0.00904412841796875, 0.008837056159973145, 0.008766528129577636, 0.00904854393005371, 0.009265536308288575, 0.009134176254272462, 0.008972384452819825, 0.008789919853210449, 0.008705663681030273, 0.008704000473022461, 0.008914336204528809, 0.008825632095336914, 0.00870969581604004, 0.009134112358093261, 0.009363840103149414, 0.009013471603393554, 0.00879747200012207, 0.008737504005432129, 0.008683072090148927, 0.008843711853027344, 0.008763615608215331, 0.008873760223388672, 0.00910927963256836, 0.009344896316528321, 0.009257311820983887, 0.009274623870849609, 0.009327391624450683, 0.009162752151489258, 0.009096416473388672, 0.009007583618164063, 0.008894335746765137, 0.009071040153503417, 0.008959456443786621, 0.008854047775268555, 0.008851455688476563, 0.008901984214782714, 0.008896448135375977, 0.008819616317749024, 0.00894979190826416, 0.009021023750305175, 0.008957119941711425, 0.008804800033569336, 0.008788800239562989, 0.00903551959991455, 0.00921737575531006, 0.009126655578613281, 0.008935327529907227, 0.008725760459899902, 0.00873532772064209, 0.008755647659301757, 0.008812159538269042, 0.008861215591430664, 0.008902432441711426, 0.00890713596343994, 0.009146783828735352, 0.009086879730224609, 0.008912863731384278, 0.008903967857360839, 0.008766528129577636, 0.008671008110046386, 0.00861184024810791, 0.00863980770111084, 0.008569631576538086, 0.00886070442199707, 0.009313216209411621, 0.009289664268493652, 0.009326047897338867, 0.009230879783630372, 0.009174495697021485, 0.009066975593566895, 0.00911571216583252, 0.009011167526245117, 0.00894547176361084, 0.008866016387939452, 0.009027584075927735, 0.009191424369812011, 0.008917056083679199, 0.009504575729370118, 0.008814944267272948, 0.009017024040222168, 0.008899968147277832, 0.008815327644348144, 0.008678943634033202, 0.008673184394836426, 0.008649215698242188, 0.008609184265136719, 0.008622752189636231, 0.008914943695068359, 0.008976736068725587, 0.008713983535766601, 0.0084399995803833, 0.008846783638000488, 0.008683199882507325, 0.00888492774963379, 0.009137248039245606, 0.008833600044250487, 0.008669535636901855, 0.00867033576965332, 0.0086496000289917, 0.00858521556854248, 0.008775679588317872, 0.009214303970336913, 0.009157535552978515, 0.009073408126831054, 0.008957951545715333, 0.008790016174316406, 0.008712191581726075, 0.008640512466430664, 0.008560640335083008, 0.008566783905029298, 0.008581119537353516, 0.008531968116760253, 0.008566816329956055, 0.008597472190856933, 0.008597503662109375, 0.008617919921875, 0.008689855575561524, 0.00869331169128418, 0.008634143829345704, 0.008651295661926269, 0.008622079849243165, 0.0086179838180542, 0.008585344314575196, 0.008595552444458008, 0.008572128295898438, 0.008567359924316406, 0.008527968406677246, 0.008525983810424805, 0.008588095664978028, 0.008575743675231933, 0.008616127967834473, 0.00857692813873291, 0.00858675193786621, 0.008563296318054199, 0.0086179838180542, 0.008631839752197265, 0.008663519859313965, 0.00860979175567627, 0.008643967628479004, 0.00870032024383545, 0.008775103569030761, 0.008703104019165039, 0.008633088111877442, 0.00859763240814209, 0.008582176208496094, 0.00859216022491455, 0.008590304374694824, 0.008623711585998535, 0.008564767837524414, 0.008644672393798828, 0.00858348846435547, 0.00861184024810791, 0.0086364164352417, 0.008398847579956055, 0.00860159969329834, 0.008550111770629882, 0.008577312469482422, 0.008558367729187012, 0.008532256126403808, 0.009459648132324219, 0.00900924777984619, 0.008589119911193847, 0.00858448028564453, 0.00859382438659668, 0.008636832237243652, 0.008598784446716308, 0.008643424034118653, 0.008590880393981934, 0.008595135688781739, 0.008642784118652344, 0.008550880432128906, 0.008589311599731446, 0.008612095832824707, 0.00861353588104248, 0.008579520225524903, 0.008545184135437011, 0.008555264472961425, 0.00856287956237793, 0.00857596778869629, 0.0085696964263916, 0.008665056228637695, 0.008636608123779296, 0.00860979175567627, 0.008585056304931641, 0.008670495986938477, 0.00863920021057129, 0.00862822437286377, 0.008589311599731446, 0.008703680038452148, 0.008577343940734863, 0.00857907199859619, 0.008556096076965333, 0.008581503868103028, 0.008525888442993164, 0.008572064399719238, 0.008581983566284179, 0.00858521556854248, 0.008643808364868165, 0.008577759742736817, 0.00858937644958496, 0.008519359588623047, 0.00855686378479004, 0.00855395221710205, 0.00854032039642334, 0.008620736122131347, 0.00857260799407959, 0.008580767631530761, 0.00856611156463623, 0.008647808074951172, 0.008707967758178711, 0.00883670425415039, 0.00937548828125, 0.009038496017456054, 0.008652799606323243, 0.00865180778503418, 0.008678367614746094, 0.008376192092895508, 0.008861503601074218, 0.0086844482421875, 0.00865187168121338, 0.008612256050109863, 0.00864633560180664, 0.008617759704589844, 0.008598336219787598, 0.00860979175567627, 0.008586239814758301, 0.008668160438537598, 0.008673343658447265, 0.008638272285461426, 0.008638591766357421, 0.00876035213470459, 0.008713184356689453, 0.008666496276855468, 0.008595775604248047, 0.008659263610839844, 0.008584544181823731, 0.008516480445861816, 0.008599328041076661, 0.008568384170532227, 0.008576704025268554, 0.008551136016845703, 0.008550432205200196, 0.00856390380859375, 0.008579039573669433, 0.008586079597473144, 0.008553728103637696, 0.008575551986694336, 0.00854975986480713, 0.008563520431518555, 0.008598655700683593, 0.008561440467834473, 0.008597599983215331, 0.008559679985046387, 0.008812928199768066, 0.008642623901367188, 0.008591872215270996, 0.008665375709533692, 0.008644319534301758, 0.008584927558898926, 0.00858255958557129, 0.008607999801635742, 0.00854697608947754, 0.008582719802856445, 0.008526528358459472, 0.008595168113708496, 0.008566975593566895, 0.008633440017700195, 0.008639007568359376, 0.008658143997192383, 0.00862435245513916, 0.009238656044006349, 0.008744768142700195, 0.00947868824005127, 0.008918975830078125, 0.00959727954864502, 0.008677151679992676, 0.008705951690673829, 0.008650208473205566, 0.008610912322998047, 0.008331263542175293, 0.008650015830993653, 0.008618720054626465, 0.00861184024810791, 0.008541952133178711, 0.008634336471557617, 0.008565024375915527, 0.008632320404052735, 0.008566847801208496, 0.008578144073486327, 0.008632255554199218, 0.008622688293457031, 0.008595775604248047, 0.008675423622131348, 0.009055392265319824, 0.008632575988769531, 0.008565247535705567, 0.008553471565246582, 0.008535200119018555, 0.008619135856628417, 0.008602335929870605, 0.008598591804504395, 0.008813247680664063, 0.008614336013793945, 0.008636223793029784, 0.008753151893615722, 0.00876540756225586, 0.008884256362915038, 0.008919232368469239, 0.009077664375305175, 0.00897555160522461, 0.009048031806945802, 0.009080063819885255, 0.00912332820892334, 0.009152607917785644, 0.009193599700927734, 0.009163552284240723, 0.00924783992767334, 0.00936847972869873, 0.009426912307739259, 0.009250271797180175, 0.009281439781188965, 0.00919593620300293, 0.00928179168701172, 0.009256959915161133, 0.009115648269653321, 0.009066495895385742, 0.009183232307434081, 0.009204863548278808, 0.009538432121276856, 0.009218111991882324, 0.009287551879882812, 0.009266752243041992, 0.00923852825164795, 0.009458175659179687, 0.00924403190612793, 0.009149279594421387, 0.009277215957641602, 0.009307711601257325, 0.009150912284851074, 0.009005375862121582, 0.009182656288146972, 0.00945321559906006, 0.008957440376281739, 0.00918393611907959, 0.009283391952514649, 0.009277440071105958, 0.009441280364990234, 0.009510911941528321, 0.009209407806396484, 0.00912656021118164, 0.009257920265197754, 0.009130111694335937, 0.009119872093200684, 0.009112159729003906, 0.009276576042175293, 0.00936569595336914, 0.00922486400604248, 0.009441280364990234, 0.009332032203674316, 0.009093855857849121, 0.009148256301879883, 0.009136159896850586, 0.009307552337646484, 0.009525952339172363, 0.009195520401000976, 0.00930611228942871, 0.009289376258850098, 0.009145824432373047, 0.009357536315917969, 0.009282015800476074, 0.009104864120483398, 0.009025792121887206, 0.008968671798706055, 0.008922783851623536, 0.00899728012084961, 0.009027520179748534, 0.009347071647644043, 0.009263104438781738, 0.009173055648803712, 0.009201375961303711, 0.009210047721862792, 0.00917033576965332, 0.009439231872558594, 0.00917363166809082, 0.009197567939758301, 0.009117695808410644, 0.009013152122497559, 0.008933216094970704, 0.00906265640258789, 0.009258111953735352, 0.009268095970153808, 0.009051872253417968, 0.009119392395019531, 0.009267871856689452, 0.009391424179077148, 0.009222816467285157, 0.009138175964355469, 0.009032928466796875, 0.008899359703063965, 0.0089619197845459, 0.00901318359375, 0.009321760177612305, 0.009202143669128419, 0.009134528160095214, 0.009093119621276855, 0.009160479545593262, 0.009395808219909667, 0.00939414405822754, 0.00942956829071045, 0.009278656005859375, 0.009175968170166016, 0.009295167922973633, 0.009155263900756836, 0.009117088317871093, 0.009155167579650878, 0.009135807991027832, 0.008974080085754394, 0.009061216354370117, 0.009042719841003417, 0.009159616470336914, 0.009211903572082519, 0.009179231643676757, 0.009021344184875489, 0.00921126365661621, 0.009240192413330078, 0.009115839958190918, 0.009140704154968262, 0.009046367645263672, 0.009009152412414552, 0.009046015739440917, 0.00902284812927246, 0.009222911834716798, 0.00928054428100586, 0.009145024299621583, 0.008966303825378419, 0.008888544082641602, 0.009129055976867676, 0.009749183654785156, 0.009058303833007812, 0.009029055595397949, 0.008978464126586915, 0.009183775901794434, 0.009389408111572265, 0.009273664474487305, 0.009257311820983887, 0.009101375579833985, 0.009131967544555664, 0.009244640350341797, 0.009254688262939454, 0.00913987159729004, 0.009032447814941407, 0.009056096076965331, 0.009089280128479003, 0.009209440231323243, 0.009207839965820313, 0.009100416183471679, 0.009217023849487305, 0.009158656120300293, 0.009101311683654785, 0.009410847663879394, 0.00915129566192627, 0.009130016326904297, 0.009076704025268554, 0.009030495643615723, 0.00920800018310547, 0.009336704254150391, 0.009328831672668457, 0.009301823616027832, 0.009228351593017579, 0.009265151977539063, 0.00925228786468506, 0.009192000389099121, 0.00925875186920166, 0.009253120422363282, 0.009245856285095215, 0.009231040000915527, 0.009146528244018555, 0.009101311683654785, 0.009125568389892579, 0.009016863822937012, 0.009034527778625488, 0.008976351737976075, 0.009101344108581542, 0.009154175758361817, 0.009046208381652833, 0.008980223655700684, 0.00907526397705078, 0.009183103561401368, 0.00923852825164795, 0.009213983535766601, 0.009176128387451172, 0.009247648239135741, 0.009352224349975586, 0.00919215965270996, 0.009021023750305175, 0.00894428825378418, 0.008900863647460937, 0.008962112426757813, 0.009155584335327148, 0.009214655876159668, 0.009363615989685058, 0.009291616439819336, 0.00954918384552002, 0.009261695861816407, 0.009193632125854492, 0.009061504364013671, 0.009222816467285157, 0.009266528129577636, 0.009106143951416016, 0.009087072372436524, 0.009193375587463378, 0.009240575790405273, 0.009255135536193847, 0.009285280227661133, 0.009203840255737304, 0.009377792358398437, 0.009203712463378906, 0.009115455627441406, 0.009099136352539063, 0.009157024383544921, 0.009131551742553711, 0.009025919914245605, 0.009054207801818847, 0.009237919807434082, 0.009343839645385743, 0.009272704124450684, 0.009202048301696778, 0.009086976051330567, 0.008957951545715333, 0.00889583969116211, 0.008957951545715333, 0.00865328025817871, 0.009000960350036622, 0.00920576000213623, 0.009158143997192383, 0.009122271537780762, 0.008998944282531738, 0.009025535583496093, 0.009082015991210937, 0.009222208023071289, 0.008996831893920898, 0.009244768142700196, 0.009177824020385743, 0.009379839897155762, 0.009652223587036133, 0.009451519966125489, 0.009928704261779785, 0.009515263557434083, 0.009676223754882812, 0.00989568042755127, 0.009249183654785156, 0.009289024353027343, 0.00912179183959961, 0.009040736198425292, 0.008902655601501466, 0.008978464126586915, 0.008986207962036133, 0.009152671813964845, 0.00909670352935791, 0.009022175788879394, 0.008988096237182617, 0.009011775970458984, 0.009184288024902345, 0.009108448028564453, 0.009076800346374512, 0.009074624061584474, 0.008972064018249511, 0.00899839973449707, 0.008954591751098633, 0.009089216232299805, 0.00917689609527588, 0.00916703987121582, 0.009197183609008789, 0.009223936080932617, 0.009053855895996095, 0.0090283842086792, 0.008986528396606446, 0.008984031677246094, 0.009357760429382325, 0.00900268840789795, 0.009019519805908203, 0.008945343971252441, 0.009064352035522461, 0.009132896423339844, 0.00920195198059082, 0.009293760299682617, 0.009364831924438476, 0.009369983673095703, 0.009215999603271484, 0.009082176208496094, 0.00906719970703125, 0.009209856033325196, 0.009176480293273925, 0.009278047561645507, 0.009110048294067382, 0.009154335975646972, 0.008954079627990722, 0.008859647750854491, 0.0088722562789917, 0.009008831977844238, 0.009043264389038085, 0.008849920272827149, 0.008709792137145997, 0.00891977596282959, 0.009028639793395996, 0.00902019214630127, 0.009110527992248535, 0.009101344108581542, 0.008963040351867676, 0.008795807838439942, 0.008730976104736329, 0.008736767768859864, 0.008832032203674317, 0.008887264251708984, 0.008779104232788087, 0.00872105598449707, 0.009076064109802245, 0.00913475227355957, 0.008800224304199218, 0.008710176467895507, 0.008859647750854491, 0.008763392448425293, 0.008652480125427246, 0.008743231773376464, 0.008552063941955566, 0.008569215774536133, 0.008748448371887207, 0.009091839790344239, 0.009438464164733887, 0.009295583724975586, 0.009354111671447753, 0.009311264038085937, 0.009268192291259766, 0.009252896308898926, 0.009084511756896972, 0.008884639739990234, 0.008939488410949707, 0.008987775802612305, 0.008991583824157715, 0.00886406421661377, 0.008859359741210937, 0.008824288368225098, 0.008978976249694823, 0.009265151977539063, 0.00910147190093994, 0.009314144134521484, 0.008973983764648438, 0.00887782382965088, 0.008770367622375489, 0.008809696197509766, 0.008917280197143555, 0.00906668758392334, 0.009216095924377441, 0.009072256088256837, 0.008888704299926758, 0.008962047576904298, 0.00910540771484375, 0.010554143905639649, 0.009851903915405273, 0.009129983901977539, 0.009019455909729003, 0.00912384033203125, 0.00912172794342041, 0.0090600004196167, 0.009004639625549317, 0.008944160461425781, 0.008859999656677246, 0.008738592147827148, 0.008697152137756348, 0.008683520317077637, 0.00878889560699463, 0.009366815567016601, 0.00941868782043457, 0.009364128112792969, 0.009420255661010742, 0.00931884765625, 0.009324511528015137, 0.009162303924560546, 0.009157183647155762, 0.009164959907531738, 0.009101152420043945, 0.009074048042297363, 0.009115519523620606, 0.008908576011657714, 0.008886431694030762, 0.00897862434387207, 0.009382399559020996, 0.009109791755676269, 0.008984416007995606, 0.008739904403686523, 0.008769536018371582, 0.008963007926940918, 0.009279616355895996, 0.009310432434082031, 0.009150112152099609, 0.009015104293823243, 0.009081024169921875, 0.009048064231872559, 0.008955904006958008, 0.00885865592956543, 0.008927424430847168, 0.008905856132507325, 0.008831999778747558, 0.00881935977935791, 0.009033727645874023, 0.008990495681762696, 0.00884115219116211, 0.00880668830871582, 0.008892640113830567, 0.008928863525390626, 0.008893792152404784, 0.00889737606048584, 0.008950816154479981, 0.00897532844543457, 0.008843263626098634, 0.008828448295593263, 0.008809056282043457, 0.008826751708984375, 0.008843263626098634, 0.009054400444030762, 0.009355775833129883, 0.009312735557556153, 0.00940236759185791, 0.009293824195861817, 0.009313568115234375, 0.009159296035766602, 0.009201760292053223, 0.00903551959991455, 0.009084671974182128, 0.00912656021118164, 0.008980511665344238, 0.009082688331604003, 0.008867456436157226, 0.008968576431274414, 0.009215935707092286, 0.009174400329589844, 0.00917363166809082, 0.009135199546813964, 0.009079775810241698, 0.009207807540893554, 0.009236543655395509, 0.009082816123962402, 0.008957311630249024, 0.008953503608703613, 0.009012191772460938, 0.008970239639282226, 0.009089088439941407, 0.009048031806945802, 0.009180928230285644, 0.009164992332458497, 0.009037535667419433, 0.009168607711791992, 0.00924947166442871, 0.009013152122497559, 0.00902137565612793, 0.00910547161102295, 0.009072287559509278, 0.008960576057434081, 0.009297663688659669, 0.009059647560119628, 0.008889056205749512, 0.008798208236694336, 0.008900351524353027, 0.009261311531066895, 0.009397567749023438, 0.00934291172027588, 0.009399040222167969, 0.009309503555297851, 0.009204128265380859, 0.009281760215759277, 0.009306367874145507, 0.009166432380676269, 0.009150752067565918, 0.00903264045715332, 0.008954879760742187, 0.00898252773284912, 0.008956064224243164, 0.008824671745300293, 0.00917244815826416, 0.009213824272155762, 0.009077216148376465, 0.008984319686889649, 0.008974944114685059]",tokens/s,111.23494978293853,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.886336,3354.329088,0.0,2959.081472,2942.567424,s,1,7.60200439453125,7.60200439453125,0.0,7.60200439453125,7.60200439453125,7.60200439453125,7.60200439453125,[7.60200439453125],,kWh,1.0000995695833125e-05,1.0958325234668118e-06,3.3088915359982818e-06,1.4405719755298219e-05,,MB,1145.524224,3549.364224,0.0,3141.533696,3105.830912,s,10,0.314550048828125,0.03145500488281249,0.0017512524114522284,0.03136390399932861,0.03279800224304199,0.03422858524322509,0.03537305164337158,"[0.0356591682434082, 0.029427936553955078, 0.02969139289855957, 0.03184819221496582, 0.03121228790283203, 0.030395008087158202, 0.03151552009582519, 0.029893503189086915, 0.03248009490966797, 0.03242694473266602]",tokens/s,8138.609450347993,kWh,1.1479109921405104e-06,1.265949505202019e-07,7.620790410353302e-07,2.0365849836960425e-06,tokens/kWh,125700622.3896462,MB,1173.696512,3591.307264,0.0,3183.476736,3163.048448,s,10,10.851031616210937,1.0851031616210938,0.013687952523551393,1.0892457885742188,1.09405625,1.0982018310546875,1.1015182958984375,"[1.093135009765625, 1.102347412109375, 1.08806591796875, 1.092810791015625, 1.0870216064453124, 1.0904256591796875, 1.0930145263671875, 1.0854609375, 1.062905029296875, 1.0558447265625]",tokens/s,58.05899588927648,kWh,3.152952058369203e-05,3.477122149222979e-06,2.0829549343364018e-05,5.5836192076279034e-05,tokens/kWh,1128300.4384313016,,s,630,10.847838054656979,0.017218790562947592,0.0003928363628905611,0.017219743728637694,0.017526509666442872,0.01780540027618408,0.018400972499847415,"[0.017661983489990235, 0.01719891166687012, 0.01731616020202637, 0.01738137626647949, 0.017250240325927733, 0.017360960006713867, 0.017319936752319336, 0.017358240127563478, 0.017492351531982423, 0.017370880126953123, 0.017455583572387696, 0.017567743301391603, 0.017489919662475584, 0.01736832046508789, 0.01737343978881836, 0.017690879821777344, 0.017473663330078125, 0.01723520088195801, 0.01719718360900879, 0.01747635269165039, 0.017453216552734376, 0.01725859260559082, 0.017282976150512695, 0.017385183334350587, 0.01759062385559082, 0.017321760177612305, 0.017364992141723632, 0.01719424057006836, 0.01720806312561035, 0.0172677116394043, 0.017212127685546873, 0.017219871520996095, 0.017319936752319336, 0.017358848571777344, 0.017258655548095702, 0.01787273597717285, 0.017613983154296874, 0.017234432220458985, 0.017322175979614256, 0.017295520782470705, 0.017349983215332033, 0.017566368103027342, 0.017430015563964844, 0.017453567504882812, 0.017433759689331054, 0.017216224670410157, 0.017082239151000978, 0.017186975479125975, 0.017207391738891603, 0.017272544860839845, 0.01729155158996582, 0.017154048919677735, 0.01719500732421875, 0.017299455642700197, 0.017252351760864256, 0.017375232696533204, 0.017129472732543945, 0.017366527557373047, 0.01728060722351074, 0.01722051239013672, 0.017522687911987304, 0.017276927947998046, 0.01717628860473633, 0.017629728317260743, 0.017875135421752928, 0.01758236885070801, 0.017876863479614258, 0.01724345588684082, 0.017353471755981446, 0.01727084732055664, 0.01731283187866211, 0.01735148811340332, 0.01778086471557617, 0.017348703384399415, 0.017341567993164064, 0.017395999908447264, 0.01737779235839844, 0.01728441619873047, 0.017212095260620116, 0.017114784240722655, 0.017530784606933594, 0.017159776687622072, 0.017465919494628907, 0.01730739212036133, 0.02096796798706055, 0.019359743118286133, 0.017477632522583008, 0.017555456161499023, 0.01739094352722168, 0.017261215209960937, 0.017180671691894533, 0.017123327255249024, 0.01722163200378418, 0.0171378231048584, 0.01725628852844238, 0.01745305633544922, 0.017444576263427734, 0.01729475212097168, 0.01787379264831543, 0.018478912353515627, 0.01818841552734375, 0.0173702392578125, 0.0174268798828125, 0.017379423141479493, 0.017515968322753907, 0.017425119400024416, 0.01721164894104004, 0.01716223907470703, 0.017526496887207033, 0.017228063583374024, 0.017324031829833983, 0.017319839477539064, 0.017334112167358397, 0.017202688217163087, 0.0174517765045166, 0.017245376586914062, 0.01732691192626953, 0.017172479629516603, 0.01727462387084961, 0.017139328002929687, 0.01726527976989746, 0.017432191848754882, 0.017496448516845703, 0.017551359176635743, 0.017454816818237306, 0.01722960090637207, 0.017766975402832032, 0.017862432479858397, 0.017281408309936522, 0.01722064018249512, 0.01701798439025879, 0.01729315185546875, 0.017489471435546876, 0.01741868782043457, 0.017154239654541017, 0.017452640533447264, 0.017173824310302736, 0.01719772720336914, 0.017328384399414063, 0.01714995193481445, 0.017190271377563477, 0.01720547294616699, 0.016984447479248047, 0.016940832138061523, 0.017105152130126953, 0.017470975875854493, 0.01750271987915039, 0.01749318313598633, 0.017271680831909178, 0.017242368698120118, 0.017342144012451172, 0.01710393524169922, 0.01717955207824707, 0.017209375381469726, 0.01728102493286133, 0.017137664794921875, 0.017102848052978514, 0.017150976181030272, 0.017130399703979494, 0.017101951599121094, 0.01725129508972168, 0.017502208709716797, 0.017708864212036133, 0.01727097511291504, 0.017155744552612304, 0.017328479766845702, 0.0172359676361084, 0.017059488296508787, 0.017073919296264648, 0.017554143905639648, 0.01753251266479492, 0.01736323165893555, 0.017238016128540038, 0.017247871398925783, 0.01713190460205078, 0.01699430465698242, 0.01710895919799805, 0.01736297607421875, 0.017175615310668944, 0.017187776565551757, 0.017133472442626953, 0.01727827262878418, 0.017144128799438475, 0.01714838409423828, 0.017421600341796874, 0.017785600662231445, 0.017217504501342774, 0.017053440093994142, 0.017107200622558594, 0.018931711196899414, 0.018153472900390624, 0.01752662467956543, 0.017121440887451173, 0.017047456741333008, 0.01725040054321289, 0.017096704483032226, 0.017202207565307617, 0.017156320571899412, 0.017269407272338867, 0.01724015998840332, 0.017336320877075196, 0.017082176208496093, 0.017096895217895508, 0.017501760482788085, 0.01723436737060547, 0.017094655990600584, 0.01704876708984375, 0.01726972770690918, 0.017571680068969725, 0.017348608016967772, 0.01734003257751465, 0.017388927459716798, 0.017300064086914063, 0.01712995147705078, 0.017116479873657227, 0.01730374336242676, 0.017296863555908204, 0.017148895263671873, 0.017372352600097656, 0.01728623962402344, 0.017074111938476563, 0.017344127655029296, 0.01773923110961914, 0.017048255920410156, 0.0171560001373291, 0.017358272552490235, 0.017134111404418947, 0.01718070411682129, 0.017088640213012696, 0.017117151260375975, 0.017231775283813477, 0.01747727966308594, 0.017424800872802734, 0.017330207824707032, 0.017170400619506837, 0.017260671615600586, 0.017125280380249023, 0.017258432388305663, 0.01725791931152344, 0.017232511520385744, 0.017264095306396485, 0.017343008041381835, 0.017187839508056642, 0.018181119918823242, 0.018701728820800782, 0.017225696563720704, 0.017332735061645507, 0.017364479064941405, 0.017451648712158204, 0.017514495849609374, 0.017366783142089844, 0.01730531120300293, 0.017662336349487304, 0.018061023712158203, 0.017432863235473633, 0.017247711181640625, 0.017117183685302736, 0.017056640625, 0.017123584747314454, 0.017176319122314453, 0.017278976440429687, 0.01718681526184082, 0.017097856521606444, 0.01706662368774414, 0.016936511993408204, 0.017006399154663086, 0.017507200241088868, 0.017426591873168945, 0.017383167266845703, 0.017143903732299806, 0.017100799560546876, 0.017174367904663087, 0.017277088165283203, 0.017190656661987304, 0.017315391540527345, 0.0171711368560791, 0.017133535385131834, 0.017200544357299806, 0.017369728088378906, 0.017498111724853514, 0.017119232177734374, 0.017113088607788086, 0.017390880584716797, 0.017175264358520508, 0.01717219161987305, 0.017317792892456055, 0.017367424011230467, 0.017301504135131835, 0.01727280044555664, 0.01714384078979492, 0.01712646484375, 0.01710553550720215, 0.017258880615234375, 0.017008575439453125, 0.016977920532226562, 0.01708361625671387, 0.01729977607727051, 0.017821599960327148, 0.017297983169555664, 0.017096704483032226, 0.017254463195800783, 0.01722707176208496, 0.017248640060424803, 0.017228031158447267, 0.01708185577392578, 0.017121440887451173, 0.017926496505737306, 0.017059295654296876, 0.017089056015014648, 0.01719059181213379, 0.017162559509277343, 0.017362943649291994, 0.017364992141723632, 0.017295360565185547, 0.017305599212646485, 0.018191455841064453, 0.018140064239501954, 0.01768409538269043, 0.01727097511291504, 0.01714512062072754, 0.017243040084838866, 0.01716633605957031, 0.017149856567382812, 0.017160287857055666, 0.01732371139526367, 0.017130975723266603, 0.017015199661254882, 0.01718726348876953, 0.01720319938659668, 0.017111040115356444, 0.017133567810058595, 0.017524736404418945, 0.017352256774902344, 0.017406015396118163, 0.017166303634643554, 0.017964704513549805, 0.017412864685058593, 0.01722368049621582, 0.01709791946411133, 0.01722662353515625, 0.017141248703002928, 0.01723641586303711, 0.017249471664428712, 0.017202016830444335, 0.017266559600830077, 0.01718681526184082, 0.017160287857055666, 0.017183839797973634, 0.017410528182983397, 0.017405439376831054, 0.01714044761657715, 0.01713907241821289, 0.01834623908996582, 0.01699286460876465, 0.017209184646606444, 0.017416479110717774, 0.017381248474121095, 0.017326080322265625, 0.017309471130371092, 0.017051456451416015, 0.017182655334472656, 0.017231775283813477, 0.017340351104736328, 0.017461343765258788, 0.017385951995849608, 0.01729324722290039, 0.017143936157226564, 0.01715932846069336, 0.017159008026123048, 0.017242111206054688, 0.01745305633544922, 0.01741168022155762, 0.017154464721679686, 0.0172542724609375, 0.01712335968017578, 0.017243648529052736, 0.01700105667114258, 0.01743052864074707, 0.017946239471435546, 0.017891712188720703, 0.017180639266967772, 0.01728451156616211, 0.01720911979675293, 0.01728783988952637, 0.017229888916015627, 0.017203296661376953, 0.017262624740600585, 0.017187999725341796, 0.01705865669250488, 0.017024480819702148, 0.01705958366394043, 0.017154783248901368, 0.017739839553833008, 0.01721897506713867, 0.017174240112304687, 0.017174623489379884, 0.01701148796081543, 0.017469728469848633, 0.0200185604095459, 0.01784662437438965, 0.017336191177368163, 0.017391136169433594, 0.01734511947631836, 0.01730988883972168, 0.017354560852050782, 0.017393503189086914, 0.01720307159423828, 0.01730748748779297, 0.017565343856811525, 0.017498912811279296, 0.01739673614501953, 0.01726908874511719, 0.01729996871948242, 0.017209503173828126, 0.017332096099853516, 0.01737923240661621, 0.0169597110748291, 0.01720319938659668, 0.01770832061767578, 0.017418176651000976, 0.017353504180908204, 0.01731180763244629, 0.017055456161499023, 0.017067359924316405, 0.017228063583374024, 0.017287776947021483, 0.01733171272277832, 0.01729996871948242, 0.017358848571777344, 0.01721343994140625, 0.017193183898925782, 0.017395263671875, 0.017120639801025392, 0.017386335372924805, 0.017445920944213867, 0.017144800186157227, 0.017258495330810548, 0.017071136474609373, 0.01683964729309082, 0.017309696197509765, 0.01745510482788086, 0.018185184478759765, 0.0184233283996582, 0.01777302360534668, 0.017219743728637694, 0.017204511642456056, 0.017238592147827147, 0.017094655990600584, 0.01767628860473633, 0.01748601531982422, 0.017368896484375, 0.01727622413635254, 0.01739411163330078, 0.017676544189453126, 0.017295360565185547, 0.01723187255859375, 0.01724937629699707, 0.01705855941772461, 0.017082527160644533, 0.017532415390014648, 0.017400032043457032, 0.01744236755371094, 0.01716044807434082, 0.017158655166625975, 0.01741561508178711, 0.017285663604736327, 0.017379520416259765, 0.017024831771850588, 0.017147903442382813, 0.017303552627563477, 0.01722764778137207, 0.017163967132568358, 0.01718726348876953, 0.01713577651977539, 0.01734025573730469, 0.01750774383544922, 0.01733078384399414, 0.017219743728637694, 0.017029056549072264, 0.016885759353637696, 0.017177663803100585, 0.017268640518188477, 0.01710176086425781, 0.017119039535522462, 0.016994495391845704, 0.016885759353637696, 0.01693801689147949, 0.017103839874267578, 0.017154239654541017, 0.017260351181030274, 0.017120479583740234, 0.017021631240844725, 0.01708233642578125, 0.017037439346313476, 0.017151647567749024, 0.01716873550415039, 0.016891616821289063, 0.01667100715637207, 0.01700864028930664, 0.0168690242767334, 0.016955583572387696, 0.017043264389038085, 0.01701513671875, 0.016912384033203123, 0.01792585563659668, 0.018128448486328125, 0.017611488342285157, 0.01794223976135254, 0.01809030342102051, 0.01693641662597656, 0.016947711944580078, 0.016934911727905275, 0.01699772834777832, 0.016929407119750977, 0.017037343978881837, 0.01732329559326172, 0.017328351974487306, 0.01679587173461914, 0.016648479461669922, 0.01660313606262207, 0.01666265678405762, 0.01718796730041504, 0.016797439575195312, 0.01693712043762207, 0.016814943313598632, 0.016777215957641603, 0.01679769515991211, 0.016719871520996094, 0.016760223388671874, 0.01694166374206543, 0.01702911949157715, 0.01683046340942383, 0.01683046340942383, 0.016639999389648438, 0.01661337661743164, 0.01681814384460449, 0.01673423957824707, 0.016665727615356445, 0.01668716812133789, 0.016542528152465822, 0.016574464797973632, 0.016948896408081053, 0.016596479415893553, 0.016604000091552735, 0.016916479110717773, 0.016674400329589844, 0.016762592315673827, 0.016663232803344728, 0.01665023994445801, 0.016658143997192384, 0.0166976318359375, 0.016695295333862305, 0.016655935287475585, 0.016662975311279297, 0.01663088035583496, 0.0165897274017334, 0.016669952392578125, 0.016587520599365236, 0.01659699249267578, 0.016586336135864257, 0.016576927185058595, 0.016801376342773438, 0.016729536056518553, 0.016626655578613283, 0.01665433692932129, 0.01701478385925293, 0.016859136581420898, 0.018188127517700194, 0.018187456130981446, 0.01733510398864746, 0.016828607559204102, 0.016777023315429688, 0.017040767669677735, 0.016684831619262694, 0.01666543960571289, 0.016656383514404297, 0.0166297607421875, 0.016695295333862305, 0.0167476806640625, 0.01662191963195801, 0.016588832855224608, 0.01655855941772461, 0.016640256881713868, 0.01670844841003418, 0.016733087539672852, 0.016604223251342774, 0.016570720672607422, 0.016560575485229493, 0.01667647933959961, 0.016673311233520508, 0.01660723114013672, 0.016594944000244142, 0.01660326385498047, 0.016563583374023437, 0.01654630470275879, 0.016635904312133788, 0.016569984436035155, 0.01657913589477539, 0.016621023178100585, 0.017758560180664063, 0.016602880477905275, 0.016687360763549805, 0.01674019241333008, 0.016631967544555665, 0.016688671112060547, 0.016681503295898438, 0.016655391693115234, 0.016697439193725586, 0.01677395248413086, 0.01661756706237793, 0.01664227294921875, 0.016621248245239258, 0.01653116798400879, 0.016619583129882813, 0.01665046310424805, 0.01672969627380371, 0.016945568084716797, 0.016893184661865235, 0.016708351135253905, 0.016776735305786133, 0.016593536376953124, 0.016559968948364256, 0.01660927963256836, 0.016723648071289062, 0.017016223907470703, 0.016839584350585936, 0.016662527084350585, 0.016648191452026367, 0.016746496200561522, 0.01684867286682129]",tokens/s,58.07608823304111,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,836.018176,4675.534848,0.0,4280.287232,4115.121152,s,1,8.0935927734375,8.0935927734375,0.0,8.0935927734375,8.0935927734375,8.0935927734375,8.0935927734375,[8.0935927734375],,kWh,1.0311723070882786e-05,1.130143968225839e-06,4.96389286000487e-06,1.6405759899113496e-05,,MB,1174.614016,4983.816192,0.0,4575.985664,4408.408064,s,10,0.4055550994873047,0.04055550994873047,0.0018906092084394035,0.04021809577941894,0.04169289970397949,0.04372158527374267,0.04534453372955322,"[0.04575027084350586, 0.04020310211181641, 0.040419647216796875, 0.03980409622192383, 0.04040787124633789, 0.03984611129760742, 0.04023308944702148, 0.039516319274902345, 0.04124208068847656, 0.03813251113891602]",tokens/s,6312.335865672272,kWh,1.4787861881734404e-06,1.6299069460646011e-07,9.893933279898385e-07,2.631170210769739e-06,tokens/kWh,97295111.86777543,MB,1203.552256,4983.816192,0.0,4575.985664,4408.410624,s,10,13.95576257324219,1.395576257324219,0.007290252780742853,1.3983818359375,1.4026697021484376,1.40456220703125,1.4060762109375,"[1.38884130859375, 1.4064547119140625, 1.39838720703125, 1.399352294921875, 1.39837646484375, 1.395798828125, 1.38310400390625, 1.38431884765625, 1.4022491455078125, 1.3988797607421875]",tokens/s,45.14264245279712,kWh,4.0314031168912836e-05,4.44631747783814e-06,2.6443989729811237e-05,7.120433837656222e-05,tokens/kWh,884777.5491828349,,s,630,13.953395492553726,0.022148246813577317,0.00035882815347562457,0.02210428810119629,0.022449370384216308,0.022625327491760255,0.023180296020507814,"[0.02265292739868164, 0.022458368301391602, 0.022116352081298828, 0.022278144836425783, 0.022360063552856444, 0.022321151733398437, 0.02226380729675293, 0.022046720504760742, 0.022244543075561524, 0.02204960060119629, 0.022237184524536133, 0.022209920883178712, 0.02222496032714844, 0.022226943969726562, 0.02204444885253906, 0.022170400619506835, 0.022382591247558595, 0.022267520904541014, 0.022343263626098633, 0.022112703323364256, 0.022094079971313477, 0.022316415786743163, 0.022466272354125977, 0.022445056915283205, 0.022089696884155272, 0.02208736038208008, 0.022132863998413087, 0.0219486083984375, 0.021932064056396486, 0.02191564750671387, 0.022009855270385743, 0.02188083267211914, 0.022001440048217774, 0.021975263595581055, 0.02200371170043945, 0.021939807891845704, 0.021969120025634767, 0.021907936096191405, 0.022162431716918944, 0.021955039978027342, 0.022098175048828127, 0.021875999450683595, 0.02200649642944336, 0.022117759704589842, 0.022196863174438478, 0.022147071838378905, 0.02216524887084961, 0.021815200805664063, 0.021801120758056642, 0.021684415817260744, 0.021716575622558593, 0.021722528457641603, 0.021619712829589844, 0.021985279083251954, 0.02159539222717285, 0.021792863845825194, 0.021864511489868163, 0.021800928115844727, 0.021765888214111326, 0.021699455261230467, 0.021681247711181642, 0.021610719680786133, 0.021626815795898438, 0.022214208602905273, 0.022192960739135743, 0.02190889549255371, 0.022146879196166994, 0.021861120223999022, 0.021825536727905274, 0.02194384002685547, 0.02178233528137207, 0.02169923210144043, 0.021954559326171876, 0.022150943756103516, 0.022476287841796876, 0.022123008728027343, 0.02239094352722168, 0.02237392044067383, 0.02225974464416504, 0.02240768051147461, 0.022331552505493166, 0.022352863311767578, 0.022196992874145508, 0.02231513595581055, 0.022515520095825196, 0.022231231689453124, 0.022837247848510742, 0.022284095764160156, 0.022325439453125, 0.022168607711791993, 0.02208867263793945, 0.022433759689331055, 0.02254198455810547, 0.02267788887023926, 0.022437887191772463, 0.02244812774658203, 0.022417407989501953, 0.022109792709350585, 0.02219254493713379, 0.022196224212646484, 0.02241244888305664, 0.022254432678222656, 0.022194496154785158, 0.022791807174682616, 0.022763456344604492, 0.02220044708251953, 0.022527999877929687, 0.022257471084594728, 0.022603391647338867, 0.022628927230834962, 0.022449216842651366, 0.02219919967651367, 0.02423811149597168, 0.02319491195678711, 0.0222194881439209, 0.022235071182250977, 0.02227168083190918, 0.022226783752441408, 0.02242729568481445, 0.02226880073547363, 0.022152671813964842, 0.022128223419189453, 0.022122592926025392, 0.02219036865234375, 0.022274623870849608, 0.0221265926361084, 0.022955743789672852, 0.022495487213134765, 0.02235411262512207, 0.02233718490600586, 0.022134048461914062, 0.02235215950012207, 0.02209404754638672, 0.022074911117553712, 0.022256511688232422, 0.02217363166809082, 0.02226131248474121, 0.022092159271240235, 0.02210147285461426, 0.02214476776123047, 0.022090656280517578, 0.02216655921936035, 0.022069280624389648, 0.02239788818359375, 0.022607872009277344, 0.022380352020263672, 0.02234796714782715, 0.022147071838378905, 0.022273279190063475, 0.022620927810668944, 0.022237184524536133, 0.022084703445434572, 0.02207836723327637, 0.022306528091430664, 0.02234956741333008, 0.022004255294799803, 0.022070816040039062, 0.022214719772338867, 0.022249631881713867, 0.021958335876464844, 0.02227801513671875, 0.022790304183959963, 0.02210256004333496, 0.022212608337402344, 0.02211840057373047, 0.022161407470703123, 0.02230067253112793, 0.021988447189331056, 0.02203887939453125, 0.021936704635620117, 0.022228992462158204, 0.022093856811523437, 0.022091392517089845, 0.021924192428588868, 0.022145023345947267, 0.021914623260498048, 0.022135295867919923, 0.022516223907470705, 0.022202495574951173, 0.02207935905456543, 0.022138879776000975, 0.02202822494506836, 0.022083648681640623, 0.02217091178894043, 0.02201260757446289, 0.02203241539001465, 0.02202956771850586, 0.021977247238159178, 0.021924415588378907, 0.022384639739990234, 0.022173696517944336, 0.02276118469238281, 0.022388832092285156, 0.022042816162109374, 0.02222447967529297, 0.022032800674438476, 0.02247065544128418, 0.02209526443481445, 0.022046304702758788, 0.022004735946655272, 0.02225276756286621, 0.022020896911621093, 0.021859807968139647, 0.021769952774047852, 0.022174528121948242, 0.021968896865844727, 0.021977088928222657, 0.022476512908935545, 0.022429983139038087, 0.02226121520996094, 0.02219910430908203, 0.02221232032775879, 0.022001407623291017, 0.022300928115844727, 0.022155263900756835, 0.02197292709350586, 0.02204579162597656, 0.022340576171875, 0.02244607925415039, 0.02197305679321289, 0.02193939208984375, 0.02189798355102539, 0.022087200164794922, 0.022317535400390626, 0.022386688232421875, 0.022001663208007814, 0.022437887191772463, 0.022466527938842774, 0.02238377571105957, 0.022278848648071288, 0.02201747131347656, 0.022186111450195313, 0.022233728408813477, 0.024420352935791017, 0.02215443229675293, 0.021907360076904296, 0.022097824096679687, 0.022471263885498048, 0.02209833526611328, 0.022478847503662108, 0.02235148811340332, 0.02224985694885254, 0.02223695945739746, 0.02208585548400879, 0.021927967071533203, 0.02222831916809082, 0.02212723159790039, 0.022094976425170897, 0.02211520004272461, 0.02207744026184082, 0.02208358383178711, 0.021825536727905274, 0.02224127960205078, 0.022147071838378905, 0.02266111946105957, 0.022267871856689454, 0.022077472686767578, 0.022180864334106445, 0.022019168853759766, 0.02209721565246582, 0.022114591598510744, 0.02209823989868164, 0.021999616622924805, 0.021958784103393556, 0.022101600646972655, 0.02198361587524414, 0.021899168014526366, 0.02199660873413086, 0.02198624038696289, 0.022955392837524412, 0.025219711303710937, 0.02224892807006836, 0.02210665512084961, 0.02208345603942871, 0.022214784622192382, 0.022169599533081053, 0.0220446720123291, 0.022003679275512694, 0.022013343811035157, 0.022015775680541992, 0.022168415069580078, 0.02205695915222168, 0.02176582336425781, 0.022004032135009767, 0.02287958335876465, 0.022302944183349608, 0.02210652732849121, 0.022126623153686523, 0.02214236831665039, 0.022057567596435547, 0.02209587287902832, 0.02208358383178711, 0.021893119812011717, 0.022255136489868165, 0.022125024795532227, 0.02214499282836914, 0.022173728942871094, 0.022066560745239258, 0.021936128616333008, 0.022063615798950196, 0.02314451217651367, 0.022220191955566407, 0.022147743225097657, 0.022246944427490235, 0.022104543685913088, 0.02239641571044922, 0.022266368865966796, 0.022054784774780272, 0.02197657585144043, 0.022050975799560547, 0.02203286361694336, 0.02230790328979492, 0.022092735290527344, 0.02200371170043945, 0.021759456634521484, 0.02286534309387207, 0.022354496002197265, 0.022920543670654298, 0.023920927047729492, 0.022344064712524415, 0.022279680252075194, 0.022114816665649413, 0.022081087112426758, 0.02207583999633789, 0.022041856765747072, 0.021721216201782228, 0.021979743957519532, 0.022044160842895507, 0.022157855987548828, 0.022300575256347658, 0.022467807769775392, 0.02224371147155762, 0.022094335556030274, 0.022091775894165038, 0.02204857635498047, 0.02233145523071289, 0.02224550437927246, 0.02212224006652832, 0.022069503784179687, 0.022219903945922853, 0.02267225646972656, 0.02209382438659668, 0.02209791946411133, 0.02211020851135254, 0.022171648025512695, 0.022589439392089843, 0.022351295471191406, 0.022153823852539063, 0.02206480026245117, 0.02210028839111328, 0.022345632553100587, 0.022088863372802733, 0.022074304580688476, 0.02187468719482422, 0.022122432708740234, 0.021950527191162108, 0.021807104110717773, 0.02168422317504883, 0.021946176528930664, 0.022071487426757814, 0.021727231979370116, 0.021753856658935547, 0.022136831283569337, 0.022475807189941407, 0.022106271743774414, 0.021881248474121092, 0.02194063949584961, 0.02214816093444824, 0.022157472610473634, 0.022190879821777344, 0.02186444854736328, 0.021999616622924805, 0.022095903396606446, 0.022104032516479494, 0.021796415328979492, 0.021633472442626953, 0.021941568374633787, 0.022097663879394533, 0.022413055419921876, 0.022702655792236327, 0.022215744018554688, 0.022393407821655272, 0.021983135223388673, 0.022109760284423827, 0.022420352935791015, 0.022024192810058595, 0.02188287925720215, 0.021893119812011717, 0.021960704803466798, 0.022023616790771486, 0.02185273551940918, 0.021831424713134765, 0.02197491264343262, 0.022038911819458006, 0.022023359298706056, 0.02199193572998047, 0.022109983444213867, 0.022261632919311523, 0.022151872634887694, 0.02181100845336914, 0.021643423080444337, 0.021516000747680664, 0.02148748779296875, 0.02157814407348633, 0.021399551391601563, 0.021527999877929686, 0.02169913673400879, 0.021677152633666992, 0.021767072677612305, 0.021935840606689454, 0.021911840438842773, 0.021983232498168945, 0.022124544143676757, 0.02201580810546875, 0.021821632385253906, 0.0217509765625, 0.02183865547180176, 0.022749183654785156, 0.021745664596557617, 0.0216944637298584, 0.02175721549987793, 0.02197372817993164, 0.021790464401245116, 0.021740928649902343, 0.021808223724365236, 0.021792543411254882, 0.02185420799255371, 0.02175574493408203, 0.021702816009521484, 0.021829120635986327, 0.021846527099609374, 0.02174550437927246, 0.02152668762207031, 0.021618688583374023, 0.021932031631469725, 0.022018400192260743, 0.02196553611755371, 0.021905887603759767, 0.02292732810974121, 0.023066816329956056, 0.022391103744506837, 0.024497823715209963, 0.022268287658691405, 0.021917535781860353, 0.02193401527404785, 0.02182476806640625, 0.022027103424072266, 0.022128288269042968, 0.02196054458618164, 0.0218240966796875, 0.021904991149902343, 0.02193040084838867, 0.022007167816162108, 0.021792415618896485, 0.021776351928710937, 0.021895999908447265, 0.0220960636138916, 0.02186240005493164, 0.021778432846069336, 0.02168160057067871, 0.02223161506652832, 0.021812223434448243, 0.02182655906677246, 0.021938175201416017, 0.021977088928222657, 0.02212819290161133, 0.021956031799316406, 0.021969919204711915, 0.0219238395690918, 0.02207846450805664, 0.022145792007446288, 0.021777887344360352, 0.021979936599731444, 0.021780479431152345, 0.022159231185913085, 0.021931360244750977, 0.02187696075439453, 0.021791296005249025, 0.021910879135131837, 0.022008480072021483, 0.021704736709594726, 0.021678047180175783, 0.02209334373474121, 0.021869024276733397, 0.021898719787597658, 0.021963167190551757, 0.021907167434692384, 0.022018463134765624, 0.0221441593170166, 0.021826400756835937, 0.021841983795166015, 0.021935359954833984, 0.021893407821655272, 0.021764127731323243, 0.021703039169311523, 0.02173516845703125, 0.021833984375, 0.022009855270385743, 0.02224051284790039, 0.022037248611450195, 0.02202009582519531, 0.021827583312988282, 0.02209382438659668, 0.021747711181640626, 0.021984256744384766, 0.02233907127380371, 0.02180339241027832, 0.02180726432800293, 0.021835136413574218, 0.021851743698120117, 0.02184239959716797, 0.02196329689025879, 0.022013952255249023, 0.02207139205932617, 0.02195027160644531, 0.02209187126159668, 0.021962751388549806, 0.022005184173583985, 0.02206979179382324, 0.021921087265014648, 0.021682912826538087, 0.021587968826293946, 0.021702495574951172, 0.021874528884887695, 0.02190745544433594, 0.02185843276977539, 0.021823680877685547, 0.021884288787841797, 0.021966880798339843, 0.02204323196411133, 0.022189760208129884, 0.02222876739501953, 0.022569503784179688, 0.022642688751220705, 0.022420799255371094, 0.02242953681945801, 0.02245075225830078, 0.022410560607910156, 0.022309856414794924, 0.022280191421508787, 0.02229212760925293, 0.02250992012023926, 0.022355520248413085, 0.02242195129394531, 0.022523551940917968, 0.022483295440673828, 0.02248294448852539, 0.022347776412963868, 0.022433792114257813, 0.022771711349487304, 0.022540288925170897, 0.023017471313476562, 0.025333696365356446, 0.02252364730834961, 0.022434112548828124, 0.022222848892211915, 0.02234377670288086, 0.022306175231933595, 0.02213532829284668, 0.022494207382202147, 0.0222873592376709, 0.022571008682250978, 0.02227609634399414, 0.02231430435180664, 0.022229696273803713, 0.02226585578918457, 0.022278144836425783, 0.02290278434753418, 0.02237811279296875, 0.022718175888061524, 0.02257695960998535, 0.02224006462097168, 0.022392736434936524, 0.022156991958618165, 0.022278560638427734, 0.0221265926361084, 0.021946367263793946, 0.022128639221191407, 0.02232035255432129, 0.02207619285583496, 0.02211020851135254, 0.02225904083251953, 0.022198944091796874, 0.022195711135864257, 0.022424064636230468, 0.02232249641418457, 0.02231091117858887, 0.022221504211425783, 0.02234880065917969, 0.02225868797302246, 0.022220800399780274, 0.022032384872436524, 0.0220897274017334, 0.022347583770751953, 0.022028255462646484, 0.021968704223632812, 0.021718496322631835, 0.02202239990234375, 0.022022848129272462, 0.021807104110717773, 0.02240716743469238, 0.022345727920532226, 0.022360063552856444, 0.022169599533081053, 0.022109344482421876, 0.022214847564697264, 0.022245759963989257, 0.022308416366577148, 0.02216009521484375, 0.02210358428955078, 0.022251840591430663, 0.02206096076965332, 0.02216886329650879, 0.022059999465942382, 0.02201190376281738, 0.02204876708984375, 0.022486272811889647, 0.02236288070678711, 0.022103071212768555, 0.02207423973083496, 0.022132831573486327, 0.021972864151000977, 0.02225369644165039, 0.022024192810058595, 0.022251487731933594, 0.02222012710571289, 0.022184608459472656, 0.022147296905517578, 0.022095680236816406, 0.02214240074157715]",tokens/s,45.150300536969816,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1068, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 845, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 845, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 634, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 230, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1054, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 856, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 596, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 426, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1522, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1613, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpyvc25izx/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 1013, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 839, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 467, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 397, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 199, in _flash_attention_forward attn_output_unpad = flash_attn_varlen_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 1124, in flash_attn_varlen_func return FlashAttnVarlenFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 620, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_varlen_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 90, in _flash_attn_varlen_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.varlen_fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1709, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1153, in __init__ self.model = Qwen2MoeModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 918, in __init__ [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 918, in [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 691, in __init__ self.self_attn = QWEN2MOE_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 391, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 294, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 634, in __init__ self.attention = GPT_NEOX_ATTENTION_CLASSES[config._attn_implementation](config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 306, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 102, in __init__ self.query_key_value = nn.Linear(config.hidden_size, 3 * config.hidden_size, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 150.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 163, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 864, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,810.139648,14639.104,0.0,14243.856384,14221.3376,s,1,7.5382119140625,7.5382119140625,0.0,7.5382119140625,7.5382119140625,7.5382119140625,7.5382119140625,[7.5382119140625],,kWh,1.5008527887503457e-05,1.6481913225325745e-06,8.933062701999006e-06,2.558978191203504e-05,,MB,1123.958784,14735.572992,0.0,14329.839616,14290.688,s,10,14.040768432617188,1.4040768432617188,0.0048848589672792955,1.405280517578125,1.4091292724609374,1.4101729736328126,1.4110079345703126,"[1.3953970947265626, 1.399153076171875, 1.3992520751953126, 1.400876953125, 1.404310302734375, 1.4079962158203125, 1.40741796875, 1.4112166748046875, 1.4088973388671875, 1.406250732421875]",tokens/s,182.32620331897448,kWh,4.105210496041688e-05,4.5276012484695975e-06,2.716368839760005e-05,7.274339460648653e-05,tokens/kWh,3519219.873981142,MB,1152.958464,14750.253056,0.0,14344.51968,14290.69056,s,10,39.413060791015624,3.9413060791015617,0.003247624827919594,3.9413275146484374,3.943664404296875,3.946299365234375,3.948407333984375,"[3.9380703125, 3.93835595703125, 3.936633544921875, 3.941100830078125, 3.94066552734375, 3.94183056640625, 3.94155419921875, 3.942836669921875, 3.943078857421875, 3.948934326171875]",tokens/s,15.984548963109491,kWh,0.00011535337135291625,1.2724405187126664e-05,7.679317254560004e-05,0.00020487094908564293,tokens/kWh,307510.65625055454,,s,630,39.40928758239751,0.0625544247339642,0.0002290329818859967,0.06253827095031739,0.0628556354522705,0.06295140991210937,0.06317164352416993,"[0.06301465606689453, 0.062229217529296874, 0.062402591705322266, 0.062215137481689456, 0.06197545623779297, 0.06248233413696289, 0.062167102813720704, 0.06237936019897461, 0.06253241729736328, 0.06231763076782226, 0.062069793701171876, 0.062396289825439454, 0.06242675018310547, 0.06287807846069336, 0.06252073669433594, 0.06271958541870117, 0.062311424255371096, 0.06230534362792969, 0.06220233535766601, 0.062204383850097654, 0.061972415924072266, 0.0622490234375, 0.06250291061401367, 0.062382080078125, 0.06239846420288086, 0.06239641571044922, 0.062407936096191406, 0.062497535705566404, 0.06253321456909179, 0.0627347526550293, 0.06249676895141602, 0.06255820846557616, 0.06289801788330078, 0.06245977783203125, 0.062368030548095706, 0.06234294509887695, 0.062488800048828126, 0.06255193710327149, 0.062400192260742185, 0.06242566299438477, 0.06238422393798828, 0.06246377563476563, 0.06233283233642578, 0.06340208053588867, 0.0627231674194336, 0.06289497756958008, 0.06259478378295899, 0.06267526245117187, 0.06269132614135742, 0.06253772735595703, 0.06240256118774414, 0.06249628829956055, 0.062396030426025394, 0.0631099853515625, 0.06281798553466797, 0.06265478515625, 0.06256028747558594, 0.0625909423828125, 0.06259423828125, 0.06275360107421875, 0.0624189453125, 0.06265862274169921, 0.06274041748046875, 0.06332227325439453, 0.06269110488891602, 0.0620689582824707, 0.062261280059814454, 0.062285247802734374, 0.06233961486816406, 0.061960193634033205, 0.06228124618530274, 0.06250294494628907, 0.0627242546081543, 0.06233660888671875, 0.06266336059570313, 0.06259219360351563, 0.06235638427734375, 0.06241059112548828, 0.06237571334838867, 0.062286113739013674, 0.06242502212524414, 0.062356990814208986, 0.0627116470336914, 0.06240943908691406, 0.06232835388183594, 0.06231497573852539, 0.062282783508300785, 0.06221676635742188, 0.06266307067871094, 0.062443519592285154, 0.0624947509765625, 0.06279727935791016, 0.06278505706787109, 0.06251824188232422, 0.062410751342773435, 0.06253500747680664, 0.06250358581542968, 0.06235033416748047, 0.06241094589233399, 0.062329662322998046, 0.06229756927490234, 0.062376480102539066, 0.062328865051269534, 0.06246329498291016, 0.06270383834838868, 0.0628023681640625, 0.06274662399291993, 0.06262764739990234, 0.06250617599487304, 0.06235855865478516, 0.06241494369506836, 0.06248767852783203, 0.06250777435302735, 0.062912353515625, 0.06270582580566406, 0.06268313598632813, 0.06253263854980469, 0.06242201614379883, 0.06253769683837891, 0.06255599975585938, 0.06254784011840821, 0.06275305557250976, 0.0627856330871582, 0.0629466552734375, 0.06270601654052735, 0.0625539207458496, 0.06293852615356445, 0.0622372817993164, 0.06213014221191406, 0.062183521270751954, 0.06217932891845703, 0.0624202880859375, 0.06216099166870117, 0.06233967971801758, 0.06250086212158203, 0.06250627136230469, 0.0625650863647461, 0.062306304931640626, 0.06214041519165039, 0.06239231872558594, 0.06254991912841797, 0.06258899307250977, 0.06262543869018555, 0.06276729583740234, 0.06234857559204102, 0.06225603103637695, 0.06215238571166992, 0.06246368026733398, 0.06224140930175781, 0.06246604919433594, 0.062273536682128906, 0.06258892822265626, 0.06288544082641602, 0.062472640991210936, 0.06227305603027344, 0.06248291015625, 0.06254959869384766, 0.06262214279174805, 0.06241891098022461, 0.06261759948730469, 0.06264774322509765, 0.06248883056640625, 0.06232633590698242, 0.062416831970214845, 0.06231865692138672, 0.06256921768188477, 0.062316574096679685, 0.06247222518920898, 0.0625458869934082, 0.06247529602050781, 0.06251820755004883, 0.0627094383239746, 0.06255791854858399, 0.06266684722900391, 0.06259107208251953, 0.06271590423583985, 0.0625115852355957, 0.06242720031738281, 0.062319583892822265, 0.06252022552490234, 0.06263001632690429, 0.06289395141601563, 0.06253158569335937, 0.06262707138061524, 0.06262246322631836, 0.06255324935913086, 0.062499679565429685, 0.06261350250244141, 0.06258483123779297, 0.0634106559753418, 0.06242652893066406, 0.06216953659057617, 0.062212032318115236, 0.062185630798339844, 0.06234511947631836, 0.062220287322998044, 0.06227507019042969, 0.06251718521118164, 0.062281982421875, 0.06216633605957031, 0.06251417541503906, 0.06242230224609375, 0.06253846359252929, 0.062394367218017575, 0.06223388671875, 0.062405441284179686, 0.06263596725463867, 0.06250086212158203, 0.06238819122314453, 0.06207078552246094, 0.062227680206298826, 0.06280047988891602, 0.06242700958251953, 0.06267939376831054, 0.06247366333007812, 0.06235087966918945, 0.06262790298461914, 0.06261238479614258, 0.06269286346435547, 0.06244972610473633, 0.062419296264648434, 0.0627276496887207, 0.06273500823974609, 0.06277072143554688, 0.06293532943725585, 0.06271811294555664, 0.062312095642089844, 0.062371711730957034, 0.06251772689819336, 0.06245587158203125, 0.06246118545532227, 0.06244966506958008, 0.0627207374572754, 0.06277228927612305, 0.06295641708374024, 0.062644287109375, 0.06251484680175781, 0.06247663879394531, 0.06255516815185547, 0.06256940841674805, 0.06266652679443359, 0.062486785888671875, 0.06263391876220703, 0.06278729629516601, 0.06276335906982422, 0.06284297561645508, 0.06266979217529296, 0.06253577423095703, 0.06285523223876953, 0.06299523162841797, 0.06285286331176758, 0.06290422439575195, 0.06305772781372071, 0.06225526428222656, 0.062169281005859375, 0.06220800018310547, 0.062195358276367185, 0.062406368255615234, 0.06218937683105469, 0.06230307388305664, 0.0623595199584961, 0.062455329895019535, 0.062519775390625, 0.06225823974609375, 0.062292831420898434, 0.06233715057373047, 0.062488544464111326, 0.06287062454223633, 0.06252803039550782, 0.06259955215454102, 0.06234112167358399, 0.062304031372070315, 0.062333152770996096, 0.06255820846557616, 0.062216159820556644, 0.06237392044067383, 0.06270083236694336, 0.06277548980712891, 0.06260377502441407, 0.06266643142700196, 0.06256880187988281, 0.06241689682006836, 0.062470142364501956, 0.06276300811767578, 0.06253673553466797, 0.062388225555419924, 0.06237216186523437, 0.06257251358032226, 0.06256438446044922, 0.06264284896850586, 0.06272735977172851, 0.0626102409362793, 0.0626684799194336, 0.06266502380371093, 0.06284672164916992, 0.06243318557739258, 0.06276108932495117, 0.062619873046875, 0.06270115280151367, 0.06265078353881835, 0.062437374114990236, 0.06260265731811523, 0.062473857879638675, 0.06240262222290039, 0.06235023880004883, 0.06258470535278321, 0.06271603012084961, 0.06295142364501953, 0.06271753692626954, 0.06297027206420898, 0.06266060638427734, 0.06264012908935547, 0.06292835235595703, 0.0626849594116211, 0.06275513458251954, 0.0631978874206543, 0.062252639770507816, 0.06214083099365234, 0.06196364974975586, 0.06255007934570313, 0.062578369140625, 0.062434177398681644, 0.06225715255737305, 0.06245580673217774, 0.06233695983886719, 0.062390335083007814, 0.062473342895507815, 0.06230499267578125, 0.06231260681152344, 0.06221619033813477, 0.06243260955810547, 0.06287635040283203, 0.06271996688842774, 0.06230220794677734, 0.06239004898071289, 0.062288127899169925, 0.06266249465942383, 0.06271603012084961, 0.06234521484375, 0.062416927337646484, 0.0625656967163086, 0.06255683135986329, 0.06266470336914062, 0.06252044677734375, 0.06263897705078125, 0.06251849746704101, 0.06256105422973633, 0.06273843383789063, 0.06262086486816407, 0.06225132751464844, 0.06250342559814454, 0.06252748870849609, 0.06261356735229492, 0.06278953552246094, 0.0625316162109375, 0.06254182434082031, 0.06255785751342774, 0.06260976028442383, 0.0626319351196289, 0.06247219085693359, 0.06250495910644531, 0.06271491241455078, 0.06287980651855468, 0.06284281539916992, 0.06256534576416016, 0.06244895935058594, 0.062576416015625, 0.06257977676391602, 0.06276889419555665, 0.06302278518676757, 0.0627729606628418, 0.0628037109375, 0.06280239868164063, 0.06281036758422852, 0.06300227355957032, 0.06278204727172852, 0.0626769905090332, 0.06248239898681641, 0.06316787338256837, 0.06242889785766602, 0.06221030426025391, 0.062058368682861326, 0.06238063812255859, 0.06222652816772461, 0.062117889404296876, 0.06243673706054687, 0.0625814094543457, 0.06250492858886719, 0.0627732810974121, 0.062349281311035155, 0.06234454345703125, 0.06228044891357422, 0.06233216094970703, 0.06267337417602539, 0.06269318389892578, 0.06286374282836914, 0.06248448181152344, 0.062339038848876954, 0.062426944732666016, 0.06212944030761719, 0.06226220703125, 0.062386207580566407, 0.06247011184692383, 0.06273833465576172, 0.06254131317138673, 0.06270550537109375, 0.06250576019287109, 0.06247625732421875, 0.06254771041870118, 0.06278374481201172, 0.0625860481262207, 0.06286214447021485, 0.06241628646850586, 0.06263868713378906, 0.06245775985717773, 0.06260528182983398, 0.0625830078125, 0.06261955261230469, 0.06253363037109375, 0.06256633758544922, 0.062475841522216795, 0.0625558090209961, 0.0625590705871582, 0.0626770896911621, 0.06266668701171875, 0.06276006317138672, 0.06269369506835938, 0.06278355026245117, 0.06251375961303711, 0.06251660919189453, 0.06242969512939453, 0.06260943984985351, 0.0630123519897461, 0.0627163848876953, 0.06255759811401367, 0.06290697479248047, 0.06272819137573242, 0.0628592643737793, 0.062740478515625, 0.06270361709594727, 0.06262979125976563, 0.06318255996704102, 0.062269599914550784, 0.06216511917114258, 0.062209022521972655, 0.06236656188964844, 0.0624901123046875, 0.06224137496948242, 0.06230227279663086, 0.0625541114807129, 0.0627276496887207, 0.062277568817138675, 0.06256646347045898, 0.06241535949707031, 0.06251087951660156, 0.06259875106811523, 0.06269161605834961, 0.06253948974609375, 0.06244572830200195, 0.06253807830810547, 0.06231670379638672, 0.062205951690673826, 0.06230809783935547, 0.06222441482543945, 0.06222396850585937, 0.06266329574584961, 0.0627691535949707, 0.06263603210449219, 0.0628326416015625, 0.06250675201416016, 0.06260268783569337, 0.06282118225097656, 0.06252953720092773, 0.06229196929931641, 0.06249679946899414, 0.06246806335449219, 0.06257664108276367, 0.062349342346191404, 0.062487583160400394, 0.062446529388427735, 0.06250230407714844, 0.06258544158935547, 0.06250291061401367, 0.06247622299194336, 0.06269343948364257, 0.06265651321411132, 0.06283065414428711, 0.06296979141235351, 0.06273027038574219, 0.06263804626464843, 0.0625885124206543, 0.06251356887817383, 0.06255401611328125, 0.062304031372070315, 0.06287308883666992, 0.06284371185302734, 0.06287360000610352, 0.06291254425048828, 0.06288927841186523, 0.06267878341674804, 0.06300969696044922, 0.06287926483154296, 0.06291215896606445, 0.06317318344116211, 0.06300243377685547, 0.0621956787109375, 0.062082942962646485, 0.062284286499023435, 0.062397823333740235, 0.06235529708862304, 0.06255081558227539, 0.06257459259033203, 0.062467838287353514, 0.06260966491699219, 0.06244147109985351, 0.062281726837158206, 0.06223427200317383, 0.062494686126708984, 0.06245974349975586, 0.06258848190307617, 0.06235030364990234, 0.06236735916137695, 0.06239401626586914, 0.062362335205078126, 0.062255104064941405, 0.06236511993408203, 0.06247436904907227, 0.06276959991455078, 0.06257254409790039, 0.06281126403808594, 0.0627534065246582, 0.0625973777770996, 0.06246745681762695, 0.06252108764648437, 0.062473087310791015, 0.06248432159423828, 0.06224297714233398, 0.06265856170654296, 0.06258480072021484, 0.06259238433837891, 0.06244419097900391, 0.06264012908935547, 0.06264403152465821, 0.06293318557739258, 0.06257600021362304, 0.06259779357910156, 0.06295139312744141, 0.06298009490966797, 0.06270361709594727, 0.062814208984375, 0.06276451110839844, 0.06250345611572265, 0.06233705520629883, 0.0624964485168457, 0.06267516708374024, 0.06274208068847656, 0.06265292739868164, 0.06295062255859375, 0.06288668823242187, 0.06278963088989258, 0.06269337463378906, 0.0628056640625, 0.06295695877075196, 0.06284384155273437, 0.06281795120239257, 0.06277155303955079, 0.06258483123779297, 0.06310518264770508, 0.06265372848510742, 0.06231711959838867, 0.06251929473876953, 0.062287200927734376, 0.06242355346679687, 0.062416702270507815, 0.06248483276367187, 0.062740478515625, 0.06252953720092773, 0.06231449508666992, 0.06223180770874023, 0.06230435180664062, 0.0627344970703125, 0.06274054336547852, 0.06266518402099609, 0.0627421760559082, 0.06252988815307617, 0.06260451126098633, 0.062488414764404296, 0.06236662292480469, 0.062422271728515624, 0.06280476760864258, 0.06304111862182617, 0.06304764938354492, 0.06302102279663085, 0.0627204475402832, 0.062461952209472656, 0.06250086212158203, 0.06234483337402344, 0.06247257614135742, 0.06258428955078126, 0.06279363250732421, 0.06295971298217773, 0.0624686393737793, 0.06277088165283203, 0.06257430267333984, 0.0625547218322754, 0.0627476806640625, 0.06265472030639649, 0.06250979232788086, 0.06255567932128907, 0.06261759948730469, 0.06296134567260742, 0.06260406494140625, 0.06279116821289063, 0.06278927993774414, 0.06263040161132813, 0.0630071029663086, 0.06263123321533202, 0.06268175888061524, 0.06276265716552734, 0.06282070541381836, 0.06276857757568359, 0.06290899276733398, 0.06280944061279296, 0.06292752075195313, 0.06318694305419922, 0.06268928146362304, 0.06277260971069336, 0.06304412841796875, 0.06288790512084962, 0.06301507186889649]",tokens/s,15.986079390112998,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 864, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1054, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 856, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 596, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 426, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 609, in __init__ self.dense_4h_to_h = nn.Linear(config.intermediate_size, config.hidden_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 200.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 955, in __init__ self.model = MistralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 707, in __init__ [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 707, in [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 504, in __init__ self.self_attn = MISTRAL_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 275, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 196, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 162, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 163, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 164, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 768.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 608, in __init__ self.dense_h_to_4h = nn.Linear(config.hidden_size, config.intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1141, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 944, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 677, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 500, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 162, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 982, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.16 GiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 555, in __init__ self.self_attn = QWEN2_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 312, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 219, in __init__ self.o_proj = nn.Linear(self.num_heads * self.head_dim, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1054, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 856, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 596, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 426, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1522, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1613, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpbsd9hq6k/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 864, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1068, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 845, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 845, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 634, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 230, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,816.472064,14274.199552,0.0,13878.951936,13865.632768,s,1,7.765259765625,7.765259765625,0.0,7.765259765625,7.765259765625,7.765259765625,7.765259765625,[7.765259765625],,kWh,1.3779381262509106e-05,1.4764872093362495e-06,7.662228351995592e-06,2.2918096823840947e-05,,MB,1187.766272,14697.824256,0.0,14289.993728,14241.298944,s,10,1.9902919158935548,0.19902919158935545,0.005375349645325746,0.20046517944335937,0.20273565673828123,0.20393392181396483,0.2048925338745117,"[0.18456034851074218, 0.19604205322265625, 0.19803517150878908, 0.1994424591064453, 0.2007923126220703, 0.20222332763671874, 0.20013804626464843, 0.20246937561035155, 0.20513218688964843, 0.20145663452148438]",tokens/s,1286.2434799423236,kWh,5.892661629166924e-06,6.496177779984675e-07,3.9293142545601675e-06,1.047159366172556e-05,tokens/kWh,24447090.697922964,MB,1210.658816,14865.596416,0.0,14457.765888,14413.156352,s,10,41.13065380859375,4.113065380859375,0.008199940178961002,4.114877197265625,4.122566162109375,4.123821899414062,4.124826489257813,"[4.101380859375, 4.10262841796875, 4.10452734375, 4.10723095703125, 4.11317529296875, 4.1165791015625, 4.11658154296875, 4.121185546875, 4.122287109375, 4.12507763671875]",tokens/s,15.317043170083748,kWh,0.00012011495879833228,1.3249209875758396e-05,7.958091922023958e-05,0.00021294508789433027,tokens/kWh,295850.9192344577,,s,630,41.126564216613794,0.06528026066129171,0.00045338844554255027,0.06525511932373046,0.06575869522094727,0.06594753646850586,0.06722484588623047,"[0.06688768005371094, 0.0653589096069336, 0.06472592163085937, 0.0646266860961914, 0.06463184356689453, 0.06459696197509765, 0.06469532775878906, 0.06461334228515625, 0.06452194976806641, 0.0645348129272461, 0.06508707427978516, 0.06486466979980468, 0.06461350250244141, 0.06462886047363281, 0.06497321319580078, 0.0652353286743164, 0.06527587127685547, 0.065017822265625, 0.06500313568115235, 0.06471926116943359, 0.06495184326171875, 0.06478665924072266, 0.06488703918457031, 0.0648458251953125, 0.06476387023925781, 0.06487042999267578, 0.06487654113769531, 0.06481430053710938, 0.06491593933105469, 0.06508985900878907, 0.06543974304199218, 0.0653148193359375, 0.06519344329833984, 0.06514332580566407, 0.06540220642089843, 0.06521878051757812, 0.06534803009033203, 0.06488880157470703, 0.06495030212402343, 0.06569369506835937, 0.06514482879638672, 0.06507724761962891, 0.06527356719970703, 0.06526595306396485, 0.06537529754638671, 0.06530758666992187, 0.06532918548583984, 0.06528809356689454, 0.06539884948730469, 0.06524720001220703, 0.06525257873535156, 0.06532908630371094, 0.06523792266845703, 0.0654130859375, 0.06503977966308594, 0.06519667053222657, 0.065091552734375, 0.0650785903930664, 0.06526412963867187, 0.06522393798828124, 0.06533010864257813, 0.06534143829345704, 0.0655376968383789, 0.06706604766845703, 0.0655789794921875, 0.06449359893798828, 0.06445193481445312, 0.06458777618408203, 0.06454319763183594, 0.0646362533569336, 0.06454262542724609, 0.06444127655029297, 0.06461849975585937, 0.06464511871337891, 0.06460415649414063, 0.06521218872070313, 0.06493001556396484, 0.06509772491455078, 0.06539266967773437, 0.06538851165771484, 0.06515302276611327, 0.06483660888671874, 0.06467276763916016, 0.06472492980957031, 0.06475167846679687, 0.06474931335449219, 0.06476534271240235, 0.0649277114868164, 0.06512115478515625, 0.06497689819335938, 0.06477401733398437, 0.06499894714355468, 0.0650451202392578, 0.06520829010009765, 0.06542131042480469, 0.06580429077148438, 0.06508748626708985, 0.06525468444824219, 0.06502473449707032, 0.06510591888427734, 0.0649233627319336, 0.06504889678955078, 0.06507929229736328, 0.06504035186767577, 0.06524269104003906, 0.06511660766601562, 0.06510358428955078, 0.0651833953857422, 0.06536851501464844, 0.06554803466796875, 0.06540684509277343, 0.0655038070678711, 0.06531276702880859, 0.06527740478515626, 0.06528797149658203, 0.06524390411376953, 0.06513452911376953, 0.06530368041992188, 0.06523385620117188, 0.0651878433227539, 0.065506591796875, 0.06527980804443359, 0.06539907073974609, 0.06554483032226563, 0.0657940444946289, 0.06553600311279296, 0.06737567901611329, 0.06539405059814453, 0.06488127899169922, 0.0645711669921875, 0.0648911361694336, 0.06461030578613282, 0.0646121597290039, 0.06468624114990235, 0.0646176986694336, 0.06475615692138671, 0.06463299560546874, 0.0644935073852539, 0.06460838317871094, 0.06470873260498047, 0.06521446228027344, 0.06551760101318359, 0.06529430389404296, 0.06512985229492188, 0.0649591064453125, 0.06519308471679687, 0.06478265380859374, 0.06483411407470703, 0.06483763122558593, 0.06497478485107422, 0.0647496337890625, 0.06488835144042969, 0.06473571014404297, 0.06473113250732422, 0.06510797119140625, 0.06515007781982422, 0.06530258941650391, 0.06550611114501953, 0.06550732421875, 0.06520809936523438, 0.06514089965820312, 0.06483116912841796, 0.06506739044189454, 0.06518169403076172, 0.06491340637207031, 0.06493593597412109, 0.06508099365234375, 0.06481308746337891, 0.06491986846923828, 0.0650890884399414, 0.06516297912597656, 0.0655101089477539, 0.06561532592773438, 0.06540956878662109, 0.06574479675292969, 0.06530262756347656, 0.06517056274414063, 0.06522073364257812, 0.06521910095214843, 0.06528594970703125, 0.06535004425048828, 0.06526499176025391, 0.06539126586914062, 0.06515711975097656, 0.06522390747070313, 0.0655565414428711, 0.0665013427734375, 0.06575862121582031, 0.06585814666748047, 0.06715280151367188, 0.06544179534912109, 0.06503628540039062, 0.06464102172851563, 0.0646737289428711, 0.06466764831542969, 0.06465340423583985, 0.06470150756835938, 0.06468685150146485, 0.06462592315673828, 0.06469110107421874, 0.06471820831298829, 0.06480140686035156, 0.06497702026367187, 0.06532406616210938, 0.06555734252929687, 0.06548252868652343, 0.06522207641601563, 0.06510262298583984, 0.06478230285644532, 0.06479440307617187, 0.06468019104003907, 0.06480076599121094, 0.0648740463256836, 0.06481881713867188, 0.06490767669677734, 0.06485443115234375, 0.06513664245605469, 0.06508287811279297, 0.06518985748291016, 0.06558979034423829, 0.06535987091064453, 0.0654028778076172, 0.06580633544921875, 0.0653985595703125, 0.06500784301757813, 0.06503823852539062, 0.06486605072021484, 0.06498544311523438, 0.06499132537841797, 0.06516726684570312, 0.06509363555908203, 0.06518169403076172, 0.06511980438232422, 0.06542176055908203, 0.06553314971923828, 0.06557366180419921, 0.06539398193359375, 0.065635009765625, 0.06539059448242188, 0.06529222106933594, 0.06523091125488281, 0.0652465591430664, 0.06516598510742187, 0.0652262420654297, 0.06542582702636719, 0.06529430389404296, 0.06525555419921875, 0.06548617553710938, 0.06556237030029297, 0.06580521392822265, 0.06560972595214844, 0.06624050903320312, 0.06744220733642578, 0.06557148742675781, 0.06496630096435548, 0.0646654052734375, 0.06454496002197266, 0.06466012573242187, 0.06463203430175782, 0.06473369598388672, 0.06467929840087891, 0.06479964447021484, 0.06580633544921875, 0.06494617462158203, 0.06483753967285157, 0.06493193817138672, 0.06500064086914062, 0.06536585235595703, 0.06531747436523437, 0.06526374053955078, 0.06513279724121093, 0.06482943725585938, 0.06487245178222656, 0.06498099517822266, 0.06481919860839844, 0.06488473510742188, 0.0649318389892578, 0.06490716552734375, 0.06483891296386719, 0.06512521362304688, 0.06495027160644531, 0.0652735366821289, 0.06539910125732422, 0.06547014617919922, 0.06551570892333984, 0.06560781097412109, 0.06541311645507812, 0.06504857635498047, 0.06523280334472656, 0.06510128021240234, 0.06515776062011719, 0.0652081298828125, 0.06574713897705078, 0.06526361846923828, 0.0653755874633789, 0.06538009643554688, 0.06562438201904297, 0.06562060546875, 0.06575305938720703, 0.06556671905517578, 0.06570393371582031, 0.06556192016601563, 0.0653790054321289, 0.06529369354248046, 0.06561219024658203, 0.06528227233886719, 0.06577356719970703, 0.0653864974975586, 0.06535753631591797, 0.06555471801757813, 0.06574694061279297, 0.06564371490478516, 0.06572319793701172, 0.06572637176513672, 0.0657652130126953, 0.06747750091552734, 0.06561996459960938, 0.06490876770019531, 0.06489888000488281, 0.06483017730712891, 0.06473932647705079, 0.06481510162353515, 0.06484786987304687, 0.06470188903808594, 0.06469058990478516, 0.06496012878417969, 0.06480732727050781, 0.06488451385498047, 0.06496806335449219, 0.06563645172119141, 0.06619840240478515, 0.06566502380371093, 0.06528585815429687, 0.0651328353881836, 0.06496988677978516, 0.06477664184570313, 0.06488105773925781, 0.06492070770263672, 0.0649716796875, 0.06507068634033203, 0.06515545654296875, 0.06492313385009765, 0.06508780670166016, 0.06519417572021484, 0.06551065826416015, 0.06601321411132813, 0.06557360076904296, 0.06541926574707031, 0.06537014770507812, 0.06519350433349609, 0.06511251068115234, 0.06536809539794922, 0.065244384765625, 0.06512102508544922, 0.06525718688964843, 0.06529164886474609, 0.06518415832519531, 0.06531737518310547, 0.06520012664794922, 0.06549504089355469, 0.06563024139404297, 0.06569945526123047, 0.06567155456542968, 0.06583293151855468, 0.0653680648803711, 0.06572646331787109, 0.065617919921875, 0.0655257568359375, 0.06546227264404297, 0.06581247711181641, 0.06568946838378906, 0.06548492431640625, 0.06545986938476563, 0.06546435546875, 0.06545030212402343, 0.06598451232910156, 0.06579750061035156, 0.06576396942138672, 0.0672542724609375, 0.0656096954345703, 0.06496192169189453, 0.06472767639160157, 0.06465126037597656, 0.06480451202392579, 0.06479087829589844, 0.06474092864990234, 0.06479837036132813, 0.06467174530029297, 0.06484166717529297, 0.06471670532226563, 0.06477426910400391, 0.06474396514892578, 0.06559734344482422, 0.065474365234375, 0.06551404571533204, 0.0652452163696289, 0.0653967056274414, 0.06493132781982422, 0.0651514892578125, 0.06501286315917969, 0.06515532684326172, 0.0650244140625, 0.06516553497314453, 0.06494822692871094, 0.06515449523925781, 0.0650505599975586, 0.06535395050048828, 0.06574956512451172, 0.0657113265991211, 0.06560157012939453, 0.06572707366943359, 0.06553097534179687, 0.06549801635742188, 0.0652943344116211, 0.06530035400390626, 0.06513062286376953, 0.06535686492919922, 0.06528070068359375, 0.06540438079833985, 0.06518863677978516, 0.06537792205810547, 0.06551181030273437, 0.06573868560791016, 0.06567123413085937, 0.06559334564208984, 0.06573670196533203, 0.06581238555908203, 0.06545801544189453, 0.06527922821044922, 0.06514176177978516, 0.06545315551757812, 0.0652747802734375, 0.06555033874511719, 0.06536969757080079, 0.0655294418334961, 0.06544255828857422, 0.06558060455322266, 0.06575081634521485, 0.0658746566772461, 0.06610908508300781, 0.0657899169921875, 0.06775190734863282, 0.06590262603759765, 0.06522672271728516, 0.06472499084472656, 0.064753662109375, 0.06484130859375, 0.06490902709960937, 0.06479328155517577, 0.06489907073974609, 0.06527177429199219, 0.06489295959472656, 0.06478972625732422, 0.06481158447265625, 0.06518169403076172, 0.06533519744873047, 0.0654830093383789, 0.06574457550048828, 0.06567769622802734, 0.06541417694091797, 0.06509164428710937, 0.06494636535644531, 0.0650159683227539, 0.06520480346679687, 0.06508544158935547, 0.06519580841064453, 0.06513072204589844, 0.0650505599975586, 0.06538041687011718, 0.06518716430664062, 0.06530319976806641, 0.06554217529296875, 0.0657828140258789, 0.06559142303466797, 0.06582701110839843, 0.0653359375, 0.06535987091064453, 0.06640435028076172, 0.06528147125244141, 0.06522528076171875, 0.06535561370849609, 0.06511756896972656, 0.06566172790527344, 0.06533529663085938, 0.06544300842285156, 0.06554300689697265, 0.06544380950927735, 0.06611148834228515, 0.06583881378173828, 0.06558134460449219, 0.06561795043945312, 0.06554569244384766, 0.06561023712158204, 0.06540624237060547, 0.06537904357910156, 0.06547049713134766, 0.06545760345458984, 0.06544233703613281, 0.06537830352783203, 0.06542950439453125, 0.06567526245117188, 0.0656527328491211, 0.06594319915771485, 0.06604428863525391, 0.0673634262084961, 0.06581206512451172, 0.06515158081054688, 0.06481100463867187, 0.06483708953857421, 0.06468617248535156, 0.06494214630126953, 0.06487197113037109, 0.06490402984619141, 0.0654233627319336, 0.06484105682373047, 0.06487110137939453, 0.06503347015380859, 0.06497917175292969, 0.0651817626953125, 0.06543389129638671, 0.0657020492553711, 0.06557449340820312, 0.06529065704345703, 0.06503628540039062, 0.064997314453125, 0.06498297882080079, 0.0650466537475586, 0.06523289489746094, 0.06522182464599609, 0.06523372650146485, 0.06532278442382812, 0.0650071029663086, 0.06498377227783203, 0.0655175323486328, 0.06573638153076172, 0.06569760131835937, 0.06574748992919922, 0.06573465728759766, 0.06554214477539062, 0.06538444519042969, 0.06534963226318359, 0.0651325454711914, 0.06543292999267578, 0.06554051208496094, 0.06532470703125, 0.0654546890258789, 0.06535987091064453, 0.06573465728759766, 0.06547993469238281, 0.06552243041992188, 0.06599593353271484, 0.06598278045654297, 0.06605007934570313, 0.06588262176513672, 0.06598860931396484, 0.06574899291992188, 0.06547660827636718, 0.06551532745361328, 0.06557328033447266, 0.0655398712158203, 0.06539469146728516, 0.06532466888427735, 0.06553826904296875, 0.06557097625732422, 0.06604393768310547, 0.0657735366821289, 0.06596598052978515, 0.06756524658203125, 0.06577455902099609, 0.0651182098388672, 0.06502371215820313, 0.0649136962890625, 0.06488790130615234, 0.06487133026123047, 0.065074462890625, 0.06495919799804688, 0.06495846557617188, 0.06493798065185546, 0.06492774200439454, 0.06509158325195312, 0.06503977966308594, 0.06561251068115234, 0.06659059143066406, 0.06583296203613281, 0.065512451171875, 0.06530764770507813, 0.06511366271972656, 0.06510636901855468, 0.0654028778076172, 0.06491913604736328, 0.06517504119873047, 0.06513062286376953, 0.06566095733642578, 0.06507997131347656, 0.06512387084960937, 0.06542189025878906, 0.06552783966064453, 0.06614422607421876, 0.06560959625244141, 0.06575936126708984, 0.0655946273803711, 0.06543846130371093, 0.06510387420654297, 0.06534143829345704, 0.0652943344116211, 0.06520003509521484, 0.06519391632080078, 0.0654109115600586, 0.06519609832763672, 0.06540927886962891, 0.06536713409423828, 0.06594239807128906, 0.06594489288330078, 0.06595855712890625, 0.06581584167480468, 0.06581123352050781, 0.06551142120361328, 0.06539059448242188, 0.0654172134399414, 0.06541516876220703, 0.06560562896728515, 0.06551145935058594, 0.0654620132446289, 0.06562019348144531, 0.06601113891601562, 0.06582886505126953, 0.06568345642089844, 0.06599680328369141, 0.06605619049072266, 0.06594969940185547]",tokens/s,15.318566284355471,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,836.231168,1903.099904,0.0,1507.852288,1469.840384,s,1,7.52754248046875,7.52754248046875,0.0,7.52754248046875,7.52754248046875,7.52754248046875,7.52754248046875,[7.52754248046875],,kWh,9.503668124996995e-06,1.0401331762255093e-06,3.1658358660008323e-06,1.3709637167223338e-05,,MB,1226.58816,1945.042944,0.0,1537.212416,1426.272256,s,10,0.27097811317443843,0.02709781131744384,0.0003786107303017915,0.027143296241760255,0.027483865928649903,0.02757210931777954,0.02764270402908325,"[0.02708073616027832, 0.027005279541015625, 0.026945632934570314, 0.026169376373291017, 0.026936735153198242, 0.02766035270690918, 0.02728428840637207, 0.027205856323242187, 0.02722559928894043, 0.027464256286621094]",tokens/s,9447.257455630874,kWh,7.997867326598076e-07,8.820181491284647e-08,5.278565866685053e-07,1.4158451342411593e-06,tokens/kWh,180810735.44615212,MB,1255.46496,1955.528704,0.0,1547.698176,1426.274816,s,10,14.865883300781249,1.4865883300781249,0.022758679453859385,1.4982611694335937,1.5034682861328124,1.5080211303710938,1.5116634057617186,"[1.4967606201171875, 1.4830277099609375, 1.4520919189453125, 1.438096435546875, 1.50245654296875, 1.512573974609375, 1.500075927734375, 1.5007305908203126, 1.49976171875, 1.480307861328125]",tokens/s,42.37891467686226,kWh,4.2963057214423454e-05,4.738280142252147e-06,2.0673327040931977e-05,6.837466439760757e-05,tokens/kWh,921393.9191517899,,s,630,14.863477323532083,0.023592821148463658,0.0005836044082770866,0.023636879920959475,0.02402796497344971,0.024213948917388917,0.025956943264007576,"[0.02380396842956543, 0.02381167984008789, 0.023557727813720702, 0.023343296051025392, 0.02356902313232422, 0.02336511993408203, 0.0234901123046875, 0.02411516761779785, 0.023535648345947267, 0.023407552719116213, 0.02365964889526367, 0.023612287521362303, 0.023468032836914062, 0.02350284767150879, 0.02510652732849121, 0.02385296058654785, 0.024130975723266602, 0.02401340866088867, 0.02364825630187988, 0.02354694366455078, 0.02357548713684082, 0.0235231990814209, 0.02374176025390625, 0.0236429443359375, 0.023686975479125977, 0.023616863250732423, 0.023540576934814452, 0.023545856475830077, 0.027241952896118166, 0.02977574348449707, 0.02368783950805664, 0.023632959365844728, 0.023755584716796875, 0.023781280517578125, 0.02357788848876953, 0.023829439163208007, 0.024065120697021485, 0.023741344451904296, 0.02341587257385254, 0.02346659278869629, 0.02344585609436035, 0.023464128494262694, 0.02360704040527344, 0.023590335845947264, 0.023494688034057618, 0.023583616256713867, 0.023550687789916994, 0.023415487289428712, 0.023342815399169922, 0.023204448699951172, 0.023337055206298828, 0.02344108772277832, 0.023666559219360353, 0.023576927185058594, 0.023317600250244142, 0.023651519775390626, 0.02339596748352051, 0.023322368621826173, 0.023351648330688476, 0.023317760467529296, 0.023206655502319335, 0.023297088623046875, 0.023542720794677733, 0.023474336624145508, 0.02344918441772461, 0.023247039794921875, 0.023474239349365236, 0.023444799423217772, 0.023517887115478517, 0.023377727508544922, 0.023298240661621093, 0.023475807189941408, 0.02336502456665039, 0.023376895904541017, 0.023472127914428712, 0.02368716812133789, 0.0234105281829834, 0.023914655685424804, 0.023959775924682618, 0.02378489685058594, 0.023439712524414062, 0.023349407196044922, 0.02360918426513672, 0.02393087959289551, 0.02349459266662598, 0.023458911895751954, 0.0233973445892334, 0.02371513557434082, 0.023389888763427735, 0.02345881652832031, 0.02343462371826172, 0.023675519943237303, 0.02333251190185547, 0.02329840087890625, 0.023619583129882812, 0.023445407867431642, 0.023549280166625976, 0.023488704681396484, 0.023410303115844727, 0.023566591262817384, 0.023802528381347655, 0.02349270439147949, 0.023351104736328124, 0.02369139289855957, 0.023855104446411132, 0.02337126350402832, 0.023773311614990234, 0.023439712524414062, 0.023996448516845702, 0.02345779228210449, 0.023513088226318358, 0.023468032836914062, 0.023365631103515624, 0.023500799179077148, 0.023600959777832033, 0.023433216094970705, 0.023655872344970703, 0.023603967666625977, 0.023349248886108398, 0.023858463287353516, 0.023343807220458986, 0.02329360008239746, 0.02428351974487305, 0.023580768585205077, 0.023627487182617188, 0.023293567657470704, 0.023424383163452148, 0.023358079910278322, 0.02338809585571289, 0.023304256439208984, 0.023494239807128905, 0.02319561576843262, 0.022937536239624023, 0.02311961555480957, 0.02314931106567383, 0.023158784866333007, 0.023568384170532225, 0.023158784866333007, 0.02304355239868164, 0.023231008529663085, 0.023225919723510742, 0.022958528518676757, 0.022872064590454103, 0.022906879425048828, 0.02306252861022949, 0.02305638313293457, 0.022957632064819336, 0.022868415832519532, 0.023344736099243164, 0.022835552215576174, 0.022761663436889647, 0.022859647750854493, 0.022740352630615236, 0.022781631469726563, 0.022829151153564452, 0.023085760116577148, 0.02274857521057129, 0.022839487075805662, 0.02299347114562988, 0.023170848846435547, 0.024883424758911133, 0.02627174377441406, 0.023320608139038086, 0.0229703369140625, 0.022971839904785157, 0.022968896865844725, 0.022923263549804687, 0.02289459228515625, 0.02289254379272461, 0.022853631973266602, 0.022734848022460938, 0.02291868782043457, 0.023126495361328124, 0.023019519805908203, 0.0229552001953125, 0.022846271514892578, 0.022711872100830078, 0.02277737617492676, 0.02286207962036133, 0.022672031402587892, 0.02272051239013672, 0.02255286407470703, 0.022695295333862303, 0.02258710479736328, 0.022605791091918945, 0.022626720428466796, 0.022757631301879883, 0.02264678382873535, 0.022691839218139647, 0.023012928009033203, 0.023002559661865235, 0.022960159301757814, 0.022600671768188477, 0.02266873550415039, 0.02268115234375, 0.02274844741821289, 0.022660415649414064, 0.02259190368652344, 0.022719648361206053, 0.022703968048095702, 0.022700544357299804, 0.022667776107788085, 0.022681440353393555, 0.0227923526763916, 0.02313145637512207, 0.022983360290527343, 0.02294112014770508, 0.022695999145507812, 0.022753791809082033, 0.022601728439331056, 0.022462495803833006, 0.022708192825317382, 0.02264860725402832, 0.0225795841217041, 0.022681440353393555, 0.023358783721923827, 0.022681343078613282, 0.022647743225097657, 0.022564863204956053, 0.022607872009277344, 0.022898687362670898, 0.0227259521484375, 0.022770368576049804, 0.022597631454467772, 0.02272870445251465, 0.02268694305419922, 0.02268035125732422, 0.022648223876953123, 0.022700639724731447, 0.02265088081359863, 0.022740896224975587, 0.022775583267211914, 0.022657567977905274, 0.022820640563964843, 0.022841344833374022, 0.022795648574829103, 0.022675775527954103, 0.022747264862060548, 0.022661312103271485, 0.022627967834472656, 0.02261030387878418, 0.022895904541015626, 0.023030496597290038, 0.022775423049926757, 0.023173503875732422, 0.023169023513793945, 0.023228416442871092, 0.02339142417907715, 0.023538496017456053, 0.023602336883544923, 0.023401504516601564, 0.023366655349731445, 0.02361759948730469, 0.023513248443603516, 0.023361824035644532, 0.02356777572631836, 0.023419071197509765, 0.023277856826782226, 0.023377471923828125, 0.023607423782348633, 0.02346028709411621, 0.023392255783081056, 0.024458751678466797, 0.024061920166015625, 0.023828128814697265, 0.023688064575195313, 0.02405177688598633, 0.02386636734008789, 0.023912384033203126, 0.023802303314208986, 0.02371785545349121, 0.023806560516357423, 0.024253856658935546, 0.024481727600097657, 0.02376969528198242, 0.02400467109680176, 0.024172544479370117, 0.023883775711059572, 0.023725824356079103, 0.02386115264892578, 0.02386390495300293, 0.0239552001953125, 0.023776416778564454, 0.023845727920532227, 0.02385411262512207, 0.0240644474029541, 0.024154655456542967, 0.023975328445434572, 0.023751264572143556, 0.024221696853637696, 0.023965696334838867, 0.023829919815063477, 0.02401750373840332, 0.023805536270141602, 0.023658912658691408, 0.02378156852722168, 0.023747776031494142, 0.023741056442260742, 0.023883775711059572, 0.023734272003173826, 0.024414207458496092, 0.023793664932250977, 0.023832576751708984, 0.023859199523925782, 0.024335615158081053, 0.023847679138183593, 0.024025087356567384, 0.023828479766845705, 0.023926015853881835, 0.02368569564819336, 0.02379385566711426, 0.02381817626953125, 0.023748672485351563, 0.023848960876464844, 0.023837823867797852, 0.02399292755126953, 0.023832576751708984, 0.02473936080932617, 0.023992799758911134, 0.023961599349975587, 0.025796607971191408, 0.026394880294799805, 0.023875328063964845, 0.023956703186035155, 0.023896831512451172, 0.024010784149169923, 0.023879680633544922, 0.02397929573059082, 0.02368342399597168, 0.023861631393432618, 0.024786943435668944, 0.02412748718261719, 0.024246271133422852, 0.023905696868896483, 0.023730783462524413, 0.023792736053466795, 0.023800479888916017, 0.024037023544311524, 0.023751264572143556, 0.023769088745117187, 0.023773183822631837, 0.02498703956604004, 0.0238656005859375, 0.023748319625854494, 0.02376460838317871, 0.02387164878845215, 0.02442326354980469, 0.023905664443969726, 0.02375459289550781, 0.024118047714233398, 0.023975967407226562, 0.023770816802978517, 0.023647680282592773, 0.023802719116210937, 0.023908447265625, 0.023885631561279298, 0.02372003173828125, 0.02388364791870117, 0.02392025566101074, 0.02443247985839844, 0.02390902328491211, 0.023973888397216796, 0.02406399917602539, 0.02401417541503906, 0.02381439971923828, 0.023856639862060547, 0.02388675117492676, 0.023989376068115235, 0.023817087173461916, 0.02392985534667969, 0.02369024085998535, 0.023897632598876953, 0.0237607364654541, 0.023695999145507813, 0.023750656127929686, 0.02372403144836426, 0.023658496856689453, 0.02391209602355957, 0.024124895095825195, 0.02368476867675781, 0.023908832550048827, 0.023756256103515627, 0.023681856155395507, 0.02371552085876465, 0.023728288650512696, 0.02385772705078125, 0.023678655624389647, 0.023793119430541992, 0.023724607467651367, 0.023769088745117187, 0.023967744827270508, 0.023666688919067383, 0.0237076473236084, 0.023582719802856447, 0.02364959907531738, 0.02359980773925781, 0.023559520721435547, 0.02369193649291992, 0.023595008850097656, 0.02371583938598633, 0.02418832015991211, 0.023736223220825196, 0.023714656829833983, 0.023805183410644533, 0.02364681625366211, 0.023859199523925782, 0.023990335464477538, 0.02360099220275879, 0.024791135787963867, 0.02373347282409668, 0.023851423263549804, 0.023779712677001952, 0.02390630340576172, 0.024115135192871093, 0.02375004768371582, 0.023571199417114257, 0.023725984573364257, 0.02361903953552246, 0.023532352447509765, 0.024018911361694335, 0.023826175689697266, 0.023613439559936524, 0.023641151428222658, 0.023501632690429687, 0.023709823608398437, 0.02369945526123047, 0.023567487716674804, 0.023815040588378907, 0.02367283248901367, 0.02364735984802246, 0.02370256042480469, 0.023819456100463866, 0.025094816207885742, 0.024528287887573243, 0.02444758415222168, 0.023905311584472656, 0.02373756790161133, 0.023670495986938475, 0.023705631256103515, 0.023642112731933593, 0.023791616439819335, 0.024204479217529298, 0.02398912048339844, 0.023961631774902344, 0.024047584533691407, 0.023826431274414063, 0.023842815399169923, 0.02392268753051758, 0.023850944519042967, 0.023760927200317382, 0.02358428764343262, 0.02390399932861328, 0.023876352310180662, 0.024778751373291014, 0.023859199523925782, 0.0235885124206543, 0.02363632011413574, 0.023678495407104493, 0.023556447982788085, 0.023592447280883787, 0.02378550338745117, 0.02403571128845215, 0.023742176055908202, 0.023572799682617187, 0.023595199584960938, 0.023586816787719726, 0.023532575607299804, 0.023542688369750975, 0.023748319625854494, 0.023719648361206054, 0.023753343582153322, 0.023615488052368162, 0.023762943267822266, 0.023664287567138672, 0.024021343231201173, 0.02405990409851074, 0.024401792526245115, 0.02395123291015625, 0.0240948486328125, 0.02412371253967285, 0.023885631561279298, 0.02373222351074219, 0.023801759719848634, 0.023656543731689454, 0.024112415313720704, 0.02360393524169922, 0.023788768768310545, 0.02351388740539551, 0.023805952072143553, 0.023842815399169923, 0.023940128326416017, 0.023688095092773438, 0.023637407302856444, 0.023588544845581056, 0.023643104553222657, 0.024147968292236328, 0.023852415084838867, 0.023682720184326173, 0.0236408634185791, 0.02378976058959961, 0.02362303924560547, 0.0238209285736084, 0.023957504272460937, 0.02395955276489258, 0.0236810245513916, 0.023545856475830077, 0.023494272232055663, 0.023736320495605468, 0.02362214469909668, 0.023653472900390625, 0.023636768341064453, 0.023767040252685546, 0.02367692756652832, 0.023699296951293945, 0.02346771240234375, 0.023486080169677733, 0.024060768127441408, 0.023793664932250977, 0.02363916778564453, 0.023571008682250975, 0.024027456283569337, 0.023732383728027343, 0.023652191162109374, 0.02360758399963379, 0.02352262306213379, 0.024024959564208984, 0.023632415771484373, 0.023662591934204103, 0.023576576232910155, 0.023858335494995116, 0.024064863204956054, 0.023863296508789062, 0.023748287200927733, 0.023608671188354493, 0.023623712539672853, 0.02384787178039551, 0.024151391983032226, 0.023748672485351563, 0.026085407257080077, 0.02425503921508789, 0.023908607482910155, 0.023947008132934572, 0.023666688919067383, 0.024032543182373047, 0.024097471237182616, 0.0240230712890625, 0.02376911926269531, 0.023805919647216796, 0.02370560073852539, 0.02384486389160156, 0.024588287353515623, 0.02372812843322754, 0.023744512557983398, 0.023737600326538086, 0.02363612747192383, 0.02351900863647461, 0.02364044761657715, 0.024152511596679686, 0.02392803192138672, 0.023712543487548827, 0.023752479553222655, 0.023643423080444335, 0.023544960021972657, 0.023754560470581054, 0.023711103439331055, 0.023511680603027343, 0.023584735870361327, 0.023821855545043947, 0.023706079483032227, 0.02367068862915039, 0.02369955253601074, 0.023834623336791993, 0.023639072418212892, 0.023464927673339842, 0.023467775344848632, 0.023576831817626952, 0.023395519256591796, 0.026346176147460938, 0.023564224243164063, 0.023417024612426757, 0.023599103927612306, 0.023636991500854493, 0.02353219223022461, 0.023234912872314453, 0.0232421760559082, 0.02321788787841797, 0.023192096710205078, 0.023291263580322265, 0.023120832443237305, 0.023240703582763672, 0.02328985595703125, 0.02335651206970215, 0.02305423927307129, 0.02306255912780762, 0.02321219253540039, 0.023313215255737305, 0.02342911911010742, 0.023404544830322265, 0.023259136199951173, 0.0233240966796875, 0.023107263565063478, 0.023147392272949218, 0.023271135330200195, 0.023517471313476562, 0.02355200004577637, 0.023398143768310547, 0.02325119972229004, 0.023245920181274415, 0.02339455986022949, 0.02381599998474121, 0.024038240432739257, 0.02359065628051758, 0.023439584732055666, 0.02322435188293457, 0.02311369514465332, 0.024137216567993162, 0.026022432327270507, 0.02377903938293457, 0.023736671447753908, 0.02422761535644531, 0.023486015319824218, 0.023419296264648438, 0.023162912368774415, 0.023154783248901366, 0.02314022445678711, 0.023054176330566407, 0.02299465560913086, 0.023007328033447266, 0.02305622482299805, 0.023001792907714844]",tokens/s,42.38577462641084,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 582, in __init__ self.transformer = CodeGenModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 400, in __init__ self.h = nn.ModuleList([CodeGenBlock(config) for _ in range(config.n_layer)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 400, in self.h = nn.ModuleList([CodeGenBlock(config) for _ in range(config.n_layer)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 258, in __init__ self.mlp = CodeGenMLP(inner_dim, config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 236, in __init__ self.fc_in = nn.Linear(embed_dim, intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,843.194368,12640.518144,0.0,12245.270528,12234.440192,s,1,7.641587890625,7.641587890625,0.0,7.641587890625,7.641587890625,7.641587890625,7.641587890625,[7.641587890625],,kWh,1.3793814154164843e-05,1.512477956816801e-06,6.774172085997551e-06,2.2080464196979194e-05,,MB,1152.118784,12925.730816,0.0,12517.900288,12440.744448,s,10,1.8887086944580078,0.18887086944580078,0.0031780540365539452,0.18875924682617187,0.19160263061523436,0.19316996765136718,0.19442383728027343,"[0.18195571899414062, 0.18829545593261718, 0.1900277099609375, 0.18892422485351562, 0.1861235809326172, 0.1881266632080078, 0.19125433349609375, 0.19066943359375, 0.18859426879882812, 0.1947373046875]",tokens/s,1355.4234210451543,kWh,5.656758292948766e-06,6.235645935288457e-07,3.7684218181538376e-06,1.004874470463145e-05,tokens/kWh,25475818.87337729,MB,1174.585344,12988.645376,0.0,12580.814848,12543.681024,s,10,36.67357470703126,3.667357470703125,0.006592214839360489,3.6670753173828126,3.675392626953125,3.677593408203125,3.679354033203125,"[3.655662109375, 3.6608779296875, 3.6623349609375, 3.66615576171875, 3.667674072265625, 3.668391357421875, 3.6664765625, 3.67130419921875, 3.674903564453125, 3.679794189453125]",tokens/s,17.17858171811141,kWh,0.000107243756794551,1.1829444516997074e-05,7.122219372984619e-05,0.00019029539504139423,tokens/kWh,331064.23824021517,,s,630,36.67032344436643,0.05820686261010548,0.0005326349760603359,0.058130367279052735,0.0585946159362793,0.05883475112915039,0.06157141201019287,"[0.0616734733581543, 0.05863423919677734, 0.05784972763061524, 0.05770662307739258, 0.057624576568603515, 0.057617568969726564, 0.05753273773193359, 0.05757804870605469, 0.05756480026245117, 0.057741630554199216, 0.057827487945556644, 0.057826751708984374, 0.05787897491455078, 0.057568862915039064, 0.05755945587158203, 0.057540607452392575, 0.058060832977294925, 0.058225822448730466, 0.05803606414794922, 0.05805721664428711, 0.05789334487915039, 0.057665313720703125, 0.057708415985107425, 0.057831424713134766, 0.0577724494934082, 0.057638240814208985, 0.05784870529174805, 0.05781884765625, 0.05777151870727539, 0.05781145477294922, 0.05788217544555664, 0.05788499069213867, 0.05808697509765625, 0.058490943908691403, 0.05812905502319336, 0.058234752655029295, 0.05814064025878906, 0.05808153533935547, 0.057945503234863284, 0.05794198226928711, 0.057885311126708985, 0.058000385284423826, 0.0578897590637207, 0.05812339019775391, 0.0579131851196289, 0.058012481689453124, 0.058137985229492185, 0.058095840454101565, 0.05795449447631836, 0.05808355331420898, 0.05812588882446289, 0.05821488189697266, 0.058198177337646484, 0.05842105484008789, 0.05818982315063476, 0.05820415878295898, 0.05817958450317383, 0.05830854415893555, 0.05828607940673828, 0.05811820983886719, 0.05816729736328125, 0.05801574325561523, 0.058159103393554686, 0.06135603332519531, 0.05881856155395508, 0.0578416633605957, 0.057649150848388675, 0.05765529632568359, 0.05750697708129883, 0.05751007843017578, 0.057581600189208985, 0.05787881469726563, 0.05784998321533203, 0.05774153518676758, 0.057775711059570314, 0.05756707382202148, 0.05762217712402344, 0.05780368041992188, 0.05773926544189453, 0.05796393585205078, 0.058261566162109375, 0.05823542404174804, 0.05810383987426758, 0.05805385589599609, 0.05791206359863281, 0.05784521484375, 0.057858463287353515, 0.05776947021484375, 0.05791603088378906, 0.057891040802001956, 0.057966400146484375, 0.05798908615112305, 0.05797683334350586, 0.05801574325561523, 0.058011425018310546, 0.058046783447265625, 0.05829827117919922, 0.058298366546630856, 0.05848905563354492, 0.058240352630615236, 0.05824121475219726, 0.058396961212158205, 0.058135711669921875, 0.05802169418334961, 0.05827276611328125, 0.05820828628540039, 0.05797990417480469, 0.05795267105102539, 0.058194496154785155, 0.05815091323852539, 0.05809878540039062, 0.05804864120483398, 0.05812067031860352, 0.05827616119384765, 0.058324417114257815, 0.05850300979614258, 0.05853257751464844, 0.05838582229614258, 0.05826211166381836, 0.05826969528198242, 0.05825081634521485, 0.05815311813354492, 0.058105792999267575, 0.05817379379272461, 0.058175487518310545, 0.0582509765625, 0.061288478851318356, 0.05884662246704102, 0.0577562255859375, 0.057779743194580076, 0.0576475830078125, 0.057667423248291015, 0.057686302185058595, 0.05767750549316406, 0.05754604721069336, 0.05775040054321289, 0.05773311996459961, 0.05769625473022461, 0.05776793670654297, 0.05793791961669922, 0.05784371185302734, 0.057843742370605467, 0.058286048889160155, 0.05846550369262695, 0.05837424087524414, 0.058184383392333984, 0.05807923126220703, 0.05792486572265625, 0.05814080047607422, 0.057872608184814454, 0.05783388900756836, 0.05795782470703125, 0.05798380661010742, 0.057955265045166016, 0.05798937606811523, 0.058036800384521484, 0.05816128158569336, 0.05801504135131836, 0.058073665618896486, 0.05829571151733398, 0.05840342330932617, 0.058386016845703125, 0.05839846420288086, 0.05822127914428711, 0.058230720520019534, 0.05830656051635742, 0.058064479827880856, 0.05800592041015625, 0.05788195037841797, 0.05806707382202148, 0.058001953125, 0.05800703811645508, 0.05803641510009765, 0.05803852844238281, 0.05801910400390625, 0.058120990753173826, 0.058464256286621094, 0.05837004852294922, 0.05837619018554688, 0.058396129608154296, 0.05832553482055664, 0.058139999389648436, 0.058433727264404295, 0.05831727981567383, 0.05819801712036133, 0.058396671295166014, 0.05809561538696289, 0.05808745574951172, 0.05809968185424805, 0.061937248229980466, 0.058886463165283204, 0.05790934371948242, 0.057737342834472655, 0.05775347137451172, 0.05772675323486328, 0.05765987014770508, 0.05816239929199219, 0.05766403198242188, 0.05779632186889649, 0.05808566284179688, 0.058039905548095704, 0.05792809677124024, 0.0579090576171875, 0.058187839508056644, 0.057853214263916014, 0.0582309455871582, 0.0582171516418457, 0.05822895812988281, 0.05819369506835938, 0.05803945541381836, 0.05786710357666015, 0.057788448333740236, 0.057823200225830075, 0.05781449508666992, 0.05793804931640625, 0.05796905517578125, 0.058058303833007814, 0.05800377655029297, 0.058011775970458986, 0.05803392028808594, 0.05801398468017578, 0.05798908615112305, 0.05843360137939453, 0.058247104644775394, 0.0583741455078125, 0.05818982315063476, 0.058191295623779296, 0.058186302185058596, 0.05840812683105469, 0.05827462387084961, 0.05813452911376953, 0.05814476776123047, 0.05810358428955078, 0.05801596832275391, 0.05831270217895508, 0.058114078521728514, 0.05805871963500977, 0.058133823394775394, 0.0581802864074707, 0.05827993774414063, 0.0584571533203125, 0.058373054504394534, 0.05859942245483398, 0.05828006362915039, 0.05865039825439453, 0.05836582565307617, 0.05834979248046875, 0.058322017669677734, 0.05820883178710937, 0.0583355827331543, 0.058308609008789064, 0.058353824615478514, 0.06163248062133789, 0.05907939147949219, 0.058413311004638674, 0.05795753479003906, 0.05793264007568359, 0.05788832092285156, 0.05788681411743164, 0.057938270568847657, 0.05790291213989258, 0.057914718627929684, 0.0578138542175293, 0.057775390625, 0.057747711181640626, 0.057811233520507814, 0.0580753288269043, 0.05834854507446289, 0.05808425521850586, 0.058314655303955076, 0.058380481719970706, 0.058340545654296874, 0.05800815963745117, 0.057971073150634767, 0.0579192008972168, 0.058131679534912106, 0.05806991958618164, 0.057929729461669924, 0.057995262145996096, 0.058060798645019535, 0.05798425674438477, 0.05794892883300781, 0.057903102874755856, 0.057987167358398435, 0.05805670547485352, 0.058149856567382814, 0.05824812698364258, 0.05852057647705078, 0.05824124908447265, 0.05825414276123047, 0.058138206481933595, 0.05839091110229492, 0.05828607940673828, 0.058226432800292965, 0.05800508880615234, 0.058120193481445315, 0.058038463592529295, 0.05821900939941406, 0.05804848098754883, 0.05807686233520508, 0.05805692672729492, 0.058310047149658206, 0.05832112121582031, 0.05838691329956055, 0.05820751953125, 0.05841340637207031, 0.05839091110229492, 0.058170528411865235, 0.05818454360961914, 0.058449790954589846, 0.05850284957885742, 0.05835558319091797, 0.05830227279663086, 0.05860547256469727, 0.058536575317382815, 0.06161423873901367, 0.05906713485717773, 0.057949695587158206, 0.0576847038269043, 0.057638687133789064, 0.057654529571533206, 0.057655071258544924, 0.05794611358642578, 0.057909854888916014, 0.05792601776123047, 0.0577760009765625, 0.057771678924560546, 0.057739742279052736, 0.05770153427124024, 0.057680736541748046, 0.057731071472167966, 0.05841100692749023, 0.05871206283569336, 0.05843548965454102, 0.05842339324951172, 0.05799718475341797, 0.05806041717529297, 0.057786880493164064, 0.05802598571777344, 0.05802188873291016, 0.05797884750366211, 0.05802384185791016, 0.05796486282348633, 0.057984832763671876, 0.058001407623291014, 0.058076351165771485, 0.05796851348876953, 0.05839763259887695, 0.05820137786865234, 0.05828003311157227, 0.05855705642700195, 0.0582902717590332, 0.05828303909301758, 0.058184288024902345, 0.058386848449707034, 0.05818966293334961, 0.058285953521728516, 0.05816361618041992, 0.058119937896728514, 0.058052223205566404, 0.05816972732543945, 0.05845196914672852, 0.058916862487792966, 0.05815929412841797, 0.05833504104614258, 0.058400032043457034, 0.05854844665527344, 0.05841542434692383, 0.05842963027954102, 0.05844521713256836, 0.05847715377807617, 0.058343425750732425, 0.05825235366821289, 0.058385345458984376, 0.058375457763671874, 0.05823766326904297, 0.05859123229980469, 0.05842943954467773, 0.06165760040283203, 0.05901926422119141, 0.05797628784179688, 0.057740833282470705, 0.05768499374389648, 0.057631935119628906, 0.05777635192871094, 0.057844318389892575, 0.05765439987182617, 0.05771353530883789, 0.05787551879882812, 0.05794911956787109, 0.05778988647460938, 0.057635391235351566, 0.05779251098632812, 0.05804854583740234, 0.058268798828125, 0.05844678497314453, 0.05824911880493164, 0.058060798645019535, 0.05790924835205078, 0.057857185363769534, 0.05771731185913086, 0.057743648529052734, 0.05776176071166992, 0.05784579086303711, 0.05814700698852539, 0.05791084671020508, 0.05798937606811523, 0.05800755310058594, 0.057915233612060545, 0.05803206253051758, 0.058183456420898436, 0.0583623046875, 0.05857024002075195, 0.05865727996826172, 0.05826150512695313, 0.05819913482666016, 0.058188705444335936, 0.058191871643066405, 0.05809561538696289, 0.05825107192993164, 0.05810195159912109, 0.05813398361206055, 0.05816128158569336, 0.05863862228393555, 0.0581448974609375, 0.05803113555908203, 0.05799417495727539, 0.05817971038818359, 0.058480545043945314, 0.05858083343505859, 0.05881174468994141, 0.05870998382568359, 0.058953857421875, 0.058290912628173826, 0.05819801712036133, 0.05841100692749023, 0.05837209701538086, 0.05823897552490234, 0.058357982635498046, 0.05840070343017578, 0.05833916854858399, 0.06146656036376953, 0.05897689437866211, 0.05806694412231445, 0.05793344116210938, 0.05790156936645508, 0.057880447387695315, 0.05782284927368164, 0.057710975646972654, 0.05769625473022461, 0.05794377517700195, 0.05803036880493164, 0.05779235076904297, 0.057893024444580075, 0.05796422576904297, 0.05791379165649414, 0.05787839889526367, 0.0580423698425293, 0.05851340866088867, 0.05823833465576172, 0.05798086547851562, 0.05786495971679687, 0.057939903259277344, 0.057953472137451174, 0.05807596969604492, 0.057890655517578125, 0.05794972610473633, 0.05808780670166016, 0.057995521545410156, 0.05799321746826172, 0.05809721755981445, 0.05807558441162109, 0.05800860977172852, 0.05804540634155273, 0.058353023529052736, 0.05957900619506836, 0.058922721862792966, 0.05843356704711914, 0.058372318267822264, 0.05828406524658203, 0.058294273376464846, 0.05822067260742188, 0.058423168182373045, 0.05816134262084961, 0.0582031021118164, 0.05802275085449219, 0.05811366271972656, 0.05840934371948242, 0.058287361145019534, 0.058327392578125, 0.05847046279907227, 0.0585272331237793, 0.05861891174316406, 0.058858463287353516, 0.05869657516479492, 0.05857686233520508, 0.05832089614868164, 0.05841030502319336, 0.05831955337524414, 0.05824012756347656, 0.05850815963745117, 0.05858015823364258, 0.05853084945678711, 0.05831248092651367, 0.06185087966918945, 0.05904569625854492, 0.05799580764770508, 0.05780115127563477, 0.05783555221557617, 0.05787865447998047, 0.057888641357421874, 0.05790105438232422, 0.057665534973144535, 0.05787798309326172, 0.05777385711669922, 0.05783148956298828, 0.05865542221069336, 0.05798083114624023, 0.05780284881591797, 0.057951904296875, 0.05830595016479492, 0.058912704467773434, 0.05862707138061524, 0.05808246231079101, 0.05822671890258789, 0.0578870735168457, 0.05800598526000977, 0.058011905670166015, 0.05805849456787109, 0.05798092651367188, 0.058062110900878906, 0.05802671813964844, 0.0579317741394043, 0.057888736724853514, 0.05804035186767578, 0.05799935913085937, 0.0582369270324707, 0.058504768371582035, 0.05880201721191406, 0.058650337219238284, 0.05866175842285156, 0.0585011215209961, 0.05823823928833008, 0.05825404739379883, 0.058080734252929686, 0.05811049652099609, 0.058068416595458985, 0.05834169769287109, 0.05840908813476563, 0.05831897735595703, 0.05826355361938477, 0.058369407653808596, 0.05838502502441406, 0.05859449768066406, 0.05870675277709961, 0.05885737609863281, 0.058687454223632814, 0.058579071044921875, 0.05866889572143555, 0.05865654373168945, 0.05839091110229492, 0.058498783111572264, 0.058288448333740236, 0.058234848022460935, 0.0592097282409668, 0.05863116836547851, 0.058595680236816404, 0.06166793441772461, 0.059229598999023435, 0.05820905685424805, 0.058025054931640625, 0.05783795166015625, 0.05795801544189453, 0.057971614837646485, 0.05809673690795898, 0.05787289428710937, 0.057966529846191404, 0.058038558959960934, 0.058005599975585936, 0.05794569778442383, 0.05803891372680664, 0.058046337127685546, 0.05823078536987305, 0.05875500869750976, 0.0588936653137207, 0.05862614440917969, 0.058468673706054686, 0.058207775115966795, 0.058098464965820315, 0.05799116897583008, 0.05799731063842774, 0.058072929382324216, 0.05810515213012695, 0.05821295928955078, 0.058517696380615235, 0.05804652786254883, 0.058173439025878904, 0.05814803314208984, 0.05814969635009765, 0.05838569641113281, 0.05866934585571289, 0.05889683151245117, 0.05884467315673828, 0.05898291015625, 0.05861580657958984, 0.05827913665771484, 0.05830073547363281, 0.05828384017944336, 0.05819881439208984, 0.05824908828735351, 0.058224639892578124, 0.058327041625976565, 0.05828403091430664, 0.058265598297119144, 0.05857279968261719, 0.058241024017333984, 0.05871820831298828, 0.05850931167602539, 0.05901724624633789, 0.05894553756713867, 0.05882262420654297, 0.05869772720336914, 0.05854412841796875, 0.058422721862792966, 0.05828054428100586, 0.05827171325683594, 0.05846137619018555, 0.058391361236572265, 0.058619903564453124, 0.05849087905883789]",tokens/s,17.18010480479645,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1709, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.693824,6223.233024,0.0,5827.985408,5712.718848,s,1,7.30068115234375,7.30068115234375,0.0,7.30068115234375,7.30068115234375,7.30068115234375,7.30068115234375,[7.30068115234375],,kWh,1.03632283999976e-05,1.1358689035069413e-06,3.512780588001141e-06,1.5011877891505682e-05,,MB,1169.432576,6453.919744,0.0,6046.089216,5989.425664,s,10,0.8358417587280274,0.08358417587280273,0.0030288228730164053,0.0849144630432129,0.08570270614624023,0.08598114433288574,0.08620389488220215,"[0.07983344268798828, 0.08402767944335937, 0.08529759979248047, 0.08336016082763673, 0.08564083099365234, 0.07612076568603515, 0.08546422576904297, 0.08453132629394532, 0.08530614471435546, 0.08625958251953125]",tokens/s,3062.7806917612893,kWh,2.6076465513019306e-06,2.87574112138717e-07,1.7332479143750208e-06,4.628468577815668e-06,tokens/kWh,55309871.0072296,MB,1202.556928,6516.834304,0.0,6109.003776,6090.851328,s,10,20.951066650390622,2.0951066650390624,0.006738284949658318,2.0973076171875,2.1025949951171876,2.1030796020507814,2.1034672875976566,"[2.0802802734375, 2.09609912109375, 2.098637939453125, 2.091754150390625, 2.087480712890625, 2.098539794921875, 2.1024873046875, 2.09851611328125, 2.103564208984375, 2.09370703125]",tokens/s,30.07006805490039,kWh,6.0810682840364244e-05,6.707186377326684e-06,4.030136904662445e-05,0.00010781923826431536,tokens/kWh,584311.3067220671,,s,630,20.94752470016479,0.033250039206610786,0.0006555988821365542,0.03314591979980469,0.03360196647644043,0.0339087797164917,0.036691193504333496,"[0.03542473602294922, 0.034076736450195315, 0.03308556747436524, 0.03282115173339844, 0.03268815994262695, 0.03273315048217774, 0.03255094528198242, 0.032616416931152345, 0.03264678573608398, 0.03254009628295899, 0.032613311767578125, 0.032511070251464845, 0.03255305480957031, 0.03256403350830078, 0.03256899261474609, 0.03253692626953125, 0.032679233551025394, 0.032723648071289066, 0.03267583847045898, 0.032675201416015626, 0.03317382431030273, 0.032868576049804685, 0.03279449462890625, 0.03276319885253906, 0.03288140869140625, 0.032833087921142576, 0.0327599983215332, 0.03270019149780273, 0.03288719940185547, 0.032895198822021486, 0.032950271606445314, 0.033200096130371094, 0.032971870422363284, 0.03293894577026367, 0.03312966537475586, 0.03307193756103516, 0.032894977569580076, 0.03300966262817383, 0.03295804977416992, 0.03290768051147461, 0.03304227066040039, 0.03290867233276367, 0.03294464111328125, 0.033003807067871094, 0.033157119750976564, 0.03312153625488281, 0.03321446228027344, 0.03318860626220703, 0.03344332885742188, 0.03486310577392578, 0.033057376861572264, 0.033228702545166015, 0.033108097076416015, 0.03300518417358399, 0.03314259338378906, 0.033083839416503905, 0.033157119750976564, 0.03321241760253906, 0.03317145538330078, 0.03309568023681641, 0.033255550384521486, 0.033352672576904295, 0.033366207122802735, 0.0367059211730957, 0.034375679016113284, 0.033544063568115234, 0.03307551956176758, 0.032939582824707034, 0.03287919998168945, 0.03272662353515625, 0.03269472122192383, 0.03282944107055664, 0.03292979049682617, 0.0329167366027832, 0.03286415863037109, 0.03293679809570312, 0.03287859344482422, 0.032888801574707034, 0.033001216888427734, 0.03294806289672852, 0.03297529602050781, 0.03297280120849609, 0.03290726470947265, 0.033277950286865234, 0.03293289566040039, 0.03303094482421875, 0.0329606704711914, 0.032939136505126955, 0.03303071975708008, 0.03315542221069336, 0.03305628967285156, 0.03309936141967774, 0.03318009567260742, 0.03338899230957031, 0.033570816040039066, 0.03340697479248047, 0.03336806488037109, 0.03321571350097656, 0.033151775360107424, 0.03339174270629883, 0.033186687469482425, 0.03322262573242187, 0.033219936370849606, 0.03317216110229492, 0.03313808059692383, 0.03327376174926758, 0.033208030700683594, 0.033129440307617185, 0.033189407348632814, 0.03331097412109375, 0.03338652801513672, 0.03335187149047852, 0.03323494338989258, 0.03330825424194336, 0.03322915267944336, 0.03347257614135742, 0.03327356719970703, 0.03320979309082031, 0.03334979248046875, 0.03357766342163086, 0.033288192749023435, 0.033529857635498046, 0.033333217620849606, 0.03405769729614258, 0.03425667190551758, 0.033648799896240235, 0.03823616027832031, 0.03472793579101562, 0.033503231048583985, 0.03302220916748047, 0.03283766555786133, 0.03282710266113281, 0.03275980758666992, 0.03289606475830078, 0.0328221435546875, 0.0327347183227539, 0.03278905487060547, 0.03275900650024414, 0.03285686492919922, 0.03294003295898437, 0.03289478302001953, 0.032831680297851565, 0.032868350982666016, 0.0328392333984375, 0.033067264556884766, 0.03305683135986328, 0.03296063995361328, 0.03330799865722656, 0.03297859191894531, 0.033037311553955076, 0.033323009490966796, 0.033040382385253905, 0.03313663864135742, 0.03294617462158203, 0.033097984313964844, 0.0332224006652832, 0.03357263946533203, 0.03332937622070312, 0.03339878463745117, 0.0333496322631836, 0.033261566162109374, 0.03316716766357422, 0.0333191032409668, 0.033538047790527346, 0.03320774459838867, 0.03320479965209961, 0.033159168243408206, 0.033310527801513674, 0.033285888671875, 0.03339923095703125, 0.03326284790039063, 0.033078014373779295, 0.03311174392700195, 0.03323436737060547, 0.03330918502807617, 0.033278335571289064, 0.03324230575561524, 0.03339347076416015, 0.033453342437744144, 0.033551071166992186, 0.0334205436706543, 0.033342208862304684, 0.03478428649902344, 0.03326051330566406, 0.03362105560302735, 0.03353900909423828, 0.033562625885009766, 0.0336629753112793, 0.03336771011352539, 0.03665513610839844, 0.03467667388916015, 0.03360156631469727, 0.03312844848632813, 0.03298918533325195, 0.03280812835693359, 0.03278726577758789, 0.03272284698486328, 0.03279459381103516, 0.03274969482421875, 0.03301990509033203, 0.03295641708374023, 0.0330742073059082, 0.034243457794189455, 0.03277628707885742, 0.03272457504272461, 0.03304415893554687, 0.03286627197265625, 0.033143550872802734, 0.03294793701171875, 0.03303798294067383, 0.03291196823120117, 0.032995361328125, 0.03285715103149414, 0.032959423065185546, 0.032918815612792966, 0.03290800094604492, 0.03299737548828125, 0.03321241760253906, 0.03362329483032227, 0.033641216278076175, 0.03342156982421875, 0.03347840118408203, 0.033409278869628904, 0.03323468780517578, 0.03325523376464844, 0.03329267120361328, 0.033189697265625, 0.033122303009033204, 0.033027198791503905, 0.03295935821533203, 0.033036384582519535, 0.033071006774902344, 0.03318937683105469, 0.03320060729980469, 0.03310518264770508, 0.03314896011352539, 0.03314352035522461, 0.033175552368164066, 0.03302918243408203, 0.0331396484375, 0.03311577606201172, 0.033154624938964844, 0.03325215911865234, 0.033173057556152345, 0.033062496185302735, 0.033046592712402345, 0.03309344100952148, 0.03323545455932617, 0.0332295036315918, 0.033371936798095705, 0.033140735626220705, 0.03316121673583984, 0.03710134506225586, 0.03480403137207031, 0.03370102310180664, 0.03291836929321289, 0.0326446418762207, 0.03279062271118164, 0.03388412857055664, 0.032764129638671875, 0.03265907287597656, 0.03261702346801758, 0.03284377670288086, 0.03255507278442383, 0.03268739318847656, 0.03289564895629883, 0.03277545547485351, 0.032613086700439456, 0.03271475219726563, 0.03277164840698242, 0.032747615814208986, 0.03268233489990234, 0.03279209518432617, 0.03283155059814453, 0.03283599853515625, 0.033006816864013674, 0.03300150299072266, 0.0330860481262207, 0.03278351974487305, 0.03293491363525391, 0.03292979049682617, 0.03301580810546875, 0.033037887573242185, 0.03298144149780274, 0.03317327880859375, 0.03300921630859375, 0.03314067077636719, 0.03314556884765625, 0.033187744140625, 0.03305187225341797, 0.03302864074707031, 0.03302844619750977, 0.03309568023681641, 0.03297795104980469, 0.03307414245605469, 0.03323904037475586, 0.03306092834472656, 0.03302739334106446, 0.03308009719848633, 0.03308697509765625, 0.033050975799560546, 0.03311001586914063, 0.03311215972900391, 0.033114017486572264, 0.03322880172729492, 0.033230846405029296, 0.03314627075195312, 0.03330879974365234, 0.033408958435058596, 0.03325993728637695, 0.03331676864624023, 0.03373897552490234, 0.03364044952392578, 0.033314815521240236, 0.03336959838867187, 0.034716575622558594, 0.034632736206054685, 0.03375510406494141, 0.03307465744018555, 0.03297683334350586, 0.03298992156982422, 0.03289199829101563, 0.032946720123291015, 0.033046783447265624, 0.03294822311401367, 0.032835582733154296, 0.03292598342895508, 0.033029823303222655, 0.03295849609375, 0.03294800186157226, 0.03292559814453125, 0.03290758514404297, 0.032884735107421875, 0.03296051025390625, 0.03303014373779297, 0.03313459014892578, 0.032904670715332034, 0.03377411270141602, 0.03342051315307617, 0.03333814239501953, 0.033078624725341794, 0.033034591674804686, 0.033022335052490234, 0.033183521270751956, 0.03317907333374023, 0.03341567993164062, 0.03330860900878906, 0.033556766510009765, 0.03337372970581055, 0.03336172866821289, 0.033563297271728514, 0.03322880172729492, 0.03341516876220703, 0.033688961029052736, 0.033202816009521484, 0.0332492790222168, 0.03318726348876953, 0.03329391860961914, 0.03335638427734375, 0.03330086517333984, 0.03315507125854492, 0.03328956985473633, 0.03341331100463867, 0.03330915069580078, 0.03350527954101563, 0.03330047988891602, 0.03320431900024414, 0.033398143768310545, 0.033331199645996096, 0.03346691131591797, 0.033564350128173825, 0.033331520080566404, 0.03339263916015625, 0.03355401611328125, 0.03388457489013672, 0.03392054367065429, 0.033460609436035155, 0.03369993591308594, 0.039167934417724606, 0.03525568008422852, 0.03392374420166016, 0.033279998779296875, 0.03319193649291992, 0.03288848114013672, 0.0328175048828125, 0.032798431396484376, 0.03278467178344727, 0.03294822311401367, 0.03291654586791992, 0.0327894401550293, 0.03286592102050781, 0.0328353271484375, 0.03284352111816406, 0.03315776062011719, 0.03307545471191406, 0.03299020767211914, 0.03311215972900391, 0.0330720329284668, 0.033050273895263674, 0.03319843292236328, 0.03302809524536133, 0.03306278228759765, 0.03306304168701172, 0.033062240600585935, 0.03301043319702148, 0.03301161575317383, 0.03318374252319336, 0.03361177444458008, 0.03389440155029297, 0.03335168075561523, 0.033667198181152345, 0.033529727935791016, 0.033808383941650394, 0.0335175666809082, 0.03325276947021484, 0.03324364852905273, 0.03324528121948242, 0.03323494338989258, 0.033255424499511715, 0.03309497451782226, 0.03316585540771484, 0.033163425445556644, 0.033295455932617186, 0.03316419219970703, 0.033331199645996096, 0.03337612915039063, 0.033511550903320315, 0.033209823608398435, 0.03324924850463867, 0.03343999862670898, 0.033265983581542966, 0.033331199645996096, 0.03338652801513672, 0.03321171188354492, 0.03327862548828125, 0.03341107177734375, 0.03335168075561523, 0.03374694442749023, 0.03379404830932617, 0.033421184539794924, 0.033875137329101565, 0.03704867172241211, 0.034586334228515626, 0.033444095611572265, 0.03322675323486328, 0.033041824340820314, 0.03285420989990234, 0.03288515090942383, 0.03285414505004883, 0.03293171310424805, 0.03288195037841797, 0.03285475158691406, 0.032877632141113285, 0.03303519821166992, 0.0328724479675293, 0.032849918365478514, 0.03292694473266602, 0.032922401428222656, 0.032882816314697264, 0.03301532745361328, 0.03315289688110352, 0.03321084976196289, 0.03296236801147461, 0.032996574401855466, 0.03295945739746094, 0.03315209579467773, 0.03308022308349609, 0.03316707229614258, 0.03312847900390625, 0.03314092636108398, 0.033276096343994144, 0.03480115127563477, 0.03352819061279297, 0.03344793701171875, 0.033377601623535154, 0.033384864807128906, 0.033181472778320314, 0.03318425750732422, 0.03316227340698242, 0.033377246856689455, 0.03323849487304688, 0.03321500778198242, 0.033213760375976564, 0.03336588668823242, 0.033379135131835935, 0.033271808624267575, 0.033253185272216795, 0.03328838348388672, 0.03314688110351562, 0.033165313720703124, 0.03328947067260742, 0.03345280075073242, 0.03316326522827148, 0.03331603240966797, 0.03344057464599609, 0.033271808624267575, 0.033320159912109376, 0.03358390426635742, 0.03352979278564453, 0.033491008758544924, 0.03336601638793945, 0.03364422225952148, 0.033605567932128905, 0.033462238311767575, 0.03948457717895508, 0.03513177490234375, 0.033874401092529295, 0.03327590560913086, 0.03342480087280274, 0.03303484725952149, 0.03295974349975586, 0.03298175811767578, 0.03299123382568359, 0.03282534408569336, 0.03284707260131836, 0.03286044692993164, 0.03282505416870117, 0.03278287887573242, 0.03297491073608398, 0.032993408203125, 0.03297443389892578, 0.032860641479492185, 0.03301580810546875, 0.03303424072265625, 0.032891136169433594, 0.03304544067382813, 0.0329552001953125, 0.033124256134033206, 0.03317769622802735, 0.03296432113647461, 0.032986942291259765, 0.033032318115234376, 0.033337600708007814, 0.0334431037902832, 0.03361171340942383, 0.03341196823120117, 0.03336601638793945, 0.03360752105712891, 0.033552032470703125, 0.033231361389160156, 0.03323875045776367, 0.03401052856445313, 0.03316169738769531, 0.03324143981933594, 0.033353855133056644, 0.03314828872680664, 0.03314950561523437, 0.03331891250610351, 0.03382684707641601, 0.0337632942199707, 0.033215648651123045, 0.033452896118164065, 0.03328432083129883, 0.033158302307128906, 0.03310211181640625, 0.033446239471435546, 0.0333496322631836, 0.033494430541992186, 0.03349155044555664, 0.03321855926513672, 0.03338572692871094, 0.03343030548095703, 0.033545246124267576, 0.03351238250732422, 0.033923072814941405, 0.033393665313720705, 0.03364742279052734, 0.03776847839355469, 0.03490886306762695, 0.03363388824462891, 0.033143199920654294, 0.03300742340087891, 0.03288899230957031, 0.032900768280029295, 0.03290560150146484, 0.03305043029785156, 0.03300166320800781, 0.03295641708374023, 0.032876190185546876, 0.03294831848144531, 0.03287260818481445, 0.032890270233154294, 0.032791072845458985, 0.03287030410766602, 0.03281878280639648, 0.03290793609619141, 0.03291449737548828, 0.03292006301879883, 0.032860607147216794, 0.03296649551391601, 0.03278160095214844, 0.03301055908203125, 0.0329890251159668, 0.03290332794189453, 0.032916961669921876, 0.03305936050415039, 0.03329391860961914, 0.0334381103515625, 0.03321446228027344, 0.03385935974121094, 0.033315040588378905, 0.03323904037475586, 0.03316454315185547, 0.03316368103027344, 0.03313423919677734, 0.03307180786132812, 0.03311001586914063, 0.03295155334472656, 0.032989566802978515, 0.03300508880615234, 0.03304889678955078, 0.03311465454101563, 0.03309308624267578, 0.03343395233154297, 0.033029953002929685, 0.03305654525756836, 0.032952510833740234, 0.03293788909912109, 0.03295187377929688, 0.03290617752075195, 0.032942081451416014, 0.03337625503540039, 0.03297280120849609, 0.03303974533081055, 0.03307379150390625, 0.03317510223388672, 0.0332845458984375, 0.035880767822265625, 0.03451091384887695, 0.03320230484008789]",tokens/s,30.07515250692335,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1153, in __init__ self.model = Qwen2MoeModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 918, in __init__ [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 918, in [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 691, in __init__ self.self_attn = QWEN2MOE_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 294, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 634, in __init__ self.attention = GPT_NEOX_ATTENTION_CLASSES[config._attn_implementation](config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 102, in __init__ self.query_key_value = nn.Linear(config.hidden_size, 3 * config.hidden_size, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 150.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 163, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.337472,569.311232,0.0,174.063616,172.57984,s,1,7.0809599609375,7.0809599609375,0.0,7.0809599609375,7.0809599609375,7.0809599609375,7.0809599609375,[7.0809599609375],,kWh,4.605726533350207e-06,4.989117345350963e-07,1.0019452460069411e-06,6.106583513892244e-06,,MB,1164.67712,642.711552,0.0,234.881024,215.589888,s,25,0.18156364774703979,0.0072625459098815915,0.0001934262272951333,0.007157440185546875,0.007528467178344727,0.007562796592712402,0.0076320192527771,"[0.007651296138763428, 0.007126143932342529, 0.007109407901763916, 0.007078271865844726, 0.0070833277702331545, 0.007101984024047852, 0.0070431680679321285, 0.007100096225738525, 0.007518655776977539, 0.007526048183441162, 0.007385503768920898, 0.007530079841613769, 0.007375487804412842, 0.007451680183410645, 0.00757097578048706, 0.007209311962127685, 0.0073846721649169925, 0.0074481601715087895, 0.007241727828979493, 0.007157440185546875, 0.0070797119140625, 0.007059904098510742, 0.007146719932556152, 0.007109344005584717, 0.007074528217315674]",tokens/s,35249.34687871375,kWh,2.154723453840459e-07,2.3762854741778743e-08,1.1348072429837874e-07,3.5271592442420335e-07,tokens/kWh,725796546.946133,MB,1198.40768,644.808704,0.0,236.978176,215.592448,s,25,9.855248168945309,0.3942099267578125,0.0258072195663266,0.3893913879394531,0.4026221801757813,0.4114611938476562,0.4888181787109373,"[0.38299411010742185, 0.5126096801757812, 0.38021795654296875, 0.38084127807617185, 0.38131674194335935, 0.3791335144042969, 0.38811654663085937, 0.401467041015625, 0.40339227294921876, 0.41347842407226565, 0.40133578491210936, 0.3942486572265625, 0.3913739013671875, 0.3932178955078125, 0.39696780395507814, 0.3935213012695313, 0.3916291198730469, 0.3952633972167969, 0.3893913879394531, 0.38041622924804686, 0.37945755004882814, 0.3835711669921875, 0.37981634521484375, 0.379656494140625, 0.38181356811523437]",tokens/s,159.81332717353104,kWh,1.1285612854936907e-05,1.2445976165048161e-06,4.527772988701155e-06,1.7057983460142875e-05,tokens/kWh,3693285.325736406,,s,1575,9.8430794839859,0.006249574275546604,0.0032463893249899843,0.006076767921447754,0.006460102462768555,0.006543299198150635,0.007097601194381713,"[0.006322175979614258, 0.00658841609954834, 0.007006207942962647, 0.006119423866271972, 0.006081759929656982, 0.006062304019927979, 0.00606060791015625, 0.006148255825042724, 0.0060761280059814455, 0.006119359970092773, 0.006111231803894043, 0.006586527824401856, 0.0061411519050598145, 0.006069056034088135, 0.006111231803894043, 0.006054111957550049, 0.006077439785003662, 0.006072959899902344, 0.006053088188171386, 0.0060215678215026856, 0.00608512020111084, 0.00611030387878418, 0.006019455909729004, 0.006023903846740722, 0.006032256126403809, 0.006007167816162109, 0.006017600059509277, 0.0060661759376525876, 0.006036767959594727, 0.00603004789352417, 0.0060207037925720215, 0.006043807983398437, 0.006027520179748535, 0.006008800029754639, 0.005991648197174073, 0.006021599769592285, 0.005986464023590088, 0.006003039836883545, 0.005990240097045898, 0.005997600078582764, 0.00598956823348999, 0.00597327995300293, 0.0059920639991760255, 0.005986176013946533, 0.006007904052734375, 0.006014880180358887, 0.006014527797698975, 0.006025248050689698, 0.0059697279930114745, 0.006002912044525147, 0.005995903968811035, 0.006015999794006348, 0.006016064167022705, 0.006040512084960937, 0.005994495868682862, 0.006023231983184814, 0.006006720066070556, 0.006010079860687256, 0.006001440048217774, 0.005992447853088379, 0.00601087999343872, 0.006035520076751709, 0.0060210561752319335, 0.005936031818389893, 0.006013984203338623, 0.006022016048431397, 0.006033408164978027, 0.006006847858428955, 0.006000768184661865, 0.006018879890441894, 0.006021503925323486, 0.006026815891265869, 0.006024735927581787, 0.006010848045349121, 0.006025055885314942, 0.006031392097473144, 0.006015679836273193, 0.13460809326171874, 0.006400352001190186, 0.006250815868377686, 0.006111487865447998, 0.006227359771728515, 0.006592991828918457, 0.006094175815582276, 0.0060934720039367675, 0.0060293121337890625, 0.006074368000030517, 0.006043039798736573, 0.0060680961608886716, 0.006044447898864746, 0.006106912136077881, 0.006094687938690185, 0.006062623977661133, 0.006073567867279053, 0.006206111907958984, 0.006085536003112793, 0.006056896209716797, 0.006002751827239991, 0.0066468482017517086, 0.0060835199356079105, 0.006047743797302246, 0.006070591926574707, 0.006024479866027832, 0.006057983875274659, 0.006029439926147461, 0.006025504112243652, 0.0060308480262756346, 0.0060380158424377444, 0.006164480209350586, 0.006458847999572754, 0.006049439907073974, 0.006046656131744385, 0.006047647953033447, 0.006104479789733887, 0.006039999961853028, 0.006076416015625, 0.006025119781494141, 0.006102399826049805, 0.006040736198425293, 0.00606169605255127, 0.006070400238037109, 0.006063712120056152, 0.0060700798034667965, 0.006072319984436035, 0.006050687789916992, 0.006040480136871338, 0.0064778242111206055, 0.0060044159889221195, 0.006025536060333252, 0.006008992195129395, 0.006017216205596924, 0.005998047828674316, 0.006008224010467529, 0.006004608154296875, 0.006003967761993408, 0.006053055763244629, 0.006037983894348144, 0.005990399837493897, 0.006000639915466309, 0.005988351821899414, 0.0059978880882263184, 0.006010623931884765, 0.006190271854400635, 0.006034463882446289, 0.0060013761520385745, 0.006008831977844238, 0.0060026879310607914, 0.006000448226928711, 0.005994847774505615, 0.006032735824584961, 0.006033088207244873, 0.006031487941741943, 0.006056863784790039, 0.006039199829101563, 0.0060499200820922855, 0.006039391994476318, 0.006051360130310058, 0.006115647792816162, 0.00608460807800293, 0.006047423839569092, 0.006105728149414062, 0.006060031890869141, 0.006072319984436035, 0.006000639915466309, 0.006039552211761475, 0.005994495868682862, 0.0060273919105529785, 0.005991775989532471, 0.0060217599868774415, 0.005942495822906494, 0.006015679836273193, 0.0059714560508728025, 0.006035967826843262, 0.005968224048614502, 0.006024864196777344, 0.005974016189575195, 0.006024191856384278, 0.005983232021331787, 0.006025023937225342, 0.005984799861907959, 0.006027967929840088, 0.005964928150177002, 0.0060191359519958494, 0.005975840091705323, 0.006018335819244385, 0.0059788479804992675, 0.0060356159210205075, 0.006006303787231445, 0.006035488128662109, 0.005922016143798828, 0.0060284161567687984, 0.005995808124542236, 0.006059967994689942, 0.006006591796875, 0.006032351970672607, 0.005997920036315918, 0.006032832145690918, 0.00597654390335083, 0.006039968013763428, 0.006023168087005615, 0.006066304206848144, 0.005996575832366943, 0.006047584056854248, 0.00601087999343872, 0.006059072017669678, 0.006016128063201905, 0.006059840202331543, 0.0059978880882263184, 0.006027679920196533, 0.005976480007171631, 0.006112192153930664, 0.005982463836669922, 0.006077119827270508, 0.0060028800964355465, 0.006074463844299317, 0.006029024124145508, 0.0060457921028137206, 0.0060024957656860355, 0.006057568073272705, 0.0059725441932678225, 0.006039487838745117, 0.006002079963684082, 0.006025536060333252, 0.006041855812072754, 0.0060330238342285155, 0.006031775951385498, 0.006018496036529541, 0.006019775867462158, 0.00602950382232666, 0.006023039817810058, 0.006023231983184814, 0.00602239990234375, 0.006009344100952148, 0.006025216102600098, 0.006011903762817383, 0.006032383918762207, 0.0060247998237609865, 0.006019680023193359, 0.006044703960418701, 0.006041920185089112, 0.006080128192901611, 0.006142144203186035, 0.006082911968231201, 0.006215519905090332, 0.006085375785827637, 0.006138912200927734, 0.0060720000267028805, 0.006120128154754639, 0.0060381760597229, 0.006090400218963623, 0.006025152206420898, 0.006047840118408203, 0.005937344074249268, 0.006043488025665283, 0.006442624092102051, 0.006046207904815673, 0.005977439880371094, 0.0063515520095825196, 0.006012928009033203, 0.006061344146728515, 0.00602185583114624, 0.006025407791137695, 0.006037407875061035, 0.006045599937438965, 0.006060287952423095, 0.006029056072235107, 0.006027455806732178, 0.005993855953216553, 0.006053343772888183, 0.005995584011077881, 0.00602239990234375, 0.005988831996917725, 0.0060356478691101076, 0.006141952037811279, 0.006049791812896729, 0.0060395197868347164, 0.006074399948120117, 0.00600867223739624, 0.006012671947479248, 0.00600435209274292, 0.006038303852081299, 0.005993696212768554, 0.0060191359519958494, 0.006013984203338623, 0.00601087999343872, 0.006078144073486328, 0.006019392013549805, 0.005990079879760742, 0.006015168190002441, 0.006061888217926025, 0.006080512046813965, 0.006014976024627685, 0.006063648223876953, 0.006015456199645996, 0.006060031890869141, 0.006034815788269043, 0.00604966402053833, 0.006008992195129395, 0.006079071998596191, 0.006018335819244385, 0.006060768127441407, 0.00603545618057251, 0.006051839828491211, 0.0060067839622497555, 0.00603545618057251, 0.006014431953430176, 0.006359583854675293, 0.006002431869506836, 0.006048255920410156, 0.005998335838317871, 0.006025216102600098, 0.006014976024627685, 0.006016672134399414, 0.005994175910949707, 0.006001247882843018, 0.005904575824737549, 0.0060293121337890625, 0.006008863925933838, 0.006044832229614258, 0.006040095806121826, 0.006037439823150635, 0.005996863842010498, 0.006060192108154297, 0.005989823818206787, 0.006066944122314453, 0.005993919849395752, 0.0060433921813964845, 0.005974656105041504, 0.0060293121337890625, 0.0059999680519104, 0.006023839950561523, 0.005990399837493897, 0.0060293121337890625, 0.0060000958442687985, 0.006054431915283203, 0.005988351821899414, 0.006021024227142334, 0.005972064018249511, 0.0060284481048583985, 0.005981023788452148, 0.006015007972717285, 0.0059550080299377445, 0.006013023853302002, 0.005982399940490723, 0.006025472164154053, 0.005988416194915772, 0.0060349440574645995, 0.005988800048828125, 0.006039552211761475, 0.005995872020721435, 0.006025184154510498, 0.005984127998352051, 0.006009376049041748, 0.005976352214813232, 0.006002592086791992, 0.006006879806518555, 0.006030367851257324, 0.005970143795013428, 0.006011680126190185, 0.0059818878173828124, 0.006203135967254639, 0.006019872188568115, 0.006004479885101319, 0.0060067839622497555, 0.005994495868682862, 0.0059944639205932616, 0.0059818878173828124, 0.0060070080757141115, 0.005986591815948486, 0.006050911903381348, 0.006017183780670166, 0.006035935878753662, 0.0060087041854858395, 0.006002943992614746, 0.0059901118278503414, 0.006006080150604248, 0.0060037441253662106, 0.006030623912811279, 0.005932576179504394, 0.005972576141357422, 0.006006720066070556, 0.0059903359413146975, 0.006014976024627685, 0.005973152160644531, 0.0060096001625061036, 0.00662883186340332, 0.0065953278541564945, 0.006104063987731933, 0.005990911960601806, 0.006007487773895264, 0.00599616003036499, 0.006006624221801758, 0.006029056072235107, 0.006010848045349121, 0.0059686717987060545, 0.0060020160675048825, 0.006012671947479248, 0.006039840221405029, 0.005984127998352051, 0.006046080112457276, 0.005994592189788818, 0.006205440044403076, 0.006103040218353272, 0.006238560199737549, 0.006147712230682373, 0.0060989117622375485, 0.006002336025238037, 0.006039904117584228, 0.006029727935791015, 0.0060423359870910645, 0.006619840145111084, 0.006107391834259034, 0.008075519561767578, 0.008646080017089844, 0.00631331205368042, 0.006026400089263916, 0.0064299840927124026, 0.00609500789642334, 0.006093183994293213, 0.006107103824615478, 0.006045663833618164, 0.006072639942169189, 0.0061847038269042965, 0.006046783924102783, 0.006035935878753662, 0.006050240039825439, 0.006008863925933838, 0.0060356159210205075, 0.006020959854125977, 0.006031360149383545, 0.006008831977844238, 0.006029600143432617, 0.005993343830108642, 0.006011040210723877, 0.0059987521171569825, 0.006154784202575684, 0.006100992202758789, 0.006059967994689942, 0.006008895874023437, 0.006053855895996094, 0.005974368095397949, 0.005967904090881348, 0.005986271858215332, 0.006020448207855225, 0.006032032012939453, 0.006039552211761475, 0.006078464031219482, 0.006050848007202149, 0.0059913921356201175, 0.006037248134613037, 0.006078559875488282, 0.006023071765899658, 0.005988736152648926, 0.006027135848999023, 0.006000639915466309, 0.006131711959838867, 0.0061430401802062985, 0.0061511039733886715, 0.0060999999046325686, 0.006128608226776123, 0.006176767826080322, 0.0062111678123474125, 0.006267136096954346, 0.006277279853820801, 0.0062873601913452145, 0.0062353601455688475, 0.006318367958068847, 0.006363647937774658, 0.006342656135559082, 0.006432703971862793, 0.006369408130645752, 0.00640121603012085, 0.006419199943542481, 0.006496255874633789, 0.006500351905822754, 0.006360576152801513, 0.006324160099029541, 0.006280992031097412, 0.006351808071136475, 0.006434271812438965, 0.007294496059417724, 0.006849376201629639, 0.006461120128631591, 0.006289728164672851, 0.006336480140686035, 0.006237504005432129, 0.006426464080810547, 0.006410816192626953, 0.006308032035827637, 0.006418560028076172, 0.00640556812286377, 0.006435743808746338, 0.006366879940032959, 0.006272128105163574, 0.0072549118995666505, 0.006603040218353271, 0.008226495742797851, 0.0064553279876708985, 0.006469567775726318, 0.007096735954284668, 0.006987391948699951, 0.006436863899230957, 0.006545472145080567, 0.006453216075897217, 0.006474976062774658, 0.006400800228118896, 0.006377471923828125, 0.006461440086364746, 0.00642252779006958, 0.007579616069793701, 0.007379295825958252, 0.006391488075256348, 0.006297152042388916, 0.006455904006958008, 0.006498144149780273, 0.006442719936370849, 0.006467264175415039, 0.0064637441635131835, 0.0065623679161071775, 0.006588191986083985, 0.00653107213973999, 0.006551551818847656, 0.0064163517951965334, 0.006336703777313233, 0.006471519947052002, 0.006496223926544189, 0.006520864009857178, 0.006589568138122558, 0.006447999954223633, 0.006536799907684326, 0.006601471900939942, 0.006477344036102295, 0.006523231983184815, 0.006542367935180664, 0.0064167361259460445, 0.006314720153808594, 0.006270143985748291, 0.006226431846618652, 0.006168575763702393, 0.006268256187438965, 0.0062707839012146, 0.00616048002243042, 0.006199456214904785, 0.006255199909210205, 0.0062568001747131344, 0.0061699519157409665, 0.006173183917999267, 0.006117216110229492, 0.006146399974822998, 0.006111040115356446, 0.006223584175109863, 0.006322336196899414, 0.006295775890350342, 0.006180223941802979, 0.006134304046630859, 0.0061584959030151366, 0.006131135940551758, 0.0061485118865966795, 0.006176032066345215, 0.006343423843383789, 0.006506624221801757, 0.006375264167785644, 0.006520832061767578, 0.00638976001739502, 0.0063975038528442385, 0.006340320110321045, 0.0063656320571899416, 0.006494080066680908, 0.006326272010803223, 0.006305471897125244, 0.006267199993133545, 0.007161664009094238, 0.006427840232849121, 0.006776095867156983, 0.006501503944396973, 0.0063719358444213866, 0.006277120113372803, 0.006276288032531738, 0.006294400215148926, 0.006477759838104248, 0.006414463996887207, 0.006272064208984375, 0.006374176025390625, 0.007100063800811768, 0.007599775791168213, 0.00793673610687256, 0.01093222427368164, 0.009020895957946777, 0.006556191921234131, 0.006602335929870606, 0.006707615852355957, 0.006596960067749023, 0.006588064193725586, 0.006441088199615479, 0.006330463886260986, 0.006405983924865723, 0.00644704008102417, 0.006452415943145752, 0.0064039998054504395, 0.006321023941040039, 0.00630790376663208, 0.006237823963165284, 0.006210048198699952, 0.0062503361701965335, 0.006232063770294189, 0.006152095794677734, 0.006262176036834717, 0.0061385598182678225, 0.00611737585067749, 0.006264832019805908, 0.0064488000869750975, 0.006392159938812256, 0.006303743839263916, 0.006300064086914062, 0.006239840030670166, 0.006170623779296875, 0.006254208087921142, 0.0062102718353271484, 0.006489759922027588, 0.006326272010803223, 0.00642790412902832, 0.006408959865570068, 0.006462656021118164, 0.006482751846313477, 0.006286911964416504, 0.006293504238128662, 0.00637500810623169, 0.006529695987701416, 0.006488255977630615, 0.006388927936553955, 0.006350944042205811, 0.006406367778778076, 0.006240992069244384, 0.006216671943664551, 0.0061931519508361815, 0.006274687767028809, 0.006259071826934815, 0.006401023864746094, 0.006532447814941406, 0.006480576038360596, 0.0065913920402526855, 0.006496384143829346, 0.006456736087799072, 0.0063554878234863285, 0.006415775775909424, 0.006471776008605957, 0.006382080078125, 0.006378975868225098, 0.006418015956878662, 0.006501311779022217, 0.00659449577331543, 0.006463232040405273, 0.0063851518630981445, 0.006421311855316162, 0.006507967948913574, 0.006504320144653321, 0.0065504322052001955, 0.006468992233276367, 0.006517151832580567, 0.006496384143829346, 0.006555808067321777, 0.0064572482109069825, 0.006335455894470215, 0.006280288219451904, 0.00623529577255249, 0.006418528079986573, 0.0064824318885803225, 0.006710527896881103, 0.0065157442092895505, 0.006525728225708008, 0.00649721622467041, 0.006434879779815674, 0.006305727958679199, 0.006327648162841797, 0.0065194878578186035, 0.006331552028656006, 0.00624505615234375, 0.0061147198677062985, 0.006123648166656494, 0.006083072185516358, 0.00605017614364624, 0.006072288036346436, 0.006197311878204346, 0.006218688011169433, 0.0062657279968261715, 0.006328192234039307, 0.006159808158874512, 0.006150784015655518, 0.006104544162750244, 0.00616534423828125, 0.006151936054229736, 0.006253983974456787, 0.006404575824737549, 0.0065577921867370605, 0.00641923189163208, 0.0064143681526184085, 0.00633190393447876, 0.006176064014434815, 0.00610371208190918, 0.006107327938079834, 0.006082496166229248, 0.006076767921447754, 0.0060495038032531735, 0.006057983875274659, 0.006039552211761475, 0.006154240131378174, 0.006176608085632324, 0.0064208641052246095, 0.0064691839218139644, 0.00645904016494751, 0.006558015823364258, 0.006529280185699463, 0.006420063972473145, 0.006259391784667969, 0.0061129918098449705, 0.00609881591796875, 0.006109312057495118, 0.006121664047241211, 0.0060700798034667965, 0.006078464031219482, 0.006076416015625, 0.006154047966003418, 0.006082272052764893, 0.006113088130950928, 0.0060975680351257324, 0.006436960220336914, 0.006082687854766846, 0.006231135845184326, 0.006275040149688721, 0.00662172794342041, 0.00616809606552124, 0.0061200962066650395, 0.006137792110443115, 0.006067359924316406, 0.006123904228210449, 0.006271520137786865, 0.006175839900970459, 0.0062137598991394045, 0.006326784133911132, 0.006500127792358398, 0.006298111915588379, 0.0062211198806762695, 0.0061057920455932614, 0.006073760032653809, 0.006043327808380127, 0.006093728065490723, 0.006182911872863769, 0.006434815883636475, 0.00660646390914917, 0.006616864204406738, 0.006517343997955323, 0.006453120231628418, 0.006424704074859619, 0.0064767999649047855, 0.006257184028625488, 0.006203872203826904, 0.006132351875305176, 0.0061354880332946775, 0.006139711856842041, 0.006107647895812988, 0.006177792072296143, 0.006200352191925049, 0.006168928146362305, 0.00610265588760376, 0.006260735988616943, 0.006349055767059326, 0.006247392177581787, 0.006154208183288574, 0.006170656204223633, 0.006183135986328125, 0.0061198720932006834, 0.006094816207885742, 0.0061294717788696286, 0.006054240226745606, 0.005984255790710449, 0.006059296131134033, 0.006068960189819336, 0.0062828478813171384, 0.006393311977386474, 0.006202303886413575, 0.006137951850891114, 0.006093791961669922, 0.006112192153930664, 0.006067840099334717, 0.006068672180175781, 0.006039487838745117, 0.006049791812896729, 0.006109183788299561, 0.006158336162567139, 0.006112448215484619, 0.006148928165435791, 0.006060031890869141, 0.00615334415435791, 0.006036352157592773, 0.006076416015625, 0.0060067839622497555, 0.006168575763702393, 0.006336351871490478, 0.006443424224853516, 0.006674176216125489, 0.006612448215484619, 0.006526815891265869, 0.006574336051940918, 0.006379615783691406, 0.006813632011413574, 0.006381984233856201, 0.006333759784698486, 0.006334720134735107, 0.006195648193359375, 0.006176576137542724, 0.0061298561096191405, 0.006167935848236084, 0.0061682558059692385, 0.006112448215484619, 0.006381311893463135, 0.006100800037384033, 0.006135072231292725, 0.006058752059936523, 0.006279327869415283, 0.006194431781768799, 0.006255008220672607, 0.006265408039093018, 0.006239744186401367, 0.006199935913085938, 0.0062278399467468265, 0.006672383785247803, 0.006293087959289551, 0.006207935810089112, 0.006103199958801269, 0.006076223850250244, 0.006059904098510743, 0.006072447776794433, 0.006088223934173584, 0.006074848175048828, 0.006135072231292725, 0.0062696638107299805, 0.006098048210144043, 0.006072703838348389, 0.006300159931182861, 0.006272223949432373, 0.006267871856689453, 0.006112095832824707, 0.006123583793640136, 0.006109792232513428, 0.006084256172180176, 0.006135744094848633, 0.006310624122619629, 0.006227968215942382, 0.00610211181640625, 0.0061244478225708, 0.0060677118301391605, 0.0062490878105163575, 0.006113152027130127, 0.006108352184295654, 0.006087488174438477, 0.006205344200134277, 0.0061641278266906735, 0.006077951908111572, 0.006089024066925049, 0.006186912059783936, 0.006509119987487793, 0.006520959854125976, 0.006706367969512939, 0.006514848232269287, 0.0065133762359619145, 0.0065001602172851565, 0.006525023937225342, 0.006398015975952148, 0.006459296226501465, 0.006460639953613281, 0.006357888221740723, 0.0061296639442443845, 0.007016448020935059, 0.006104415893554688, 0.006049600124359131, 0.0061528959274292, 0.006176928043365479, 0.006127744197845459, 0.006080383777618408, 0.006154240131378174, 0.006057119846343994, 0.006086880207061767, 0.006482687950134278, 0.006608575820922852, 0.006342336177825928, 0.006291423797607422, 0.006192512035369873, 0.0061380801200866695, 0.00609878396987915, 0.006097824096679688, 0.006465536117553711, 0.006184447765350342, 0.006642176151275635, 0.006643712043762207, 0.00702784013748169, 0.006234655857086182, 0.006237728118896484, 0.006212416172027588, 0.006115039825439453, 0.0061073598861694335, 0.006238272190093994, 0.006359039783477783, 0.006252863883972168, 0.0062399358749389644, 0.006213632106781006, 0.006135807991027832, 0.006031360149383545, 0.006083775997161865, 0.006013855934143066, 0.006070432186126709, 0.006104832172393799, 0.0061272640228271485, 0.006277599811553955, 0.006473760128021241, 0.006649280071258545, 0.006566304206848145, 0.0065474557876586915, 0.006662144184112549, 0.0064880638122558594, 0.006492159843444824, 0.006387360095977783, 0.006408544063568116, 0.006401663780212403, 0.00625497579574585, 0.006241312026977539, 0.006215936183929444, 0.006148223876953125, 0.006130271911621093, 0.00615334415435791, 0.006221824169158936, 0.0061829757690429685, 0.006468607902526856, 0.006379327774047852, 0.006371103763580323, 0.006285344123840332, 0.006443359851837158, 0.00645308780670166, 0.006191103935241699, 0.006174176216125488, 0.0062280001640319825, 0.006210048198699952, 0.006131455898284912, 0.0060941438674926756, 0.006087520122528076, 0.006091040134429931, 0.006388895988464356, 0.006177216053009033, 0.0061485118865966795, 0.006147488117218018, 0.0061077442169189455, 0.006045951843261719, 0.006102208137512207, 0.006050047874450684, 0.006144320011138916, 0.006065279960632324, 0.006391776084899902, 0.006463520050048828, 0.006179168224334717, 0.0061038718223571774, 0.006057695865631103, 0.007448575973510742, 0.006615039825439453, 0.006524928092956543, 0.006469791889190674, 0.006346047878265381, 0.006359583854675293, 0.006360415935516357, 0.006226208209991455, 0.006223231792449951, 0.006218751907348633, 0.006231647968292236, 0.006201759815216064, 0.006200448036193847, 0.0062984957695007325, 0.006501408100128174, 0.006487008094787598, 0.006365280151367187, 0.00637440013885498, 0.006375967979431152, 0.0062317438125610355, 0.006161215782165527, 0.006304992198944092, 0.006300672054290772, 0.006076064109802246, 0.006080351829528809, 0.006205088138580323, 0.006193664073944092, 0.006127520084381104, 0.0060416641235351565, 0.006061728000640869, 0.006063680171966553, 0.0060629119873046875, 0.006087935924530029, 0.006111775875091553, 0.006072735786437989, 0.006075424194335937, 0.0060731201171875, 0.006123104095458984, 0.006285727977752686, 0.006279200077056885, 0.006221759796142578, 0.00616860818862915, 0.00613321590423584, 0.00610368013381958, 0.0061439042091369625, 0.00611030387878418, 0.006330848217010498, 0.006373824119567871, 0.006097343921661377, 0.006153791904449463, 0.006136032104492187, 0.006080927848815918, 0.00617033576965332, 0.006146336078643799, 0.006146048069000244, 0.006212800025939941, 0.00638047981262207, 0.00659065580368042, 0.006678207874298096, 0.0064973440170288084, 0.006542272090911865, 0.006432767868041992, 0.006524831771850586, 0.006401631832122803, 0.006229663848876953, 0.006212448120117188, 0.0061972479820251464, 0.006210944175720215, 0.006104991912841797, 0.006191711902618408, 0.006058303833007812, 0.006109119892120362, 0.0060778560638427735, 0.006087135791778564, 0.0060661759376525876, 0.006053120136260986, 0.006087264060974121, 0.0061413440704345705, 0.0062841281890869145, 0.006690271854400635, 0.006207968235015869, 0.0060867519378662105, 0.006112224102020264, 0.006665120124816895, 0.006275231838226318, 0.006131552219390869, 0.006283008098602295, 0.0064486398696899415, 0.006271743774414062, 0.006205664157867431, 0.006102431774139405, 0.006053664207458496, 0.006090784072875976, 0.006127647876739502, 0.006102911949157715, 0.006084512233734131, 0.006092576026916504, 0.006070240020751953, 0.006095104217529297, 0.006076863765716553, 0.006162752151489257, 0.006151999950408936, 0.006135200023651123, 0.00617139196395874, 0.006156320095062256, 0.006209760189056396, 0.006161824226379394, 0.006101376056671143, 0.006082560062408447, 0.006031167984008789, 0.006154431819915772, 0.006596511840820313, 0.006645631790161133, 0.00661897611618042, 0.006531455993652344, 0.006479872226715088, 0.006502399921417237, 0.006409952163696289, 0.006206751823425293, 0.006187615871429443, 0.006135647773742676, 0.006125376224517822, 0.006087488174438477, 0.0060787200927734375, 0.006067903995513916, 0.006123839855194092, 0.006231584072113037, 0.006135968208312989, 0.0061972479820251464, 0.006081984043121338, 0.006275455951690673, 0.006317376136779785, 0.006249216079711914, 0.006205567836761475, 0.006313151836395263, 0.006208320140838623, 0.006252543926239014, 0.006211008071899414, 0.006193727970123291, 0.006162687778472901, 0.006251584053039551, 0.006346687793731689, 0.006273983955383301, 0.006160607814788818, 0.006113887786865235, 0.0061262078285217285, 0.006095488071441651, 0.006094207763671875, 0.006105247974395752, 0.006105311870574951, 0.006050848007202149, 0.006054848194122314, 0.006176832199096679, 0.006213215827941895, 0.006156447887420654, 0.006074111938476562, 0.006124000072479248, 0.0060928001403808595, 0.006365375995635986, 0.006374944210052491, 0.006408864021301269, 0.006452320098876953, 0.006420608043670654, 0.006445343971252441, 0.006368735790252686, 0.006482592105865478, 0.006516287803649903, 0.006552031993865967, 0.006465184211730957, 0.006398176193237305, 0.006324319839477539, 0.006303391933441162, 0.006209887981414795, 0.006223872184753418, 0.006067903995513916, 0.006144224166870117, 0.006108767986297607, 0.006095456123352051, 0.006031328201293945, 0.006125311851501464, 0.00611568021774292, 0.00609503984451294, 0.0060680961608886716, 0.006067999839782715, 0.006047455787658691, 0.0060472960472106934, 0.0062490878105163575, 0.0063816637992858884, 0.006437215805053711, 0.006772384166717529, 0.006270815849304199, 0.006250847816467285, 0.006183743953704834, 0.006149472236633301, 0.006065408229827881, 0.006071872234344482, 0.006055871963500977, 0.006099584102630615, 0.006093088150024414, 0.006077919960021973, 0.006112927913665771, 0.00625651216506958, 0.006165472030639649, 0.006129695892333985, 0.006229472160339355, 0.006430463790893555, 0.006228352069854736, 0.006097311973571777, 0.00613753604888916, 0.006028895854949951, 0.006044095993041993, 0.006213151931762696, 0.006350719928741455, 0.006577023983001709, 0.006512639999389648, 0.006475776195526123, 0.006382847785949707, 0.0064048638343811035, 0.0063777599334716795, 0.0062665920257568355, 0.006198272228240966, 0.006156544208526612, 0.0061010241508483885, 0.006074719905853272, 0.0060993280410766605, 0.00603545618057251, 0.00606822395324707, 0.006012864112854004, 0.006061888217926025, 0.006170783996582031, 0.00615231990814209, 0.006137311935424805, 0.006031871795654297, 0.005994495868682862, 0.006024352073669434, 0.005987167835235596, 0.006041600227355957, 0.005903744220733643, 0.0061348161697387694, 0.006002816200256347, 0.006050848007202149, 0.006035232067108155, 0.006019680023193359, 0.0060022082328796385, 0.006032095909118652, 0.006002943992614746, 0.00606547212600708, 0.006032127857208252, 0.006024960041046142, 0.005975615978240967, 0.0060850558280944825, 0.005996543884277344, 0.006037248134613037, 0.0060265278816223145, 0.006044640064239502, 0.0060992960929870605, 0.006086592197418213, 0.0060804481506347655, 0.0060486397743225095, 0.00604694414138794, 0.006032224178314209, 0.006052095890045166, 0.006120031833648682, 0.006045695781707764, 0.006078464031219482, 0.006091104030609131, 0.006117343902587891, 0.0060778560638427735, 0.006018496036529541, 0.006077280044555664, 0.006033696174621582, 0.006072256088256836, 0.006001728057861328, 0.006049568176269531, 0.005992640018463135, 0.006036384105682373, 0.005982048034667968, 0.006033440113067627, 0.005969791889190674, 0.006023136138916016, 0.005961855888366699, 0.006044703960418701, 0.005959904193878174, 0.006022240161895752, 0.00596451187133789, 0.006032639980316162, 0.006049471855163574, 0.006015103816986084, 0.005971903800964355, 0.006170464038848877, 0.005976160049438476, 0.006021120071411133, 0.0059658241271972655, 0.0060059518814086915, 0.006050496101379394, 0.006008959770202636, 0.005986303806304932, 0.006027200222015381, 0.005959551811218261, 0.006033696174621582, 0.005905888080596924, 0.006017568111419677, 0.005994495868682862, 0.006045536041259765, 0.006000800132751465, 0.006096288204193116, 0.006001183986663819, 0.0060338878631591795, 0.005995840072631836, 0.0060234560966491695, 0.005973440170288086, 0.006050303936004638, 0.005967936038970947, 0.005995935916900634, 0.00598195219039917, 0.006017951965332031, 0.005972064018249511, 0.006114655971527099, 0.0060236802101135255, 0.006030655860900879, 0.005995200157165527, 0.006014463901519776, 0.0059704318046569825, 0.006041600227355957, 0.005998015880584717, 0.006042175769805909, 0.005992288112640381, 0.0061133761405944825, 0.005988480091094971, 0.006043583869934082, 0.0059985918998718265, 0.006031199932098389, 0.006002399921417236, 0.006049248218536377, 0.005995488166809082, 0.006051199913024902, 0.005988480091094971, 0.0060375680923461916, 0.005986688137054444, 0.0060249919891357424, 0.005988639831542969, 0.006033311843872071, 0.005996032238006592, 0.006048351764678955, 0.005982207775115967, 0.006012928009033203, 0.0060061440467834475, 0.006029952049255371, 0.006018784046173096, 0.0060289278030395506, 0.005990816116333008, 0.0060011520385742185, 0.005996640205383301, 0.0061231679916381836, 0.006024223804473877, 0.0060152320861816405, 0.005994527816772461, 0.006003647804260254, 0.006039360046386719, 0.006018815994262695, 0.006011072158813476, 0.0060026879310607914, 0.006031007766723633, 0.005943295955657959, 0.005970240116119385, 0.006029088020324707, 0.0060239357948303224, 0.006035391807556152, 0.006115520000457763, 0.0071840319633483885, 0.006700928211212158, 0.0065998082160949705, 0.0060999679565429685, 0.006330368041992188, 0.006032639980316162, 0.006044415950775146, 0.0060293121337890625, 0.006172671794891358, 0.00601087999343872, 0.00601907205581665, 0.006028575897216797, 0.006023903846740722, 0.0060067839622497555, 0.006038943767547608, 0.005997151851654053, 0.006131711959838867, 0.006080512046813965, 0.006074560165405273, 0.00600816011428833, 0.006243135929107666, 0.006094207763671875, 0.006076704025268555, 0.006145855903625488, 0.006097055912017822, 0.006002719879150391, 0.005992288112640381, 0.0059987521171569825, 0.006045567989349365, 0.006017151832580566, 0.006012800216674804, 0.006021183967590332, 0.006014592170715332, 0.006024672031402588, 0.006140895843505859, 0.006111231803894043, 0.006024608135223389, 0.0060442562103271485, 0.0059985918998718265, 0.0060293121337890625, 0.006002304077148438, 0.006037888050079346, 0.006023104190826416, 0.006027455806732178, 0.005992320060729981, 0.006012928009033203, 0.0059881601333618166, 0.006004928112030029, 0.006031072139739991, 0.006007071971893311, 0.006027647972106934, 0.006020768165588379, 0.006010655879974365, 0.006033599853515625, 0.006000671863555908, 0.006027232170104981, 0.0060028800964355465, 0.005905471801757813, 0.006014976024627685, 0.006006752014160156, 0.0060152320861816405, 0.006010848045349121, 0.005979231834411621, 0.006002655982971191, 0.0060068159103393555, 0.006006432056427002, 0.005971968173980713, 0.005990399837493897, 0.0059699201583862304, 0.005994336128234863, 0.0059967041015625, 0.005990399837493897, 0.005987328052520752, 0.00598905611038208, 0.006008384227752686, 0.00603007984161377, 0.005997920036315918, 0.006051743984222412, 0.006001408100128174, 0.0060160961151123045, 0.006025983810424805, 0.006017183780670166, 0.0062722558975219726, 0.006034175872802735, 0.006045951843261719, 0.0060433921813964845, 0.006062079906463623, 0.006017024040222168, 0.006012928009033203, 0.006036736011505127, 0.0060136961936950685, 0.006039552211761475, 0.0060026879310607914, 0.0060538239479064945, 0.0060226240158081056, 0.0060730881690979005, 0.005990464210510254, 0.00603113603591919, 0.005992447853088379, 0.006066239833831787, 0.005987967967987061, 0.00602675199508667, 0.00598745584487915, 0.006049471855163574, 0.00599622392654419, 0.006043968200683594, 0.005975103855133056, 0.006052800178527832, 0.006008255958557129, 0.006063007831573487, 0.006012576103210449, 0.006053760051727295, 0.006025152206420898, 0.006047935962677002, 0.00598137617111206, 0.006074495792388916, 0.006021984100341797, 0.0060412797927856445, 0.0059712638854980465, 0.006149055957794189, 0.005900191783905029, 0.006160223960876465, 0.006052095890045166, 0.0062399678230285645, 0.00597760009765625, 0.0060050878524780275, 0.005982656002044678, 0.006023519992828369, 0.005987679958343506, 0.006024576187133789, 0.005983168125152588, 0.006008959770202636, 0.0060026879310607914, 0.006006400108337403, 0.006016543865203857, 0.0060095682144165035, 0.0060022401809692386, 0.00601087999343872, 0.006011360168457032, 0.006052127838134766, 0.006020800113677978, 0.0060536317825317385, 0.005986559867858887, 0.006050111770629883, 0.006053120136260986, 0.006058688163757324, 0.0060208640098571775, 0.006021120071411133, 0.006045919895172119, 0.006027040004730225, 0.006078080177307129, 0.006035295963287354, 0.00604963207244873, 0.006009535789489746, 0.0059985918998718265, 0.006000639915466309, 0.006006015777587891, 0.00598419189453125, 0.005994688034057618, 0.005989183902740478, 0.006010687828063964, 0.005974016189575195, 0.0060026879310607914, 0.006020736217498779, 0.0059920639991760255, 0.006000448226928711, 0.006005631923675537, 0.006025023937225342, 0.006014431953430176, 0.0060280637741088865, 0.006014976024627685, 0.006000639915466309, 0.006008575916290284, 0.006023295879364014, 0.006002592086791992, 0.005980703830718994, 0.006016640186309814, 0.005986368179321289, 0.006043295860290527, 0.006008895874023437, 0.006035744190216064, 0.006047743797302246, 0.006021247863769531, 0.005968448162078858, 0.005998655796051026, 0.006021120071411133, 0.00604310417175293, 0.006029856204986572, 0.006037312030792236, 0.006013440132141113, 0.006028704166412354, 0.006029600143432617, 0.006012928009033203, 0.005988351821899414, 0.006281216144561768, 0.006037504196166992, 0.006060256004333496, 0.00616425609588623, 0.006041600227355957, 0.006042655944824219, 0.006013919830322265, 0.00601087999343872, 0.005994495868682862, 0.006010240077972412, 0.006013152122497559, 0.0060360321998596195, 0.006053728103637695, 0.00606547212600708, 0.006066880226135254, 0.006039872169494629, 0.006051743984222412, 0.006024064064025879, 0.006064159870147705, 0.005995039939880371, 0.0060698561668396, 0.005993216037750244, 0.006078464031219482, 0.006000448226928711, 0.00604588794708252, 0.0059983677864074705, 0.006064288139343262, 0.005977536201477051, 0.006081151962280274, 0.006021120071411133, 0.006055007934570313, 0.006003615856170655, 0.006047743797302246, 0.006013023853302002, 0.00602668809890747, 0.005974495887756347, 0.00602623987197876, 0.005962751865386963, 0.006041696071624756, 0.006028768062591553, 0.00608464002609253, 0.006036223888397217, 0.00601039981842041, 0.006041728019714356, 0.0060067839622497555, 0.006062079906463623, 0.006061279773712158, 0.006136608123779297, 0.006170623779296875, 0.006188672065734863, 0.006123807907104492, 0.006647903919219971]",tokens/s,160.0108992884219,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,739.06176,6315.507712,0.0,5920.260096,5695.433728,s,1,7.27074365234375,7.27074365234375,0.0,7.27074365234375,7.27074365234375,7.27074365234375,7.27074365234375,[7.27074365234375],,kWh,8.849139250005086e-06,9.601664135668122e-07,3.5350028280006773e-06,1.3344308491572575e-05,,MB,1099.22304,6330.187776,0.0,5922.357248,5577.220096,s,10,0.9366836853027344,0.09366836853027345,0.0030483283167822284,0.09478851318359374,0.09545802764892578,0.09604574813842773,0.09651592453002929,"[0.08512000274658203, 0.0924968032836914, 0.09479142761230469, 0.09525901031494141, 0.09663346862792968, 0.09478559875488281, 0.09532742309570312, 0.09490019226074219, 0.09411737823486328, 0.09325238037109375]",tokens/s,2733.0464276983885,kWh,2.7320048962226894e-06,3.012278133645236e-07,1.8089526413458205e-06,4.8421853509330335e-06,tokens/kWh,52868690.7762983,MB,1132.351488,6330.187776,0.0,5922.357248,5663.963136,s,10,20.591097900390626,2.0591097900390625,0.007451632914587384,2.0561278076171874,2.071295068359375,2.0724998291015626,2.0734636376953124,"[2.0528154296875, 2.0534404296875, 2.0631806640625, 2.055403564453125, 2.05194140625, 2.052717529296875, 2.060014892578125, 2.07370458984375, 2.07102734375, 2.05685205078125]",tokens/s,30.595745940678984,kWh,6.005933179169355e-05,6.624405903750141e-06,3.9696497229654024e-05,0.00010638023492509772,tokens/kWh,592215.2742411058,,s,630,20.58804969024657,0.032679443952772357,0.0004708449742492666,0.032596769332885744,0.033009909057617186,0.033275018310546876,0.03555262310028076,"[0.03432243347167969, 0.03297459030151367, 0.032454910278320315, 0.03223551940917969, 0.032292385101318356, 0.03224812698364258, 0.03224387359619141, 0.03234201431274414, 0.032307201385498044, 0.032263648986816405, 0.0322911376953125, 0.03226144027709961, 0.03232364654541016, 0.032174945831298825, 0.03224576187133789, 0.03224316787719726, 0.032309791564941404, 0.03238284683227539, 0.03230847930908203, 0.03240419387817383, 0.03236054229736328, 0.03234796905517578, 0.03244800186157227, 0.03227507019042969, 0.03243171310424805, 0.03240512084960938, 0.03257436752319336, 0.0323583984375, 0.03238281631469726, 0.03249987030029297, 0.03256320190429687, 0.033054271697998044, 0.03274911880493164, 0.03263577651977539, 0.03254886245727539, 0.03263897705078125, 0.0328007698059082, 0.03266892623901367, 0.03268703842163086, 0.03300947189331055, 0.03318937683105469, 0.032669952392578125, 0.03259827041625977, 0.03242598342895508, 0.03255091094970703, 0.032415744781494144, 0.032557056427001956, 0.03248252868652344, 0.03248617553710938, 0.03242393493652344, 0.03273318481445313, 0.03280486297607422, 0.032851966857910156, 0.032745471954345705, 0.0327823371887207, 0.03270041656494141, 0.03265945434570312, 0.032729087829589845, 0.03288883209228516, 0.032849918365478514, 0.03283148956298828, 0.03287449645996094, 0.03319388961791992, 0.03557328033447266, 0.03347711944580078, 0.032763870239257815, 0.03256115341186523, 0.03227606582641602, 0.03232400131225586, 0.03228684616088867, 0.032153472900390626, 0.03226009750366211, 0.03231868743896484, 0.03229977416992187, 0.03213520050048828, 0.0321712646484375, 0.03218035125732422, 0.03226483154296875, 0.03253657531738281, 0.03257539367675781, 0.03239945602416992, 0.03248537445068359, 0.032337024688720704, 0.0324554557800293, 0.03232688140869141, 0.03245555114746094, 0.03251980972290039, 0.03249728012084961, 0.03241241455078125, 0.032437950134277346, 0.032640960693359374, 0.03255043029785156, 0.032537345886230466, 0.03254595184326172, 0.032607135772705076, 0.032732383728027344, 0.0325599365234375, 0.03255302429199219, 0.03277536010742187, 0.03246697616577148, 0.032507648468017576, 0.0325022087097168, 0.03244086456298828, 0.03241923141479492, 0.03241427230834961, 0.032585536956787106, 0.03244815826416016, 0.03255558395385742, 0.03244230270385742, 0.03282134246826172, 0.03263654327392578, 0.03261270523071289, 0.03256729507446289, 0.03261561584472656, 0.03268281555175781, 0.03265327835083008, 0.03275503921508789, 0.03256390380859375, 0.03253644943237305, 0.03253216171264649, 0.03258188629150391, 0.032825023651123046, 0.0328279037475586, 0.03304150390625, 0.03306588745117187, 0.03305472183227539, 0.03558595275878906, 0.03328992080688477, 0.03272745513916016, 0.03239731216430664, 0.032384864807128905, 0.03238723373413086, 0.032284671783447266, 0.032148799896240234, 0.03224031829833984, 0.03232153701782227, 0.03247420883178711, 0.032462753295898435, 0.032508926391601564, 0.03259187316894531, 0.032546142578125, 0.03241846466064453, 0.03237273788452148, 0.032376094818115236, 0.03255574417114258, 0.03249356842041016, 0.03260211181640625, 0.032449920654296874, 0.032487808227539064, 0.03277350234985352, 0.03250627136230469, 0.032541152954101565, 0.03249116897583008, 0.03248777770996094, 0.03259187316894531, 0.0328803825378418, 0.03286412811279297, 0.032883071899414064, 0.032860160827636715, 0.032794624328613284, 0.03405619049072266, 0.03339059066772461, 0.032616127014160154, 0.032549182891845704, 0.03267379379272461, 0.03277619171142578, 0.032718017578125, 0.032645118713378905, 0.03276473617553711, 0.03261439895629883, 0.03263071823120117, 0.032706623077392576, 0.0328243522644043, 0.032611297607421874, 0.03273241424560547, 0.03259059143066406, 0.03261439895629883, 0.03278643035888672, 0.03346803283691406, 0.03329840087890625, 0.033012126922607424, 0.032896736145019534, 0.032792865753173826, 0.03270655822753906, 0.032868350982666016, 0.033073150634765625, 0.03308297729492188, 0.032864063262939454, 0.03275632095336914, 0.0354752311706543, 0.03328752136230469, 0.03272998428344726, 0.032495201110839846, 0.0325184326171875, 0.032315521240234374, 0.0325522575378418, 0.03220908737182617, 0.032323455810546874, 0.03230374526977539, 0.03227238464355469, 0.03222118377685547, 0.032315391540527344, 0.03239731216430664, 0.03265068817138672, 0.032490047454833984, 0.03262464141845703, 0.0324956169128418, 0.03246249771118164, 0.03266185760498047, 0.03256524658203125, 0.03259088134765625, 0.0324884147644043, 0.03246080017089844, 0.03266940689086914, 0.0325819206237793, 0.03255014419555664, 0.03246771240234375, 0.032589824676513675, 0.032589824676513675, 0.032745471954345705, 0.03327084732055664, 0.032604415893554686, 0.032662208557128904, 0.03268198394775391, 0.03256320190429687, 0.03266355133056641, 0.032471038818359374, 0.03244646453857422, 0.032290817260742184, 0.03241187286376953, 0.032383937835693356, 0.03241244888305664, 0.03257759857177735, 0.032601089477539064, 0.03262102508544922, 0.03244287872314453, 0.032489246368408206, 0.03265766525268555, 0.032595966339111326, 0.03279667282104492, 0.0327344970703125, 0.03267068862915039, 0.03264281463623047, 0.03253225708007813, 0.032620319366455076, 0.03264956665039063, 0.03267139053344727, 0.03266521453857422, 0.032694366455078124, 0.03285385513305664, 0.03281711959838867, 0.0328262710571289, 0.035722145080566405, 0.03348278427124023, 0.03267334365844726, 0.032811454772949215, 0.03270870590209961, 0.03228457641601563, 0.03223302459716797, 0.032284862518310545, 0.03231564712524414, 0.032126976013183595, 0.032405502319335935, 0.03233792114257812, 0.03232745742797852, 0.0329475212097168, 0.032904094696044925, 0.03233996963500976, 0.03255897521972656, 0.032221057891845706, 0.03231769561767578, 0.032368640899658206, 0.03230713653564453, 0.03232361602783203, 0.032274463653564456, 0.03225190353393555, 0.03236025619506836, 0.03247446441650391, 0.03244323348999024, 0.03240755081176758, 0.03241068649291992, 0.03254278564453125, 0.03257228851318359, 0.03262464141845703, 0.0325591049194336, 0.032546817779541014, 0.03254476928710937, 0.03247459030151367, 0.032485919952392577, 0.03246284866333008, 0.03245414352416992, 0.03239350509643555, 0.03236044692993164, 0.03247305679321289, 0.03244857788085938, 0.03234998321533203, 0.03244668960571289, 0.03227865600585938, 0.03259603118896484, 0.032456703186035156, 0.032544670104980467, 0.032503326416015624, 0.0326682243347168, 0.032530433654785154, 0.03283763122558594, 0.032812896728515624, 0.03256083297729492, 0.0325882568359375, 0.03263059234619141, 0.03255519866943359, 0.032589824676513675, 0.03275702285766602, 0.03285475158691406, 0.032755680084228515, 0.032784416198730466, 0.03550204849243164, 0.033290241241455076, 0.03271692657470703, 0.03255699157714844, 0.03241542434692383, 0.03225798416137695, 0.03220012664794922, 0.032194561004638675, 0.03265951919555664, 0.03214416122436523, 0.032214366912841796, 0.032432830810546875, 0.032368640899658206, 0.03225804901123047, 0.03240716934204101, 0.03221952056884766, 0.03280879974365234, 0.032851486206054686, 0.03239619064331055, 0.03235523223876953, 0.03241862487792969, 0.032411102294921876, 0.03236713409423828, 0.03234611129760742, 0.03228672027587891, 0.032350208282470705, 0.03233187103271484, 0.03240950393676758, 0.032513729095458986, 0.0326690559387207, 0.032722080230712894, 0.0327083854675293, 0.03268175888061523, 0.03269859313964844, 0.03265945434570312, 0.03254441452026367, 0.032522590637207034, 0.032481151580810545, 0.032444000244140625, 0.03240131378173828, 0.032411903381347654, 0.03254719924926758, 0.03260950469970703, 0.03251279830932617, 0.032505855560302735, 0.03282665634155273, 0.032602848052978514, 0.0324587516784668, 0.032530433654785154, 0.03243417739868164, 0.03256489562988281, 0.03260860824584961, 0.03256524658203125, 0.03266355133056641, 0.032589824676513675, 0.03258569717407227, 0.03262262344360352, 0.03256115341186523, 0.03279872131347656, 0.03269734573364258, 0.032796897888183595, 0.03281999969482422, 0.03288604736328125, 0.03569382476806641, 0.03369647979736328, 0.032696319580078126, 0.032513473510742186, 0.03242969512939453, 0.032325950622558594, 0.03236108779907226, 0.03225980758666992, 0.03257503890991211, 0.03226454544067383, 0.03242409515380859, 0.032293087005615236, 0.03227852630615234, 0.03238054275512695, 0.03228652954101562, 0.032260673522949215, 0.032400928497314456, 0.032362911224365236, 0.032428096771240235, 0.03233990478515625, 0.03244022369384766, 0.03230326461791992, 0.03249356842041016, 0.032540672302246096, 0.032454303741455075, 0.03233827209472656, 0.032419551849365236, 0.03240784072875977, 0.03265705490112305, 0.03257276916503906, 0.032799232482910154, 0.03291596984863281, 0.032952320098876955, 0.03299123382568359, 0.032794017791748044, 0.03273174285888672, 0.03266300964355469, 0.0325453109741211, 0.03266336059570313, 0.0324568977355957, 0.03249151992797852, 0.03301580810546875, 0.032599903106689455, 0.032503231048583985, 0.03260079956054687, 0.03258755111694336, 0.03273292922973633, 0.03260435104370117, 0.03263449478149414, 0.03273795318603516, 0.032683967590332035, 0.03279264068603516, 0.03292111968994141, 0.0332784309387207, 0.032991008758544924, 0.03290544128417969, 0.03294822311401367, 0.03300966262817383, 0.0330189437866211, 0.03315603256225586, 0.03301375961303711, 0.03303184127807617, 0.03300128173828125, 0.03610348892211914, 0.03358787155151367, 0.033070465087890626, 0.03280140686035156, 0.032742401123046876, 0.032653343200683596, 0.03257648086547851, 0.03250790405273438, 0.03259801483154297, 0.032573089599609376, 0.032565601348876955, 0.03263692855834961, 0.033966079711914066, 0.03266764831542969, 0.032630561828613285, 0.032731361389160156, 0.03261161422729492, 0.03257110214233398, 0.03269734573364258, 0.03252364730834961, 0.032696670532226565, 0.03280310440063477, 0.03273318481445313, 0.03277536010742187, 0.032670528411865234, 0.03248534393310547, 0.03259532928466797, 0.03263555145263672, 0.03274137496948242, 0.033972225189208984, 0.03308889770507813, 0.03305971145629883, 0.03284966278076172, 0.032935455322265626, 0.03305065536499024, 0.03283808135986328, 0.03305446243286133, 0.032727294921875, 0.032718849182128903, 0.03279052734375, 0.03271424102783203, 0.033012161254882814, 0.03276332855224609, 0.03273587036132813, 0.03295356750488281, 0.032887584686279295, 0.03278035354614258, 0.03269740676879883, 0.033089534759521484, 0.032912254333496094, 0.032911361694335936, 0.03285606384277344, 0.03295641708374023, 0.033091583251953126, 0.032925697326660154, 0.03282329559326172, 0.03284377670288086, 0.032871681213378905, 0.03293056106567383, 0.032884735107421875, 0.03312607955932617, 0.033259136199951175, 0.0332848014831543, 0.03605833435058594, 0.033718048095703126, 0.03292873764038086, 0.032722782135009766, 0.03274716949462891, 0.03256086349487305, 0.03277494430541992, 0.03317878341674805, 0.0325384635925293, 0.03249049758911133, 0.032568801879882814, 0.03249615859985352, 0.03261644744873047, 0.03250102233886719, 0.032654048919677735, 0.03252617645263672, 0.0326657600402832, 0.03254476928710937, 0.03262259292602539, 0.032659263610839845, 0.03259616088867188, 0.03265331268310547, 0.03271225738525391, 0.03264556884765625, 0.03266969680786133, 0.032622337341308594, 0.032702720642089844, 0.03276595306396484, 0.03285932922363281, 0.03355433654785156, 0.033325984954833986, 0.03310182571411133, 0.032950271606445314, 0.03278643035888672, 0.03289817428588867, 0.03281798553466797, 0.032736961364746096, 0.032688350677490235, 0.032729248046875, 0.032778240203857424, 0.032833534240722655, 0.032699649810791015, 0.03275635147094726, 0.032745601654052735, 0.033252574920654296, 0.03287324905395508, 0.03330047988891602, 0.032806686401367184, 0.03267606353759766, 0.032591552734375, 0.03268025588989258, 0.03278643035888672, 0.032833534240722655, 0.032982078552246094, 0.03280499267578125, 0.03272582244873047, 0.0328600959777832, 0.03286227035522461, 0.033414337158203126, 0.03298537445068359, 0.03310851287841797, 0.03306496047973633, 0.0329090576171875, 0.036259166717529295, 0.03353593444824219, 0.03302060699462891, 0.03259737777709961, 0.03249379348754883, 0.03247964859008789, 0.03244879913330078, 0.0325013427734375, 0.03274150466918945, 0.03235363388061523, 0.032301055908203126, 0.03225228881835938, 0.03242790222167969, 0.03234857559204102, 0.032468353271484375, 0.03225254440307617, 0.03228876876831055, 0.0323133430480957, 0.03251718521118164, 0.03238547134399414, 0.03253299331665039, 0.032405502319335935, 0.03236454391479492, 0.03273510360717773, 0.03264313507080078, 0.03253359985351562, 0.03265840148925781, 0.032464897155761716, 0.03249356842041016, 0.03262464141845703, 0.032737281799316405, 0.03264716720581055, 0.03266463851928711, 0.032591934204101565, 0.03250166320800781, 0.032414688110351565, 0.03245843124389648, 0.032358497619628904, 0.03253190231323242, 0.03243907165527344, 0.032411361694335936, 0.032319774627685545, 0.03244851303100586, 0.03265945434570312, 0.03250175857543945, 0.032589824676513675, 0.032486976623535155, 0.03242438507080078, 0.03246284866333008, 0.03255817413330078, 0.03261123275756836, 0.03262156677246094, 0.03268505477905274, 0.03261439895629883, 0.03279433441162109, 0.03260649490356445, 0.032894977569580076, 0.03278646469116211, 0.033670398712158205, 0.032911327362060545, 0.03290995025634766, 0.03292787170410156, 0.032839744567871094]",tokens/s,30.600275862868994,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,810.344448,14639.104,0.0,14243.856384,14221.3376,s,1,7.5134609375,7.5134609375,0.0,7.5134609375,7.5134609375,7.5134609375,7.5134609375,[7.5134609375],,kWh,1.4797649766668522e-05,1.591941333074149e-06,6.8258387939997694e-06,2.321542989374244e-05,,MB,1185.431552,14737.670144,0.0,14329.839616,14290.688,s,10,2.1331998596191406,0.21331998596191407,0.00621579085342007,0.21396086120605468,0.21881604003906252,0.21890145874023437,0.21896979370117187,"[0.19669474792480468, 0.21166371154785157, 0.21879705810546876, 0.2109894714355469, 0.21761846923828124, 0.2128002166748047, 0.2177275848388672, 0.21898687744140624, 0.21454396057128905, 0.2133777618408203]",tokens/s,1200.075083661903,kWh,6.355655424999946e-06,7.009066060996178e-07,4.22104202417391e-06,1.1277604055273475e-05,tokens/kWh,22699857.056986574,MB,1206.10816,14752.350208,0.0,14344.51968,14290.69056,s,10,38.713044921874996,3.8713044921874995,0.006590696282903404,3.87208447265625,3.8785936767578124,3.8798875366210934,3.8809226245117188,"[3.859248046875, 3.86244873046875, 3.86632080078125, 3.871842529296875, 3.87074365234375, 3.872326416015625, 3.873673095703125, 3.8769541015625, 3.87830615234375, 3.881181396484375]",tokens/s,16.273584298816427,kWh,0.00011356573131749991,1.2526579352459377e-05,7.55003828640261e-05,0.00020159269353398537,tokens/kWh,312511.32615765755,,s,630,38.70906281661986,0.06144295685177758,0.0005424991378040025,0.06134395027160645,0.06177381134033203,0.06188752746582031,0.06480036430358888,"[0.06405974578857422, 0.062161758422851564, 0.061213825225830076, 0.061046974182128906, 0.06102067184448242, 0.06102588653564453, 0.06105859375, 0.061370399475097655, 0.060830398559570314, 0.06098457717895508, 0.061190654754638675, 0.06119014358520508, 0.06118239974975586, 0.061128158569335934, 0.060923839569091795, 0.06110879898071289, 0.0611409912109375, 0.061515777587890626, 0.06115024185180664, 0.061141983032226566, 0.061265918731689455, 0.06106524658203125, 0.061020160675048826, 0.06111142349243164, 0.060992351531982424, 0.061216766357421876, 0.06101196670532227, 0.06129641723632812, 0.061335777282714846, 0.06107340621948242, 0.061034015655517575, 0.06101449584960938, 0.06148294448852539, 0.061482494354248046, 0.06132297515869141, 0.06133436965942383, 0.061079425811767576, 0.06098956680297852, 0.06104883193969726, 0.061042686462402344, 0.06105702209472656, 0.061110271453857425, 0.06110819244384766, 0.06100380706787109, 0.061159423828125, 0.06123846435546875, 0.061217601776123044, 0.06126182556152344, 0.06138380813598633, 0.061367198944091796, 0.06120240020751953, 0.06129244613647461, 0.06142985534667969, 0.061459583282470705, 0.061295486450195315, 0.061402240753173826, 0.06144438552856445, 0.06120684814453125, 0.06145817565917969, 0.06136819076538086, 0.06121539306640625, 0.061302143096923827, 0.06120054244995117, 0.06465602874755859, 0.06275273513793946, 0.06161552047729492, 0.061132480621337894, 0.06117264175415039, 0.060909599304199216, 0.061389888763427734, 0.0613139533996582, 0.06109779357910156, 0.06121039962768555, 0.061082015991210936, 0.06079484939575195, 0.061273887634277345, 0.06093644714355469, 0.061143039703369144, 0.06141241455078125, 0.06156793594360352, 0.06172390365600586, 0.06135270309448242, 0.06141747283935547, 0.06125139236450195, 0.06113670349121094, 0.06125606536865234, 0.06111203384399414, 0.06100409698486328, 0.06098873519897461, 0.061203102111816406, 0.06111161422729492, 0.061023937225341794, 0.06088806533813477, 0.06140620803833008, 0.0612259521484375, 0.06145753479003906, 0.06153696060180664, 0.06148323059082031, 0.06149264144897461, 0.0613361930847168, 0.0610629768371582, 0.061033824920654296, 0.061020992279052735, 0.060911296844482425, 0.06089324951171875, 0.06098479843139649, 0.06093011093139648, 0.06114905548095703, 0.06103740692138672, 0.06117788696289062, 0.06132940673828125, 0.06159356689453125, 0.0613642578125, 0.06163644790649414, 0.06142755126953125, 0.06164620971679687, 0.061475391387939456, 0.06109961700439453, 0.06142355346679688, 0.061069408416748044, 0.06094716644287109, 0.061197662353515626, 0.06124816131591797, 0.06109731292724609, 0.06121129608154297, 0.061208576202392576, 0.06485542297363281, 0.06265711975097656, 0.06143939208984375, 0.06112515258789063, 0.06097708892822266, 0.06115686416625977, 0.061233726501464844, 0.061061214447021485, 0.06128201675415039, 0.06125324630737305, 0.06087478256225586, 0.06093827056884766, 0.061078113555908205, 0.061085697174072265, 0.06107340621948242, 0.061419551849365234, 0.061773792266845706, 0.061714305877685546, 0.061689823150634766, 0.06156623840332031, 0.061286945343017575, 0.06143830490112305, 0.06100937652587891, 0.061139198303222654, 0.06111577606201172, 0.06101417541503906, 0.06101295852661133, 0.0612534065246582, 0.06137235260009766, 0.06130233764648438, 0.06127872085571289, 0.06145228958129883, 0.061548030853271485, 0.06157158279418945, 0.06163148880004883, 0.061395263671875, 0.061344158172607424, 0.061329696655273436, 0.0611247673034668, 0.06118182373046875, 0.06091772842407227, 0.06098912048339844, 0.061028705596923825, 0.06106662368774414, 0.06148492813110352, 0.061117023468017576, 0.06129411315917969, 0.061630207061767577, 0.061518688201904294, 0.06158502578735352, 0.06167385482788086, 0.061818878173828126, 0.061603839874267576, 0.061284351348876956, 0.061290496826171874, 0.061297760009765626, 0.06114985656738281, 0.06131305694580078, 0.061212512969970705, 0.0610873908996582, 0.06108438491821289, 0.06104012680053711, 0.06132371139526367, 0.06549359893798828, 0.06340780639648437, 0.06182675170898438, 0.06153043365478516, 0.06103481674194336, 0.0612044792175293, 0.06116966247558594, 0.06127740859985351, 0.06131180953979492, 0.061102046966552734, 0.061292030334472655, 0.0610302734375, 0.06105145645141601, 0.06128582382202148, 0.061446784973144535, 0.0619310417175293, 0.06177142333984375, 0.06179235076904297, 0.06195609664916992, 0.06155747222900391, 0.061504798889160155, 0.06134473419189453, 0.061091167449951175, 0.06111891174316406, 0.06104256057739258, 0.06120803070068359, 0.061134624481201175, 0.060991424560546875, 0.061037471771240234, 0.061134273529052735, 0.06114572906494141, 0.06125155258178711, 0.06130217742919922, 0.06152969741821289, 0.06139136123657227, 0.06145894241333008, 0.061573089599609374, 0.06163455963134765, 0.061663230895996096, 0.061386302947998045, 0.061129150390625, 0.06150147247314453, 0.06124540710449219, 0.06134550476074219, 0.061205982208251956, 0.06107769775390625, 0.0612083854675293, 0.061449024200439455, 0.06146262359619141, 0.06161801528930664, 0.06183260726928711, 0.06138745498657226, 0.06167548751831055, 0.061464576721191405, 0.06129257583618164, 0.06130252838134766, 0.06130691146850586, 0.061220447540283204, 0.061176414489746096, 0.06134579086303711, 0.061394622802734375, 0.06124585723876953, 0.0612020149230957, 0.06507174682617188, 0.06283673477172852, 0.06168899154663086, 0.06141219329833984, 0.06101916885375976, 0.061273056030273436, 0.06110617446899414, 0.061040481567382815, 0.06103673553466797, 0.06110822296142578, 0.06123721694946289, 0.06088911819458008, 0.061052894592285155, 0.06095449447631836, 0.06125743865966797, 0.061843841552734376, 0.06178118515014648, 0.06166409683227539, 0.06170355224609375, 0.061309024810791014, 0.06105667114257812, 0.06101900863647461, 0.06122905731201172, 0.06129663848876953, 0.0611545295715332, 0.061481536865234374, 0.06128051376342773, 0.061205951690673825, 0.06129052734375, 0.06101046371459961, 0.06132035064697266, 0.06150444793701172, 0.06157913589477539, 0.06192127990722656, 0.06160588836669922, 0.06149529647827148, 0.06140313720703125, 0.06115433502197266, 0.06117270278930664, 0.061451519012451175, 0.061069278717041015, 0.06116854476928711, 0.06120230484008789, 0.06117113494873047, 0.06129248046875, 0.060985504150390626, 0.06140156936645508, 0.06138016128540039, 0.061305248260498046, 0.061755008697509765, 0.06182454299926758, 0.06186809539794922, 0.06161491012573242, 0.06155673599243164, 0.061615455627441404, 0.06146323013305664, 0.06136201477050781, 0.06124291229248047, 0.061305438995361325, 0.06146358489990234, 0.06120732879638672, 0.061507774353027345, 0.06168169784545898, 0.06466556549072265, 0.06253107070922852, 0.061438465118408205, 0.061255233764648434, 0.06116582489013672, 0.06119142532348633, 0.06115423965454102, 0.06128572845458984, 0.06117238235473633, 0.061142433166503904, 0.06126208114624023, 0.06081571197509766, 0.061050880432128904, 0.0611748161315918, 0.061510623931884764, 0.0617775993347168, 0.06179257583618164, 0.061795520782470706, 0.06164896011352539, 0.06141414260864258, 0.06140723037719727, 0.06124550247192383, 0.06114297485351562, 0.06145788955688476, 0.061262367248535156, 0.061240638732910156, 0.061120609283447265, 0.061131393432617184, 0.061357601165771485, 0.06121516799926758, 0.06135948944091797, 0.06160857772827148, 0.061779712677001955, 0.06189494323730469, 0.06155043029785156, 0.06169817733764649, 0.0617341423034668, 0.06122371292114258, 0.061570625305175784, 0.061399486541748045, 0.06124291229248047, 0.061345600128173826, 0.06150822448730469, 0.06172256088256836, 0.06127817535400391, 0.06114009475708008, 0.06144099044799805, 0.06143952178955078, 0.06145395278930664, 0.06174601745605469, 0.061586624145507814, 0.061315902709960936, 0.06138851165771484, 0.06130847930908203, 0.061207263946533204, 0.06127382278442383, 0.06144419097900391, 0.06122304153442383, 0.06159097671508789, 0.06143606567382812, 0.061253566741943356, 0.06135657501220703, 0.06160761642456054, 0.06552387237548828, 0.06310531234741211, 0.06176358413696289, 0.06137011337280274, 0.061208831787109376, 0.06110614395141602, 0.06117529678344726, 0.06137702560424805, 0.061128799438476565, 0.061093505859375, 0.06117814254760742, 0.06114896011352539, 0.061085952758789065, 0.06111433410644531, 0.061222911834716794, 0.06164070510864258, 0.06162432098388672, 0.0617960319519043, 0.06167279815673828, 0.06167820739746094, 0.0615181770324707, 0.06148473739624023, 0.06120393753051758, 0.06121353530883789, 0.06115900802612305, 0.06126019287109375, 0.06116761779785156, 0.061216766357421876, 0.061400577545166014, 0.06131353759765625, 0.061315071105957034, 0.06141033554077149, 0.06149014282226563, 0.06166540908813477, 0.061505409240722654, 0.061505535125732425, 0.061547584533691406, 0.06129350280761719, 0.061357471466064455, 0.061212417602539065, 0.0615629768371582, 0.061743358612060546, 0.061288032531738285, 0.06129462432861328, 0.061270912170410155, 0.061314720153808594, 0.061404960632324215, 0.06149289703369141, 0.061657279968261716, 0.061811424255371096, 0.061765216827392576, 0.06176607894897461, 0.06161814498901367, 0.06157699203491211, 0.06162579345703125, 0.0613507194519043, 0.06119366455078125, 0.061118080139160154, 0.06132815933227539, 0.06107968139648438, 0.061192192077636716, 0.06131011199951172, 0.06128271865844727, 0.06490729522705078, 0.06290224075317383, 0.06169935989379883, 0.061251808166503906, 0.06106291198730469, 0.06126438522338867, 0.061257984161376955, 0.061337631225585935, 0.061112289428710935, 0.06115523147583008, 0.06118550491333008, 0.06122150421142578, 0.061282302856445314, 0.06110396957397461, 0.061593536376953126, 0.061829345703125, 0.06195199966430664, 0.06179635238647461, 0.06191241455078125, 0.06165724945068359, 0.06142617416381836, 0.061252769470214845, 0.06119715118408203, 0.06113644790649414, 0.06141996765136719, 0.061484897613525394, 0.0612138557434082, 0.06158848190307617, 0.061292545318603515, 0.06124748611450195, 0.06173081588745117, 0.061598846435546875, 0.06177471923828125, 0.061683521270751954, 0.0616833610534668, 0.06150191879272461, 0.061472801208496096, 0.061304862976074216, 0.06131670379638672, 0.061357952117919924, 0.06108009719848633, 0.06115532684326172, 0.061259777069091796, 0.06173286437988281, 0.06170169448852539, 0.061558334350585935, 0.06179449462890625, 0.06170800018310547, 0.061812961578369144, 0.06168857574462891, 0.061951648712158205, 0.06179232025146485, 0.061642559051513675, 0.06140137481689453, 0.061327232360839846, 0.06134201431274414, 0.06127558517456055, 0.06108127975463867, 0.06125657653808594, 0.06148668670654297, 0.06120223999023437, 0.061424320220947265, 0.06167337417602539, 0.06490252685546875, 0.0627410545349121, 0.061370433807373045, 0.061381729125976565, 0.061295520782470705, 0.06139884948730469, 0.06143814468383789, 0.06172819137573242, 0.06134841537475586, 0.06134172821044922, 0.06159312057495117, 0.06148051071166992, 0.061610881805419924, 0.06125686264038086, 0.06164771270751953, 0.06206991958618164, 0.06199980926513672, 0.061773983001708985, 0.06157267379760742, 0.06140777587890625, 0.06142556762695312, 0.06117375946044922, 0.06101308822631836, 0.06097958374023438, 0.061122718811035155, 0.06119417572021484, 0.06146297454833984, 0.06147174453735352, 0.06133248138427734, 0.0614799690246582, 0.061510623931884764, 0.06154764938354492, 0.06175836944580078, 0.06185964965820313, 0.06160604858398438, 0.06156492614746094, 0.0615731201171875, 0.06144371032714844, 0.06142102432250977, 0.061295486450195315, 0.06126515197753906, 0.06117574310302734, 0.06120534515380859, 0.0612426872253418, 0.061216545104980466, 0.06138070297241211, 0.06146131134033203, 0.06125158309936524, 0.06174460983276367, 0.061858177185058594, 0.061878463745117185, 0.06186188888549805, 0.06161539077758789, 0.061731521606445315, 0.061505569458007815, 0.06158137512207031, 0.06166281509399414, 0.06156934356689453, 0.061413406372070316, 0.06146656036376953, 0.061319137573242186, 0.06157708740234375, 0.06130291366577149, 0.06526566314697266, 0.06307129669189453, 0.06162454223632813, 0.061301441192626954, 0.061367454528808596, 0.061354881286621095, 0.06135539245605469, 0.06131110382080078, 0.06135424041748047, 0.06147865676879883, 0.06132374572753906, 0.06146047973632812, 0.061704193115234375, 0.06123865509033203, 0.06153263854980469, 0.06226755142211914, 0.0620687370300293, 0.06199071884155274, 0.061742431640625, 0.06163337707519531, 0.06154415893554688, 0.061792545318603516, 0.06119571304321289, 0.06167577743530273, 0.06123689651489258, 0.06145500946044922, 0.061095294952392576, 0.06138937759399414, 0.06120864105224609, 0.0614420166015625, 0.0613458251953125, 0.0615096321105957, 0.061677120208740235, 0.06167596817016602, 0.06187539291381836, 0.061778751373291016, 0.061679359436035155, 0.06149321746826172, 0.061411231994628904, 0.06134374237060547, 0.06133388900756836, 0.06134991836547852, 0.06119222259521485, 0.061450302124023436, 0.06161328125, 0.06117609786987305, 0.06134777450561523, 0.06139731216430664, 0.06151910400390625, 0.0614365119934082, 0.06169411087036133, 0.06185324859619141, 0.06180720138549805, 0.06185881423950195, 0.06153014373779297, 0.0614835205078125, 0.06164665603637695, 0.06137305450439453, 0.061421600341796875, 0.06141334533691406, 0.06154214477539063, 0.06155427169799805, 0.06148735809326172]",tokens/s,16.275258406140157,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,738.668544,3450.79808,0.0,3055.550464,2937.680896,s,1,7.21254931640625,7.21254931640625,0.0,7.21254931640625,7.21254931640625,7.21254931640625,7.21254931640625,[7.21254931640625],,kWh,7.65544858750123e-06,8.368047426373106e-07,2.206668432001846e-06,1.0698921762140386e-05,,MB,1104.44544,3522.101248,0.0,3114.27072,2817.473024,s,10,0.44898410034179687,0.04489841003417969,0.0006442349365678308,0.04467651176452637,0.045460602951049804,0.045966013526916503,0.046370341987609864,"[0.0464714241027832, 0.04416009521484375, 0.04534828948974609, 0.04467161560058594, 0.044681407928466796, 0.044641857147216794, 0.04422576141357422, 0.04450051116943359, 0.045274112701416014, 0.04500902557373047]",tokens/s,5701.760926614453,kWh,1.4554342901533722e-06,1.6050690768969044e-07,9.612100005870587e-07,2.577151198430121e-06,tokens/kWh,99334490.01981068,MB,1137.303552,3522.101248,0.0,3114.27072,2877.80864,s,10,11.548456054687499,1.15484560546875,0.004692471931810019,1.1568258666992188,1.159875244140625,1.1607574462890626,1.1614632080078124,"[1.15967919921875, 1.1498546142578125, 1.1500992431640624, 1.1482830810546876, 1.158281494140625, 1.1577117919921875, 1.1616396484375, 1.157550537109375, 1.1561011962890626, 1.1492552490234376]",tokens/s,54.55274687946568,kWh,3.3499842479430185e-05,3.6945752013604073e-06,2.2181919625012554e-05,5.937633730580316e-05,tokens/kWh,1061028.7339809134,,s,630,11.545618431091308,0.018326378462049698,0.00035041837722673565,0.018253119468688964,0.01856883583068848,0.018941068649291994,0.02004849069595337,"[0.019441120147705077, 0.018897504806518556, 0.018577791213989257, 0.018319360733032225, 0.01818009567260742, 0.01811395263671875, 0.0181376953125, 0.01805516815185547, 0.018792287826538086, 0.01797881507873535, 0.018034912109375, 0.018000383377075196, 0.01800707244873047, 0.018150367736816407, 0.018104032516479494, 0.018001855850219725, 0.018032703399658203, 0.017952640533447264, 0.0186841926574707, 0.019297632217407226, 0.018570016860961915, 0.018333023071289062, 0.018301567077636718, 0.018230464935302733, 0.018131231307983397, 0.018145856857299806, 0.01803059196472168, 0.018101503372192383, 0.018078208923339844, 0.018112768173217775, 0.018370559692382812, 0.018124671936035158, 0.018210079193115233, 0.01824166488647461, 0.018274112701416014, 0.018315168380737306, 0.01822003173828125, 0.018127967834472656, 0.018076576232910157, 0.02145484733581543, 0.02059833526611328, 0.01829692840576172, 0.018153823852539063, 0.018128896713256838, 0.018214879989624025, 0.018157535552978516, 0.018324800491333008, 0.018264223098754882, 0.018227071762084962, 0.01828883171081543, 0.018386720657348633, 0.01861299133300781, 0.018579200744628908, 0.018620512008666993, 0.01860009574890137, 0.018563072204589845, 0.018582847595214842, 0.01859654426574707, 0.018521247863769533, 0.018420576095581054, 0.01840127944946289, 0.018359935760498047, 0.018319744110107422, 0.02004256057739258, 0.019277215957641602, 0.0187807674407959, 0.018503679275512695, 0.01824358367919922, 0.018319360733032225, 0.01814240074157715, 0.0180948486328125, 0.018171968460083007, 0.018112512588500978, 0.01799692726135254, 0.017949567794799805, 0.01803379249572754, 0.01827315139770508, 0.01845043182373047, 0.018017696380615233, 0.018100223541259765, 0.01859849548339844, 0.0181942081451416, 0.0182458553314209, 0.018163711547851562, 0.01802614402770996, 0.017993471145629884, 0.018073728561401367, 0.018119136810302736, 0.018112512588500978, 0.018167808532714845, 0.018083839416503905, 0.01804287910461426, 0.01805891227722168, 0.01837910461425781, 0.018259967803955078, 0.01809769630432129, 0.018040895462036133, 0.018178464889526368, 0.01814303970336914, 0.018370752334594728, 0.018167327880859375, 0.018139392852783202, 0.018094303131103516, 0.018212160110473632, 0.018203136444091796, 0.018085376739501953, 0.018096832275390624, 0.018192127227783204, 0.01819878387451172, 0.018238880157470702, 0.018201183319091797, 0.01816966438293457, 0.018155712127685547, 0.018137088775634767, 0.01826742362976074, 0.018295520782470702, 0.018316287994384766, 0.0182609920501709, 0.018274303436279296, 0.018302047729492187, 0.018383743286132813, 0.01833782386779785, 0.01823744010925293, 0.01825382423400879, 0.018253568649291993, 0.018254079818725587, 0.020050912857055663, 0.01904665565490723, 0.01877577590942383, 0.01840947151184082, 0.01841289520263672, 0.018205343246459962, 0.018056224822998047, 0.01803539276123047, 0.018009824752807616, 0.0179836483001709, 0.017973535537719725, 0.01800124740600586, 0.018036735534667968, 0.01798838424682617, 0.018008064270019532, 0.01805232048034668, 0.018002431869506837, 0.01808332824707031, 0.018016223907470704, 0.018058048248291016, 0.018325504302978517, 0.018069503784179687, 0.01802649688720703, 0.018067455291748045, 0.018092031478881835, 0.018182144165039063, 0.018155519485473632, 0.0182108154296875, 0.0195665283203125, 0.018267263412475587, 0.01817900848388672, 0.01825564765930176, 0.018129119873046873, 0.01823744010925293, 0.01798534393310547, 0.01826416015625, 0.018155168533325196, 0.018157855987548828, 0.018122880935668946, 0.01809187126159668, 0.018155647277832032, 0.018132095336914063, 0.01811756706237793, 0.018147552490234375, 0.01820444869995117, 0.018202207565307618, 0.018227615356445313, 0.01823904037475586, 0.0181907844543457, 0.018153472900390624, 0.018343551635742188, 0.018272287368774415, 0.01823094367980957, 0.018311872482299804, 0.018300928115844727, 0.018350080490112306, 0.01846067237854004, 0.018495487213134765, 0.01833683204650879, 0.018367424011230468, 0.018282495498657226, 0.018328704833984376, 0.01823798370361328, 0.019194976806640625, 0.018704416275024414, 0.01843494415283203, 0.018249727249145507, 0.01810207939147949, 0.01808118438720703, 0.01797961616516113, 0.017997983932495118, 0.018024864196777343, 0.01824470329284668, 0.018082719802856445, 0.01804697608947754, 0.018231231689453124, 0.018101343154907225, 0.018097120285034178, 0.01817190361022949, 0.01803664016723633, 0.01806959915161133, 0.018118656158447266, 0.018094079971313477, 0.01803264045715332, 0.018084991455078126, 0.018082687377929688, 0.01823744010925293, 0.01827596855163574, 0.018207103729248046, 0.01818623924255371, 0.018226207733154295, 0.018109407424926758, 0.018171104431152343, 0.018326303482055665, 0.018145280838012694, 0.01807548713684082, 0.018100383758544922, 0.018056928634643556, 0.018112127304077148, 0.01824835205078125, 0.018120704650878908, 0.01811155128479004, 0.018160575866699218, 0.018122047424316407, 0.018115264892578125, 0.01810963249206543, 0.01811689567565918, 0.018221311569213867, 0.01827577590942383, 0.018258623123168945, 0.018294464111328124, 0.01824611282348633, 0.018186176300048828, 0.018133056640625, 0.01846451187133789, 0.018353567123413086, 0.018408287048339845, 0.01844793510437012, 0.018360767364501953, 0.01836182403564453, 0.018527776718139648, 0.018380992889404296, 0.018408256530761717, 0.018350080490112306, 0.018410816192626953, 0.018313087463378907, 0.019470464706420897, 0.01901523208618164, 0.018704832077026366, 0.02062335968017578, 0.018351903915405275, 0.018208255767822267, 0.01820537567138672, 0.018087711334228516, 0.01817011260986328, 0.01832476806640625, 0.018268735885620117, 0.018169824600219726, 0.01845471954345703, 0.018206720352172853, 0.018241535186767577, 0.018317312240600587, 0.01820582389831543, 0.018191232681274414, 0.01820057678222656, 0.01825779151916504, 0.018161792755126954, 0.018228736877441407, 0.018332160949707032, 0.018159616470336915, 0.018210432052612305, 0.01832979202270508, 0.018228607177734377, 0.018266944885253905, 0.018124351501464842, 0.018182207107543945, 0.018248064041137695, 0.01827840042114258, 0.018147327423095702, 0.01826576042175293, 0.018175872802734375, 0.01826243209838867, 0.018264127731323243, 0.018159616470336915, 0.018232416152954102, 0.018289567947387696, 0.018331615447998047, 0.018345279693603514, 0.018403711318969725, 0.018319520950317383, 0.018521791458129884, 0.018456064224243163, 0.018375680923461913, 0.018341888427734376, 0.01837401580810547, 0.018310848236083983, 0.018323999404907226, 0.018416128158569335, 0.018479007720947266, 0.018542591094970702, 0.018486751556396484, 0.01855855941772461, 0.018458719253540038, 0.01855574417114258, 0.018458303451538087, 0.018567487716674803, 0.01834716796875, 0.01833660888671875, 0.018450048446655272, 0.019408895492553712, 0.01905254364013672, 0.018589696884155273, 0.01841766357421875, 0.018388063430786132, 0.01826883125305176, 0.018311424255371092, 0.018264064788818358, 0.018297855377197265, 0.018291711807250977, 0.018176000595092775, 0.018231296539306642, 0.018192384719848635, 0.018265792846679688, 0.018349632263183594, 0.018975488662719725, 0.01824563217163086, 0.0182959041595459, 0.018356672286987306, 0.01840176010131836, 0.018222272872924803, 0.01831808090209961, 0.018214975357055664, 0.018284543991088868, 0.01830076789855957, 0.018393184661865233, 0.018222368240356446, 0.01823209571838379, 0.01825584030151367, 0.01816988754272461, 0.018259967803955078, 0.018140512466430662, 0.018146976470947266, 0.01819340705871582, 0.018075424194335936, 0.018150848388671877, 0.01821776008605957, 0.018198528289794923, 0.018229248046875, 0.01823539161682129, 0.01824358367919922, 0.01840643119812012, 0.018381919860839844, 0.018252927780151366, 0.018303712844848632, 0.01835830307006836, 0.01846067237854004, 0.018499584197998048, 0.018276159286499023, 0.01839676856994629, 0.018418272018432616, 0.01855897521972656, 0.018593599319458008, 0.018501663208007814, 0.018515647888183592, 0.018444255828857423, 0.018643455505371095, 0.018642112731933592, 0.018447168350219728, 0.018384735107421876, 0.018410816192626953, 0.018413408279418945, 0.0184901123046875, 0.02029657554626465, 0.019533632278442382, 0.018962623596191407, 0.018689504623413088, 0.018543039321899414, 0.018348127365112304, 0.018325504302978517, 0.018363967895507812, 0.018264511108398437, 0.01820159912109375, 0.0182794246673584, 0.018206016540527344, 0.018141887664794923, 0.018247007369995117, 0.01821558380126953, 0.01817724800109863, 0.018201375961303713, 0.018096128463745118, 0.018161184310913087, 0.01811020851135254, 0.018088672637939455, 0.018096063613891603, 0.018208831787109376, 0.01841971206665039, 0.01942639923095703, 0.018357152938842772, 0.018197919845581053, 0.0181847038269043, 0.018232608795166017, 0.018635583877563477, 0.01835759925842285, 0.018571680068969726, 0.018249504089355467, 0.018267871856689454, 0.018494207382202147, 0.018386528015136717, 0.018779808044433594, 0.018420480728149415, 0.01831260871887207, 0.018351871490478514, 0.01845737648010254, 0.018376319885253907, 0.018330047607421875, 0.018373695373535157, 0.01832441520690918, 0.01861631965637207, 0.018990816116333006, 0.018379039764404297, 0.018372608184814454, 0.018347583770751952, 0.018295232772827148, 0.01832111930847168, 0.018352415084838865, 0.01828236770629883, 0.01837273597717285, 0.01843404769897461, 0.01836636734008789, 0.018614368438720705, 0.01856870460510254, 0.01849395179748535, 0.01849475288391113, 0.018399967193603515, 0.018394975662231444, 0.02001456069946289, 0.019399200439453125, 0.018933536529541016, 0.018665439605712892, 0.018368543624877928, 0.01822537612915039, 0.018158815383911134, 0.018148128509521484, 0.01816192054748535, 0.018150400161743165, 0.018252159118652345, 0.018279903411865233, 0.018332351684570314, 0.018387168884277345, 0.01839891242980957, 0.01830940818786621, 0.018393119812011718, 0.018561023712158203, 0.01826201629638672, 0.01844223976135254, 0.01820876884460449, 0.018141183853149414, 0.018108287811279298, 0.018130943298339842, 0.019060863494873046, 0.018253311157226563, 0.018081663131713867, 0.01808857536315918, 0.018181888580322266, 0.018157184600830076, 0.018258432388305663, 0.018249120712280274, 0.018106592178344726, 0.019118080139160155, 0.01835372734069824, 0.018269119262695314, 0.018288639068603514, 0.01816294479370117, 0.018143999099731446, 0.0181341438293457, 0.01825267219543457, 0.018364416122436524, 0.01829033660888672, 0.018319711685180665, 0.01847279930114746, 0.01838038444519043, 0.018395263671875, 0.018411968231201174, 0.018206720352172853, 0.018206464767456056, 0.018265535354614258, 0.01829555130004883, 0.018421823501586915, 0.01836796760559082, 0.018405920028686525, 0.01834297561645508, 0.01830393600463867, 0.018485248565673826, 0.018291807174682616, 0.01829318428039551, 0.018301408767700197, 0.018431999206542968, 0.01842790412902832, 0.019533344268798828, 0.01897318458557129, 0.01865727996826172, 0.018380800247192384, 0.018247360229492186, 0.01813478469848633, 0.01809056091308594, 0.018039072036743164, 0.018177440643310547, 0.01807097625732422, 0.018834304809570313, 0.020147552490234377, 0.01827702331542969, 0.018348031997680665, 0.0184682559967041, 0.01815932846069336, 0.018144128799438476, 0.018235328674316407, 0.01825388717651367, 0.01824563217163086, 0.018151424407958985, 0.018040256500244142, 0.01806710433959961, 0.018103200912475585, 0.01822105598449707, 0.01827596855163574, 0.01814156723022461, 0.0181343994140625, 0.01813350486755371, 0.018144800186157228, 0.018174560546875, 0.018233343124389647, 0.018116607666015624, 0.018184192657470705, 0.018323392868041993, 0.018362432479858398, 0.01835212707519531, 0.018298879623413086, 0.018182144165039063, 0.018431999206542968, 0.01894723129272461, 0.01914147186279297, 0.01836851119995117, 0.01830431938171387, 0.018262815475463868, 0.018249536514282228, 0.01823321533203125, 0.01825200080871582, 0.018153472900390624, 0.0182108154296875, 0.01823744010925293, 0.018288639068603514, 0.018298784255981446, 0.018393184661865233, 0.018378400802612306, 0.01832940864562988, 0.018415136337280272, 0.018397344589233398, 0.018363231658935546, 0.01826201629638672, 0.018290687561035156, 0.018284543991088868, 0.018293792724609376, 0.02017750358581543, 0.01919308853149414, 0.018897632598876953, 0.018549983978271484, 0.018272735595703124, 0.018253536224365235, 0.01811721611022949, 0.018184032440185547, 0.01820022392272949, 0.018203136444091796, 0.018100223541259765, 0.01820364761352539, 0.018240575790405274, 0.018229183197021485, 0.018308927536010742, 0.018093568801879883, 0.018043519973754883, 0.018061376571655272, 0.01800720024108887, 0.018025344848632812, 0.018081504821777342, 0.018102527618408203, 0.01818009567260742, 0.01814851188659668, 0.01812156867980957, 0.018233343124389647, 0.018110464096069336, 0.018069503784179687, 0.018046464920043945, 0.018092544555664062, 0.018114559173583983, 0.018149375915527344, 0.018077695846557617, 0.018104320526123048, 0.01807155227661133, 0.018167808532714845, 0.01809328079223633, 0.01814159965515137, 0.0180731201171875, 0.018094560623168946, 0.018055551528930663, 0.01819161605834961, 0.018098943710327147, 0.01811625671386719, 0.018104671478271484, 0.018173215866088867, 0.018162399291992187, 0.018185792922973634, 0.018135488510131834, 0.018181568145751954, 0.018158143997192382, 0.018241535186767577, 0.018251775741577148, 0.018284543991088868, 0.018279455184936524, 0.01827939224243164, 0.018284479141235353, 0.018391008377075194, 0.018400415420532227, 0.01830121612548828, 0.0182524471282959, 0.01845583915710449, 0.018370399475097655]",tokens/s,54.56615457717421,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,838.209536,9637.39648,0.0,9242.148864,8603.568128,s,1,7.56006689453125,7.56006689453125,0.0,7.56006689453125,7.56006689453125,7.56006689453125,7.56006689453125,[7.56006689453125],,kWh,1.2805606391702895e-05,1.4049791025938898e-06,6.6475053179942956e-06,2.085809081229108e-05,,MB,1194.815488,9889.05472,0.0,9481.224192,8972.090368,s,10,1.128035583496094,0.11280355834960938,0.0011751249572338504,0.11267300796508789,0.11414554901123046,0.11428141555786132,0.11439010879516602,"[0.11201679992675781, 0.11153091430664062, 0.11100089263916016, 0.1131654052734375, 0.11218061065673827, 0.11388262176513672, 0.11441728210449219, 0.11393654632568359, 0.11178915405273437, 0.1141153564453125]",tokens/s,2269.431955387305,kWh,3.5066526572917507e-06,3.867173765259819e-07,2.328391412976124e-06,6.221761446793857e-06,tokens/kWh,41145904.12847147,MB,1223.151616,9893.249024,0.0,9485.418496,8972.092928,s,10,25.124321044921878,2.512432104492188,0.01365794814109764,2.508675903320312,2.5302686279296878,2.5304315551757814,2.530561896972656,"[2.523666748046875, 2.509734130859375, 2.530594482421875, 2.527587890625, 2.530232421875, 2.502421875, 2.49313671875, 2.495555908203125, 2.503773193359375, 2.50761767578125]",tokens/s,25.07530447782331,kWh,7.272253918687309e-05,8.02116478447727e-06,4.820976012862395e-05,0.0001289534640999743,tokens/kWh,488548.333615588,,s,630,25.12142214202883,0.03987527324131557,0.0008178128978666541,0.03974924850463867,0.04038665008544922,0.04076124038696289,0.044132319068908694,"[0.04416851043701172, 0.04000019073486328, 0.03940556716918946, 0.03972051239013672, 0.039725440979003906, 0.039647296905517576, 0.03943219375610352, 0.04027801513671875, 0.039779903411865235, 0.039735744476318356, 0.04018399810791016, 0.039707744598388675, 0.03994112014770508, 0.03994384002685547, 0.039516448974609375, 0.040199966430664064, 0.039792640686035156, 0.03975167846679688, 0.03993600082397461, 0.040013824462890625, 0.040013824462890625, 0.03999667358398438, 0.03985897445678711, 0.04008752059936523, 0.039755775451660154, 0.03988889694213867, 0.0397918701171875, 0.03990399932861328, 0.03994236755371094, 0.03968115234375, 0.039760543823242185, 0.039757694244384766, 0.040193214416503906, 0.039801151275634765, 0.039828033447265626, 0.040419391632080075, 0.04000153732299805, 0.039726112365722654, 0.03989193725585938, 0.03954457473754883, 0.039815425872802734, 0.040129791259765624, 0.03973936080932617, 0.039678752899169924, 0.04025324630737305, 0.03996895980834961, 0.039814334869384765, 0.03992659378051758, 0.039997665405273435, 0.043370079040527344, 0.0397457275390625, 0.03973494338989258, 0.039780033111572265, 0.04000425720214844, 0.039890209197998044, 0.03996131134033203, 0.03978854370117187, 0.04299980926513672, 0.040011489868164066, 0.04002025604248047, 0.040199649810791015, 0.039909919738769534, 0.03989871978759765, 0.04362803268432617, 0.039938526153564455, 0.03957350540161133, 0.039777313232421875, 0.04047766494750977, 0.04005062484741211, 0.03976134490966797, 0.040256126403808594, 0.0399441909790039, 0.03975065612792969, 0.039638015747070314, 0.03959366226196289, 0.0398191032409668, 0.03998972702026367, 0.03973484802246094, 0.03990367889404297, 0.04003180694580078, 0.039500064849853515, 0.03930944061279297, 0.039690273284912106, 0.04067132949829102, 0.039699615478515624, 0.04114246368408203, 0.039737503051757814, 0.04002012634277344, 0.03959436798095703, 0.03970220947265625, 0.0393238410949707, 0.03935388946533203, 0.03992009735107422, 0.039215103149414066, 0.03931340789794922, 0.039476577758789065, 0.039860897064208985, 0.03951366424560547, 0.0393322868347168, 0.03911679840087891, 0.039360321044921875, 0.039183616638183594, 0.03932841491699219, 0.03906588745117188, 0.03933388900756836, 0.039585792541503906, 0.03956326293945313, 0.03936595153808594, 0.040080062866210936, 0.04001331329345703, 0.040178176879882815, 0.04095795059204101, 0.03959177780151367, 0.03958297729492188, 0.0397334098815918, 0.03973606491088867, 0.03968819046020508, 0.040269824981689455, 0.0397127685546875, 0.039538272857666014, 0.039542911529541015, 0.039417537689208984, 0.04095651245117188, 0.04067532730102539, 0.03991551971435547, 0.03972614288330078, 0.044502368927001955, 0.04037311935424805, 0.04230348968505859, 0.03965692901611328, 0.039975456237792965, 0.03926607894897461, 0.03982521438598633, 0.03956089782714844, 0.03954147338867187, 0.03987017440795899, 0.039893280029296874, 0.039809024810791016, 0.03949756622314453, 0.03942211151123047, 0.03929087829589844, 0.039122943878173826, 0.03937068939208985, 0.03959609603881836, 0.03932160186767578, 0.039790592193603515, 0.039686145782470705, 0.040296192169189456, 0.04003036880493164, 0.04019619369506836, 0.04005478286743164, 0.040038047790527345, 0.03989132690429688, 0.04014182281494141, 0.040133598327636716, 0.0404370231628418, 0.04017635345458984, 0.039981056213378906, 0.04025276947021485, 0.04007183837890625, 0.040153087615966795, 0.040099071502685546, 0.040174335479736326, 0.040293697357177735, 0.03999609756469726, 0.040237056732177735, 0.04029644775390625, 0.04022995376586914, 0.04005574417114258, 0.040118270874023435, 0.040002750396728515, 0.040003616333007815, 0.04000611114501953, 0.040255809783935545, 0.04002191925048828, 0.04069136047363281, 0.040188350677490235, 0.04013868713378906, 0.04013388824462891, 0.04007404708862305, 0.04065280151367188, 0.04011212921142578, 0.04064051055908203, 0.04130815887451172, 0.04233027267456055, 0.0404826545715332, 0.040065025329589846, 0.03983769607543945, 0.040325023651123046, 0.04454598236083984, 0.04073878479003906, 0.03982432174682617, 0.03990867233276367, 0.039755550384521485, 0.03968912124633789, 0.03947708892822266, 0.03940572738647461, 0.039686145782470705, 0.04053919982910156, 0.039947200775146484, 0.039876609802246096, 0.0398289909362793, 0.04188415908813477, 0.039866336822509764, 0.040192031860351564, 0.040310081481933595, 0.04068239974975586, 0.04062547302246094, 0.039874561309814455, 0.04004092788696289, 0.040021854400634764, 0.03982966232299805, 0.03983564758300781, 0.03964723205566406, 0.04021247863769531, 0.04009769439697266, 0.03981286239624023, 0.04001827239990234, 0.0397946891784668, 0.039583744049072264, 0.03993190383911133, 0.039774208068847655, 0.040564735412597655, 0.039880702972412106, 0.03983564758300781, 0.040054336547851566, 0.03994854354858399, 0.040683712005615234, 0.03969228744506836, 0.040185855865478515, 0.04021583938598633, 0.04042211151123047, 0.04015465545654297, 0.03976611328125, 0.04021491241455078, 0.03987043380737305, 0.039779552459716795, 0.03981110382080078, 0.03974943923950195, 0.04038339233398437, 0.04019401550292969, 0.039888065338134764, 0.040308734893798825, 0.039697246551513674, 0.03986188888549805, 0.0402334098815918, 0.039749057769775394, 0.040632896423339844, 0.04041638565063477, 0.03982368087768555, 0.04014547348022461, 0.039853279113769534, 0.04419638442993164, 0.040647262573242186, 0.04017289733886719, 0.039680416107177735, 0.03966988754272461, 0.03995647811889649, 0.039876094818115236, 0.03988460922241211, 0.03977462387084961, 0.040083999633789065, 0.04032396697998047, 0.0397334098815918, 0.0398394889831543, 0.04041596984863281, 0.03963452911376953, 0.039674495697021486, 0.03987254333496094, 0.03968172836303711, 0.04043193435668945, 0.0401860466003418, 0.03982460784912109, 0.040020481109619144, 0.040244895935058596, 0.039877056121826175, 0.040994430541992186, 0.03979507064819336, 0.040101886749267575, 0.04036403274536133, 0.040013824462890625, 0.04011196899414062, 0.040183967590332034, 0.03992521667480469, 0.040024608612060544, 0.0398047981262207, 0.04005625534057617, 0.04019587326049805, 0.04310927963256836, 0.03971686553955078, 0.03985190582275391, 0.03957078552246094, 0.039772415161132814, 0.039893535614013674, 0.03971651077270508, 0.04048112106323242, 0.04131148910522461, 0.04000214385986328, 0.039860641479492184, 0.04032460784912109, 0.04018320083618164, 0.04069462585449219, 0.039874561309814455, 0.040310176849365234, 0.03986697769165039, 0.03973500823974609, 0.040054656982421874, 0.03984016036987305, 0.040597503662109374, 0.04007872009277344, 0.03984022521972656, 0.03994844818115234, 0.04029439926147461, 0.03989654541015625, 0.03979727935791016, 0.044315807342529295, 0.04021337509155273, 0.039763904571533205, 0.03956121444702149, 0.039376895904541014, 0.039636863708496096, 0.039701759338378904, 0.039457439422607425, 0.039912990570068356, 0.039701183319091796, 0.0393438720703125, 0.03923993682861328, 0.03911654281616211, 0.03942015838623047, 0.03958784103393555, 0.03967795181274414, 0.039964672088623046, 0.039901153564453125, 0.039487232208251954, 0.039534881591796876, 0.039232990264892575, 0.03936105728149414, 0.03945062255859375, 0.039583744049072264, 0.03944607925415039, 0.039637439727783205, 0.03950175857543945, 0.03947731018066406, 0.03930112075805664, 0.03944156646728516, 0.03965574264526367, 0.03964339065551758, 0.03953692626953125, 0.04002627182006836, 0.04156947326660156, 0.0396151351928711, 0.039196670532226564, 0.03935539245605469, 0.039152641296386716, 0.03936460876464844, 0.03943219375610352, 0.03949977493286133, 0.03947865676879883, 0.03946470260620117, 0.0392487678527832, 0.04017110443115234, 0.043108768463134765, 0.03973324966430664, 0.039403518676757815, 0.03924780654907226, 0.03909145736694336, 0.039322433471679685, 0.03950796890258789, 0.03980287933349609, 0.03946905517578125, 0.03958335876464844, 0.039518592834472656, 0.04061974334716797, 0.03964137649536133, 0.03956121444702149, 0.04031488037109375, 0.03924979019165039, 0.03921702575683594, 0.046403553009033205, 0.04075110244750976, 0.039499839782714846, 0.03943529510498047, 0.039175071716308595, 0.0390184326171875, 0.040470592498779295, 0.03879731369018555, 0.039651329040527344, 0.03889926528930664, 0.038986270904541015, 0.03884431838989258, 0.03925785446166992, 0.03955532836914062, 0.039172096252441405, 0.038997791290283204, 0.039230846405029295, 0.039050048828125, 0.03960335922241211, 0.0401396484375, 0.03922083282470703, 0.03927081680297852, 0.039115806579589844, 0.03900310516357422, 0.03983564758300781, 0.03937279891967774, 0.03908185577392578, 0.03906572723388672, 0.039093505859375, 0.039185150146484375, 0.039066879272460935, 0.039472991943359376, 0.039142433166503905, 0.03929651260375976, 0.04003478240966797, 0.039954334259033206, 0.039480705261230466, 0.039244415283203125, 0.039218433380126955, 0.03912726211547852, 0.03949350357055664, 0.03929155349731445, 0.0395338249206543, 0.04009651184082031, 0.040769535064697264, 0.041603073120117184, 0.04005411148071289, 0.039471073150634764, 0.03953129577636719, 0.03940752029418945, 0.03930646514892578, 0.039581985473632814, 0.0392545280456543, 0.039403263092041015, 0.03940991973876953, 0.039272449493408204, 0.03945808029174805, 0.03982566452026367, 0.03963913726806641, 0.039672191619873044, 0.03934620666503906, 0.0390491828918457, 0.03919664001464844, 0.04495942306518555, 0.040583839416503904, 0.03956115341186523, 0.03994585418701172, 0.039266529083251955, 0.03923510360717773, 0.03886137771606445, 0.03885391998291016, 0.038742881774902344, 0.03907583999633789, 0.039065601348876954, 0.03985203170776367, 0.0394886703491211, 0.03958208084106445, 0.0388325424194336, 0.03878713607788086, 0.03895065689086914, 0.03881804656982422, 0.038825984954833984, 0.03919257736206055, 0.0394886703491211, 0.039656288146972654, 0.03964271926879883, 0.03931584167480469, 0.03894889450073242, 0.039060993194580076, 0.03941017532348633, 0.03950140762329102, 0.0398803825378418, 0.03954537582397461, 0.0392869758605957, 0.03935027313232422, 0.03939689636230469, 0.039379425048828125, 0.03932956695556641, 0.039485088348388674, 0.03970073699951172, 0.03971097564697266, 0.03979388809204101, 0.03952316665649414, 0.039479297637939455, 0.039569408416748046, 0.039577598571777346, 0.039652862548828126, 0.03975628662109375, 0.03971072006225586, 0.03957468795776367, 0.03950646209716797, 0.0420560302734375, 0.039530464172363285, 0.03953039932250976, 0.040149089813232425, 0.039907329559326174, 0.040314208984375, 0.03966195297241211, 0.03973926544189453, 0.039666046142578126, 0.0394486083984375, 0.0398616943359375, 0.039561790466308595, 0.03949382400512695, 0.03981497573852539, 0.0398131217956543, 0.04395113754272461, 0.04020611190795898, 0.03978374481201172, 0.039664222717285154, 0.03952588653564453, 0.039413761138916016, 0.03952620697021485, 0.0396973762512207, 0.039757823944091795, 0.03965340805053711, 0.03953865432739258, 0.03948134231567383, 0.039559295654296875, 0.03969216156005859, 0.03980489730834961, 0.039830944061279294, 0.03978099060058594, 0.03984998321533203, 0.039642688751220706, 0.03972121429443359, 0.039819454193115236, 0.03972403335571289, 0.03970560073852539, 0.03982953643798828, 0.039514080047607425, 0.04192051315307617, 0.04002220916748047, 0.039862079620361326, 0.03935846328735351, 0.03936665725708008, 0.03949772644042969, 0.0397844467163086, 0.04230348968505859, 0.039981056213378906, 0.039775390625, 0.03953286361694336, 0.0399318733215332, 0.03956793594360351, 0.04051116943359375, 0.03909049606323242, 0.03952560043334961, 0.03932140731811523, 0.03964617538452148, 0.03977011108398437, 0.03956934356689453, 0.039849952697753904, 0.0393155517578125, 0.03943958282470703, 0.03950048065185547, 0.03920294570922851, 0.03937071990966797, 0.03948134231567383, 0.03990854263305664, 0.039586624145507815, 0.03920896148681641, 0.03927603149414063, 0.039284801483154295, 0.03922784042358399, 0.03924582290649414, 0.03894476699829102, 0.03919177627563476, 0.039348896026611326, 0.03906777572631836, 0.0440437126159668, 0.04017593765258789, 0.039144832611083986, 0.03923238372802734, 0.039236961364746095, 0.0405654067993164, 0.040202239990234374, 0.04010598373413086, 0.0396943359375, 0.039696063995361325, 0.03956972885131836, 0.03943833541870117, 0.03944607925415039, 0.03920665740966797, 0.03946566390991211, 0.039865665435791016, 0.039497665405273434, 0.039578369140625, 0.039043041229248045, 0.03991059112548828, 0.0393155517578125, 0.039556926727294925, 0.039772319793701175, 0.04103408050537109, 0.03951270294189453, 0.03949926376342774, 0.039133502960205076, 0.03932364654541016, 0.03923286437988281, 0.03954140853881836, 0.03996464157104492, 0.03998275375366211, 0.039741825103759766, 0.039790592193603515, 0.041825439453125, 0.04246409606933594, 0.039567359924316405, 0.03976396942138672, 0.039642528533935545, 0.040071041107177734, 0.039570335388183595, 0.03959174346923828, 0.0391511344909668, 0.03928931045532227, 0.03916799926757813, 0.03944985580444336, 0.03984870529174805, 0.04031049728393555, 0.039741569519042966, 0.03960438537597656, 0.039365665435791015, 0.03961318588256836, 0.03951638412475586, 0.03954278564453125, 0.039288833618164064, 0.039292606353759765, 0.04049270248413086, 0.03993814468383789, 0.03994812774658203, 0.04008009719848633, 0.039653377532958986, 0.03951577758789063, 0.03950627136230469]",tokens/s,25.07819805893844,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.181824,1326.383104,0.0,931.135488,917.648384,s,1,7.267576171875,7.267576171875,0.0,7.267576171875,7.267576171875,7.267576171875,7.267576171875,[7.267576171875],,kWh,9.731543208325394e-06,1.0588403921696833e-06,4.187781127998336e-06,1.4978164728493414e-05,,MB,1164.394496,1458.50368,0.0,1050.673152,1018.330112,s,10,0.2378742084503174,0.023787420845031736,0.00025684186075807625,0.023689487457275393,0.02418208408355713,0.024227458477020264,0.02426375799179077,"[0.024172000885009766, 0.023683807373046876, 0.02393600082397461, 0.023694143295288086, 0.023820640563964844, 0.023541439056396486, 0.023403263092041014, 0.0242728328704834, 0.023684831619262697, 0.02366524887084961]",tokens/s,10761.99061965427,kWh,6.842642028688759e-07,7.546128079848815e-08,4.5220111637936463e-07,1.2119266000467288e-06,tokens/kWh,211233914.65302378,MB,1198.395392,1475.280896,0.0,1067.450368,1032.767488,s,10,13.457886840820311,1.3457886840820312,0.017177029152594957,1.3504623413085937,1.359035498046875,1.360448779296875,1.361579404296875,"[1.35027099609375, 1.34962744140625, 1.358721435546875, 1.3506536865234375, 1.3226339111328125, 1.3043353271484375, 1.3483697509765624, 1.361862060546875, 1.3536444091796875, 1.357767822265625]",tokens/s,46.812698564910725,kWh,3.787052177546552e-05,4.176658680802899e-06,1.7827452396220437e-05,5.987463285248884e-05,tokens/kWh,1052198.5187819193,,s,630,13.451185438156136,0.021351087997073217,0.00045205151538563394,0.02138086414337158,0.021686100006103517,0.02196893768310547,0.022835756683349608,"[0.02106595230102539, 0.021346879959106446, 0.02127872085571289, 0.02129100799560547, 0.021303295135498047, 0.021448991775512696, 0.021298912048339842, 0.021391008377075197, 0.02154489517211914, 0.021416351318359374, 0.02126185607910156, 0.021357023239135742, 0.02118572807312012, 0.021412128448486327, 0.021381664276123046, 0.021553279876708985, 0.021544704437255858, 0.021479551315307616, 0.021571104049682616, 0.021453279495239258, 0.021335487365722657, 0.02129158401489258, 0.021364288330078127, 0.0213623046875, 0.021316415786743165, 0.021583871841430666, 0.021978784561157226, 0.024228288650512696, 0.021284576416015624, 0.0214420166015625, 0.021383007049560546, 0.021314111709594727, 0.021451072692871095, 0.021391199111938475, 0.021065536499023436, 0.021246015548706056, 0.02138947105407715, 0.021878751754760742, 0.022534303665161133, 0.02166988754272461, 0.02131385612487793, 0.021712575912475586, 0.021139455795288087, 0.021212736129760743, 0.021174623489379884, 0.021401472091674804, 0.021395679473876952, 0.021402751922607422, 0.021304191589355467, 0.021182464599609374, 0.02115488052368164, 0.021255104064941407, 0.02123980712890625, 0.02138051223754883, 0.021162080764770507, 0.021642847061157225, 0.021600288391113283, 0.021261184692382813, 0.020927999496459963, 0.021076480865478517, 0.021190656661987304, 0.021192607879638673, 0.021180511474609375, 0.021114208221435546, 0.02127052879333496, 0.021258079528808593, 0.021300031661987306, 0.021374975204467773, 0.02131113624572754, 0.021439903259277342, 0.02142201614379883, 0.02123673629760742, 0.0211661434173584, 0.021147136688232423, 0.021078208923339843, 0.021301504135131835, 0.021383167266845703, 0.02197248077392578, 0.021680992126464845, 0.021490495681762697, 0.021133344650268556, 0.02122960090637207, 0.02188934326171875, 0.021295040130615235, 0.022047264099121094, 0.021900320053100587, 0.021379903793334962, 0.021665824890136718, 0.02129462432861328, 0.02153926467895508, 0.021493120193481444, 0.021322719573974608, 0.021364255905151366, 0.021317920684814452, 0.021214496612548827, 0.021318368911743164, 0.021243904113769533, 0.021315584182739256, 0.02121660804748535, 0.021426847457885742, 0.021370880126953123, 0.02154297637939453, 0.021323423385620117, 0.02129897689819336, 0.021254623413085937, 0.021262208938598634, 0.021155168533325195, 0.02148137664794922, 0.021607648849487304, 0.02148137664794922, 0.021534143447875978, 0.021324127197265626, 0.022230976104736327, 0.021281919479370116, 0.021187519073486327, 0.021656736373901368, 0.02139411163330078, 0.021382495880126952, 0.021541919708251953, 0.02118771171569824, 0.021481472015380858, 0.02149443244934082, 0.021429759979248047, 0.02148953628540039, 0.021413759231567384, 0.02155388832092285, 0.021386079788208008, 0.021405023574829103, 0.02153251266479492, 0.021594816207885743, 0.022244735717773436, 0.02132044792175293, 0.021739168167114256, 0.021184864044189452, 0.021433759689331054, 0.02137763214111328, 0.02141606330871582, 0.021448575973510742, 0.021360992431640625, 0.021468832015991212, 0.021401599884033205, 0.02156870460510254, 0.02152876853942871, 0.02126630401611328, 0.021346368789672852, 0.02144291114807129, 0.021506399154663087, 0.02145894432067871, 0.021460384368896485, 0.021430879592895507, 0.022981760025024413, 0.022840192794799805, 0.021941471099853515, 0.021535295486450196, 0.021229440689086915, 0.02148918342590332, 0.021540800094604493, 0.02157043266296387, 0.021462400436401366, 0.021494047164916992, 0.021485919952392577, 0.02143846321105957, 0.02142617607116699, 0.02138710403442383, 0.0214880313873291, 0.02145635223388672, 0.02131177520751953, 0.021591903686523438, 0.021612287521362305, 0.021540256500244142, 0.021728256225585937, 0.022007104873657226, 0.021329631805419923, 0.021362752914428712, 0.021719968795776368, 0.021622047424316407, 0.0223874568939209, 0.02134422492980957, 0.021354496002197267, 0.021574687957763673, 0.021354496002197267, 0.02143491172790527, 0.021227392196655273, 0.02135856056213379, 0.022089567184448242, 0.02211408042907715, 0.021369823455810545, 0.021227519989013673, 0.021321216583251954, 0.020748287200927733, 0.021513599395751953, 0.021365503311157226, 0.02151849555969238, 0.021288671493530274, 0.021310848236083986, 0.02124982452392578, 0.02133625602722168, 0.021600927352905273, 0.021356544494628905, 0.02130534362792969, 0.02168627166748047, 0.021559295654296876, 0.02152239990234375, 0.021201183319091797, 0.021141248703002928, 0.021384672164916994, 0.02137280082702637, 0.02154697608947754, 0.02151807975769043, 0.021324735641479492, 0.0215285758972168, 0.02145715141296387, 0.02119059181213379, 0.021663040161132813, 0.02120863914489746, 0.021301984786987305, 0.021282560348510744, 0.0214552001953125, 0.021605855941772462, 0.021410655975341798, 0.021880640029907226, 0.02121436882019043, 0.022067583084106446, 0.021426624298095703, 0.021388511657714843, 0.021468255996704103, 0.021379968643188477, 0.021386016845703126, 0.02149718475341797, 0.02154159927368164, 0.021467552185058594, 0.021270111083984376, 0.021350400924682617, 0.021441919326782227, 0.021404287338256837, 0.021623968124389648, 0.021328832626342772, 0.021454751968383787, 0.021506048202514647, 0.021476736068725587, 0.02133465576171875, 0.021522335052490234, 0.021220735549926758, 0.021457696914672853, 0.02118547248840332, 0.021478431701660156, 0.02141302490234375, 0.021302080154418944, 0.021368671417236328, 0.022006111145019533, 0.021515903472900392, 0.021624319076538084, 0.020828351974487305, 0.021493343353271483, 0.021075679779052735, 0.021252031326293944, 0.021082944869995117, 0.021491680145263672, 0.021733152389526368, 0.021462944030761717, 0.02137273597717285, 0.0213275203704834, 0.021441152572631836, 0.021456895828247072, 0.02124310493469238, 0.021551616668701173, 0.021335424423217772, 0.021697471618652344, 0.023775199890136718, 0.02130454444885254, 0.021468992233276366, 0.02093507194519043, 0.020655712127685546, 0.020620256423950194, 0.021012479782104493, 0.021261600494384764, 0.02134931182861328, 0.020764448165893554, 0.02077471923828125, 0.020713727951049806, 0.020520383834838868, 0.020586751937866212, 0.020471807479858398, 0.020424959182739257, 0.020696224212646483, 0.02059766387939453, 0.020545055389404297, 0.02031407928466797, 0.020310464859008788, 0.02068889617919922, 0.020719615936279297, 0.020942848205566408, 0.020472991943359376, 0.020658016204833984, 0.020974592208862306, 0.020745471954345705, 0.020716287612915038, 0.020602880477905275, 0.02065017509460449, 0.020565824508666994, 0.02090188789367676, 0.020809024810791017, 0.02078611183166504, 0.02086444854736328, 0.020752288818359374, 0.02066473579406738, 0.020699136734008788, 0.02066761589050293, 0.020910879135131837, 0.020783103942871094, 0.021818368911743165, 0.020677631378173827, 0.02065999984741211, 0.021082624435424805, 0.021257951736450197, 0.02172492790222168, 0.020821247100830078, 0.020880384445190428, 0.020645631790161132, 0.020590848922729492, 0.020590591430664062, 0.020568031311035156, 0.02059267234802246, 0.020527423858642577, 0.020491968154907225, 0.020426496505737305, 0.020510944366455078, 0.02045737648010254, 0.020605056762695313, 0.020516864776611327, 0.020414464950561522, 0.020477535247802735, 0.020420703887939453, 0.02044960021972656, 0.02029974365234375, 0.020534719467163086, 0.02074435234069824, 0.020668352127075195, 0.02059110450744629, 0.02031820869445801, 0.02043903923034668, 0.020508447647094728, 0.020527135848999022, 0.020744384765625, 0.020473791122436524, 0.020556928634643555, 0.0208035831451416, 0.02079840087890625, 0.020965599060058595, 0.021144832611083984, 0.02111747169494629, 0.021149696350097655, 0.020932928085327148, 0.020670143127441407, 0.02060310363769531, 0.02060652732849121, 0.020713695526123045, 0.020610624313354493, 0.02048988723754883, 0.02049056053161621, 0.020848960876464845, 0.020555936813354492, 0.020686784744262696, 0.021489728927612306, 0.020854207992553712, 0.02084876823425293, 0.020559968948364257, 0.020574560165405275, 0.02048409652709961, 0.020622528076171875, 0.02087436866760254, 0.02090166473388672, 0.02086697578430176, 0.020725759506225586, 0.02073798370361328, 0.020895807266235352, 0.02106502342224121, 0.020947647094726563, 0.02070944023132324, 0.02120649528503418, 0.021178335189819337, 0.020889408111572267, 0.021060447692871093, 0.02107792091369629, 0.020899456024169923, 0.021325824737548828, 0.020973760604858397, 0.02100764846801758, 0.02105027198791504, 0.020940799713134766, 0.021175615310668944, 0.02108220863342285, 0.021234272003173828, 0.02112870407104492, 0.021393632888793944, 0.021290943145751952, 0.021424480438232422, 0.021429311752319335, 0.021461952209472657, 0.02155897521972656, 0.02169068717956543, 0.022055103302001954, 0.02168608093261719, 0.021540864944458008, 0.02150099182128906, 0.021605152130126953, 0.021299455642700197, 0.021950111389160157, 0.021512447357177736, 0.021747711181640626, 0.021994943618774413, 0.0215695686340332, 0.021791263580322264, 0.021604352951049805, 0.021482688903808594, 0.021674816131591796, 0.02161164855957031, 0.021610847473144533, 0.021563135147094726, 0.021501728057861328, 0.02143699264526367, 0.0212393913269043, 0.02121353530883789, 0.021434879302978514, 0.021405696868896484, 0.021284063339233397, 0.0213590087890625, 0.021309823989868165, 0.02123980712890625, 0.021471456527709962, 0.02123139190673828, 0.02129871940612793, 0.021580255508422852, 0.02162086486816406, 0.021403871536254882, 0.021153343200683593, 0.021407007217407226, 0.021504831314086915, 0.021606399536132814, 0.021475263595581055, 0.021507551193237304, 0.021364992141723632, 0.0214835205078125, 0.02177801513671875, 0.021333951950073243, 0.02133964729309082, 0.021389280319213867, 0.021443071365356444, 0.021328384399414063, 0.021381120681762695, 0.021364255905151366, 0.02137750434875488, 0.021313119888305664, 0.021379615783691405, 0.021594207763671876, 0.022573856353759764, 0.02155392074584961, 0.021964607238769532, 0.02141788864135742, 0.021598751068115235, 0.024227840423583984, 0.02147532844543457, 0.02142963218688965, 0.021602943420410158, 0.0213668155670166, 0.021561311721801757, 0.02130963134765625, 0.02147884750366211, 0.021606624603271483, 0.02142223930358887, 0.021512351989746093, 0.021364736557006835, 0.021245792388916017, 0.021326143264770506, 0.021402336120605468, 0.02168726348876953, 0.021461151123046876, 0.021763391494750976, 0.02152707290649414, 0.021442720413208008, 0.021391199111938475, 0.02149504089355469, 0.02154572868347168, 0.021428224563598632, 0.021444671630859374, 0.02137833595275879, 0.021508447647094725, 0.02149600028991699, 0.02219343948364258, 0.022824895858764647, 0.02147011184692383, 0.021545024871826173, 0.021876672744750976, 0.021566976547241212, 0.021443071365356444, 0.021389280319213867, 0.021638240814208985, 0.021348831176757812, 0.02166819190979004, 0.021849920272827148, 0.02139686393737793, 0.0225098876953125, 0.021695104598999024, 0.02182150459289551, 0.02127257537841797, 0.021372575759887696, 0.02145961570739746, 0.021460607528686525, 0.021435680389404296, 0.021480031967163086, 0.02154105567932129, 0.021358591079711914, 0.02142790412902832, 0.02150982475280762, 0.021633663177490235, 0.021597408294677736, 0.02147212791442871, 0.02136662483215332, 0.021671743392944337, 0.021250303268432618, 0.02146918487548828, 0.021140512466430665, 0.02132476806640625, 0.02144256019592285, 0.021383167266845703, 0.021319583892822267, 0.021460639953613282, 0.021373376846313477, 0.02156342315673828, 0.021513824462890626, 0.021205568313598634, 0.021806848526000976, 0.02136684799194336, 0.02162073516845703, 0.021503583908081055, 0.021327999114990233, 0.021514623641967774, 0.0216407356262207, 0.021850496292114257, 0.02136579132080078, 0.021385759353637696, 0.021430559158325195, 0.021307552337646484, 0.021348352432250976, 0.02207257652282715, 0.02133865547180176, 0.0216856632232666, 0.022333631515502928, 0.02155174446105957, 0.021573631286621094, 0.021394527435302735, 0.0214715518951416, 0.02161520004272461, 0.021547008514404296, 0.02161267280578613, 0.021473215103149413, 0.02154694366455078, 0.021431743621826174, 0.02127097511291504, 0.021207168579101564, 0.02140355110168457, 0.021423711776733398, 0.021343936920166017, 0.02140665626525879, 0.021526432037353514, 0.021302848815917968, 0.021459199905395507, 0.021001216888427734, 0.021385215759277345, 0.02149171257019043, 0.021695520401000975, 0.02148387145996094, 0.022984895706176758, 0.021383392333984376, 0.021241472244262694, 0.02143270492553711, 0.021274368286132814, 0.021237279891967772, 0.021242111206054688, 0.02128950309753418, 0.02125446319580078, 0.021380607604980468, 0.021410144805908204, 0.021505088806152345, 0.02137343978881836, 0.021258176803588866, 0.021352960586547853, 0.02156105613708496, 0.02148944091796875, 0.021532447814941406, 0.021850847244262697, 0.021372671127319335, 0.021332223892211913, 0.022409503936767577, 0.021591487884521483, 0.02123196792602539, 0.021325183868408204, 0.02129158401489258, 0.02146713638305664, 0.02127257537841797, 0.02138252830505371, 0.02126464080810547, 0.021284543991088867, 0.021994176864624022, 0.02148316764831543, 0.021433919906616212, 0.02138995170593262, 0.02146905517578125, 0.02152272033691406, 0.021456703186035157, 0.02143244743347168, 0.02259974479675293, 0.022409183502197266, 0.022222496032714843, 0.021505760192871093, 0.02146784019470215, 0.021335744857788087, 0.021192991256713867, 0.021305376052856446, 0.021759040832519533, 0.021418912887573242, 0.021444608688354492, 0.0214936637878418, 0.021321119308471678, 0.02130400085449219, 0.021073919296264648, 0.021362464904785158, 0.021215679168701172, 0.02462211227416992, 0.021505023956298826]",tokens/s,46.836020728174546,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.153152,3354.329088,0.0,2959.081472,2942.567424,s,1,7.5465380859375,7.5465380859375,0.0,7.5465380859375,7.5465380859375,7.5465380859375,7.5465380859375,[7.5465380859375],,kWh,1.0555499091666812e-05,1.1571333836274744e-06,4.908337259995621e-06,1.6620969735289905e-05,,MB,1184.321536,3547.267072,0.0,3139.436544,3105.830912,s,10,0.35119199752807617,0.03511919975280762,0.0013828462358450484,0.03462977600097656,0.03707234077453613,0.037395865821838375,0.037654685859680175,"[0.037719390869140626, 0.03700044631958008, 0.03378755187988281, 0.03475555038452149, 0.03391804885864258, 0.033634750366210935, 0.03446249771118164, 0.034675006866455076, 0.034584545135498045, 0.03665420913696289]",tokens/s,7289.459947888875,kWh,1.2789389064228766e-06,1.4104498809024023e-07,8.523442044366632e-07,2.27232809894978e-06,tokens/kWh,112659787.16643849,MB,1217.888256,3589.210112,0.0,3181.379584,3162.0096,s,10,13.4394306640625,1.3439430664062502,0.00897286271096474,1.3480936889648438,1.3514184814453125,1.3525001953125,1.35336556640625,"[1.3511781005859376, 1.3503336181640626, 1.346485595703125, 1.35000439453125, 1.324740966796875, 1.3322947998046875, 1.3377889404296874, 1.3497017822265625, 1.3535819091796875, 1.343320556640625]",tokens/s,46.87698577028576,kWh,3.866066860732683e-05,4.263842917494485e-06,2.491350974156387e-05,6.783802126638519e-05,tokens/kWh,928682.7478739787,,s,630,13.436337451934827,0.021327519764975898,0.00045949658176490496,0.02128881549835205,0.02156562919616699,0.021789492511749265,0.02252170030593872,"[0.025484832763671875, 0.02147990417480469, 0.021353759765625, 0.021735328674316406, 0.021275455474853516, 0.02188467216491699, 0.021528831481933595, 0.021968896865844727, 0.02162892723083496, 0.021597440719604493, 0.020981536865234376, 0.02120137596130371, 0.02117683219909668, 0.020975360870361327, 0.021235456466674806, 0.021336576461791993, 0.021432319641113282, 0.021293279647827148, 0.021233728408813476, 0.021144575119018554, 0.02168191909790039, 0.022072288513183595, 0.021098495483398438, 0.021356544494628905, 0.021727231979370116, 0.021579776763916016, 0.021536415100097656, 0.021212959289550783, 0.021146175384521484, 0.021219551086425783, 0.02208745574951172, 0.02122956848144531, 0.02129715156555176, 0.021393760681152344, 0.02120035171508789, 0.021419872283935548, 0.02137481689453125, 0.021351167678833008, 0.02127027130126953, 0.02118560028076172, 0.02111788749694824, 0.021141504287719725, 0.02103500747680664, 0.021503904342651366, 0.022295808792114256, 0.02170128059387207, 0.02116559982299805, 0.021156511306762694, 0.021403200149536134, 0.021295520782470705, 0.02132988739013672, 0.02112828826904297, 0.021343008041381836, 0.021262432098388673, 0.02148467254638672, 0.021503040313720703, 0.02147292709350586, 0.02125644874572754, 0.021250175476074218, 0.02117955207824707, 0.02097545623779297, 0.02098636817932129, 0.021465311050415038, 0.021350112915039063, 0.021180767059326172, 0.021174272537231444, 0.022502527236938477, 0.021385408401489257, 0.021258943557739256, 0.021153791427612305, 0.021301504135131835, 0.021169919967651368, 0.021202943801879884, 0.021227519989013673, 0.021001247406005858, 0.021526655197143554, 0.021387935638427735, 0.02149836730957031, 0.021564960479736328, 0.021033119201660157, 0.02125004768371582, 0.021086208343505858, 0.02129305648803711, 0.021259584426879884, 0.021373632431030274, 0.02117148780822754, 0.021404287338256837, 0.021133407592773438, 0.021807104110717773, 0.027393056869506837, 0.021236703872680663, 0.02124185562133789, 0.021465087890625, 0.021356544494628905, 0.021358591079711914, 0.021346303939819337, 0.021223424911499023, 0.021172224044799806, 0.021239295959472656, 0.02128771209716797, 0.02181011199951172, 0.021377824783325197, 0.021215263366699218, 0.021365760803222656, 0.02114364814758301, 0.02119465637207031, 0.021420160293579103, 0.021414688110351562, 0.02130073547363281, 0.021100191116333007, 0.02103798484802246, 0.02104934310913086, 0.021387264251708983, 0.022521631240844726, 0.021280704498291017, 0.02119708824157715, 0.021329919815063478, 0.02126028823852539, 0.021352479934692383, 0.021065376281738282, 0.021118911743164062, 0.021660032272338866, 0.02143539237976074, 0.021240192413330077, 0.021571647644042968, 0.021176607131958007, 0.021271551132202148, 0.02129180717468262, 0.02112735939025879, 0.021313535690307618, 0.021171680450439455, 0.021158432006835936, 0.021149696350097655, 0.0212108154296875, 0.021391679763793945, 0.021616159439086916, 0.021493728637695313, 0.021182975769042968, 0.021037376403808594, 0.021093503952026367, 0.02125881576538086, 0.021180416107177736, 0.021427711486816405, 0.021248512268066407, 0.021382783889770506, 0.0212392635345459, 0.02144963264465332, 0.021194911956787108, 0.021143392562866212, 0.021522432327270507, 0.02131702423095703, 0.021534303665161132, 0.021300575256347657, 0.021288192749023438, 0.021221792221069336, 0.021159040451049806, 0.02122617530822754, 0.021413408279418945, 0.022235008239746095, 0.02174457550048828, 0.021364223480224608, 0.021289312362670898, 0.021411968231201173, 0.022521728515625, 0.021401599884033205, 0.021296319961547853, 0.02138217544555664, 0.021684160232543947, 0.02135264015197754, 0.021427871704101563, 0.021486879348754883, 0.021301599502563478, 0.021461343765258788, 0.021286943435668945, 0.02123695945739746, 0.021474143981933595, 0.02146268844604492, 0.021434080123901366, 0.021563968658447265, 0.02125971221923828, 0.02124038314819336, 0.02115782356262207, 0.021493824005126953, 0.021478975296020508, 0.021491743087768553, 0.02124355125427246, 0.02125708770751953, 0.021444255828857423, 0.021243360519409178, 0.021372991561889647, 0.021696672439575196, 0.021475488662719727, 0.02132918357849121, 0.021269216537475585, 0.021438304901123046, 0.021353952407836913, 0.02124991989135742, 0.021574464797973633, 0.021442880630493166, 0.021492927551269532, 0.021164287567138673, 0.021096704483032226, 0.021342496871948242, 0.021960416793823243, 0.022095296859741213, 0.021350976943969726, 0.021279104232788087, 0.021372255325317384, 0.021215360641479494, 0.0212010555267334, 0.020927743911743166, 0.021086208343505858, 0.02243168067932129, 0.024996671676635742, 0.02149772834777832, 0.021477504730224608, 0.021243423461914063, 0.02118489646911621, 0.02129007911682129, 0.021154815673828126, 0.02124575996398926, 0.021246143341064453, 0.021217376708984374, 0.02122742462158203, 0.02125619125366211, 0.021130271911621094, 0.021259040832519532, 0.021081279754638672, 0.020943616867065428, 0.02140595245361328, 0.021392576217651366, 0.021216064453125, 0.021114879608154297, 0.021207136154174806, 0.021168031692504884, 0.021233152389526368, 0.021552928924560545, 0.0212891845703125, 0.021281055450439453, 0.021358367919921874, 0.021203392028808592, 0.021184511184692383, 0.021207040786743164, 0.021108896255493163, 0.021249151229858397, 0.021680864334106445, 0.021292160034179688, 0.021727615356445313, 0.023144128799438477, 0.02140652847290039, 0.02125555229187012, 0.021351039886474608, 0.02146268844604492, 0.021379648208618166, 0.02125619125366211, 0.02111724853515625, 0.021103424072265627, 0.02107436752319336, 0.021096895217895508, 0.021220544815063476, 0.021479583740234374, 0.02125686454772949, 0.02109644889831543, 0.021125152587890626, 0.021059551239013673, 0.021129215240478515, 0.021130239486694336, 0.021334943771362306, 0.02104528045654297, 0.021063039779663086, 0.020996799468994142, 0.020996095657348633, 0.021231103897094726, 0.020893440246582032, 0.020781280517578125, 0.021151519775390624, 0.021311616897583006, 0.020980352401733397, 0.020711423873901368, 0.0204902400970459, 0.02066815948486328, 0.02150601577758789, 0.021673824310302733, 0.0210948486328125, 0.02098771286010742, 0.02088159942626953, 0.02082745552062988, 0.021103071212768554, 0.020856224060058593, 0.02084681510925293, 0.021043424606323243, 0.02115519905090332, 0.02082697677612305, 0.020680864334106444, 0.020709375381469726, 0.020779008865356444, 0.020967424392700194, 0.020954944610595702, 0.020994239807128907, 0.020934656143188478, 0.021114879608154297, 0.02090937614440918, 0.020779712677001953, 0.020747711181640625, 0.020931135177612303, 0.02091007995605469, 0.020915456771850586, 0.020951808929443358, 0.020773887634277344, 0.020715551376342775, 0.020848608016967772, 0.020935775756835938, 0.0212042236328125, 0.02102899169921875, 0.021209632873535156, 0.021141504287719725, 0.021004287719726563, 0.02110643196105957, 0.021180416107177736, 0.020915935516357422, 0.020816415786743165, 0.020875423431396485, 0.020854623794555664, 0.021098495483398438, 0.021014528274536134, 0.020785152435302736, 0.020653055191040038, 0.02087424087524414, 0.02104319953918457, 0.020917823791503906, 0.02381430435180664, 0.02199283218383789, 0.021167007446289063, 0.0211844482421875, 0.021076032638549805, 0.020888927459716797, 0.02103932762145996, 0.020947391510009766, 0.021123071670532227, 0.020875263214111327, 0.020959232330322267, 0.021251583099365236, 0.021089887619018553, 0.02106883239746094, 0.021215103149414063, 0.021171455383300782, 0.02113817596435547, 0.021303295135498047, 0.021056640625, 0.021151744842529296, 0.021128063201904298, 0.021137407302856445, 0.021041280746459962, 0.02108403205871582, 0.021168127059936523, 0.021198368072509764, 0.021061279296875, 0.021111007690429687, 0.02100822448730469, 0.021144128799438475, 0.02135264015197754, 0.021245599746704102, 0.02160470390319824, 0.021250207901000975, 0.02115990447998047, 0.021174079895019533, 0.021094463348388673, 0.020938880920410158, 0.020995967864990233, 0.021078079223632813, 0.021150976181030273, 0.021535423278808592, 0.021118431091308593, 0.021120960235595704, 0.02106368064880371, 0.02113539123535156, 0.021119552612304686, 0.02101043128967285, 0.021078655242919922, 0.021012224197387696, 0.021068063735961914, 0.020960607528686524, 0.021207391738891603, 0.02115001678466797, 0.02103500747680664, 0.021021728515625, 0.02104412841796875, 0.021184608459472655, 0.021085792541503907, 0.021053247451782227, 0.021000768661499022, 0.020944896697998046, 0.021090303421020508, 0.020946592330932618, 0.020748640060424806, 0.020672704696655272, 0.020896799087524415, 0.020832895278930664, 0.020897951126098633, 0.02079539108276367, 0.020709632873535156, 0.02071731185913086, 0.02093257522583008, 0.020831935882568358, 0.020732255935668947, 0.020905248641967772, 0.021353183746337892, 0.02127984046936035, 0.02163599967956543, 0.021307392120361326, 0.021403039932250977, 0.02133452796936035, 0.02126857566833496, 0.021310560226440428, 0.021337152481079102, 0.02130518341064453, 0.021485376358032226, 0.021440704345703124, 0.02203228759765625, 0.022298976898193358, 0.02212620735168457, 0.02140787124633789, 0.021329919815063478, 0.02137615966796875, 0.021537696838378906, 0.021384767532348633, 0.021494144439697264, 0.021445791244506837, 0.02131622314453125, 0.021353887557983398, 0.02131974411010742, 0.021448448181152345, 0.021441535949707033, 0.021452863693237303, 0.021415552139282226, 0.0213703670501709, 0.021306175231933594, 0.021349536895751954, 0.021469343185424806, 0.021350080490112305, 0.021399744033813478, 0.021516223907470704, 0.021407615661621093, 0.021469375610351563, 0.02136479949951172, 0.021417919158935546, 0.021399551391601563, 0.021366783142089844, 0.021419136047363282, 0.0212488956451416, 0.021303295135498047, 0.021518047332763673, 0.021420032501220702, 0.021899744033813475, 0.02123347282409668, 0.021344255447387696, 0.021286720275878905, 0.021428415298461914, 0.02128895950317383, 0.02143619155883789, 0.021327232360839842, 0.02189731216430664, 0.021391424179077148, 0.021418527603149416, 0.021340320587158203, 0.02155673599243164, 0.02152931213378906, 0.021307167053222657, 0.021301248550415038, 0.02150809669494629, 0.021329439163208008, 0.021453279495239258, 0.021379072189331053, 0.021465087890625, 0.021364736557006835, 0.02141798400878906, 0.02140563201904297, 0.02131059265136719, 0.021433280944824218, 0.021427967071533202, 0.02122572708129883, 0.021431520462036134, 0.021362720489501955, 0.021348127365112303, 0.02151148796081543, 0.021347999572753906, 0.021342208862304687, 0.021361791610717773, 0.02144879913330078, 0.021672735214233397, 0.02126438331604004, 0.02143951988220215, 0.021262367248535155, 0.02124812889099121, 0.021885408401489257, 0.021545312881469728, 0.021303232192993165, 0.021235103607177733, 0.02154969596862793, 0.02138115119934082, 0.021767967224121092, 0.021264608383178712, 0.021370880126953123, 0.02147532844543457, 0.021577823638916017, 0.02126710319519043, 0.021437984466552734, 0.02164156723022461, 0.021338111877441408, 0.021344160079956053, 0.021549152374267577, 0.021660703659057617, 0.02158896064758301, 0.021397504806518555, 0.021362592697143554, 0.021389408111572264, 0.021489887237548827, 0.02146665573120117, 0.021364992141723632, 0.021227519989013673, 0.021381120681762695, 0.02125619125366211, 0.021521856307983398, 0.021424896240234376, 0.02160416030883789, 0.021456895828247072, 0.021522432327270507, 0.021553152084350585, 0.021831680297851562, 0.021384767532348633, 0.021338560104370116, 0.021425535202026367, 0.021424768447875976, 0.021420032501220702, 0.02146713638305664, 0.02143027114868164, 0.021283136367797852, 0.021311168670654298, 0.021376991271972658, 0.021125152587890626, 0.02144451141357422, 0.02154640007019043, 0.02155926322937012, 0.02156342315673828, 0.02134448051452637, 0.021342079162597657, 0.0216124153137207, 0.02437411117553711, 0.021701536178588866, 0.02130633544921875, 0.02133407974243164, 0.021476703643798827, 0.021541439056396484, 0.021331167221069335, 0.021224159240722656, 0.021302623748779295, 0.0213243522644043, 0.02122083282470703, 0.02181328010559082, 0.02165216064453125, 0.02149580764770508, 0.021630048751831055, 0.02128374481201172, 0.021303295135498047, 0.021321407318115236, 0.02129155158996582, 0.02130668830871582, 0.021362688064575194, 0.02120012855529785, 0.021385728836059572, 0.02110207939147949, 0.021240575790405274, 0.021257663726806642, 0.02102889633178711, 0.021306175231933594, 0.0222696647644043, 0.02151580810546875, 0.021314016342163088, 0.021379072189331053, 0.02126028823852539, 0.02109187126159668, 0.021354719161987303, 0.02109609603881836, 0.021299232482910158, 0.021162559509277343, 0.02127052879333496, 0.021125247955322266, 0.021180288314819336, 0.021178688049316406, 0.021083999633789062, 0.021163040161132813, 0.021089088439941405, 0.02146236801147461, 0.021338783264160156, 0.021349727630615236, 0.021275104522705077, 0.021110847473144533, 0.02122460746765137, 0.021175552368164062, 0.021288671493530274, 0.021370880126953123, 0.02125004768371582, 0.02225107192993164, 0.021290431976318358, 0.02128998374938965, 0.021207040786743164, 0.021319679260253906, 0.021325824737548828, 0.021034112930297853, 0.021149696350097655, 0.02134127998352051, 0.02156224060058594, 0.021418912887573242, 0.021336063385009766, 0.02123366355895996, 0.021237312316894533, 0.021162784576416016, 0.021115776062011718, 0.021231712341308592, 0.02129318428039551, 0.02127145576477051, 0.02251897621154785, 0.021289440155029298, 0.021272319793701172, 0.02138751983642578, 0.021206783294677733, 0.021550432205200195, 0.0216278076171875, 0.02134988784790039, 0.02116441535949707]",tokens/s,46.88777743590243,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 609, in __init__ self.dense_4h_to_h = nn.Linear(config.intermediate_size, config.hidden_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 200.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 955, in __init__ self.model = MistralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 707, in __init__ [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 707, in [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 504, in __init__ self.self_attn = MISTRAL_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 196, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 162, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 163, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 164, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 768.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 608, in __init__ self.dense_h_to_4h = nn.Linear(config.hidden_size, config.intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,811.667456,12523.077632,0.0,12127.830016,12122.08896,s,1,7.14426513671875,7.14426513671875,0.0,7.14426513671875,7.14426513671875,7.14426513671875,7.14426513671875,[7.14426513671875],,kWh,1.1392105812499171e-05,1.1714825149596346e-06,6.170004935999746e-06,1.8733593263458553e-05,,MB,1160.593408,12697.141248,0.0,12289.31072,12248.586752,s,10,1.8632452697753905,0.1863245269775391,0.0030487712772315654,0.18495963287353517,0.18985733184814454,0.19079213790893554,0.19153998275756837,"[0.18131852722167968, 0.18408697509765626, 0.18430841064453124, 0.18478448486328125, 0.185038818359375, 0.18488044738769532, 0.1880411834716797, 0.18940988159179686, 0.18964959716796875, 0.19172694396972656]",tokens/s,1373.9468665381885,kWh,5.6292487803686115e-06,6.203937768888961e-07,3.7304463604230814e-06,9.98008891768059e-06,tokens/kWh,25651074.064728413,MB,1211.277312,12705.529856,0.0,12297.699328,12248.589312,s,10,33.32199340820313,3.3321993408203125,0.005015908607296032,3.3335286865234375,3.3371913818359373,3.338296496582031,3.3391805883789063,"[3.3286611328125, 3.324007568359375, 3.325651611328125, 3.32798779296875, 3.33694580078125, 3.332620361328125, 3.33443701171875, 3.335543701171875, 3.33673681640625, 3.339401611328125]",tokens/s,18.906431925675495,kWh,9.750761309254812e-05,1.0755634699114251e-05,6.469694171477692e-05,0.0001729601895064393,tokens/kWh,364245.6693634376,,s,630,33.319364505767844,0.05288788016788543,0.0004587038709020352,0.05283310508728027,0.05316370964050293,0.05333808536529541,0.05552307079315186,"[0.05734841537475586, 0.05428630447387695, 0.05302272033691406, 0.052803585052490234, 0.052482048034667966, 0.052830142974853514, 0.052633663177490235, 0.052587711334228515, 0.0524637451171875, 0.05246214294433594, 0.0527402229309082, 0.05251686477661133, 0.05231766510009766, 0.05264028930664062, 0.05259014511108399, 0.05245491027832031, 0.05253011322021484, 0.05258975982666016, 0.05281465530395508, 0.05302259063720703, 0.05293683242797852, 0.053115966796875, 0.05283321762084961, 0.0528337287902832, 0.052582977294921875, 0.05254348754882812, 0.05244723129272461, 0.05250809478759766, 0.05246828842163086, 0.05251606369018555, 0.05244387054443359, 0.05254716873168945, 0.052545921325683594, 0.052770912170410154, 0.05246976089477539, 0.05261052703857422, 0.052816417694091795, 0.05272934341430664, 0.05267302322387695, 0.052926464080810545, 0.05300841522216797, 0.05309222412109375, 0.05288320159912109, 0.05312752151489258, 0.05267385482788086, 0.05265049743652344, 0.05257161712646485, 0.05254422378540039, 0.05251398468017578, 0.052732223510742186, 0.052750049591064455, 0.05285763168334961, 0.0526192626953125, 0.052596736907958984, 0.05299814224243164, 0.053133312225341796, 0.05301248168945313, 0.053008384704589843, 0.053185569763183595, 0.05312364959716797, 0.052830623626708983, 0.053026817321777345, 0.05299817657470703, 0.05548448181152344, 0.053284832000732425, 0.052547584533691405, 0.05254553604125976, 0.05231206512451172, 0.052410369873046876, 0.0523570556640625, 0.05260003280639648, 0.05244185638427734, 0.052516960144042966, 0.052563488006591795, 0.05261052703857422, 0.05270140838623047, 0.05256070327758789, 0.052402145385742185, 0.05242060852050781, 0.05246944046020508, 0.052914497375488284, 0.05344054412841797, 0.052880638122558596, 0.05283299255371094, 0.052910079956054686, 0.05275568008422851, 0.0525709114074707, 0.052450366973876957, 0.0526894416809082, 0.052579967498779294, 0.052575008392333984, 0.05248819351196289, 0.052523006439208986, 0.052866943359375, 0.052637214660644534, 0.05254819107055664, 0.05251894378662109, 0.05272777557373047, 0.052932193756103516, 0.0526192626953125, 0.052969886779785154, 0.052868415832519534, 0.052938465118408204, 0.05284310531616211, 0.05270771026611328, 0.05265983963012695, 0.05270694351196289, 0.052591297149658205, 0.05292348861694336, 0.05270019149780274, 0.05257209777832031, 0.052678398132324215, 0.052604927062988284, 0.05276847839355469, 0.05260857772827148, 0.052548030853271484, 0.05295363235473633, 0.05277040100097656, 0.05268307113647461, 0.053071582794189456, 0.053051265716552734, 0.05296371078491211, 0.05288560104370117, 0.05303696060180664, 0.05309247970581055, 0.05286502456665039, 0.05526432037353515, 0.05338822555541992, 0.052539390563964845, 0.052602336883544924, 0.05231660842895508, 0.05246166229248047, 0.05253276824951172, 0.0525255355834961, 0.05254947280883789, 0.052652191162109375, 0.05240627288818359, 0.05260198211669922, 0.05257100677490235, 0.05269094467163086, 0.052540447235107424, 0.05246870422363281, 0.052400127410888675, 0.052761791229248046, 0.0529898567199707, 0.053182559967041014, 0.05284249496459961, 0.0528023681640625, 0.05271756744384765, 0.052582401275634766, 0.05264527893066406, 0.05265996932983399, 0.05255254364013672, 0.05270025634765625, 0.05258127975463867, 0.05261423873901367, 0.052507232666015625, 0.052660511016845706, 0.05255964660644531, 0.05250073623657227, 0.052703231811523435, 0.05269676971435547, 0.052642112731933595, 0.05308415985107422, 0.053036865234375, 0.05301264190673828, 0.052967105865478516, 0.052902240753173825, 0.05292851257324219, 0.052802974700927735, 0.05271612930297852, 0.052760318756103514, 0.05275222396850586, 0.05268521499633789, 0.052670463562011716, 0.05284249496459961, 0.05278668975830078, 0.05274211120605469, 0.05279510498046875, 0.05299897766113281, 0.05281766510009766, 0.052949153900146484, 0.052772960662841796, 0.05285683059692383, 0.05306316757202149, 0.05293423843383789, 0.052929439544677735, 0.05319683074951172, 0.05294895935058594, 0.055698974609375, 0.053991905212402345, 0.0527496337890625, 0.05255782318115235, 0.05243936157226563, 0.05262374496459961, 0.05244313430786133, 0.05244927978515625, 0.05232025527954102, 0.052580352783203124, 0.05256777572631836, 0.052698623657226565, 0.052515201568603516, 0.052680320739746093, 0.05251561737060547, 0.05245868682861328, 0.052410400390625, 0.052539520263671875, 0.052856639862060545, 0.05334102249145508, 0.052950336456298826, 0.052863681793212894, 0.05256806564331055, 0.05262745666503906, 0.05249433517456055, 0.052453216552734376, 0.05257436752319336, 0.05265340805053711, 0.052748958587646486, 0.05285270309448242, 0.05260086441040039, 0.05269094467163086, 0.05295308685302735, 0.052938335418701174, 0.05277328109741211, 0.052655200958251956, 0.052963390350341796, 0.053392223358154293, 0.053134368896484374, 0.053040096282958984, 0.05305059051513672, 0.052873409271240235, 0.052744670867919924, 0.052678783416748046, 0.05265817642211914, 0.05271039962768555, 0.052724735260009765, 0.052717151641845705, 0.05267283248901367, 0.0527279052734375, 0.05267251205444336, 0.05266227340698242, 0.052770816802978515, 0.052940574645996094, 0.05282595062255859, 0.05314156723022461, 0.05293619155883789, 0.052910911560058595, 0.053065727233886716, 0.052992000579833984, 0.052864032745361327, 0.05309065628051758, 0.052932510375976564, 0.05588336181640625, 0.053460990905761716, 0.05261103820800781, 0.05272419357299805, 0.05245897674560547, 0.052644351959228515, 0.05269504165649414, 0.0526940803527832, 0.05314214324951172, 0.05280931091308594, 0.0525700798034668, 0.05292927932739258, 0.052811775207519535, 0.052730911254882815, 0.052653022766113285, 0.05261417770385742, 0.0525401611328125, 0.05300166320800781, 0.05324675369262695, 0.053320766448974606, 0.05295926284790039, 0.05284751892089844, 0.05267254257202148, 0.05276259231567383, 0.053007678985595705, 0.05275689697265625, 0.05264822387695312, 0.05278656005859375, 0.05271958541870117, 0.052789920806884764, 0.05295513534545898, 0.052924320220947264, 0.052908096313476566, 0.052676513671875, 0.052559009552001955, 0.053002849578857425, 0.053031295776367185, 0.053266433715820315, 0.05321900939941406, 0.05324582290649414, 0.05311328125, 0.052893695831298826, 0.052994049072265625, 0.05319680023193359, 0.05288332748413086, 0.052956958770751954, 0.052959583282470704, 0.05292201614379883, 0.05306777572631836, 0.05290422439575195, 0.05296953582763672, 0.05306777572631836, 0.052819103240966794, 0.05301129531860352, 0.053001537322998046, 0.05321798324584961, 0.05326972961425781, 0.05305219268798828, 0.05292230224609375, 0.052881473541259764, 0.05289516830444336, 0.05324857711791992, 0.05313536071777344, 0.05556966400146485, 0.05402220916748047, 0.05306256103515625, 0.052735904693603515, 0.052545440673828124, 0.052830398559570314, 0.05255372619628906, 0.05260287857055664, 0.05239603042602539, 0.05272576141357422, 0.05289779281616211, 0.052977664947509766, 0.052539295196533206, 0.05258393478393555, 0.05250313568115234, 0.05276793670654297, 0.05254022216796875, 0.05291212844848633, 0.052770816802978515, 0.05297078323364258, 0.0529087028503418, 0.05288534545898437, 0.05268502426147461, 0.05296297454833984, 0.052779361724853514, 0.05269094467163086, 0.052571807861328125, 0.052735774993896485, 0.052615745544433594, 0.05274211120605469, 0.05270735931396484, 0.052547584533691405, 0.05253324890136719, 0.05291417694091797, 0.05284793472290039, 0.05306233596801758, 0.05318000030517578, 0.05310915374755859, 0.05292806243896484, 0.052908031463623044, 0.05298425674438476, 0.05311283111572265, 0.05297151947021484, 0.053040447235107424, 0.052937408447265626, 0.05288729476928711, 0.05290390396118164, 0.0527690544128418, 0.052760574340820314, 0.05289315032958984, 0.0527913932800293, 0.05306320190429688, 0.05277993774414062, 0.05286092758178711, 0.05293670272827149, 0.05294233703613281, 0.05294956970214844, 0.05307795333862304, 0.052864734649658206, 0.053020030975341796, 0.05286800003051758, 0.05316396713256836, 0.05294838333129883, 0.05629574584960938, 0.05414918518066406, 0.052848865509033206, 0.05268035125732422, 0.05243328094482422, 0.052719615936279295, 0.052538654327392575, 0.052570846557617186, 0.05275027084350586, 0.052641281127929686, 0.05257049560546875, 0.05262969589233398, 0.0526376953125, 0.05294480133056641, 0.05270662307739258, 0.05261187362670899, 0.052751937866210935, 0.05268320083618164, 0.05316543960571289, 0.053109375, 0.052817214965820314, 0.05296380615234375, 0.05279151916503906, 0.05297875213623047, 0.052994110107421874, 0.052822593688964845, 0.052657791137695316, 0.052656352996826174, 0.05268473434448242, 0.052685344696044925, 0.05274832153320313, 0.05288140869140625, 0.052623104095458985, 0.052781280517578126, 0.052918270111083986, 0.052780895233154296, 0.05281324768066406, 0.05293129730224609, 0.053026817321777345, 0.05315961456298828, 0.05281824111938477, 0.05294208145141602, 0.05309926223754883, 0.052983070373535154, 0.05284105682373047, 0.05286310577392578, 0.05293260955810547, 0.05304089736938476, 0.05287142562866211, 0.053053279876708985, 0.0529634895324707, 0.05285043334960938, 0.052728065490722655, 0.052836353302001954, 0.052985855102539066, 0.053133312225341796, 0.05303500747680664, 0.053141502380371096, 0.053083774566650394, 0.05291251373291016, 0.052989761352539064, 0.05286076736450195, 0.05306304168701172, 0.055521183013916016, 0.05359487915039062, 0.052647167205810544, 0.05256268692016602, 0.052618240356445314, 0.052716545104980465, 0.05269094467163086, 0.05266230392456055, 0.05250454330444336, 0.05264179229736328, 0.052672126770019534, 0.05286950302124024, 0.05277798461914063, 0.05290256118774414, 0.05316368103027344, 0.05268755340576172, 0.0525926399230957, 0.052875072479248046, 0.0531352653503418, 0.05334249496459961, 0.05294432067871094, 0.05297209548950195, 0.05281587219238281, 0.052803585052490234, 0.052674560546875, 0.05282815933227539, 0.05260902404785156, 0.05268431854248047, 0.05260745620727539, 0.052722686767578124, 0.05300735855102539, 0.05284249496459961, 0.05290111923217773, 0.05269580841064453, 0.0532022705078125, 0.053103263854980466, 0.052872318267822266, 0.05317631912231445, 0.053089153289794924, 0.05310464096069336, 0.05325619125366211, 0.05299932861328125, 0.05287974548339844, 0.05305392074584961, 0.05286896133422852, 0.05291974258422852, 0.053078079223632814, 0.05291689682006836, 0.05288345718383789, 0.05294044876098633, 0.052762943267822264, 0.05281795120239258, 0.05282815933227539, 0.0529714241027832, 0.052956993103027344, 0.0531583023071289, 0.05316908645629883, 0.05302150344848633, 0.05286515045166015, 0.053215232849121094, 0.05301174545288086, 0.05302470397949219, 0.05286515045166015, 0.05575183868408203, 0.053664512634277343, 0.05269855880737305, 0.05265478515625, 0.052402145385742185, 0.0527749137878418, 0.052463390350341796, 0.052744415283203124, 0.05274003219604492, 0.052666046142578124, 0.052636032104492185, 0.0526143684387207, 0.052679454803466794, 0.052784191131591794, 0.05313017654418945, 0.0530513916015625, 0.05279334259033203, 0.052864864349365236, 0.053299072265625, 0.05311308670043945, 0.053006366729736326, 0.05304115295410156, 0.05284592056274414, 0.05278787231445312, 0.05272371292114258, 0.0527988166809082, 0.0528430404663086, 0.05267836761474609, 0.05274988937377929, 0.0528798713684082, 0.052654430389404296, 0.052762496948242185, 0.0526091537475586, 0.052579647064208986, 0.05280223846435547, 0.05283225631713867, 0.052829345703125, 0.053010623931884764, 0.05293052673339844, 0.05302339172363281, 0.052850719451904296, 0.05307932662963867, 0.05294768142700195, 0.05288345718383789, 0.053000190734863284, 0.053096511840820315, 0.052958847045898434, 0.05302508926391602, 0.052780895233154296, 0.05298601531982422, 0.05307372665405274, 0.05300038528442383, 0.05291811370849609, 0.05297782516479492, 0.05312054443359375, 0.05333449554443359, 0.05311283111572265, 0.05305452728271484, 0.05318547058105469, 0.05353881454467774, 0.053384929656982424, 0.053265697479248045, 0.05296844863891602, 0.05552384185791016, 0.053332321166992186, 0.052687007904052736, 0.052741119384765625, 0.052468734741210936, 0.05258195114135742, 0.052500926971435546, 0.05278307342529297, 0.05263552093505859, 0.052746112823486326, 0.052859169006347656, 0.052926464080810545, 0.05285657501220703, 0.05263385772705078, 0.052692928314208985, 0.052735198974609376, 0.05263241577148438, 0.052844192504882814, 0.05311936187744141, 0.05345593643188477, 0.053144481658935545, 0.052944862365722656, 0.052799518585205076, 0.05299574279785156, 0.05278726577758789, 0.05295270538330078, 0.05262931060791016, 0.05269171142578125, 0.05282972717285156, 0.052861503601074215, 0.05280767822265625, 0.05266636657714844, 0.05276671981811523, 0.05288345718383789, 0.052671775817871094, 0.05339619064331055, 0.05359004974365234, 0.05353606414794922, 0.05343913650512695, 0.05336262512207031, 0.05311699295043945, 0.053176158905029296, 0.05283855819702148, 0.05290979385375977, 0.052926624298095706, 0.05289139175415039, 0.05298732757568359, 0.05321350479125977, 0.053023361206054685, 0.0528438720703125, 0.052752063751220706, 0.05310976028442383, 0.05312211227416992, 0.053234592437744144, 0.05284659194946289, 0.05318041610717773, 0.0533256950378418, 0.05345289611816406, 0.05313049697875977, 0.053160736083984375, 0.053381118774414066, 0.05310259246826172, 0.05291215896606445]",tokens/s,18.90792364575088,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 162, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 982, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.16 GiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 555, in __init__ self.self_attn = QWEN2_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 219, in __init__ self.o_proj = nn.Linear(self.num_heads * self.head_dim, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,814.374912,806.289408,0.0,411.041792,391.374848,s,1,7.21324560546875,7.21324560546875,0.0,7.21324560546875,7.21324560546875,7.21324560546875,7.21324560546875,[7.21324560546875],,kWh,5.089960633351135e-06,5.542499003759846e-07,9.938896839908895e-07,6.63810021771801e-06,,MB,1164.005376,881.78688,0.0,473.956352,454.832128,s,15,0.18734287929534915,0.01248952528635661,0.000214234636375093,0.012487648010253906,0.012725933074951172,0.012832464122772218,0.012959427242279054,"[0.012574463844299317, 0.012199999809265137, 0.012470656394958497, 0.012207615852355956, 0.012267423629760741, 0.012245023727416992, 0.012525504112243652, 0.012668160438537598, 0.01249782371520996, 0.012487648010253906, 0.012633376121520996, 0.012485119819641113, 0.012991168022155762, 0.012324447631835938, 0.012764448165893555]",tokens/s,20497.176164065335,kWh,3.6605550480445295e-07,4.0369153673917844e-08,2.2922400883563672e-07,6.356486673140075e-07,tokens/kWh,402738199.8325455,MB,1197.735936,909.049856,0.0,501.219328,454.834688,s,15,10.493724060058597,0.6995816040039063,0.01224672635514582,0.7041112670898437,0.7111708374023438,0.7148606750488281,0.7194729162597656,"[0.6981555786132813, 0.6862251586914062, 0.6848902587890625, 0.6789227294921875, 0.6823243408203125, 0.687145263671875, 0.7206259765625, 0.7078164672851562, 0.7072764892578125, 0.7093423461914062, 0.7089010620117188, 0.7041112670898437, 0.6998896484375, 0.7123898315429688, 0.7057076416015625]",tokens/s,90.05382594315361,kWh,2.0434243422139875e-05,2.2535672322146246e-06,8.911857378097216e-06,3.1599668032451724e-05,tokens/kWh,1993691.830411043,,s,945,10.485675329208373,0.01109595272932103,0.00030792222763291303,0.011096672058105468,0.011409036827087403,0.011485177421569823,0.011998213195800777,"[0.01104911994934082, 0.011024031639099121, 0.011106495857238769, 0.011065312385559082, 0.01126137638092041, 0.011096672058105468, 0.010999808311462403, 0.010935615539550782, 0.010844544410705566, 0.010780351638793945, 0.010936927795410157, 0.010760383605957031, 0.010741888046264648, 0.010798815727233887, 0.01100595188140869, 0.010903552055358886, 0.010816960334777833, 0.010865216255187988, 0.010797408103942871, 0.010756928443908692, 0.010740575790405273, 0.01074995231628418, 0.011136544227600098, 0.011620832443237304, 0.011495424270629882, 0.01131935977935791, 0.011285568237304687, 0.01123136043548584, 0.011076064109802245, 0.011066944122314452, 0.010973119735717773, 0.01094320011138916, 0.010902912139892577, 0.010864800453186035, 0.010868351936340332, 0.010746784210205078, 0.011253824234008789, 0.011358112335205077, 0.011898271560668945, 0.011962623596191406, 0.011379039764404297, 0.011049087524414063, 0.010927712440490723, 0.01104313564300537, 0.011003904342651367, 0.01093126392364502, 0.011457695960998535, 0.010942432403564453, 0.010966848373413086, 0.011017215728759766, 0.010966015815734862, 0.011309023857116698, 0.011564736366271972, 0.01138268756866455, 0.011343615531921387, 0.011266559600830077, 0.011426015853881836, 0.011126144409179688, 0.011018943786621094, 0.010956831932067871, 0.010886048316955567, 0.010931039810180664, 0.010751168251037597, 0.01044863986968994, 0.010719488143920899, 0.010649056434631347, 0.01068841552734375, 0.010758463859558105, 0.010770751953125, 0.010919936180114746, 0.010789024353027343, 0.01073523235321045, 0.010826016426086426, 0.010846143722534179, 0.010852352142333984, 0.01083801555633545, 0.010760191917419434, 0.0107741117477417, 0.01074022388458252, 0.01088092803955078, 0.011402912139892578, 0.011421759605407715, 0.011421919822692871, 0.011208767890930177, 0.0110447998046875, 0.01108572769165039, 0.011053183555603027, 0.011057184219360352, 0.01083407974243164, 0.010747743606567382, 0.010690560340881347, 0.010719231605529785, 0.01075814437866211, 0.01062502384185791, 0.010637151718139648, 0.010708831787109374, 0.010704256057739258, 0.010912704467773437, 0.010979328155517578, 0.01099078369140625, 0.010918047904968262, 0.010865632057189942, 0.010946368217468262, 0.010929216384887695, 0.010896479606628418, 0.010808256149291992, 0.01098198413848877, 0.010878944396972656, 0.010889439582824708, 0.011057151794433593, 0.011282431602478027, 0.011354111671447753, 0.01139737606048584, 0.011128576278686523, 0.011085215568542481, 0.011079968452453614, 0.010791007995605468, 0.010821663856506347, 0.010729280471801758, 0.010681056022644042, 0.010876031875610351, 0.01062332820892334, 0.010674367904663086, 0.010680319786071778, 0.010654879570007323, 0.010726143836975097, 0.0106397123336792, 0.010995231628417968, 0.010972960472106934, 0.011233632087707519, 0.011010272026062012, 0.011095968246459961, 0.010992863655090331, 0.011216927528381347, 0.011000672340393066, 0.010983424186706543, 0.010877951622009278, 0.010894335746765137, 0.011149312019348144, 0.011224191665649414, 0.011041407585144042, 0.010964703559875489, 0.010746399879455566, 0.010637663841247558, 0.01067155170440674, 0.010698975563049317, 0.010717023849487304, 0.010756256103515625, 0.0108373441696167, 0.010782431602478028, 0.010775487899780273, 0.010851840019226074, 0.010862688064575195, 0.010758560180664062, 0.01086019229888916, 0.010879327774047851, 0.010836095809936523, 0.010879039764404297, 0.010874752044677735, 0.010817472457885741, 0.01081884765625, 0.010820544242858887, 0.010900351524353027, 0.01079798412322998, 0.01074790382385254, 0.01074176025390625, 0.010674176216125488, 0.010721280097961425, 0.010739392280578613, 0.010749759674072266, 0.010780672073364257, 0.01095030403137207, 0.011154272079467774, 0.011109631538391113, 0.011065792083740235, 0.0111560640335083, 0.011046976089477538, 0.010964768409729004, 0.010825152397155762, 0.010770879745483398, 0.010755071640014649, 0.010667008399963379, 0.010804479598999023, 0.010699520111083985, 0.010690048217773437, 0.010725312232971192, 0.010682720184326172, 0.010695199966430664, 0.01063043212890625, 0.010497920036315918, 0.010758272171020508, 0.01084812831878662, 0.01076643180847168, 0.01075334358215332, 0.01064793586730957, 0.010676575660705566, 0.01073516845703125, 0.010676671981811523, 0.010854432106018067, 0.010921952247619628, 0.010878975868225099, 0.010799263954162598, 0.010745311737060547, 0.010667936325073242, 0.010664416313171386, 0.01071718406677246, 0.010697728157043456, 0.01064633560180664, 0.010694016456604004, 0.010651488304138184, 0.010693568229675293, 0.010694208145141602, 0.01067363166809082, 0.010705951690673829, 0.010690655708312988, 0.010759903907775878, 0.010771807670593261, 0.010814271926879882, 0.010762335777282715, 0.010835871696472169, 0.010841664314270019, 0.011089344024658204, 0.011036767959594726, 0.0108307523727417, 0.010821632385253906, 0.010809632301330566, 0.011261055946350098, 0.01085910415649414, 0.010768383979797362, 0.010727295875549317, 0.01073964786529541, 0.01066966438293457, 0.010711711883544922, 0.010669631958007813, 0.010686847686767578, 0.010788864135742187, 0.010780672073364257, 0.010767904281616211, 0.010731904029846192, 0.010722720146179199, 0.010708767890930176, 0.010791999816894531, 0.010847519874572754, 0.010791296005249024, 0.01079097557067871, 0.010859904289245605, 0.010831968307495117, 0.010840736389160156, 0.010746111869812012, 0.010733311653137206, 0.01075814437866211, 0.010729663848876953, 0.010457759857177734, 0.010731391906738282, 0.010743136405944825, 0.010785375595092773, 0.010763456344604492, 0.010677056312561035, 0.010666048049926758, 0.010682111740112304, 0.010790271759033203, 0.01087168025970459, 0.010755423545837403, 0.010717087745666504, 0.010778719902038575, 0.011233951568603516, 0.010901535987854004, 0.010813407897949219, 0.010947872161865234, 0.010881759643554688, 0.010932448387145997, 0.010948479652404785, 0.010923359870910644, 0.010750240325927734, 0.010725024223327637, 0.011152000427246093, 0.011138272285461426, 0.010686592102050781, 0.01065231990814209, 0.01067024040222168, 0.010700639724731445, 0.011630592346191406, 0.010881024360656738, 0.010842111587524414, 0.010786751747131347, 0.010938431739807129, 0.010811391830444337, 0.010931296348571777, 0.010865887641906739, 0.010866463661193848, 0.01082096004486084, 0.010855072021484375, 0.010894880294799805, 0.010890656471252442, 0.010869728088378907, 0.01070899200439453, 0.010757599830627441, 0.010746208190917969, 0.010696160316467284, 0.010686207771301269, 0.010886207580566406, 0.010743935585021973, 0.010710111618041992, 0.010701567649841308, 0.010696640014648437, 0.01066105556488037, 0.010761024475097657, 0.010789055824279786, 0.011100192070007325, 0.010786591529846191, 0.010820639610290528, 0.010715519905090332, 0.01102460765838623, 0.010737407684326171, 0.010736255645751954, 0.010813952445983887, 0.011146623611450196, 0.010843232154846191, 0.010769824028015136, 0.010809727668762207, 0.010800224304199219, 0.010845151901245118, 0.010896672248840332, 0.010940735816955566, 0.011014495849609375, 0.010987520217895508, 0.01094976043701172, 0.01087993621826172, 0.010794943809509277, 0.010741503715515136, 0.010705151557922364, 0.010786815643310547, 0.01075820827484131, 0.010828736305236817, 0.010698080062866211, 0.010687520027160644, 0.010813632011413575, 0.010749600410461425, 0.01073027229309082, 0.010702848434448242, 0.010732959747314454, 0.010756799697875977, 0.010694239616394043, 0.01068889617919922, 0.011093952178955079, 0.010784640312194824, 0.010803327560424805, 0.010737088203430176, 0.010715904235839844, 0.010644831657409669, 0.010717663764953613, 0.010730719566345215, 0.01083471965789795, 0.010810720443725585, 0.010859487533569336, 0.01081107234954834, 0.010799103736877442, 0.010862144470214843, 0.010758591651916503, 0.010692831993103027, 0.010692543983459472, 0.010872672080993653, 0.011042240142822266, 0.011163871765136718, 0.011182784080505371, 0.011209792137145996, 0.011177727699279785, 0.011141695976257325, 0.0110862398147583, 0.01116966438293457, 0.011372096061706543, 0.011176416397094727, 0.01115062427520752, 0.01117689609527588, 0.011197312355041504, 0.011204671859741212, 0.011212896347045899, 0.011098719596862794, 0.011572319984436035, 0.011252927780151366, 0.011436511993408203, 0.011271167755126953, 0.011287551879882812, 0.01120076847076416, 0.011609919548034668, 0.01349180793762207, 0.01287987232208252, 0.01152239990234375, 0.01136451244354248, 0.011394911766052247, 0.011286527633666991, 0.011564959526062011, 0.011374688148498536, 0.011431936264038087, 0.011382975578308106, 0.011333279609680176, 0.011485152244567871, 0.011306976318359375, 0.011460096359252929, 0.011399200439453125, 0.011693984031677247, 0.011389951705932617, 0.011428895950317383, 0.011506752014160156, 0.011342559814453125, 0.01187119960784912, 0.011423135757446289, 0.011309599876403809, 0.011347840309143067, 0.011371999740600587, 0.011391488075256348, 0.011335904121398926, 0.01136025619506836, 0.011308320045471191, 0.011335807800292969, 0.011286656379699707, 0.011336095809936523, 0.011331583976745606, 0.011313535690307618, 0.011157183647155762, 0.011183903694152833, 0.011530495643615723, 0.01118553638458252, 0.011270208358764649, 0.011323936462402344, 0.011409279823303222, 0.01146236801147461, 0.011372960090637207, 0.01141875171661377, 0.011410304069519043, 0.011312543869018555, 0.011222880363464355, 0.011207712173461915, 0.011255552291870117, 0.011171520233154297, 0.011385408401489258, 0.011320256233215331, 0.01142249584197998, 0.01143712043762207, 0.011305536270141602, 0.011267840385437012, 0.01099129581451416, 0.011155776023864746, 0.011012096405029297, 0.01113868808746338, 0.011151488304138184, 0.0111843843460083, 0.011106304168701172, 0.011167648315429688, 0.011253376007080079, 0.011305439949035644, 0.011089792251586914, 0.011315327644348144, 0.011237407684326172, 0.011183263778686524, 0.011149312019348144, 0.011125503540039062, 0.011338111877441406, 0.011343296051025391, 0.011342240333557128, 0.011418623924255371, 0.011331775665283202, 0.011313823699951172, 0.011190048217773437, 0.011138463973999023, 0.011123519897460937, 0.011363679885864258, 0.011389599800109864, 0.011231231689453124, 0.011895968437194824, 0.010958815574645996, 0.011047807693481446, 0.011148896217346192, 0.011245984077453614, 0.01125376033782959, 0.011280703544616699, 0.011279552459716798, 0.011327103614807129, 0.011361472129821778, 0.011388031959533692, 0.011409983634948731, 0.011214847564697266, 0.011165151596069336, 0.011143327713012696, 0.011190303802490234, 0.011106559753417969, 0.01111248016357422, 0.011040160179138184, 0.011199135780334473, 0.011200511932373047, 0.011243519783020019, 0.011611328125, 0.011419520378112794, 0.011431039810180664, 0.011313055992126465, 0.011175359725952148, 0.010944992065429688, 0.010928095817565918, 0.010939935684204101, 0.011167679786682129, 0.011417759895324706, 0.011245823860168458, 0.01115561580657959, 0.011192319869995117, 0.010995712280273438, 0.01122441577911377, 0.011407072067260743, 0.011357119560241699, 0.011495743751525879, 0.011495008468627929, 0.011298912048339844, 0.011280384063720703, 0.011234848022460938, 0.011164128303527832, 0.011122943878173829, 0.011105088233947754, 0.011102656364440917, 0.01113548755645752, 0.011032575607299805, 0.011053248405456543, 0.010995519638061524, 0.010946559906005859, 0.010966560363769532, 0.010967519760131836, 0.010989567756652833, 0.010992799758911133, 0.011024928092956543, 0.01202617645263672, 0.011493375778198242, 0.011376064300537109, 0.011268671989440917, 0.011485183715820312, 0.011239423751831054, 0.0123985595703125, 0.011307040214538574, 0.011469056129455566, 0.011386624336242676, 0.011368608474731446, 0.011378496170043946, 0.011263456344604492, 0.01121951961517334, 0.011192319869995117, 0.011122688293457032, 0.01103667163848877, 0.010975232124328613, 0.010950528144836425, 0.010840448379516601, 0.01084832000732422, 0.010852031707763672, 0.010981023788452149, 0.011083647727966309, 0.011108192443847656, 0.011068032264709473, 0.01114521598815918, 0.011182080268859864, 0.011204607963562012, 0.01136025619506836, 0.011312416076660156, 0.011197152137756348, 0.011326815605163574, 0.011356831550598145, 0.01133561611175537, 0.011237343788146973, 0.011327232360839844, 0.01129916763305664, 0.011276288032531738, 0.011059200286865235, 0.010835712432861327, 0.011186431884765625, 0.011399359703063964, 0.011280192375183105, 0.011257439613342286, 0.011248031616210937, 0.01137664031982422, 0.01139673614501953, 0.011561344146728516, 0.011340031623840332, 0.011351807594299317, 0.011415552139282227, 0.011659263610839844, 0.011695679664611816, 0.011420096397399902, 0.01130726432800293, 0.011189408302307129, 0.011092063903808593, 0.01117852783203125, 0.011042783737182617, 0.01098464012145996, 0.011218879699707031, 0.011351167678833008, 0.011310848236083984, 0.011214847564697266, 0.011167743682861327, 0.011070879936218261, 0.01111017608642578, 0.011307840347290038, 0.01125532817840576, 0.011067872047424316, 0.011081248283386231, 0.011223520278930664, 0.011192319869995117, 0.01101414394378662, 0.010973279953002929, 0.011024288177490234, 0.01115135955810547, 0.011157088279724121, 0.011008416175842285, 0.010968928337097167, 0.010987648010253906, 0.011042847633361816, 0.011169695854187011, 0.011120896339416503, 0.011048640251159668, 0.011135328292846679, 0.01110313606262207, 0.011030559539794921, 0.01101296043395996, 0.010987551689147949, 0.010962944030761718, 0.011157504081726074, 0.011356287956237793, 0.011736031532287597, 0.012051360130310058, 0.011530367851257325, 0.01259712028503418, 0.011542528152465821, 0.011416895866394043, 0.011295424461364747, 0.01118723201751709, 0.011197728157043457, 0.011091551780700683, 0.011229696273803711, 0.011439871788024903, 0.01127609634399414, 0.011262463569641114, 0.011289759635925293, 0.011218751907348633, 0.01128275203704834, 0.011302656173706054, 0.011234399795532226, 0.011259200096130371, 0.011311840057373047, 0.01154201602935791, 0.01136415958404541, 0.011208255767822266, 0.011117600440979003, 0.011123616218566895, 0.011203392028808594, 0.011112000465393066, 0.01105174446105957, 0.011013919830322266, 0.010962271690368652, 0.011043359756469726, 0.010995424270629883, 0.010889856338500977, 0.010909152030944825, 0.011052895545959472, 0.010978015899658203, 0.011380479812622071, 0.011441920280456543, 0.011305343627929688, 0.011347200393676758, 0.011344160079956055, 0.011542880058288574, 0.011118656158447265, 0.011388863563537598, 0.011596927642822266, 0.011412575721740722, 0.011286304473876954, 0.011293760299682617, 0.011409503936767578, 0.011481792449951171, 0.011387040138244629, 0.011245023727416993, 0.011227328300476075, 0.011364704132080078, 0.01145036792755127, 0.01133568000793457, 0.01123737621307373, 0.011194368362426758, 0.011214847564697266, 0.011233247756958008, 0.011237407684326172, 0.011367456436157226, 0.011307392120361328, 0.011221887588500977, 0.011260992050170898, 0.011263808250427247, 0.011189087867736817, 0.011233280181884766, 0.011116095542907715, 0.011063424110412598, 0.011079104423522949, 0.010767999649047852, 0.010945440292358399, 0.010964223861694336, 0.010966879844665527, 0.01101318359375, 0.011089759826660156, 0.01132953643798828, 0.011138751983642578, 0.011221088409423829, 0.011124959945678711, 0.01103872013092041, 0.01121008014678955, 0.011263903617858886, 0.011186016082763673, 0.011266176223754882, 0.011132960319519042, 0.011192288398742676, 0.011242431640625, 0.011206239700317382, 0.011083935737609863, 0.011008319854736328, 0.01102729606628418, 0.010988479614257812, 0.0109366397857666, 0.011249343872070312, 0.011360128402709961, 0.01129257583618164, 0.011498784065246582, 0.011520959854125977, 0.011290623664855956, 0.01121452808380127, 0.011243807792663573, 0.011202591896057129, 0.011096351623535157, 0.01106710433959961, 0.01099135971069336, 0.010990847587585449, 0.011031104087829589, 0.011129152297973633, 0.01108176040649414, 0.011298912048339844, 0.011208703994750976, 0.011146528244018555, 0.011084863662719727, 0.011103903770446778, 0.011155776023864746, 0.011342975616455078, 0.011397695541381836, 0.011343968391418458, 0.011261856079101563, 0.011204607963562012, 0.011218655586242675, 0.011323360443115235, 0.011333056449890137, 0.011299615859985351, 0.011040575981140137, 0.010975520133972167, 0.01091811180114746, 0.010937503814697266, 0.011178912162780762, 0.011408160209655762, 0.011390111923217773, 0.011337727546691894, 0.011051072120666503, 0.011112031936645507, 0.011070112228393554, 0.011136704444885254, 0.010982720375061036, 0.010898112297058105, 0.010849504470825196, 0.01094643211364746, 0.011166496276855469, 0.01102451229095459, 0.010778431892395019, 0.010739904403686523, 0.01118019199371338, 0.011407199859619141, 0.01139129638671875, 0.01145030403137207, 0.011411199569702149, 0.011441472053527833, 0.011251423835754394, 0.011129823684692383, 0.011181983947753906, 0.011085439682006835, 0.011002559661865235, 0.011032352447509766, 0.010959872245788574, 0.01076863956451416, 0.010812159538269042, 0.010827072143554687, 0.011067808151245117, 0.011442560195922852, 0.01150592041015625, 0.011335328102111816, 0.011306943893432618, 0.011147071838378906, 0.011100159645080567, 0.010934528350830077, 0.010952704429626465, 0.011068639755249023, 0.010965375900268555, 0.010866751670837402, 0.011052415847778321, 0.011061759948730468, 0.010787296295166015, 0.010821632385253906, 0.01073305606842041, 0.010627584457397461, 0.010682368278503418, 0.01123472023010254, 0.011379487991333008, 0.011349823951721192, 0.011260064125061035, 0.011258943557739258, 0.01130780792236328, 0.011544575691223144, 0.011243776321411133, 0.011101951599121094, 0.011242783546447754, 0.01134665584564209, 0.01136844825744629, 0.011286175727844239, 0.01112508773803711, 0.0108721923828125, 0.010924448013305664, 0.010645824432373047, 0.010887104034423828, 0.010950655937194824, 0.011018207550048828, 0.011208736419677734, 0.011189663887023926, 0.011245632171630859, 0.01142630386352539, 0.011228927612304688, 0.011171104431152344, 0.011117568016052246, 0.011016192436218262, 0.011216287612915038, 0.011459168434143066, 0.011472000122070312, 0.011723135948181152, 0.01140287971496582, 0.011362624168395997, 0.011343487739562988, 0.011295680046081544, 0.011384736061096192, 0.011240703582763672, 0.011266912460327148, 0.011290016174316407, 0.01105731201171875, 0.011125280380249023, 0.011456352233886719, 0.012119359970092773, 0.013842623710632324, 0.011282560348510742, 0.010936448097229004, 0.010832192420959473, 0.010807295799255372, 0.010979104042053222, 0.011178208351135253, 0.011069439888000488, 0.010944512367248535, 0.01096291160583496, 0.01108137607574463, 0.011225024223327636, 0.01112723159790039, 0.01107148838043213, 0.011108448028564453, 0.01094883155822754, 0.010983424186706543, 0.011296544075012207, 0.011419551849365234, 0.011460576057434082, 0.011364383697509766, 0.011425248146057129, 0.011336511611938476, 0.011235039710998535, 0.011241472244262696, 0.011183648109436036, 0.011020959854125977, 0.011261247634887695, 0.011479552268981934, 0.011475008010864257, 0.012947392463684083, 0.012400959968566895, 0.01123078441619873, 0.011155232429504395, 0.011171199798583985, 0.011152480125427246, 0.011137951850891113, 0.011108223915100097, 0.011312416076660156, 0.011361056327819825, 0.011227519989013671, 0.011253439903259277, 0.011395071983337402, 0.011228896141052247, 0.010987808227539062, 0.0109683837890625, 0.010974911689758301, 0.011014431953430175, 0.011070176124572754, 0.011122688293457032, 0.011214912414550781, 0.011094112396240235, 0.011091103553771972, 0.01101484775543213, 0.010966272354125976, 0.011159584045410157, 0.01137235164642334, 0.011465023994445801, 0.011530879974365235, 0.01153657627105713, 0.011378111839294434, 0.011411808013916015, 0.011200480461120606, 0.011111712455749512, 0.011121472358703614, 0.010887200355529786, 0.010950559616088868, 0.011131168365478516, 0.011090911865234376, 0.011148032188415527, 0.01117199993133545, 0.011201696395874023, 0.011233983993530273, 0.011408672332763672, 0.011634464263916016, 0.011401408195495606, 0.01137331199645996, 0.011298208236694337, 0.011332159996032714, 0.011511839866638184, 0.011273568153381347, 0.011416223526000977, 0.011294719696044921, 0.011350048065185548, 0.011157343864440918, 0.011272319793701172, 0.011065119743347169, 0.010928352355957032, 0.010923295974731446, 0.010938431739807129, 0.01119324779510498, 0.011082880020141602, 0.011051872253417968, 0.01114038372039795, 0.011309568405151366, 0.010903136253356933, 0.010908063888549804, 0.01115116786956787]",tokens/s,90.1229506284307,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.378432,3354.329088,0.0,2959.081472,2942.567424,s,1,7.59777783203125,7.59777783203125,0.0,7.59777783203125,7.59777783203125,7.59777783203125,7.59777783203125,[7.59777783203125],,kWh,1.0297370037498391e-05,1.121980236473022e-06,4.496670263996749e-06,1.5916020537968165e-05,,MB,1163.268096,3547.267072,0.0,3139.436544,3105.830912,s,10,0.3488427925109863,0.03488427925109863,0.0011933682254219404,0.034781984329223636,0.03551321678161621,0.036841905021667475,0.037904855613708495,"[0.03817059326171875, 0.03403852844238281, 0.03426652908325195, 0.03401894378662109, 0.03521795272827148, 0.034831169128417966, 0.03476364898681641, 0.03480031967163086, 0.03368713760375976, 0.035047969818115234]",tokens/s,7338.54921173232,kWh,1.27505811024383e-06,1.406140795955646e-07,8.462379404541586e-07,2.2619101302935532e-06,tokens/kWh,113178678.75094402,MB,1196.326912,3589.210112,0.0,3181.379584,3162.0096,s,10,13.277565917968753,1.327756591796875,0.016408054098027577,1.3364856567382812,1.344683605957031,1.3461151794433592,1.3472604382324218,"[1.3368380126953125, 1.33793359375, 1.344365478515625, 1.33613330078125, 1.3231259765625, 1.30058984375, 1.3064454345703125, 1.3065584716796874, 1.338029052734375, 1.3475467529296874]",tokens/s,47.44845583085455,kWh,3.8813193528505906e-05,4.278620239660743e-06,2.502147719814582e-05,6.811329096631246e-05,tokens/kWh,924929.6151489523,,s,630,13.274697385787961,0.021070948231409464,0.00038994736390604915,0.021117759704589845,0.021469849586486817,0.021614044857025148,0.022120639095306396,"[0.021445440292358398, 0.02127846336364746, 0.02119500732421875, 0.023240703582763672, 0.021192703247070312, 0.021016576766967773, 0.02101158332824707, 0.02126265525817871, 0.02144927978515625, 0.021168127059936523, 0.021344255447387696, 0.02114905548095703, 0.02105926322937012, 0.020975711822509766, 0.020939552307128906, 0.021288671493530274, 0.02179475212097168, 0.021631391525268554, 0.021535808563232423, 0.02113801574707031, 0.020969823837280275, 0.021112255096435547, 0.02108448028564453, 0.021238016128540038, 0.021021984100341798, 0.021002559661865233, 0.021172191619873045, 0.020953439712524415, 0.020791391372680663, 0.020873376846313477, 0.02087299156188965, 0.020998144149780275, 0.021208831787109375, 0.021528736114501953, 0.0214399356842041, 0.021305215835571288, 0.02120585632324219, 0.02116399955749512, 0.021085664749145507, 0.02106985664367676, 0.021425920486450194, 0.021189088821411132, 0.021127328872680665, 0.020967456817626955, 0.020961408615112306, 0.02085273551940918, 0.020821535110473632, 0.020932863235473633, 0.020996192932128906, 0.021123327255249024, 0.021369728088378906, 0.021338560104370116, 0.021809728622436523, 0.02177142333984375, 0.02173551940917969, 0.021191423416137695, 0.021202943801879884, 0.021178112030029297, 0.02120729637145996, 0.021104639053344726, 0.02106268882751465, 0.021034175872802735, 0.02096089553833008, 0.021179424285888673, 0.02136684799194336, 0.02144758415222168, 0.02149305534362793, 0.021473983764648437, 0.02106902313232422, 0.021183135986328126, 0.02105308723449707, 0.02142220878601074, 0.02127702331542969, 0.021182464599609374, 0.021155839920043946, 0.02141798400878906, 0.021146623611450196, 0.02104729652404785, 0.02111110305786133, 0.02112553596496582, 0.020848928451538087, 0.021029951095581055, 0.02156572723388672, 0.021389984130859376, 0.0213319034576416, 0.02118796730041504, 0.021289215087890626, 0.0210948486328125, 0.02120832061767578, 0.02116275215148926, 0.021296319961547853, 0.021080896377563475, 0.021213184356689452, 0.021012224197387696, 0.02101478385925293, 0.02110054397583008, 0.021182464599609374, 0.02101862335205078, 0.021123071670532227, 0.02170172882080078, 0.021394271850585938, 0.021262399673461913, 0.021137407302856445, 0.02130454444885254, 0.021207328796386718, 0.021219839096069337, 0.02138230323791504, 0.02120585632324219, 0.02138256072998047, 0.02109644889831543, 0.021119583129882814, 0.021368671417236328, 0.021295263290405275, 0.02111692810058594, 0.021106399536132813, 0.021463199615478514, 0.021354623794555664, 0.021303295135498047, 0.021209087371826172, 0.02117571258544922, 0.021078624725341798, 0.021149696350097655, 0.02117580795288086, 0.02182809638977051, 0.021103647232055663, 0.02117875289916992, 0.02110963249206543, 0.021383007049560546, 0.0211396484375, 0.021096416473388672, 0.021251136779785157, 0.021496768951416015, 0.021287967681884765, 0.021165023803710936, 0.021156991958618164, 0.021197696685791016, 0.021365983963012695, 0.021249984741210936, 0.021094655990600585, 0.021262304306030273, 0.021434944152832033, 0.02131155204772949, 0.021184511184692383, 0.021217279434204102, 0.02126643180847168, 0.02118828773498535, 0.021045568466186524, 0.021123071670532227, 0.021474496841430664, 0.02161337661743164, 0.02187264060974121, 0.021370880126953123, 0.021131263732910157, 0.021167903900146483, 0.021584096908569335, 0.021256223678588867, 0.02131350326538086, 0.021555200576782226, 0.021567487716674806, 0.021338111877441408, 0.021514240264892577, 0.02123366355895996, 0.021151744842529296, 0.021223232269287108, 0.021230783462524414, 0.02108435249328613, 0.02133024024963379, 0.0215314884185791, 0.021751455307006836, 0.02123513603210449, 0.022677120208740235, 0.022493280410766602, 0.02135536003112793, 0.02128691291809082, 0.021217279434204102, 0.021288864135742186, 0.021229663848876954, 0.021280960083007814, 0.021273504257202147, 0.021166175842285157, 0.021211936950683595, 0.02112719917297363, 0.02111859130859375, 0.021059968948364257, 0.020928512573242186, 0.021348352432250976, 0.02146099281311035, 0.02167193603515625, 0.021257247924804688, 0.021234464645385743, 0.021421087265014647, 0.02132476806640625, 0.021207040786743164, 0.02122137641906738, 0.021103679656982424, 0.021092735290527343, 0.021264959335327148, 0.021078016281127928, 0.020862464904785157, 0.021094112396240233, 0.020934656143188478, 0.020713663101196288, 0.02063327980041504, 0.021711807250976562, 0.021617759704589845, 0.021344383239746093, 0.021137311935424806, 0.02104591941833496, 0.0213505916595459, 0.021317632675170898, 0.021964351654052736, 0.02103107261657715, 0.02153923225402832, 0.021331199645996092, 0.021450719833374023, 0.02111964797973633, 0.021041023254394532, 0.021153663635253905, 0.021084384918212892, 0.02099407958984375, 0.02088332748413086, 0.021297279357910155, 0.021346303939819337, 0.021220703125, 0.02121708869934082, 0.021093023300170897, 0.021053119659423827, 0.021240320205688477, 0.02106572723388672, 0.021048416137695314, 0.020918560028076173, 0.021196928024291992, 0.021336576461791993, 0.021102592468261717, 0.02110873603820801, 0.021157503128051758, 0.021155935287475586, 0.021007904052734373, 0.02088217544555664, 0.021341920852661133, 0.021491327285766602, 0.021435039520263672, 0.021211135864257814, 0.02112512016296387, 0.021186784744262697, 0.021044063568115234, 0.021622720718383788, 0.021212160110473634, 0.021292192459106445, 0.021223615646362305, 0.02123632049560547, 0.021653568267822266, 0.02138585662841797, 0.021348352432250976, 0.02147123146057129, 0.021354496002197267, 0.02145280075073242, 0.021336063385009766, 0.021436416625976562, 0.02143436813354492, 0.02126643180847168, 0.021192703247070312, 0.021247135162353516, 0.02115078353881836, 0.021067327499389648, 0.021122528076171876, 0.02129996871948242, 0.0211878719329834, 0.02115452766418457, 0.02101862335205078, 0.021102592468261717, 0.021164031982421876, 0.021204992294311522, 0.021137247085571288, 0.021241439819335937, 0.02109903907775879, 0.02092198371887207, 0.020760992050170898, 0.020862016677856445, 0.020826271057128906, 0.020798240661621094, 0.020998144149780275, 0.02087468719482422, 0.02091587257385254, 0.02099635124206543, 0.020820640563964845, 0.020769920349121094, 0.02072006416320801, 0.020707775115966796, 0.020707328796386718, 0.020762624740600585, 0.02097737693786621, 0.020985599517822265, 0.020823808670043947, 0.020812576293945312, 0.02088140869140625, 0.020736000061035157, 0.02105958366394043, 0.02088742446899414, 0.020717695236206056, 0.020724767684936522, 0.02059676742553711, 0.020926816940307617, 0.02075094413757324, 0.020752384185791017, 0.020864223480224608, 0.020947744369506836, 0.021319679260253906, 0.02101353645324707, 0.020753376007080077, 0.020569440841674804, 0.020750911712646484, 0.02082953643798828, 0.020912191390991212, 0.020945600509643555, 0.020919456481933593, 0.020802431106567383, 0.02071958351135254, 0.02072313690185547, 0.020562496185302735, 0.0205515193939209, 0.020530847549438475, 0.02050444793701172, 0.02077350425720215, 0.020727807998657227, 0.020592607498168946, 0.020575647354125978, 0.020572799682617188, 0.020580352783203124, 0.020647008895874022, 0.020542367935180664, 0.02045916748046875, 0.020492639541625977, 0.020590431213378908, 0.020573503494262697, 0.020677024841308594, 0.020574655532836914, 0.02047385597229004, 0.02051481628417969, 0.021423999786376952, 0.020484224319458007, 0.020592639923095703, 0.02043894386291504, 0.020627552032470704, 0.020736000061035157, 0.020653312683105468, 0.020526880264282225, 0.020646879196166992, 0.02092995262145996, 0.020655839920043946, 0.0206177921295166, 0.02049465560913086, 0.02050614356994629, 0.020902368545532228, 0.020700416564941405, 0.02054956817626953, 0.020578880310058594, 0.02075052833557129, 0.02061235237121582, 0.020673343658447266, 0.020553728103637696, 0.02060697555541992, 0.020692991256713866, 0.020727807998657227, 0.02063279914855957, 0.02056243133544922, 0.0205130558013916, 0.020463008880615235, 0.020629440307617188, 0.020705951690673827, 0.020516319274902342, 0.020511039733886717, 0.0204781436920166, 0.02058448028564453, 0.02067356872558594, 0.021414688110351562, 0.020586368560791015, 0.02069536018371582, 0.02114121627807617, 0.020877599716186523, 0.020788415908813477, 0.020566783905029296, 0.020602943420410157, 0.020592639923095703, 0.020527103424072265, 0.02058995246887207, 0.020757055282592772, 0.020500383377075194, 0.021074079513549806, 0.022122272491455076, 0.02099404716491699, 0.020866847991943358, 0.0208470401763916, 0.02071673583984375, 0.020874048233032228, 0.020932607650756836, 0.020723072052001953, 0.020683391571044922, 0.020619264602661135, 0.021149280548095704, 0.02116383934020996, 0.020619871139526368, 0.020707328796386718, 0.020547584533691408, 0.020711423873901368, 0.020536447525024416, 0.02109324836730957, 0.020639167785644532, 0.02060076713562012, 0.020574047088623048, 0.020548223495483398, 0.02056947135925293, 0.02063567924499512, 0.020515424728393555, 0.020586496353149415, 0.02053036880493164, 0.020601823806762697, 0.020588544845581053, 0.02065932846069336, 0.020699520111083985, 0.020656576156616212, 0.020621055603027343, 0.02063759994506836, 0.02066912078857422, 0.020679967880249023, 0.02057164764404297, 0.02054390335083008, 0.020552223205566406, 0.020550975799560545, 0.020617919921875, 0.020574207305908202, 0.0205963191986084, 0.020729503631591796, 0.020886272430419923, 0.020844127655029295, 0.020903615951538085, 0.020976127624511717, 0.020999679565429686, 0.020793952941894532, 0.0205927677154541, 0.02053046417236328, 0.02078713607788086, 0.02079545593261719, 0.020638879776000978, 0.020562335968017577, 0.0204968318939209, 0.020493375778198243, 0.020574304580688478, 0.020490751266479493, 0.020548959732055665, 0.020580543518066406, 0.020521696090698243, 0.020641056060791016, 0.020594560623168945, 0.02058950424194336, 0.020614559173583985, 0.02038435173034668, 0.02065203285217285, 0.02049967956542969, 0.02053753662109375, 0.020516576766967772, 0.020607200622558594, 0.021666048049926757, 0.02067292785644531, 0.02049804878234863, 0.020607263565063476, 0.020562015533447265, 0.020594688415527345, 0.02305574417114258, 0.021284479141235352, 0.020751359939575196, 0.020789247512817383, 0.02068889617919922, 0.02061884880065918, 0.020610784530639647, 0.02059436798095703, 0.02063052749633789, 0.02068182373046875, 0.02062393569946289, 0.02066876792907715, 0.020573408126831054, 0.020601535797119142, 0.02051900863647461, 0.0206561279296875, 0.02062303924560547, 0.020816192626953126, 0.021222623825073242, 0.02064259147644043, 0.020923391342163086, 0.021956960678100587, 0.021058208465576173, 0.020938751220703124, 0.020699136734008788, 0.020631103515625, 0.020740543365478516, 0.020968767166137697, 0.02064886474609375, 0.020590368270874022, 0.020702943801879883, 0.020609312057495117, 0.020641792297363282, 0.020584447860717774, 0.020641792297363282, 0.02059878349304199, 0.020643775939941406, 0.02055379295349121, 0.020573919296264648, 0.022116640090942382, 0.02077440071105957, 0.020676704406738283, 0.02059679985046387, 0.020631807327270508, 0.020523103713989257, 0.020547903060913086, 0.020552543640136717, 0.020781919479370116, 0.021319360733032228, 0.02127440071105957, 0.02079497528076172, 0.020624095916748048, 0.020623199462890623, 0.02060326385498047, 0.020566015243530272, 0.020602848052978514, 0.020770847320556642, 0.02072719955444336, 0.020900447845458983, 0.021436416625976562, 0.021220800399780273, 0.021301824569702147, 0.021370880126953123, 0.021463359832763672, 0.022995840072631835, 0.02266828727722168, 0.021261632919311522, 0.021561952590942384, 0.021737375259399415, 0.021401248931884765, 0.02136899185180664, 0.021493919372558595, 0.021702688217163087, 0.021547040939331054, 0.02132905578613281, 0.02117510414123535, 0.021421663284301756, 0.02144118309020996, 0.02173516845703125, 0.021614591598510743, 0.02132921600341797, 0.0214649600982666, 0.021336799621582032, 0.02150614356994629, 0.02145859146118164, 0.021423967361450195, 0.02159872055053711, 0.02167724800109863, 0.021338943481445313, 0.021211135864257814, 0.021292512893676757, 0.021432863235473634, 0.021393407821655275, 0.021398815155029297, 0.02126425552368164, 0.021445472717285155, 0.02123776054382324, 0.02138924789428711, 0.021485631942749023, 0.02138096046447754, 0.021364896774291993, 0.021419103622436524, 0.021246047973632814, 0.02134099197387695, 0.021809152603149414, 0.02146054458618164, 0.021373056411743165, 0.0212457275390625, 0.021385759353637696, 0.021511743545532227, 0.02127302360534668, 0.021307071685791015, 0.02141983985900879, 0.021469696044921875, 0.021266111373901365, 0.021385536193847657, 0.021433408737182618, 0.021534751892089844, 0.02166671943664551, 0.02140332794189453, 0.021264543533325197, 0.021381248474121095, 0.02139753532409668, 0.0212807674407959, 0.021340160369873046, 0.021300928115844726, 0.021461343765258788, 0.02139132881164551, 0.021485183715820314, 0.02143680000305176, 0.02128281593322754, 0.021356544494628905, 0.021147647857666017, 0.021593984603881837, 0.021333536148071288, 0.021547615051269533, 0.02127020835876465, 0.02130758476257324, 0.02129913520812988, 0.021469375610351563, 0.021209087371826172, 0.02138038444519043, 0.021467039108276367, 0.02131769561767578, 0.021439231872558594, 0.021608448028564452, 0.021415935516357423, 0.021550752639770507, 0.021209056854248048, 0.02158355140686035, 0.021309823989868165, 0.021288320541381835, 0.021318368911743164, 0.02134448051452637, 0.02135856056213379, 0.021313568115234376, 0.021553375244140624, 0.02132086372375488, 0.02134489631652832, 0.021372928619384765, 0.021188608169555666, 0.021279775619506835]",tokens/s,47.458708977764346,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,835.735552,4675.534848,0.0,4280.287232,4115.121152,s,1,7.37269970703125,7.37269970703125,0.0,7.37269970703125,7.37269970703125,7.37269970703125,7.37269970703125,[7.37269970703125],,kWh,9.522534087530706e-06,1.0429521619540363e-06,3.299169305998051e-06,1.3864655555482794e-05,,MB,1202.020352,4983.816192,0.0,4575.985664,4408.408064,s,10,0.42754406356811525,0.04275440635681152,0.0014334045633080338,0.042253551483154295,0.04336371269226074,0.04515391941070556,0.046586084785461425,"[0.04694412612915039, 0.04253737640380859, 0.042242782592773434, 0.042584064483642575, 0.04222710418701172, 0.04203776168823242, 0.04195721435546875, 0.041783424377441404, 0.042264320373535155, 0.04296588897705078]",tokens/s,5987.686926664923,kWh,1.5306460620200298e-06,1.6880301024038254e-07,1.0141325739580902e-06,2.713581646218502e-06,tokens/kWh,94340260.72395776,MB,1230.35648,4983.816192,0.0,4575.985664,4408.410624,s,10,15.602204467773436,1.5602204467773437,0.006893250126550453,1.5605767211914063,1.56579580078125,1.5694571044921877,1.5723861474609375,"[1.573118408203125, 1.564982177734375, 1.558853759765625, 1.5473900146484374, 1.5649759521484374, 1.5629482421875, 1.551452880859375, 1.5591427001953124, 1.5620107421875, 1.55732958984375]",tokens/s,40.37890935869181,kWh,4.494949023089878e-05,4.957548495471422e-06,2.977266983324302e-05,7.967970855961321e-05,tokens/kWh,790665.542568669,,s,630,15.599725788116476,0.024761469504946756,0.000482530303829509,0.024702032089233397,0.0250308967590332,0.02532572202682495,0.02608155612945557,"[0.025272319793701172, 0.02474393653869629, 0.024856576919555663, 0.024817663192749022, 0.024887615203857422, 0.024907455444335938, 0.02490572738647461, 0.024807424545288087, 0.024922111511230468, 0.024801279067993166, 0.025231359481811523, 0.029007648468017577, 0.02487318420410156, 0.025145408630371093, 0.02533328056335449, 0.02497372817993164, 0.02569625663757324, 0.024817663192749022, 0.02489139175415039, 0.02480735969543457, 0.024780096054077147, 0.024849151611328123, 0.024706304550170897, 0.024804096221923828, 0.025481056213378907, 0.02496227264404297, 0.02486777687072754, 0.024742944717407227, 0.02535523223876953, 0.02493017578125, 0.02490108871459961, 0.025690559387207032, 0.024985824584960938, 0.025132192611694335, 0.024863584518432617, 0.024937568664550783, 0.024531871795654296, 0.024639488220214844, 0.02459561538696289, 0.02467024040222168, 0.024514591217041016, 0.024591136932373046, 0.02474332809448242, 0.02485513687133789, 0.024702688217163087, 0.0246080322265625, 0.024656671524047852, 0.024721632003784178, 0.02487295913696289, 0.025556991577148438, 0.024922111511230468, 0.024682048797607423, 0.025390687942504882, 0.025489599227905273, 0.025444576263427734, 0.024637887954711914, 0.02473756790161133, 0.024766687393188477, 0.024268800735473633, 0.024357152938842774, 0.024272607803344726, 0.024923391342163086, 0.0250263671875, 0.02491769599914551, 0.02536288070678711, 0.024864639282226562, 0.024590560913085938, 0.024579776763916015, 0.024620864868164064, 0.024539424896240235, 0.02451456069946289, 0.02614476776123047, 0.025933792114257812, 0.02469385528564453, 0.024879968643188477, 0.024748128890991213, 0.024713216781616212, 0.02456928062438965, 0.02465439987182617, 0.024530208587646485, 0.025078176498413086, 0.02460268783569336, 0.02439344024658203, 0.024437280654907228, 0.02471219253540039, 0.03300022506713867, 0.024514463424682616, 0.024929983139038086, 0.024777215957641603, 0.024754304885864258, 0.0247193603515625, 0.02462067222595215, 0.024899967193603517, 0.02451241683959961, 0.024567903518676756, 0.024858015060424805, 0.02486537551879883, 0.024532127380371093, 0.02492630386352539, 0.024453887939453123, 0.024649311065673828, 0.024895519256591798, 0.024696575164794923, 0.024507007598876952, 0.024532800674438478, 0.02455571174621582, 0.024593696594238282, 0.024584928512573243, 0.024635391235351564, 0.02460371208190918, 0.024564672470092773, 0.024436128616333007, 0.024719808578491213, 0.02444044876098633, 0.02446611213684082, 0.025082752227783202, 0.024757055282592772, 0.024538591384887697, 0.024580799102783202, 0.024399551391601562, 0.024586559295654297, 0.02441766357421875, 0.02461065673828125, 0.02457859230041504, 0.02485273551940918, 0.0244421443939209, 0.025602975845336915, 0.02497529602050781, 0.02480953598022461, 0.025163103103637695, 0.024743776321411132, 0.02509292793273926, 0.024543231964111328, 0.024841407775878906, 0.024580928802490236, 0.024549375534057616, 0.024526847839355468, 0.024708736419677736, 0.024904064178466797, 0.024614912033081054, 0.024544832229614257, 0.024533439636230468, 0.02451456069946289, 0.024677568435668946, 0.02478323173522949, 0.024795583724975586, 0.02460633659362793, 0.02478323173522949, 0.024467456817626954, 0.024507616043090822, 0.024566560745239257, 0.024886335372924805, 0.024641536712646486, 0.02457491111755371, 0.024448127746582032, 0.024580768585205078, 0.02518448066711426, 0.024700511932373048, 0.02453094482421875, 0.02488265609741211, 0.024746944427490234, 0.0247193603515625, 0.024554975509643556, 0.024615455627441406, 0.024614240646362303, 0.02446790313720703, 0.024783071517944337, 0.024573951721191405, 0.0248603515625, 0.024653823852539062, 0.024557535171508788, 0.02500556755065918, 0.024653696060180665, 0.025676223754882814, 0.026434080123901367, 0.024757471084594727, 0.024647968292236328, 0.024586271286010743, 0.024666175842285157, 0.024594751358032227, 0.024585344314575194, 0.024669151306152343, 0.024431903839111327, 0.024516511917114257, 0.02465670394897461, 0.02469638442993164, 0.024792991638183593, 0.02461955261230469, 0.024585504531860352, 0.024832000732421877, 0.02459769630432129, 0.024451904296875, 0.024227840423583984, 0.024405727386474608, 0.024289247512817382, 0.024198783874511718, 0.024142559051513673, 0.024088544845581053, 0.02425200080871582, 0.024268287658691406, 0.024404895782470702, 0.024434688568115235, 0.024333471298217772, 0.024345439910888673, 0.024423551559448243, 0.02434752082824707, 0.02480892753601074, 0.026811935424804687, 0.025015327453613283, 0.024469472885131835, 0.024649503707885743, 0.02492367935180664, 0.024615615844726563, 0.024620351791381837, 0.02466476821899414, 0.02467430305480957, 0.024493215560913086, 0.024657760620117188, 0.024515584945678712, 0.02426470375061035, 0.024551424026489257, 0.02428646469116211, 0.024304351806640624, 0.024170080184936524, 0.02418451118469238, 0.024103679656982423, 0.02416758346557617, 0.024298336029052733, 0.0242093448638916, 0.024143936157226563, 0.02408608055114746, 0.024123775482177735, 0.024483680725097656, 0.024641279220581055, 0.02471164894104004, 0.02470035171508789, 0.02472198486328125, 0.024827903747558593, 0.02473574447631836, 0.02485865592956543, 0.02478220748901367, 0.02465020751953125, 0.024610944747924805, 0.024681535720825196, 0.024895999908447267, 0.02466655921936035, 0.024723392486572265, 0.0249815673828125, 0.024846336364746095, 0.02497439956665039, 0.02482387161254883, 0.024947071075439455, 0.02610588836669922, 0.025106399536132813, 0.02497443199157715, 0.024756479263305663, 0.02475484848022461, 0.024829952239990235, 0.024745119094848632, 0.024761184692382813, 0.024720928192138673, 0.024820192337036133, 0.024700319290161133, 0.024731327056884765, 0.02461788749694824, 0.02500819206237793, 0.024852415084838868, 0.024624160766601563, 0.024710111618041992, 0.024849567413330078, 0.024724319458007814, 0.024635391235351564, 0.024796735763549804, 0.02469728088378906, 0.024691743850708006, 0.025012704849243166, 0.02474153518676758, 0.024616832733154296, 0.0247359676361084, 0.024616832733154296, 0.02456787109375, 0.02633401679992676, 0.02523868751525879, 0.024807552337646484, 0.02491663932800293, 0.024694847106933593, 0.024645631790161132, 0.024642879486083985, 0.02481385612487793, 0.024657695770263673, 0.024715904235839845, 0.02449171257019043, 0.024631616592407226, 0.025821184158325194, 0.02486662483215332, 0.024670400619506837, 0.024935455322265626, 0.02493539237976074, 0.024750080108642578, 0.02473574447631836, 0.025233407974243165, 0.024542783737182616, 0.024662464141845704, 0.02469593620300293, 0.02470387268066406, 0.02476406478881836, 0.024768352508544922, 0.024764928817749023, 0.024729120254516603, 0.02485910415649414, 0.025046432495117187, 0.02491763114929199, 0.024765024185180663, 0.02472313690185547, 0.024742591857910157, 0.025319456100463867, 0.024702943801879883, 0.02461676788330078, 0.02463968086242676, 0.024616960525512696, 0.025050239562988283, 0.024968063354492188, 0.024662208557128907, 0.024801088333129884, 0.024993791580200195, 0.02466716766357422, 0.024990688323974608, 0.024672256469726563, 0.025190208435058595, 0.0247891845703125, 0.02486403274536133, 0.02470710372924805, 0.02472755241394043, 0.024746688842773437, 0.02475142478942871, 0.024791391372680664, 0.02470044708251953, 0.02466076850891113, 0.024814783096313478, 0.024734176635742188, 0.02509164810180664, 0.02500227165222168, 0.024835968017578126, 0.024631839752197265, 0.02492560005187988, 0.024689376831054686, 0.024764415740966796, 0.024584192276000977, 0.0247459831237793, 0.02465171241760254, 0.025001440048217773, 0.02465827178955078, 0.02463327980041504, 0.024686208724975588, 0.024512256622314453, 0.024421024322509765, 0.02459676742553711, 0.024604671478271483, 0.024608768463134766, 0.025795936584472656, 0.025664159774780274, 0.02465177536010742, 0.024661855697631838, 0.02466217613220215, 0.024917024612426758, 0.024894432067871095, 0.024559520721435548, 0.02472764778137207, 0.025212928771972655, 0.025330848693847656, 0.024607391357421876, 0.024455263137817384, 0.0245883846282959, 0.024647680282592774, 0.024620864868164064, 0.024698400497436525, 0.025385087966918945, 0.024795679092407225, 0.02514508819580078, 0.024684480667114258, 0.024807519912719726, 0.024715520858764647, 0.024623680114746093, 0.024798688888549806, 0.024625696182250977, 0.024610815048217775, 0.02453708839416504, 0.024573951721191405, 0.024721408843994142, 0.024525920867919923, 0.024488544464111327, 0.02493881607055664, 0.024805376052856445, 0.024649728775024415, 0.02454528045654297, 0.024569664001464844, 0.02446713638305664, 0.024689151763916017, 0.02468454360961914, 0.02464508819580078, 0.024719903945922852, 0.024638784408569335, 0.024507072448730467, 0.024792255401611327, 0.024557600021362303, 0.024648479461669922, 0.024803327560424804, 0.02511257553100586, 0.025217023849487305, 0.024840192794799806, 0.024591808319091798, 0.024615455627441406, 0.024731679916381834, 0.024760351181030274, 0.024707040786743163, 0.02472755241394043, 0.02481052780151367, 0.024572032928466797, 0.024531808853149414, 0.02470297622680664, 0.024647071838378908, 0.0246112003326416, 0.024611040115356444, 0.024827264785766603, 0.025027200698852538, 0.024589471817016602, 0.024389984130859375, 0.024281600952148437, 0.024061279296875, 0.024044191360473633, 0.024197120666503907, 0.024216800689697265, 0.02432694435119629, 0.0245166072845459, 0.024381439208984376, 0.0246824951171875, 0.02453651237487793, 0.02468307113647461, 0.024319999694824217, 0.02450432014465332, 0.024344575881958007, 0.025718591690063478, 0.02471993637084961, 0.024743904113769533, 0.024411264419555663, 0.026021984100341795, 0.024978271484375, 0.024502208709716797, 0.024256128311157227, 0.024314239501953126, 0.024503551483154296, 0.02456038475036621, 0.024473600387573242, 0.024403295516967773, 0.024300128936767577, 0.025299007415771485, 0.024163328170776367, 0.02412838363647461, 0.024195199966430665, 0.0242475528717041, 0.025168512344360353, 0.02425974464416504, 0.024277984619140627, 0.02426192092895508, 0.02426748847961426, 0.025450496673583983, 0.024823808670043947, 0.02454092788696289, 0.024540544509887696, 0.024410144805908203, 0.02461510467529297, 0.024488607406616212, 0.024993791580200195, 0.024683679580688477, 0.02484659194946289, 0.024809280395507814, 0.024992639541625977, 0.024711008071899413, 0.02493801689147949, 0.02501481628417969, 0.02482585525512695, 0.024838144302368165, 0.02469856071472168, 0.02495929527282715, 0.02496620750427246, 0.024903615951538085, 0.02498252868652344, 0.02503036880493164, 0.024916255950927734, 0.02500934410095215, 0.024713216781616212, 0.024761152267456055, 0.024806976318359375, 0.025087711334228515, 0.025053920745849608, 0.024661439895629883, 0.024684223175048828, 0.025404287338256837, 0.024809343338012695, 0.024625280380249023, 0.02474393653869629, 0.024688640594482423, 0.024786815643310547, 0.024897504806518554, 0.02546499252319336, 0.02475449562072754, 0.024713216781616212, 0.024727264404296876, 0.02485481643676758, 0.024899168014526366, 0.024710752487182616, 0.024687423706054687, 0.024557567596435546, 0.024681503295898438, 0.024599519729614258, 0.024713247299194337, 0.024698175430297852, 0.024619680404663086, 0.024653247833251953, 0.024637567520141603, 0.02470137596130371, 0.024969215393066405, 0.024596479415893553, 0.024845888137817383, 0.025100128173828125, 0.025717344284057617, 0.025024511337280272, 0.02481705665588379, 0.024760576248168947, 0.02465622329711914, 0.024853792190551758, 0.024879007339477538, 0.024832832336425782, 0.024901023864746095, 0.024694911956787108, 0.02466454315185547, 0.02482784080505371, 0.02481772804260254, 0.024863807678222657, 0.024662912368774412, 0.024751583099365235, 0.025021024703979492, 0.02489257621765137, 0.024705568313598634, 0.024571584701538085, 0.024617599487304687, 0.024625152587890626, 0.024746208190917968, 0.0246474552154541, 0.024661632537841798, 0.024977792739868165, 0.02488444709777832, 0.024776832580566406, 0.024875680923461915, 0.0247459831237793, 0.02455865669250488, 0.02447455978393555, 0.02456166458129883, 0.0248668155670166, 0.024881120681762695, 0.02462713623046875, 0.0247193603515625, 0.02467030334472656, 0.02446950340270996, 0.024961023330688475, 0.025067520141601563, 0.02523494338989258, 0.025391807556152345, 0.02497443199157715, 0.02506012725830078, 0.024961151123046876, 0.024759584426879883, 0.024742591857910157, 0.024658079147338866, 0.024706560134887694, 0.024713600158691406, 0.024727359771728515, 0.024755903244018555, 0.024672544479370118, 0.024641759872436525, 0.024688575744628908, 0.024610559463500978, 0.0245980167388916, 0.024494911193847658, 0.02443878364562988, 0.024774816513061522, 0.025035648345947265, 0.024773120880126953, 0.02466864013671875, 0.024577951431274413, 0.02454252815246582, 0.024893375396728517, 0.024795904159545898, 0.024680543899536132, 0.024534400939941407, 0.024490304946899414, 0.024656192779541015, 0.024726560592651367, 0.02487196731567383, 0.02478220748901367, 0.024955455780029296, 0.024590560913085938, 0.024755775451660158, 0.0251680965423584, 0.02479046440124512, 0.024686304092407227, 0.024695648193359374, 0.024508256912231446, 0.024541215896606447, 0.024561792373657226, 0.024700927734375, 0.024832000732421877, 0.024702720642089844, 0.024535295486450195, 0.024532991409301756, 0.02471731185913086, 0.024936447143554686, 0.024793088912963866, 0.024571903228759767, 0.024648895263671877, 0.02465990447998047, 0.02468908882141113, 0.02468262481689453, 0.02474403190612793, 0.024569503784179686, 0.02456755256652832, 0.02465875244140625, 0.024586368560791015, 0.024676223754882813, 0.02461871910095215]",tokens/s,40.38532526513516,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,837.287936,14689.435648,0.0,14294.188032,14284.158464,s,1,7.4896943359375,7.4896943359375,0.0,7.4896943359375,7.4896943359375,7.4896943359375,7.4896943359375,[7.4896943359375],,kWh,1.4267168679187611e-05,1.560477292521457e-06,7.197227980010323e-06,2.3024873951719392e-05,,MB,1159.585792,14995.61984,0.0,14587.789312,14512.892416,s,10,2.1156253356933594,0.21156253356933594,0.0047781127013005464,0.21339472198486328,0.21463478698730468,0.2157583038330078,0.2166571173095703,"[0.198820068359375, 0.21002482604980469, 0.20845228576660157, 0.21333978271484375, 0.21344966125488282, 0.21438511657714843, 0.2128421173095703, 0.21688182067871092, 0.2136397705078125, 0.21378988647460936]",tokens/s,1210.0441211444486,kWh,6.3592148394028245e-06,7.012992785076774e-07,4.21556496665201e-06,1.1276079084562512e-05,tokens/kWh,22702926.973124564,MB,1182.142464,15100.47744,0.0,14692.646912,14646.153216,s,10,43.51311914062501,4.351311914062501,0.006574783603022592,4.351512939453125,4.359264599609375,4.360290502929688,4.361111225585938,"[4.33798828125, 4.3467451171875, 4.3467138671875, 4.35394970703125, 4.35903662109375, 4.35664697265625, 4.36131640625, 4.350025390625, 4.3476962890625, 4.35300048828125]",tokens/s,14.47839209053196,kWh,0.00012701867365351295,1.4010532308286438e-05,8.433970032194869e-05,0.00022536890628374807,tokens/kWh,279541.6680980853,,s,630,43.509153587341366,0.0690621485513354,0.0005463833746282887,0.06900400161743164,0.06953564147949219,0.06970079917907715,0.07210129539489747,"[0.07258521270751953, 0.06911984252929687, 0.0683496322631836, 0.06819667053222657, 0.0684606704711914, 0.0682718734741211, 0.06827760314941406, 0.0684728012084961, 0.06824038696289063, 0.06830226898193359, 0.06852806091308594, 0.06832998657226562, 0.06824969482421875, 0.06847283172607421, 0.06864662170410156, 0.06875552368164063, 0.06874960327148437, 0.06874476623535156, 0.06857766723632812, 0.06848863983154296, 0.06853004455566407, 0.06851612854003906, 0.06860022735595703, 0.06840697479248047, 0.06888070678710938, 0.06847923278808593, 0.06859478759765625, 0.06850012969970704, 0.06882118225097657, 0.06887612915039062, 0.06887369537353516, 0.06878873443603516, 0.06865853118896484, 0.0689179229736328, 0.06872064208984376, 0.06883676910400391, 0.06883388519287109, 0.06876096343994141, 0.06901209259033203, 0.06890393829345703, 0.06879676818847656, 0.06892726135253906, 0.06908956909179688, 0.06896256256103515, 0.06917769622802734, 0.06915641784667968, 0.06898531341552734, 0.06902777862548828, 0.06900534057617187, 0.06899001312255859, 0.0690060806274414, 0.06899321746826172, 0.06906620788574219, 0.06912464141845703, 0.0690802230834961, 0.06911430358886719, 0.06930409240722656, 0.06935616302490234, 0.069212158203125, 0.06920588684082031, 0.06909964752197266, 0.06915299224853516, 0.0694188461303711, 0.07203292846679688, 0.06919577789306641, 0.06860800170898437, 0.06844585418701171, 0.06844214630126953, 0.06841999816894531, 0.06843897247314454, 0.06843244934082031, 0.06820735931396485, 0.06861775970458985, 0.06832550048828125, 0.06852352142333984, 0.06844822692871094, 0.06868860626220703, 0.06915782165527344, 0.06906355285644532, 0.06898073577880859, 0.06910140991210938, 0.06886147308349609, 0.06884153747558594, 0.0686370849609375, 0.06852828979492187, 0.06879424285888672, 0.06846227264404296, 0.06878572845458984, 0.06905846405029296, 0.06874018859863282, 0.06907459259033204, 0.06926486206054687, 0.06904096221923828, 0.06891065979003906, 0.06924256134033203, 0.06876982116699219, 0.0689662094116211, 0.06892361450195313, 0.06891500854492187, 0.06879235076904297, 0.06863481903076171, 0.06878585815429687, 0.06879875183105469, 0.06882982635498047, 0.06886547088623046, 0.06926102447509766, 0.06913715362548828, 0.06922988891601563, 0.06908329772949219, 0.06908089447021484, 0.06925804901123046, 0.06916710662841796, 0.06939238739013671, 0.06890086364746094, 0.06916915130615234, 0.06901302337646484, 0.06894854736328125, 0.06946192169189454, 0.0690708465576172, 0.06947020721435547, 0.06950911712646485, 0.0691937255859375, 0.06998226928710938, 0.06947833251953126, 0.0695902099609375, 0.06928272247314453, 0.07193395233154297, 0.06917743682861328, 0.06852294158935547, 0.06883599853515625, 0.06840780639648437, 0.06867254638671876, 0.06846121978759766, 0.06855897521972656, 0.068523681640625, 0.06881260681152344, 0.06859961700439453, 0.06848313903808594, 0.06858812713623047, 0.06872998046875, 0.06949903869628907, 0.06934796905517578, 0.06880659484863282, 0.06860822296142578, 0.06840729522705079, 0.06855270385742188, 0.0685277099609375, 0.068470947265625, 0.06841545867919922, 0.06864530944824218, 0.06864185333251953, 0.06855500793457031, 0.06872930908203125, 0.06871363067626954, 0.068901123046875, 0.06947235107421874, 0.06902559661865235, 0.06902352142333984, 0.06885270690917969, 0.06883737945556641, 0.06871670532226562, 0.06896419525146484, 0.06873257446289062, 0.06912556457519531, 0.07008528137207032, 0.06873078155517579, 0.06881734466552734, 0.06898854064941407, 0.06909503936767578, 0.06944847869873047, 0.06961724853515625, 0.06898876953125, 0.06894992065429688, 0.06912873840332032, 0.06905606079101563, 0.069421630859375, 0.069119873046875, 0.06924057769775391, 0.06933324432373048, 0.06898489379882812, 0.0689697265625, 0.06883510589599609, 0.0690206069946289, 0.06953705596923829, 0.06995833587646484, 0.06930850982666016, 0.06919158172607422, 0.06910157012939454, 0.06945315551757812, 0.07210189056396485, 0.06947225952148438, 0.06875958251953125, 0.06831919860839844, 0.06831839752197266, 0.06841222381591797, 0.06851939392089844, 0.06883372497558594, 0.06869551849365234, 0.068880126953125, 0.06829740905761719, 0.06849350738525391, 0.0689392318725586, 0.06862681579589844, 0.06912220764160157, 0.06913433837890624, 0.06914252471923828, 0.06882099151611328, 0.06854783630371093, 0.06867775726318359, 0.06864768218994141, 0.06876338958740234, 0.06894127655029297, 0.06849932861328124, 0.06849520111083984, 0.06862726593017578, 0.06875337219238281, 0.06927110290527344, 0.06924931335449219, 0.06924697875976563, 0.06953794860839843, 0.06936914825439452, 0.06947235107421874, 0.06915312194824219, 0.06874940490722656, 0.06896044921875, 0.06890444946289062, 0.0693623046875, 0.07014940643310547, 0.06887702178955078, 0.06887833404541016, 0.06893977355957032, 0.06923375701904297, 0.06997062683105469, 0.06942313385009766, 0.06927788543701172, 0.06965657806396484, 0.06939260864257812, 0.06901023864746093, 0.06932784271240235, 0.06913027191162109, 0.06928598022460937, 0.06939225769042968, 0.06895961761474609, 0.06912882995605468, 0.06899712371826172, 0.06923673248291015, 0.06984060668945312, 0.0696568603515625, 0.06935552215576171, 0.06938777923583984, 0.06957011413574218, 0.06936396789550782, 0.07218790435791016, 0.06934937286376953, 0.06856841278076171, 0.06848310089111329, 0.0684303970336914, 0.06856636810302734, 0.06867449951171875, 0.06885968017578124, 0.06856716918945313, 0.06872013092041016, 0.06890534210205078, 0.06892134094238281, 0.0688046417236328, 0.06892518615722656, 0.06926716613769532, 0.06948095703125, 0.06938361358642578, 0.06893753814697266, 0.06864498901367187, 0.06864959716796876, 0.06873017883300782, 0.06867574310302735, 0.06936547088623046, 0.06878899383544922, 0.0687022705078125, 0.06856729888916016, 0.06907469177246094, 0.06948047637939453, 0.06935343933105469, 0.06959017944335938, 0.06951760101318359, 0.06901529693603516, 0.06941104125976562, 0.0688213119506836, 0.068847900390625, 0.06878617858886718, 0.069010498046875, 0.06945465850830078, 0.06910979461669922, 0.06894831848144531, 0.06884281921386719, 0.06949078369140625, 0.06950252532958984, 0.06938873291015625, 0.0694029769897461, 0.0692384033203125, 0.06931251525878906, 0.06961190032958985, 0.06958080291748046, 0.06939238739013671, 0.06950508880615235, 0.06946364593505859, 0.06921619415283203, 0.06900777435302734, 0.06914646148681641, 0.06961138916015625, 0.06953548431396485, 0.07015888214111328, 0.06960332489013672, 0.06912614440917969, 0.06954105377197266, 0.06985779571533203, 0.06950739288330078, 0.07209983825683594, 0.06916896057128906, 0.06849513244628906, 0.06857068634033203, 0.0685184326171875, 0.06871059417724609, 0.06898665618896484, 0.06856694030761719, 0.06848966217041015, 0.06862643432617188, 0.06918962860107422, 0.06866102600097657, 0.068609375, 0.0689438705444336, 0.06970681762695312, 0.06940643310546875, 0.06928336334228516, 0.06889859008789062, 0.06880131530761718, 0.06851583862304687, 0.0686913604736328, 0.0691226577758789, 0.06872268676757813, 0.06880255889892578, 0.06857500457763673, 0.06872406768798828, 0.06894297790527344, 0.06898252868652344, 0.06934102630615234, 0.06986972808837891, 0.06929612731933593, 0.06933708953857422, 0.06877318572998047, 0.06916780853271484, 0.06877184295654297, 0.06877597045898437, 0.06901942443847656, 0.06880480194091797, 0.06905840301513672, 0.06901190185546875, 0.06912790679931641, 0.06932892608642578, 0.06959718322753906, 0.06979373168945313, 0.06948067474365234, 0.06926233673095702, 0.06967174530029296, 0.06935942077636718, 0.06891334533691407, 0.06893977355957032, 0.06942720031738281, 0.06931199645996093, 0.06901811218261719, 0.06908236694335937, 0.06926988983154297, 0.06952566528320313, 0.06967874908447266, 0.06972473907470703, 0.0694205093383789, 0.06924931335449219, 0.06936716461181641, 0.06951213073730468, 0.07014988708496094, 0.07210371398925781, 0.06939670562744141, 0.06869606781005859, 0.06883328247070312, 0.06846380615234375, 0.06862726593017578, 0.06890716552734374, 0.0687511978149414, 0.06866051483154297, 0.06872959899902344, 0.0688333740234375, 0.06909939575195312, 0.06861561584472656, 0.06872940826416016, 0.06956646728515625, 0.06904994964599609, 0.06926284790039063, 0.06876060485839844, 0.06875945281982422, 0.06877696228027344, 0.06909836578369141, 0.06889686584472657, 0.06882713317871093, 0.06906674957275391, 0.06891725158691406, 0.06896422576904297, 0.06926051330566406, 0.0696278076171875, 0.0691435546875, 0.06939852905273437, 0.06967203521728516, 0.06924150085449218, 0.0690660171508789, 0.06904729461669921, 0.06904624176025391, 0.06887382507324219, 0.06931670379638671, 0.06922016143798829, 0.06904637145996094, 0.06950691223144531, 0.06945645141601563, 0.0693759994506836, 0.06954598236083985, 0.06931046295166016, 0.06948834991455079, 0.0696404800415039, 0.06928998565673829, 0.06938832092285156, 0.06949600219726562, 0.06947702026367188, 0.06942428588867187, 0.06933193969726563, 0.06949478149414062, 0.0694307861328125, 0.06919379425048829, 0.06925299072265625, 0.06945439910888672, 0.06962995147705078, 0.0695767059326172, 0.06966668701171876, 0.0696562271118164, 0.06934575653076172, 0.0691443862915039, 0.07213875579833984, 0.06927769470214844, 0.06873827362060547, 0.06869478607177734, 0.06858675384521484, 0.068552734375, 0.0691136932373047, 0.06899203491210938, 0.06880863952636719, 0.06870748901367188, 0.06850745391845703, 0.06854550170898438, 0.06864806365966797, 0.0687809295654297, 0.06926937866210937, 0.0690458526611328, 0.06880441284179688, 0.0688486099243164, 0.0687891845703125, 0.06859449768066406, 0.06857933044433594, 0.0688222427368164, 0.06890364837646484, 0.06865312194824219, 0.06865715026855469, 0.06855455780029297, 0.06863276672363282, 0.0689656982421875, 0.06932921600341797, 0.0691756820678711, 0.06864691162109375, 0.06900505828857421, 0.06882329559326172, 0.06893567657470703, 0.06884259033203124, 0.06910620880126953, 0.06886243438720703, 0.06883932495117187, 0.06887606048583984, 0.06911385345458984, 0.06906082916259766, 0.06924854278564453, 0.06930867004394531, 0.06958512115478516, 0.06919577789306641, 0.06897869110107421, 0.06917113494873046, 0.06912598419189453, 0.06891487884521484, 0.06890147399902344, 0.06912185668945313, 0.06906594848632812, 0.06913046264648437, 0.06896742248535156, 0.06918131256103516, 0.0694879379272461, 0.0697798080444336, 0.0697838363647461, 0.06913148498535156, 0.0690749740600586, 0.0689988784790039, 0.06952384185791016, 0.07013209533691406, 0.07213459014892579, 0.0691500473022461, 0.06855958557128906, 0.06860800170898437, 0.06835318756103516, 0.06856585693359375, 0.06845439910888672, 0.06848102569580078, 0.06839705657958985, 0.06851299285888672, 0.06839785766601562, 0.06854783630371093, 0.06849120330810547, 0.06880662536621093, 0.06949542236328125, 0.06931407928466797, 0.06893949127197266, 0.0685902099609375, 0.06840278625488282, 0.06834867095947265, 0.06881011199951172, 0.06868409729003906, 0.06859715270996093, 0.06859993743896485, 0.06864771270751953, 0.06851789093017578, 0.06871449279785156, 0.06875299072265625, 0.06898902130126953, 0.06978797149658203, 0.06911318206787109, 0.06900726318359375, 0.06903679656982421, 0.06877932739257812, 0.06884528350830078, 0.06915376281738281, 0.06966995239257813, 0.06906771087646485, 0.06887811279296875, 0.06893180847167969, 0.06900294494628906, 0.06915309143066406, 0.06917446136474609, 0.06947840118408204, 0.06940534210205078, 0.06937385559082031, 0.06910182189941406, 0.06899097442626953, 0.06900940704345702, 0.06920716857910156, 0.06940354919433593, 0.06946412658691406, 0.06892329406738282, 0.06885158538818359, 0.06882726287841796, 0.06902547454833985, 0.06933126068115235, 0.06947433471679687, 0.06938950347900391, 0.06922217559814453, 0.06967005157470703, 0.0694557113647461, 0.06923849487304687, 0.07238857269287109, 0.06957615661621094, 0.06867407989501953, 0.06846876525878906, 0.06876156616210938, 0.06865446472167969, 0.06875523376464844, 0.06870425415039062, 0.06840013122558594, 0.06836128234863281, 0.06920829010009766, 0.06871711730957031, 0.06846463775634766, 0.06882508850097656, 0.0694939193725586, 0.06966480255126953, 0.06904710388183594, 0.0686612777709961, 0.0684849624633789, 0.06853644561767579, 0.06894127655029297, 0.06853817749023437, 0.06861692810058594, 0.0684933090209961, 0.06860514831542969, 0.06875398254394531, 0.06891248321533203, 0.06882601928710938, 0.06928173065185547, 0.06976675415039063, 0.06939894104003906, 0.06906473541259765, 0.06881056213378907, 0.06919574737548828, 0.06888169860839843, 0.06892845153808594, 0.06891519927978515, 0.06879567718505859, 0.06899942779541016, 0.06894544219970702, 0.06923769378662109, 0.069165283203125, 0.06920521545410156, 0.0697636489868164, 0.06970780944824219, 0.06938985443115234, 0.06917369842529297, 0.06975897979736329, 0.06891292572021485, 0.06925539398193359, 0.06910157012939454, 0.06902579498291016, 0.06904627227783203, 0.06911590576171875, 0.06919497680664062, 0.06912489318847656, 0.06933299255371093, 0.06981017303466797, 0.06969344329833985, 0.06923878479003906, 0.06908889770507813, 0.06931289672851562, 0.06946931457519531]",tokens/s,14.479711694122551,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,737.046528,804.192256,0.0,408.94464,387.119104,s,1,7.05956787109375,7.05956787109375,0.0,7.05956787109375,7.05956787109375,7.05956787109375,7.05956787109375,[7.05956787109375],,kWh,6.266552395828739e-06,6.837814345044297e-07,2.0188905040033345e-06,8.969224334336502e-06,,MB,1100.791808,829.35808,0.0,421.527552,354.083328,s,18,0.19807555103302,0.011004197279612223,0.00019689022549573127,0.010912367820739746,0.011292044830322265,0.011378513336181641,0.011422403717041014,"[0.010825087547302246, 0.010987551689147949, 0.011236991882324219, 0.01096713638305664, 0.010945504188537597, 0.010912384033203125, 0.011231648445129394, 0.010849023818969726, 0.010844736099243165, 0.010888416290283203, 0.010819135665893554, 0.01087177562713623, 0.010912351608276367, 0.011368831634521485, 0.010868127822875977, 0.011433376312255859, 0.011259136199951171, 0.010854335784912109]",tokens/s,23263.850464976505,kWh,3.2461778655451273e-07,3.579943586903562e-08,2.1631923126807694e-07,5.767364536916254e-07,tokens/kWh,443876918.75790524,MB,1135.0016,844.038144,0.0,436.207616,354.085888,s,18,10.186482849121093,0.5659157138400608,0.003745610212349325,0.5648192138671875,0.5710327026367187,0.5727525726318359,0.5730854754638672,"[0.5703270874023437, 0.5690594482421875, 0.573168701171875, 0.5664511108398438, 0.5726791381835937, 0.5663496704101563, 0.563966796875, 0.56943896484375, 0.5603001708984375, 0.5606165161132812, 0.562454833984375, 0.565671630859375, 0.5635009765625, 0.5637919921875, 0.5637879638671875, 0.56255029296875, 0.5684378662109375, 0.5639296875]",tokens/s,111.3239983610087,kWh,1.634629071205645e-05,1.8027229825864778e-06,7.3478997830654334e-06,2.5496913477708366e-05,tokens/kWh,2470887.3117163815,,s,1134,10.178151805877684,0.008975442509592316,0.00019099574927967507,0.008945232391357421,0.009071491050720214,0.009170578908920288,0.009812120027542118,"[0.009137824058532714, 0.009097567558288574, 0.00898252773284912, 0.009009023666381837, 0.009011327743530273, 0.008945088386535644, 0.009015071868896484, 0.009052927970886231, 0.009056480407714843, 0.009009152412414552, 0.009011008262634278, 0.009056511878967286, 0.009074591636657714, 0.009189375877380371, 0.009548831939697265, 0.009132831573486329, 0.009123871803283692, 0.00915017604827881, 0.009049632072448731, 0.009048831939697266, 0.009012543678283691, 0.009050815582275391, 0.00902348804473877, 0.009003007888793945, 0.009025216102600098, 0.009080415725708007, 0.009007840156555176, 0.009000960350036622, 0.008997952461242676, 0.009025823593139649, 0.009034303665161133, 0.009019583702087402, 0.009010975837707519, 0.009037856101989747, 0.009035679817199707, 0.009195712089538574, 0.00897875213623047, 0.00897811222076416, 0.009000448226928711, 0.008964544296264648, 0.008997023582458496, 0.008992447853088378, 0.008976832389831543, 0.008973631858825683, 0.009083359718322755, 0.009020511627197265, 0.008991776466369629, 0.009471360206604005, 0.009014880180358887, 0.008997632026672362, 0.008988479614257812, 0.008968544006347657, 0.009017696380615234, 0.008998335838317871, 0.009035008430480957, 0.009151712417602539, 0.009032671928405761, 0.008999711990356445, 0.008994751930236817, 0.008976608276367188, 0.008974176406860351, 0.009005056381225587, 0.00902143955230713, 0.008769311904907227, 0.009494751930236816, 0.009004159927368164, 0.009003904342651367, 0.009001184463500976, 0.009057056427001954, 0.009067520141601563, 0.008968192100524902, 0.009027392387390136, 0.00905401611328125, 0.008992159843444824, 0.009010144233703613, 0.008992511749267578, 0.0089967679977417, 0.009047648429870606, 0.008999679565429687, 0.00902348804473877, 0.009119744300842286, 0.00898252773284912, 0.009002528190612792, 0.00897276782989502, 0.009006208419799804, 0.008973183631896973, 0.008964096069335938, 0.008923487663269043, 0.009096927642822265, 0.009033087730407716, 0.009003583908081054, 0.009103455543518067, 0.008970047950744629, 0.008980575561523438, 0.009026592254638671, 0.008998944282531738, 0.00913094425201416, 0.009051775932312012, 0.008988991737365722, 0.008951871871948243, 0.00904371166229248, 0.008993247985839843, 0.00904576015472412, 0.009056351661682128, 0.009014528274536132, 0.00898464012145996, 0.009095040321350098, 0.008970944404602051, 0.00900921630859375, 0.009030752182006836, 0.00897321605682373, 0.00898579216003418, 0.009022239685058595, 0.00903593635559082, 0.009080448150634766, 0.008999168395996094, 0.008972288131713867, 0.009151583671569824, 0.008969120025634766, 0.008976479530334473, 0.008951295852661132, 0.009032095909118652, 0.008974047660827636, 0.008980768203735352, 0.00932044792175293, 0.009140352249145507, 0.0087706241607666, 0.009126272201538086, 0.009007072448730469, 0.00904428768157959, 0.009025919914245605, 0.00902143955230713, 0.009004735946655273, 0.009082240104675293, 0.008997856140136718, 0.009000384330749511, 0.009013536453247071, 0.008972288131713867, 0.009103487968444825, 0.008988544464111329, 0.0090316801071167, 0.009035776138305664, 0.009076031684875488, 0.009068767547607421, 0.009346752166748047, 0.008968992233276367, 0.008945664405822755, 0.008984448432922364, 0.009828479766845703, 0.011249855995178222, 0.009828255653381348, 0.00908620834350586, 0.009038144111633301, 0.009036416053771973, 0.008990431785583496, 0.009518495559692383, 0.008997471809387207, 0.00897433567047119, 0.008980480194091797, 0.008976127624511719, 0.008984224319458008, 0.00896236801147461, 0.009088768005371093, 0.009089568138122558, 0.008971263885498047, 0.008946687698364257, 0.00899401569366455, 0.009017248153686524, 0.00898374366760254, 0.008918911933898925, 0.008994624137878418, 0.008944671630859375, 0.008948736190795899, 0.008976320266723632, 0.008967583656311034, 0.009449600219726562, 0.009001312255859375, 0.010072511672973633, 0.008954719543457031, 0.00895680046081543, 0.008951711654663085, 0.008962143898010254, 0.008945856094360351, 0.008910047531127929, 0.008933728218078613, 0.008943776130676269, 0.00888764762878418, 0.008884991645812988, 0.008945664405822755, 0.008706048011779785, 0.009000960350036622, 0.008965279579162598, 0.008974847793579101, 0.008954208374023437, 0.00894976043701172, 0.00901734447479248, 0.009000736236572266, 0.008962271690368652, 0.008951807975769043, 0.008937472343444825, 0.00896777629852295, 0.008984992027282715, 0.0090665922164917, 0.009185183525085449, 0.0090316801071167, 0.008988672256469727, 0.008967647552490235, 0.008953696250915527, 0.008970399856567383, 0.008978976249694823, 0.008965344429016113, 0.008989472389221192, 0.008951935768127441, 0.00894553565979004, 0.008922240257263184, 0.008962944030761718, 0.008976479530334473, 0.00898185634613037, 0.008960576057434081, 0.008990847587585449, 0.008982399940490722, 0.008973471641540528, 0.008950719833374024, 0.008976287841796875, 0.008943615913391113, 0.008915295600891113, 0.008933024406433105, 0.00898185634613037, 0.008942079544067384, 0.00893331241607666, 0.00893280029296875, 0.00891481590270996, 0.008938015937805176, 0.009398655891418457, 0.009093119621276855, 0.009000960350036622, 0.00908886432647705, 0.009031840324401855, 0.009029664039611817, 0.00901961612701416, 0.009029343605041503, 0.00900710391998291, 0.009029919624328614, 0.009041119575500488, 0.008948543548583984, 0.008942496299743653, 0.008981311798095703, 0.009000960350036622, 0.00898252773284912, 0.008974559783935547, 0.008929023742675781, 0.008986368179321289, 0.008785216331481934, 0.009026176452636718, 0.009002335548400879, 0.00898464012145996, 0.009005727767944336, 0.008969887733459473, 0.009035648345947266, 0.00897596836090088, 0.009030719757080079, 0.00893836784362793, 0.008986656188964843, 0.008960927963256836, 0.008961343765258789, 0.008983103752136231, 0.008978816032409668, 0.009004544258117676, 0.009025279998779296, 0.008970111846923827, 0.009185919761657715, 0.009011199951171875, 0.009117695808410644, 0.009033727645874023, 0.009165120124816895, 0.009333951950073242, 0.009162879943847657, 0.010742207527160645, 0.008967904090881348, 0.009015520095825196, 0.008978431701660156, 0.009088768005371093, 0.00914409637451172, 0.008944095611572266, 0.008947039604187012, 0.009011872291564941, 0.008927231788635253, 0.009019392013549805, 0.008947711944580078, 0.00959705638885498, 0.008957823753356934, 0.008956000328063965, 0.00896985626220703, 0.008967616081237794, 0.008928095817565918, 0.00893337631225586, 0.009177087783813476, 0.00942080020904541, 0.008964096069335938, 0.008973504066467285, 0.008978528022766113, 0.009897024154663085, 0.01069660758972168, 0.00896291160583496, 0.008958175659179687, 0.008913311958312988, 0.008945376396179198, 0.008962080001831055, 0.008947936058044434, 0.008923456192016602, 0.009050111770629882, 0.008951711654663085, 0.008939040184020997, 0.008958271980285645, 0.008921343803405762, 0.00875443172454834, 0.008990880012512206, 0.008987296104431152, 0.009158592224121093, 0.00908841609954834, 0.008999039649963379, 0.008962528228759765, 0.008971936225891113, 0.008991071701049805, 0.008939647674560547, 0.009008959770202636, 0.008978431701660156, 0.00899283218383789, 0.00901529598236084, 0.008965696334838867, 0.008952223777770996, 0.00904924774169922, 0.008999808311462403, 0.008920991897583008, 0.008984607696533203, 0.008973823547363282, 0.008921664237976074, 0.008966015815734864, 0.008990847587585449, 0.009000384330749511, 0.009159232139587402, 0.00917039966583252, 0.009271840095520019, 0.009023679733276366, 0.009033184051513671, 0.009029631614685058, 0.008994943618774414, 0.009064224243164063, 0.009004896163940429, 0.009021280288696289, 0.009006879806518554, 0.00898259162902832, 0.008967071533203126, 0.009000960350036622, 0.008934975624084473, 0.009033696174621582, 0.00897276782989502, 0.00899443244934082, 0.00897267246246338, 0.008947168350219727, 0.008981023788452148, 0.009142144203186035, 0.009027711868286133, 0.009003007888793945, 0.009070079803466797, 0.008939552307128906, 0.008937536239624023, 0.008933792114257813, 0.008874112129211426, 0.0088472318649292, 0.00887168025970459, 0.00885756778717041, 0.008816927909851073, 0.008893600463867187, 0.008858016014099121, 0.008866239547729492, 0.008845312118530273, 0.00897862434387207, 0.009006815910339356, 0.008960864067077638, 0.00903987216949463, 0.00890880012512207, 0.00890880012512207, 0.008888128280639649, 0.008837311744689942, 0.008914752006530762, 0.008958144187927246, 0.008960000038146973, 0.00888371181488037, 0.008897024154663086, 0.008880127906799316, 0.009155712127685546, 0.00903052806854248, 0.008970111846923827, 0.008937600135803223, 0.008921088218688965, 0.008887519836425781, 0.008849632263183594, 0.008978848457336425, 0.008933247566223145, 0.008890175819396972, 0.008888928413391114, 0.008902560234069825, 0.008902624130249023, 0.00889241600036621, 0.008887871742248536, 0.00893075180053711, 0.008920063972473144, 0.008976384162902832, 0.008914496421813965, 0.008962559700012206, 0.008916864395141602, 0.00895302391052246, 0.008913791656494141, 0.008887295722961425, 0.008957056045532227, 0.008968064308166504, 0.008885855674743653, 0.008903072357177735, 0.00892518424987793, 0.008921088218688965, 0.008968416213989258, 0.008981887817382812, 0.009764863967895507, 0.009005536079406738, 0.008982463836669922, 0.00890675163269043, 0.008900735855102539, 0.008920960426330567, 0.008970239639282226, 0.008943615913391113, 0.008904735565185546, 0.0089169282913208, 0.008929280281066895, 0.00890713596343994, 0.009020352363586425, 0.008877056121826172, 0.008953568458557128, 0.008898112297058105, 0.008870335578918457, 0.008886272430419923, 0.009011967658996582, 0.009527423858642577, 0.010597472190856933, 0.00901734447479248, 0.009564031600952148, 0.00898960018157959, 0.008937472343444825, 0.009236479759216308, 0.008867839813232421, 0.008927231788635253, 0.008934528350830077, 0.008893183708190918, 0.008869983673095704, 0.008902688026428223, 0.008881664276123047, 0.008915455818176269, 0.008934752464294433, 0.008876704216003417, 0.008883487701416016, 0.008926239967346192, 0.008890048027038574, 0.008812543869018554, 0.008889439582824708, 0.008922016143798828, 0.008900768280029297, 0.00902284812927246, 0.009218527793884277, 0.008939519882202148, 0.00893507194519043, 0.00905465602874756, 0.008983551979064941, 0.008872544288635254, 0.008955264091491699, 0.008931488037109374, 0.008887295722961425, 0.008875807762145997, 0.008892031669616699, 0.008971936225891113, 0.009998687744140625, 0.010410112380981445, 0.008892671585083008, 0.00894115161895752, 0.008933216094970704, 0.008880991935729981, 0.008809503555297851, 0.00890505599975586, 0.008877887725830078, 0.008937631607055664, 0.009109919548034667, 0.009170911788940429, 0.009135552406311035, 0.009003487586975098, 0.008915040016174316, 0.008939871788024903, 0.008928192138671876, 0.008884127616882325, 0.008897343635559081, 0.008877087593078613, 0.008876031875610351, 0.009171744346618653, 0.009044159889221191, 0.008893664360046388, 0.008873791694641113, 0.008613887786865235, 0.00901244831085205, 0.008910688400268554, 0.008889280319213867, 0.008882176399230958, 0.008865344047546386, 0.008845760345458984, 0.008863743782043456, 0.008790335655212402, 0.008894399642944336, 0.008988191604614258, 0.008881695747375488, 0.00887177562713623, 0.008928064346313477, 0.00894979190826416, 0.009023679733276366, 0.008904512405395509, 0.008896736145019532, 0.008971936225891113, 0.008865440368652344, 0.008864224433898926, 0.008863455772399902, 0.008865056037902832, 0.008890591621398925, 0.008915743827819823, 0.008851455688476563, 0.008894463539123536, 0.008861696243286133, 0.008939359664916992, 0.008925344467163086, 0.008896415710449218, 0.008954015731811524, 0.008912832260131835, 0.008880352020263672, 0.008883999824523927, 0.008887935638427734, 0.008909184455871582, 0.00889628791809082, 0.008868032455444336, 0.008837151527404784, 0.008847776412963868, 0.008887904167175293, 0.008857600212097168, 0.008817760467529297, 0.008866432189941407, 0.008881855964660644, 0.008893024444580079, 0.008822303771972656, 0.008804224014282226, 0.00885750389099121, 0.00887059211730957, 0.008857760429382324, 0.008854784011840821, 0.008821215629577637, 0.008871392250061035, 0.008911520004272462, 0.008843263626098634, 0.008863903999328614, 0.009018912315368652, 0.008825311660766602, 0.009039711952209473, 0.009011072158813476, 0.008870016098022461, 0.008655551910400391, 0.008943807601928712, 0.009016384124755859, 0.008961119651794434, 0.00889020824432373, 0.008921279907226562, 0.008935232162475586, 0.008964096069335938, 0.008851455688476563, 0.008873727798461914, 0.008916447639465332, 0.008842047691345214, 0.00887395191192627, 0.00892518424987793, 0.008870176315307617, 0.00890828800201416, 0.00890447998046875, 0.008918784141540527, 0.008917695999145507, 0.008870016098022461, 0.008871071815490723, 0.008870623588562011, 0.008803808212280273, 0.008863840103149414, 0.00891539192199707, 0.008870176315307617, 0.008849120140075684, 0.008910176277160644, 0.008909472465515137, 0.008946975708007813, 0.008862431526184083, 0.008910847663879394, 0.008881888389587402, 0.008933216094970704, 0.008911456108093262, 0.00889241600036621, 0.008841055870056153, 0.008877344131469726, 0.008862015724182128, 0.008817055702209472, 0.008837120056152344, 0.008857695579528809, 0.008853407859802246, 0.008871935844421386, 0.00899071979522705, 0.008895584106445312, 0.008932607650756836, 0.008904224395751953, 0.008887968063354491, 0.008850048065185547, 0.008893280029296876, 0.0089303035736084, 0.008912896156311035, 0.008828736305236817, 0.008876480102539063, 0.008830240249633789, 0.008900992393493653, 0.009200960159301759, 0.008878879547119141, 0.00885372829437256, 0.008891231536865234, 0.008850367546081542, 0.008857664108276367, 0.008593952178955078, 0.008912960052490234, 0.008914336204528809, 0.008884511947631835, 0.00891321563720703, 0.008880127906799316, 0.008840543746948242, 0.008895392417907716, 0.008975296020507813, 0.00886457633972168, 0.008879360198974609, 0.008915840148925781, 0.008854623794555663, 0.008888447761535644, 0.008868736267089844, 0.008899935722351074, 0.00892563247680664, 0.00885536003112793, 0.008855744361877442, 0.008848671913146972, 0.008874496459960938, 0.008904159545898438, 0.008903424263000488, 0.008954943656921387, 0.008942048072814941, 0.008923199653625489, 0.00888259220123291, 0.008871520042419433, 0.008923583984375, 0.008875455856323242, 0.008937824249267578, 0.00887827205657959, 0.008898655891418457, 0.008873824119567872, 0.008931391716003418, 0.008912608146667481, 0.008882080078125, 0.008919424057006835, 0.008906720161437988, 0.008916416168212891, 0.008938079833984374, 0.009110976219177246, 0.008884703636169434, 0.008951904296875, 0.008867839813232421, 0.009244671821594238, 0.009105024337768554, 0.008950143814086915, 0.00891926383972168, 0.009200608253479004, 0.00897878360748291, 0.008982975959777833, 0.00885587215423584, 0.008951519966125488, 0.008898303985595704, 0.008964192390441895, 0.008887807846069335, 0.008917792320251465, 0.008879615783691406, 0.008987168312072753, 0.008916671752929688, 0.009005215644836426, 0.008991071701049805, 0.008660991668701172, 0.008982208251953125, 0.00905247974395752, 0.008912896156311035, 0.008900927543640138, 0.008951199531555177, 0.00891932773590088, 0.008934847831726074, 0.008946592330932618, 0.00893507194519043, 0.008929280281066895, 0.008894463539123536, 0.008898591995239258, 0.008879872322082519, 0.009021663665771484, 0.008986559867858887, 0.009072095870971679, 0.008870847702026367, 0.008940320014953613, 0.008896832466125488, 0.008882623672485351, 0.008933024406433105, 0.008882656097412109, 0.00894547176361084, 0.0088721923828125, 0.008957887649536133, 0.00931174373626709, 0.008978943824768066, 0.008910976409912109, 0.00893939208984375, 0.00892751979827881, 0.00926796817779541, 0.009779359817504883, 0.009499456405639648, 0.009337183952331542, 0.00898739242553711, 0.0089934720993042, 0.008973952293395995, 0.008955743789672852, 0.008946528434753418, 0.008875103950500488, 0.008930432319641112, 0.008897631645202637, 0.00886025619506836, 0.008954879760742187, 0.008954463958740234, 0.008958368301391602, 0.008867839813232421, 0.008910847663879394, 0.008943615913391113, 0.009089152336120605, 0.00913599967956543, 0.00899891185760498, 0.008937472343444825, 0.0089169921875, 0.008888319969177246, 0.008931327819824218, 0.008923135757446288, 0.008894335746765137, 0.008899999618530273, 0.00883187198638916, 0.008865856170654296, 0.008875136375427245, 0.008702752113342285, 0.009015232086181641, 0.008942655563354492, 0.008967103958129883, 0.008927552223205567, 0.00901088047027588, 0.008879551887512207, 0.00890937614440918, 0.008877568244934082, 0.008948415756225585, 0.008918975830078125, 0.008908672332763672, 0.00892518424987793, 0.008926560401916504, 0.008905376434326172, 0.008888319969177246, 0.008920512199401855, 0.008886240005493163, 0.008903583526611329, 0.008860960006713867, 0.008895296096801758, 0.008916576385498047, 0.008983903884887696, 0.00915715217590332, 0.008957823753356934, 0.008972479820251464, 0.00899897575378418, 0.008980480194091797, 0.008881312370300293, 0.009073504447937012, 0.00926540756225586, 0.008988415718078614, 0.009005056381225587, 0.008964096069335938, 0.008959967613220215, 0.008938624382019042, 0.008940223693847657, 0.008904000282287598, 0.008949664115905762, 0.0089303035736084, 0.008893856048583984, 0.008921088218688965, 0.008923744201660156, 0.008893440246582032, 0.008874591827392577, 0.009052895545959472, 0.008894144058227539, 0.008923135757446288, 0.0088853759765625, 0.00894809627532959, 0.008890239715576172, 0.008945504188537597, 0.009025856018066407, 0.008859935760498048, 0.008933568000793457, 0.008946911811828613, 0.008891519546508788, 0.008926495552062989, 0.008922816276550293, 0.008927935600280762, 0.00887929630279541, 0.008893024444580079, 0.00888371181488037, 0.008677696228027344, 0.009243807792663573, 0.009261919975280761, 0.009233823776245117, 0.008949376106262208, 0.009182175636291505, 0.008947232246398925, 0.009021696090698242, 0.008952159881591797, 0.008926176071166992, 0.009024319648742675, 0.009607263565063476, 0.009040096282958984, 0.008916768074035644, 0.008856896400451661, 0.008910592079162599, 0.009089983940124512, 0.008924927711486817, 0.00889680004119873, 0.008931296348571777, 0.008935423851013183, 0.008884223937988281, 0.008861632347106933, 0.008851519584655762, 0.008857600212097168, 0.008900959968566895, 0.008877728462219238, 0.008855263710021972, 0.00892137622833252, 0.00896828842163086, 0.009012864112854005, 0.0089901123046875, 0.00901414394378662, 0.008975584030151368, 0.008956704139709473, 0.00889241600036621, 0.008953791618347168, 0.008878144264221191, 0.008941727638244629, 0.00889020824432373, 0.008905759811401367, 0.008969183921813964, 0.008934816360473634, 0.008964703559875489, 0.008918304443359375, 0.008935968399047851, 0.008923328399658204, 0.008893471717834472, 0.00885654354095459, 0.008951359748840333, 0.008855135917663574, 0.008889023780822753, 0.008849056243896484, 0.008833760261535645, 0.008855648040771484, 0.008879743576049804, 0.008839391708374023, 0.008855392456054687, 0.008802111625671388, 0.008835264205932617, 0.008874176025390625, 0.008844703674316405, 0.00886240005493164, 0.008654848098754882, 0.008904704093933105, 0.008857600212097168, 0.008908479690551759, 0.008855968475341798, 0.009045439720153809, 0.008837183952331544, 0.008886688232421875, 0.008870240211486816, 0.008867487907409669, 0.008876031875610351, 0.008877568244934082, 0.008885791778564453, 0.008889632225036621, 0.009145952224731446, 0.009100864410400391, 0.00895849609375, 0.008951680183410644, 0.008949888229370118, 0.008971839904785157, 0.008976832389831543, 0.008951359748840333, 0.008934880256652832, 0.008987615585327149, 0.008943615913391113, 0.008972288131713867, 0.008902655601501466, 0.008965472221374511, 0.009087615966796875, 0.008990752220153808, 0.008942720413208007, 0.00890345573425293, 0.009000288009643555, 0.008995903968811034, 0.008918560028076172, 0.008921567916870116, 0.008883904457092286, 0.008912320137023927, 0.008898528099060058, 0.008861824035644532, 0.008899264335632324, 0.008894368171691895, 0.008910400390625, 0.008853183746337891, 0.008902527809143067, 0.008889439582824708, 0.008984224319458008, 0.00899071979522705, 0.008861696243286133, 0.008943488121032715, 0.009140352249145507, 0.009281439781188965, 0.008859744071960449, 0.00883619213104248, 0.008895392417907716, 0.008873311996459961, 0.008854016304016114, 0.00914243221282959, 0.00891881561279297, 0.009480192184448242, 0.00888649559020996, 0.008845312118530273, 0.008931551933288574, 0.008653663635253906, 0.008993856430053711, 0.009318719863891602, 0.00901734447479248, 0.008932000160217285, 0.008916768074035644, 0.008904800415039063, 0.008928352355957032, 0.008895392417907716, 0.00886796760559082, 0.00889408016204834, 0.00894092845916748, 0.008914912223815918, 0.008922016143798828, 0.008896672248840332, 0.008914079666137695, 0.008946623802185059, 0.009038975715637207, 0.008854047775268555, 0.008953951835632324, 0.008939519882202148, 0.00900710391998291, 0.008919039726257324, 0.00894761562347412, 0.009020928382873536, 0.008893024444580079, 0.008897695541381836, 0.008866047859191895, 0.008956512451171876, 0.008896512031555176, 0.008896608352661133, 0.008878080368041993, 0.008919072151184083, 0.008946623802185059, 0.008887231826782227, 0.008843647956848144, 0.008887455940246583, 0.008895135879516602, 0.008898367881774902, 0.008935423851013183, 0.00890060806274414, 0.00894713592529297, 0.008972127914428711, 0.00894819164276123, 0.008915264129638672, 0.00891487979888916, 0.008912320137023927, 0.008911423683166504, 0.00890880012512207, 0.008895520210266114, 0.008903583526611329, 0.008941632270812988, 0.008899871826171874, 0.008886752128601074, 0.008877951622009278, 0.009083264350891114, 0.008898816108703612, 0.008826656341552734, 0.008880096435546875, 0.008889375686645508, 0.008906815528869629, 0.008925151824951173, 0.00888044834136963, 0.008795007705688477, 0.008994720458984374, 0.008986144065856934, 0.008937952041625976, 0.008996479988098145, 0.008992704391479492, 0.00893177604675293, 0.008969280242919922, 0.008973247528076172, 0.008919039726257324, 0.009000767707824707, 0.00890454387664795, 0.008946016311645507, 0.008954143524169922, 0.008999967575073242, 0.008999615669250489, 0.008962143898010254, 0.008976287841796875, 0.008996864318847657, 0.008955424308776855, 0.009003487586975098, 0.00898470401763916, 0.008962271690368652, 0.008916319847106934, 0.008885951995849609, 0.008923775672912597, 0.0089169921875, 0.00890988826751709, 0.008919551849365234, 0.008908224105834962, 0.00909823989868164, 0.009174176216125488, 0.00900972843170166, 0.008935359954833985, 0.009021792411804199, 0.009041440010070801, 0.009005215644836426, 0.008977888107299804, 0.008967167854309082, 0.008914719581604004, 0.008906463623046874, 0.011077887535095214, 0.01050767993927002, 0.009095871925354004, 0.008998432159423829, 0.009001152038574219, 0.009000639915466308, 0.009007072448730469, 0.009011967658996582, 0.008905856132507325, 0.008966912269592285, 0.008970432281494141, 0.008959263801574706, 0.00892182445526123, 0.008931136131286621, 0.008917280197143555, 0.008926943778991699, 0.008873215675354004, 0.008895551681518555, 0.008826560020446777, 0.008836735725402832, 0.008888575553894043, 0.008872063636779785, 0.008608927726745605, 0.008928095817565918, 0.008939616203308106, 0.008893600463867187, 0.008931679725646973, 0.008914560317993163, 0.008863615989685058, 0.008928159713745117, 0.008906559944152832, 0.008829119682312012, 0.008929120063781739, 0.008972448348999023, 0.008923135757446288, 0.008939616203308106, 0.008962176322937011, 0.008949343681335448, 0.008931520462036133, 0.008904704093933105, 0.008867487907409669, 0.008892191886901855, 0.008954239845275878, 0.008945631980895995, 0.009013471603393554, 0.00924079990386963, 0.009044960021972656, 0.009075136184692382, 0.009023872375488282, 0.009025216102600098, 0.009023615837097168, 0.008994400024414062, 0.008991328239440918, 0.008929632186889648, 0.009197216033935546, 0.008946016311645507, 0.009021087646484375, 0.008965279579162598, 0.008894911766052246, 0.009099072456359863, 0.008882783889770507, 0.008843168258666993, 0.008895711898803711, 0.008893312454223633, 0.008922975540161132, 0.008878496170043946, 0.008840255737304688, 0.008901311874389648, 0.008878080368041993, 0.009174528121948243, 0.009058943748474121, 0.008927295684814453, 0.00887715244293213, 0.008926943778991699, 0.008909824371337891, 0.0089169921875, 0.00892080020904541, 0.008907039642333985, 0.008970047950744629, 0.009015487670898438, 0.00889260768890381, 0.008906815528869629, 0.008901663780212402, 0.008934111595153808, 0.00890287971496582]",tokens/s,111.41511952544634,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1068, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 845, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 845, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 634, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 230, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 560.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 608, in __init__ self.dense_h_to_4h = nn.Linear(config.hidden_size, config.intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,835.82976,3354.329088,0.0,2959.081472,2957.493248,s,1,7.60441064453125,7.60441064453125,0.0,7.60441064453125,7.60441064453125,7.60441064453125,7.60441064453125,[7.60441064453125],,kWh,1.005276507085379e-05,1.1015301663254118e-06,4.153892212005461e-06,1.5308187449184663e-05,,MB,1128.615936,3423.535104,0.0,3017.801728,2552.885248,s,10,0.5859242820739747,0.058592428207397476,0.002362301742255165,0.05809035110473633,0.05996758232116699,0.06253839855194092,0.06459505153656006,"[0.06510921478271485, 0.0585645751953125, 0.05581372833251953, 0.05712700653076172, 0.05939628982543945, 0.058537055969238284, 0.057635486602783205, 0.05768281555175781, 0.05849788665771485, 0.05756022262573242]",tokens/s,4369.1652288900905,kWh,2.154151239307669e-06,2.3743067485282434e-07,1.4369149567204602e-06,3.828496870880954e-06,tokens/kWh,66866973.8108192,MB,1138.286592,3423.535104,0.0,3017.801728,2552.887808,s,10,12.852337768554687,1.285233776855469,0.012224236546737058,1.289853271484375,1.2956299560546873,1.2969684204101561,1.298039191894531,"[1.298306884765625, 1.29533251953125, 1.294235595703125, 1.293533203125, 1.2904178466796874, 1.2892886962890624, 1.275995849609375, 1.2716734619140626, 1.2858623046875, 1.25769140625]",tokens/s,49.01831957306602,kWh,3.715348410902399e-05,4.097759460257526e-06,2.1655742406280632e-05,6.290698597556217e-05,tokens/kWh,1001478.5961049536,,s,630,12.84996529006958,0.020396770301697745,0.00037085017510198466,0.020403215408325195,0.020714876365661623,0.020847481441497805,0.02165227920532227,"[0.021066112518310545, 0.02083488082885742, 0.02062531280517578, 0.02076054382324219, 0.020461727142333984, 0.02046553611755371, 0.020426752090454102, 0.020465215682983397, 0.02067840003967285, 0.02053590393066406, 0.020598527908325195, 0.020515167236328125, 0.020353023529052734, 0.02046953582763672, 0.020455135345458984, 0.020515327453613282, 0.020817920684814452, 0.020729312896728514, 0.02049430465698242, 0.020668800354003907, 0.020670047760009767, 0.020471839904785155, 0.02052355194091797, 0.020469696044921874, 0.020738143920898438, 0.020580352783203124, 0.020473407745361327, 0.020440927505493166, 0.020510751724243163, 0.020568639755249023, 0.0203505916595459, 0.02026697540283203, 0.020344831466674804, 0.02058608055114746, 0.02115839958190918, 0.02113260841369629, 0.022361087799072265, 0.02051584053039551, 0.020597152709960938, 0.020817695617675783, 0.021066560745239257, 0.0205614070892334, 0.02032614326477051, 0.02053606414794922, 0.020586496353149415, 0.02038374328613281, 0.020475839614868162, 0.02048543930053711, 0.020498592376708983, 0.020699743270874024, 0.020553728103637696, 0.02041651153564453, 0.020514463424682616, 0.020670816421508788, 0.020668415069580077, 0.020525056838989256, 0.020358400344848634, 0.02049305534362793, 0.020557823181152343, 0.020657760620117187, 0.020686464309692384, 0.020398880004882814, 0.020418560028076172, 0.02129913520812988, 0.02044937515258789, 0.02044313621520996, 0.020438335418701173, 0.020499135971069334, 0.020596736907958983, 0.020344831466674804, 0.020529151916503906, 0.020545536041259766, 0.02068070411682129, 0.020444320678710937, 0.020466527938842773, 0.020559871673583984, 0.020563968658447264, 0.020537343978881836, 0.020682752609252928, 0.02056188774108887, 0.020637727737426757, 0.021139455795288087, 0.020524543762207033, 0.02073855972290039, 0.02047385597229004, 0.0210984001159668, 0.020659360885620117, 0.02035807991027832, 0.02040403175354004, 0.02050886344909668, 0.020812864303588866, 0.02082419204711914, 0.020410879135131836, 0.020457855224609377, 0.02056185531616211, 0.020596736907958983, 0.020531200408935548, 0.02075823974609375, 0.020402368545532228, 0.020547679901123047, 0.02059587287902832, 0.020553728103637696, 0.02042678451538086, 0.020369535446166993, 0.0203885440826416, 0.020549631118774413, 0.02048793601989746, 0.020549888610839843, 0.020465375900268555, 0.020435136795043947, 0.020608768463134766, 0.020525407791137696, 0.020568063735961914, 0.020458656311035155, 0.0205296630859375, 0.020626943588256837, 0.02035593605041504, 0.02057027244567871, 0.020567264556884766, 0.020607616424560545, 0.020518911361694335, 0.02039193534851074, 0.020479999542236327, 0.020534751892089843, 0.020465503692626952, 0.02038240051269531, 0.02088243293762207, 0.020664575576782227, 0.02041484832763672, 0.020543872833251955, 0.020746240615844725, 0.020398080825805662, 0.020849727630615236, 0.02044380760192871, 0.020334880828857423, 0.020512767791748047, 0.020389888763427736, 0.021529727935791016, 0.020369760513305663, 0.020412960052490235, 0.020612096786499022, 0.020242719650268554, 0.020423391342163085, 0.02060633659362793, 0.020472448348999025, 0.020578304290771485, 0.020781248092651368, 0.020649791717529297, 0.02050048065185547, 0.02042265510559082, 0.020436607360839843, 0.020599168777465822, 0.020567039489746093, 0.02037820816040039, 0.02026460838317871, 0.02044326400756836, 0.02032633590698242, 0.020087135314941405, 0.02034320068359375, 0.020228031158447266, 0.02033203125, 0.020213760375976563, 0.021139968872070314, 0.020799488067626954, 0.020573631286621093, 0.020531776428222657, 0.020448671340942384, 0.02056867218017578, 0.020701183319091796, 0.02047385597229004, 0.02041609573364258, 0.020422624588012694, 0.020314815521240235, 0.02025651168823242, 0.02041651153564453, 0.020320287704467775, 0.0203504638671875, 0.020083168029785155, 0.020351999282836913, 0.02077289581298828, 0.020705408096313476, 0.020458335876464843, 0.020699392318725585, 0.022826751708984374, 0.020645696640014647, 0.02057360076904297, 0.020406911849975586, 0.020424032211303712, 0.020321088790893553, 0.02088960075378418, 0.020375520706176757, 0.020336639404296874, 0.020813535690307618, 0.020738336563110353, 0.02062745666503906, 0.02045283126831055, 0.020380191802978516, 0.020395488739013673, 0.02068675231933594, 0.020378240585327147, 0.02040415954589844, 0.020387903213500976, 0.02037881660461426, 0.020322240829467774, 0.02026723289489746, 0.020404064178466796, 0.020703712463378907, 0.020490591049194335, 0.020297727584838866, 0.02039948844909668, 0.020417152404785158, 0.020733951568603515, 0.020858816146850586, 0.020471872329711913, 0.02045747184753418, 0.02034876823425293, 0.020319807052612306, 0.02026691246032715, 0.02046636772155762, 0.02065203285217285, 0.020466815948486327, 0.020539264678955078, 0.020494335174560546, 0.02041548728942871, 0.020412479400634766, 0.02055776023864746, 0.0204466552734375, 0.02082259178161621, 0.020447231292724608, 0.020410367965698242, 0.020369407653808593, 0.02038374328613281, 0.020798847198486327, 0.020638336181640626, 0.0203855037689209, 0.020336927413940428, 0.020356832504272462, 0.02049849510192871, 0.020762847900390624, 0.020410367965698242, 0.0204977912902832, 0.020458112716674803, 0.020467039108276366, 0.020286111831665038, 0.020400224685668947, 0.020483999252319335, 0.020504159927368162, 0.020430816650390624, 0.020422975540161134, 0.020510719299316405, 0.021272031784057618, 0.022427616119384767, 0.020874624252319337, 0.02043996810913086, 0.020322303771972656, 0.020555776596069338, 0.020563968658447264, 0.020361215591430663, 0.02037555122375488, 0.02037555122375488, 0.020426816940307617, 0.020391744613647463, 0.02035036849975586, 0.020415199279785155, 0.02046883201599121, 0.020540447235107423, 0.020559743881225587, 0.020361120223999024, 0.02067875289916992, 0.020927743911743166, 0.02074025535583496, 0.020564479827880858, 0.02065417671203613, 0.020415552139282228, 0.020495296478271485, 0.02044313621520996, 0.020551263809204103, 0.020318784713745118, 0.020387104034423828, 0.020437568664550782, 0.020215808868408205, 0.020375455856323242, 0.020284799575805663, 0.020392671585083007, 0.020576255798339844, 0.020168319702148437, 0.02047433662414551, 0.020367040634155273, 0.020367584228515624, 0.020745311737060547, 0.020517791748046875, 0.02040233612060547, 0.02052079963684082, 0.020455423355102538, 0.020440927505493166, 0.020528703689575194, 0.020400480270385744, 0.020340959548950197, 0.02033433532714844, 0.020318496704101564, 0.021116895675659178, 0.020486175537109377, 0.02063564872741699, 0.020516511917114257, 0.020412832260131835, 0.020492223739624022, 0.020350976943969725, 0.020206975936889648, 0.020854560852050782, 0.020509536743164063, 0.02059676742553711, 0.02047792053222656, 0.020379520416259764, 0.020434240341186523, 0.020489023208618163, 0.02088640022277832, 0.020379520416259764, 0.020236415863037108, 0.020536352157592773, 0.020687135696411132, 0.020585119247436525, 0.020477983474731447, 0.020431968688964845, 0.020341663360595702, 0.02047385597229004, 0.020628639221191406, 0.020433536529541017, 0.020408384323120116, 0.02032451248168945, 0.020545440673828123, 0.02067465591430664, 0.020461856842041017, 0.02064067268371582, 0.020521631240844728, 0.02047369575500488, 0.020277568817138672, 0.020316160202026368, 0.020928512573242186, 0.020516159057617188, 0.020480287551879882, 0.020394176483154298, 0.020347103118896485, 0.02036636734008789, 0.02031622314453125, 0.0204705924987793, 0.0208155517578125, 0.020304288864135742, 0.020307968139648438, 0.020391424179077147, 0.02025507164001465, 0.020297887802124024, 0.020365312576293947, 0.020338687896728515, 0.020197376251220703, 0.02053638458251953, 0.020701887130737305, 0.020522335052490234, 0.020585023880004882, 0.020396095275878906, 0.0204716796875, 0.02042255973815918, 0.02035353660583496, 0.02069708824157715, 0.020436992645263673, 0.020387840270996094, 0.02026905632019043, 0.020415552139282228, 0.02056604766845703, 0.020157344818115236, 0.02027724838256836, 0.020641183853149413, 0.02080214309692383, 0.020518911361694335, 0.0205897274017334, 0.02038256072998047, 0.02049238395690918, 0.020385696411132814, 0.02022604751586914, 0.02081875228881836, 0.020418336868286133, 0.02046175956726074, 0.020223648071289062, 0.02026473617553711, 0.02034262466430664, 0.020339424133300782, 0.02002943992614746, 0.020116607666015626, 0.01990950393676758, 0.020244768142700195, 0.020332256317138673, 0.02036735916137695, 0.020353023529052734, 0.020308063507080077, 0.020321279525756835, 0.020239263534545898, 0.020141439437866213, 0.02008127975463867, 0.020015104293823242, 0.019920896530151368, 0.019986080169677733, 0.020010879516601562, 0.01982512092590332, 0.021702335357666015, 0.02112544059753418, 0.020215808868408205, 0.020246143341064452, 0.02060736083984375, 0.02007859230041504, 0.020531200408935548, 0.02005731201171875, 0.020046144485473632, 0.019919008255004884, 0.020033855438232422, 0.02020966339111328, 0.020714559555053712, 0.020298688888549805, 0.020106592178344727, 0.02007107162475586, 0.020136959075927736, 0.020173824310302735, 0.02011039924621582, 0.020013311386108398, 0.02024323272705078, 0.020260768890380858, 0.020298912048339845, 0.020278112411499023, 0.020484256744384765, 0.020156160354614257, 0.020084320068359376, 0.020166976928710938, 0.019982143402099608, 0.020182432174682616, 0.020291872024536133, 0.020503231048583984, 0.020256767272949217, 0.020186656951904296, 0.020084768295288085, 0.020695232391357423, 0.02007046318054199, 0.019983936309814453, 0.020088544845581056, 0.020717727661132813, 0.020153375625610353, 0.02018604850769043, 0.020221439361572266, 0.0199616641998291, 0.019862016677856444, 0.020035776138305664, 0.020262912750244142, 0.020312000274658203, 0.020231231689453125, 0.020261215209960937, 0.02011408042907715, 0.019976192474365235, 0.019995744705200196, 0.02014668846130371, 0.020264991760253905, 0.01996633529663086, 0.01989017677307129, 0.02005638313293457, 0.02026652717590332, 0.019949216842651368, 0.019876224517822266, 0.01999488067626953, 0.019996543884277344, 0.019900224685668946, 0.019879552841186525, 0.019929664611816406, 0.01988096046447754, 0.019993024826049803, 0.020518943786621092, 0.019972192764282228, 0.02005990409851074, 0.019971839904785155, 0.02012460708618164, 0.02036729621887207, 0.02080508804321289, 0.02080419158935547, 0.020658079147338866, 0.020574304580688478, 0.020661279678344725, 0.02052102470397949, 0.020721759796142578, 0.02085487937927246, 0.020675296783447265, 0.02048409652709961, 0.020155967712402342, 0.02002579116821289, 0.02020672035217285, 0.01999875259399414, 0.019850080490112304, 0.01988582420349121, 0.019799808502197265, 0.0199869441986084, 0.019927040100097656, 0.019869632720947265, 0.020035648345947267, 0.020189184188842774, 0.020275136947631837, 0.020230207443237305, 0.0202128963470459, 0.020095775604248047, 0.020451391220092773, 0.020184320449829103, 0.021155391693115234, 0.02065043258666992, 0.02047529602050781, 0.020656864166259767, 0.020289535522460937, 0.020059808731079102, 0.020040031433105468, 0.02002707290649414, 0.020166976928710938, 0.020197376251220703, 0.020105215072631837, 0.020153440475463868, 0.02010745620727539, 0.02019606399536133, 0.0200392951965332, 0.020093311309814454, 0.020641759872436525, 0.021180448532104493, 0.020551679611206054, 0.020414464950561522, 0.02025494384765625, 0.020300575256347656, 0.020220928192138672, 0.019963903427124022, 0.01983692741394043, 0.019812351226806642, 0.020319520950317384, 0.022896383285522463, 0.020020191192626952, 0.020242240905761717, 0.020376928329467775, 0.02020796775817871, 0.020017759323120117, 0.020375200271606445, 0.020583776473999022, 0.020418495178222657, 0.02068070411682129, 0.021214176177978515, 0.02055276870727539, 0.020251583099365235, 0.020213056564331054, 0.021889759063720704, 0.023609024047851562, 0.020303808212280273, 0.02025904083251953, 0.020203039169311522, 0.020273632049560546, 0.020357248306274413, 0.020189184188842774, 0.019868736267089845, 0.020318464279174806, 0.020626111984252928, 0.02025267219543457, 0.0202259521484375, 0.0200479679107666, 0.01987993621826172, 0.019869695663452147, 0.019916479110717773, 0.0198590087890625, 0.020060928344726562, 0.020844736099243165, 0.020325439453125, 0.020484384536743165, 0.020815040588378905, 0.020240703582763673, 0.020048383712768555, 0.020106719970703124, 0.01999523162841797, 0.019959487915039063, 0.01983513641357422, 0.019748544692993163, 0.019869888305664062, 0.020027103424072264, 0.0198756160736084, 0.01991468811035156, 0.019755712509155275, 0.01980191993713379, 0.01981273651123047, 0.019928159713745116, 0.0199769287109375, 0.01981439971923828, 0.019974143981933593, 0.01995110321044922, 0.01984355163574219, 0.019891616821289062, 0.020498176574707032, 0.019849279403686523, 0.019995456695556642, 0.01989414405822754, 0.020402399063110352, 0.01985526466369629, 0.01974675178527832, 0.020035648345947267, 0.02002124786376953, 0.019998655319213868, 0.019843135833740234, 0.01981955146789551, 0.02006524848937988, 0.020015104293823242, 0.020245983123779298, 0.019849760055541992, 0.01994710350036621, 0.019967744827270508, 0.02000543975830078, 0.01996735954284668, 0.019847904205322266, 0.019860992431640623, 0.019906335830688477, 0.019872095108032225, 0.019827072143554687, 0.019736576080322265, 0.01998361587524414, 0.019966720581054687, 0.01983283233642578, 0.019861631393432617, 0.019925952911376953, 0.01978191947937012, 0.01995779228210449, 0.020001312255859376, 0.019867551803588866, 0.019775680541992188, 0.020377023696899414, 0.01984979248046875, 0.020045440673828126, 0.01998476791381836, 0.019994623184204103]",tokens/s,49.027369784948945,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1709, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.47264,11724.06272,0.0,11328.815104,11314.254848,s,1,7.47275341796875,7.47275341796875,0.0,7.47275341796875,7.47275341796875,7.47275341796875,7.47275341796875,[7.47275341796875],,kWh,1.2082180279158667e-05,1.3250766185224244e-06,4.895559471987387e-06,1.830281636966848e-05,,MB,1093.189632,12166.561792,0.0,11760.828416,11713.906688,s,10,4.051521362304687,0.40515213623046875,0.008479341193698546,0.40764956665039065,0.4102277038574219,0.4112820343017578,0.4121254986572266,"[0.3810806579589844, 0.4031009521484375, 0.40672128295898435, 0.4038128662109375, 0.409993408203125, 0.4085778503417969, 0.406339111328125, 0.409971435546875, 0.41233636474609375, 0.40958743286132815]",tokens/s,631.8614098442657,kWh,1.1751291458666704e-05,1.295934292239087e-06,7.827084039440102e-06,2.087430979034589e-05,tokens/kWh,12263878.545981761,MB,1097.920512,12271.419392,0.0,11865.686016,11828.952576,s,10,30.397986328125,3.0397986328125,0.004010404397557613,3.0387587890625003,3.044893994140625,3.045714453125,3.0463708203125,"[3.033953125, 3.03819580078125, 3.038205810546875, 3.040492919921875, 3.039311767578125, 3.03443017578125, 3.03797314453125, 3.046534912109375, 3.044711669921875, 3.044177001953125]",tokens/s,20.725057021856337,kWh,8.89646126238327e-05,9.81164368429653e-06,5.905577502235942e-05,0.00015783203133048863,tokens/kWh,399158.51978159393,,s,630,30.39387789535523,0.048244250627547974,0.00035791633083775157,0.0481997127532959,0.048510780334472654,0.048626995277404786,0.050413185539245606,"[0.05025820922851563, 0.04848777770996094, 0.04789548873901367, 0.04802080154418945, 0.04798847961425781, 0.04777388763427735, 0.047736351013183596, 0.047752128601074216, 0.047713409423828124, 0.04776025772094727, 0.048244735717773435, 0.04807475280761719, 0.047892478942871096, 0.04774092864990234, 0.04773023986816406, 0.047827262878417966, 0.04793363189697265, 0.04795180892944336, 0.04808499145507812, 0.048080894470214845, 0.048648094177246096, 0.04863945770263672, 0.048427486419677736, 0.048218273162841795, 0.04809081649780273, 0.048199745178222654, 0.04799929428100586, 0.047961215972900394, 0.04790345764160156, 0.04789052963256836, 0.04802886581420898, 0.04800518417358399, 0.04820156860351563, 0.048116416931152345, 0.04798396682739258, 0.048184192657470704, 0.048148574829101565, 0.04809104156494141, 0.04803379058837891, 0.04824198532104492, 0.04828639984130859, 0.0483061752319336, 0.048287391662597656, 0.04832092666625976, 0.04823139190673828, 0.04823139190673828, 0.04825398254394531, 0.04825187301635742, 0.04818700790405273, 0.04809971237182617, 0.04806646347045898, 0.048070209503173825, 0.048142879486083985, 0.04819968032836914, 0.048336894989013675, 0.04815462493896484, 0.04833484649658203, 0.04807884979248047, 0.04811174392700195, 0.04833443069458008, 0.04824092864990234, 0.04854988861083984, 0.04851007843017578, 0.05042982482910156, 0.04864176177978516, 0.04799939346313477, 0.04797030258178711, 0.048057376861572264, 0.04798358535766602, 0.04780441665649414, 0.047728641510009766, 0.04773231887817383, 0.048008865356445315, 0.048036830902099606, 0.04782057571411133, 0.04802377700805664, 0.04803152084350586, 0.047826335906982424, 0.04792995071411133, 0.04795619201660156, 0.048086814880371094, 0.04801945495605469, 0.0485478401184082, 0.048543262481689456, 0.0483988151550293, 0.048291519165039064, 0.048360992431640625, 0.04814934539794922, 0.048083038330078126, 0.048151424407958984, 0.04804022216796875, 0.04797328186035156, 0.047903648376464845, 0.04806335830688477, 0.048060352325439454, 0.04801337432861328, 0.0482529296875, 0.048395423889160155, 0.04804473495483398, 0.04804828643798828, 0.048263168334960936, 0.04834643173217774, 0.04841036987304687, 0.04859795379638672, 0.04849868774414062, 0.04835737609863281, 0.04838195037841797, 0.0483061752319336, 0.04830003356933594, 0.048146430969238284, 0.048390209197998045, 0.04827078247070313, 0.04807727813720703, 0.04813827133178711, 0.048252609252929686, 0.048222526550292966, 0.04818473434448242, 0.04814089584350586, 0.04831027221679687, 0.04818329620361328, 0.04819148635864258, 0.04832771301269531, 0.04837295913696289, 0.048623390197753906, 0.04854508972167969, 0.04850960159301758, 0.050522113800048826, 0.048534591674804686, 0.047973312377929685, 0.048004798889160157, 0.04782070541381836, 0.04812035369873047, 0.04784332656860352, 0.04790361785888672, 0.04788326263427734, 0.04785561752319336, 0.04790288162231445, 0.04789807891845703, 0.048116031646728515, 0.04795779037475586, 0.04791116714477539, 0.04797407913208008, 0.048148448944091794, 0.0480852165222168, 0.04799488067626953, 0.0485233268737793, 0.04866044616699219, 0.04848633575439453, 0.04825644683837891, 0.04825369644165039, 0.04816691207885742, 0.048121471405029294, 0.04831907272338867, 0.048086273193359376, 0.04797289657592774, 0.04831155014038086, 0.04797443389892578, 0.04801327896118164, 0.04802227020263672, 0.04821724700927734, 0.04820780944824219, 0.0481901741027832, 0.048156768798828124, 0.04820195388793945, 0.048178752899169924, 0.04844169616699219, 0.048465599060058595, 0.048425247192382816, 0.04848035049438477, 0.04834703826904297, 0.04825702285766602, 0.04835091018676758, 0.0483515510559082, 0.04815462493896484, 0.04808246231079102, 0.04814806365966797, 0.048208545684814454, 0.04814172744750977, 0.04812883377075195, 0.04803596878051758, 0.04808832168579102, 0.048171615600585936, 0.04814448165893555, 0.04833708953857422, 0.04837964630126953, 0.04840240097045898, 0.048459808349609376, 0.04846092987060547, 0.04851801681518555, 0.050372447967529294, 0.048853408813476565, 0.048236286163330075, 0.04806051254272461, 0.048004192352294923, 0.04792822265625, 0.047925247192382815, 0.04789433670043945, 0.04807228851318359, 0.04803039932250976, 0.04795587158203125, 0.047951873779296876, 0.047927520751953126, 0.047914783477783204, 0.048173057556152345, 0.04814438247680664, 0.04819686508178711, 0.04829782485961914, 0.0480285758972168, 0.048346656799316406, 0.04845001602172851, 0.048482017517089845, 0.048433441162109375, 0.0484983024597168, 0.04829363250732422, 0.04826995086669922, 0.04801126480102539, 0.048099552154541016, 0.04808272171020508, 0.048009407043457034, 0.04827936172485352, 0.048205825805664064, 0.04827545547485351, 0.04812595367431641, 0.04803583908081055, 0.048205825805664064, 0.048121856689453124, 0.048356830596923826, 0.048200225830078124, 0.04826726531982422, 0.04843110275268555, 0.04854988861083984, 0.04842291259765625, 0.0484453125, 0.048400062561035156, 0.04843376159667969, 0.0481769905090332, 0.04807680130004883, 0.04816252899169922, 0.04822428894042969, 0.04807500839233399, 0.048254718780517576, 0.04803631973266602, 0.04817049789428711, 0.04822367858886719, 0.0481247673034668, 0.04855398559570313, 0.04826521682739258, 0.04849868774414062, 0.04833884811401367, 0.04844348907470703, 0.048367038726806644, 0.04840300750732422, 0.05048524856567383, 0.04894502258300781, 0.04827971267700195, 0.047988895416259766, 0.04782489776611328, 0.04800511932373047, 0.04788169479370117, 0.048143009185791015, 0.04801715087890625, 0.0479725456237793, 0.04782483291625977, 0.047875358581542966, 0.04790140914916992, 0.04807884979248047, 0.047876094818115236, 0.048228351593017575, 0.048078655242919925, 0.04811747360229492, 0.04812169647216797, 0.048390785217285154, 0.04864521789550781, 0.04863001632690429, 0.04846454238891602, 0.04832665634155273, 0.048140289306640625, 0.0481954231262207, 0.04821539306640625, 0.04807267379760742, 0.04822512054443359, 0.0481743049621582, 0.047988639831542966, 0.04798137664794922, 0.048029281616210937, 0.04796057510375976, 0.04806409454345703, 0.048068191528320314, 0.04826598358154297, 0.04811779022216797, 0.04839984130859375, 0.04847875213623047, 0.04861539077758789, 0.04860742568969727, 0.04852035140991211, 0.04838576126098633, 0.048399326324462894, 0.04830003356933594, 0.04820326232910156, 0.048236446380615236, 0.048192031860351564, 0.04823046493530273, 0.04803379058837891, 0.04808201599121094, 0.04823855972290039, 0.04810028839111328, 0.048196895599365235, 0.04811849594116211, 0.048162849426269534, 0.048162849426269534, 0.04822537612915039, 0.04838896179199219, 0.048347137451171876, 0.048402431488037106, 0.048363521575927736, 0.050638721466064456, 0.048599422454833986, 0.04799897766113281, 0.04786175918579102, 0.04782815933227539, 0.04777558517456055, 0.04788528060913086, 0.047783935546875, 0.047869697570800784, 0.0477861442565918, 0.04775740814208984, 0.04782227325439453, 0.04783161544799805, 0.04800307083129883, 0.04794572830200195, 0.047925247192382815, 0.048080192565917966, 0.0480447998046875, 0.04801059341430664, 0.0482861442565918, 0.048424991607666015, 0.04845375823974609, 0.04836284637451172, 0.04806630325317383, 0.04800723266601563, 0.047936065673828125, 0.047908958435058595, 0.04790707015991211, 0.0479536018371582, 0.0481794548034668, 0.04801945495605469, 0.04806054306030273, 0.047998847961425783, 0.04797030258178711, 0.047916961669921876, 0.048162368774414065, 0.04808348846435547, 0.048246784210205076, 0.048130046844482424, 0.048323841094970704, 0.048487327575683595, 0.04846115112304687, 0.048374080657958986, 0.04828387069702148, 0.04832662582397461, 0.048508926391601564, 0.04816876983642578, 0.048025440216064454, 0.04806009674072265, 0.0480951042175293, 0.0481280632019043, 0.04808319854736328, 0.04814281463623047, 0.04811980819702148, 0.048113662719726565, 0.048233470916748046, 0.04822732925415039, 0.04841632080078125, 0.04840275192260742, 0.04828387069702148, 0.04851004791259766, 0.04840284729003906, 0.04841292953491211, 0.05049379348754883, 0.048779422760009766, 0.048067008972167966, 0.04790182495117187, 0.047860607147216794, 0.047811969757080075, 0.04779052734375, 0.047917247772216794, 0.04789657592773437, 0.04801887893676758, 0.04807123184204101, 0.04798463821411133, 0.04791484832763672, 0.04782505416870117, 0.047905792236328126, 0.04792348861694336, 0.047943519592285155, 0.04802150344848633, 0.04800806427001953, 0.04818467330932617, 0.04860550308227539, 0.04842940902709961, 0.04838809585571289, 0.04822630310058594, 0.04813833618164062, 0.04801865768432617, 0.0480959358215332, 0.0481300163269043, 0.048265056610107424, 0.04803193664550781, 0.04801126480102539, 0.04798681640625, 0.04807462310791016, 0.04804118347167969, 0.04814313507080078, 0.04811980819702148, 0.048277408599853515, 0.04822844696044922, 0.0482979850769043, 0.04827344131469727, 0.04846588897705078, 0.048494590759277346, 0.04838195037841797, 0.048428192138671874, 0.04827827072143555, 0.04829193496704102, 0.0481710090637207, 0.048247806549072264, 0.04824335861206055, 0.04820556640625, 0.04811008071899414, 0.0483328971862793, 0.04821212768554688, 0.048166751861572266, 0.048132095336914066, 0.04828521728515625, 0.04823731231689453, 0.04836748886108398, 0.048371551513671875, 0.04851507186889648, 0.048429153442382813, 0.048607135772705076, 0.04847206497192383, 0.05049971389770508, 0.04876697540283203, 0.048233566284179685, 0.048142398834228516, 0.04804390335083008, 0.04808156967163086, 0.0479747200012207, 0.048138240814208984, 0.048121631622314455, 0.04812412643432617, 0.04811775970458984, 0.04814438247680664, 0.04803193664550781, 0.048213024139404294, 0.048183361053466794, 0.048186080932617184, 0.048129470825195315, 0.04823068618774414, 0.04825494384765625, 0.04849407958984375, 0.048651073455810545, 0.048637664794921875, 0.048498977661132814, 0.04840179061889648, 0.04821868896484375, 0.048213760375976564, 0.04818483352661133, 0.048081600189208984, 0.04815679931640625, 0.0481341438293457, 0.04832172775268555, 0.04818937683105469, 0.04828351974487305, 0.04837273788452148, 0.04830374526977539, 0.048293888092041014, 0.04819782257080078, 0.048363712310791014, 0.04839833450317383, 0.04849478530883789, 0.04867862319946289, 0.04858848190307617, 0.048578144073486325, 0.048476993560791014, 0.04839846420288086, 0.0484466552734375, 0.04835187149047852, 0.048363582611083984, 0.0483279037475586, 0.04829990386962891, 0.04817523193359375, 0.048481056213378906, 0.04835548782348633, 0.04829964828491211, 0.04829177474975586, 0.04821430587768555, 0.04818124771118164, 0.048388031005859374, 0.04844345474243164, 0.04855795288085937, 0.048600383758544925, 0.048608062744140625, 0.048465503692626956, 0.05048566436767578, 0.04885488128662109, 0.04823484802246094, 0.04803023910522461, 0.048045921325683597, 0.04798463821411133, 0.04814233779907227, 0.048112735748291016, 0.04800604629516601, 0.047994686126708985, 0.048115455627441406, 0.04803414535522461, 0.04786118316650391, 0.048036128997802734, 0.047978527069091795, 0.04826556777954102, 0.04811161422729492, 0.04812799835205078, 0.048151775360107424, 0.048239391326904295, 0.04866457748413086, 0.048568286895751954, 0.04847824096679688, 0.04830003356933594, 0.04833280181884766, 0.048220321655273436, 0.04810736083984375, 0.04820787048339844, 0.04817824172973633, 0.04807980728149414, 0.048154048919677735, 0.04814652633666992, 0.04809366226196289, 0.04816486358642578, 0.04820787048339844, 0.048271358489990236, 0.04827859115600586, 0.04832147216796875, 0.04843270492553711, 0.048372161865234374, 0.04862118530273438, 0.04849638366699219, 0.04850956726074219, 0.04838358306884766, 0.048449951171875, 0.04844748687744141, 0.04832406234741211, 0.048364063262939454, 0.04831350326538086, 0.04825548934936524, 0.048195934295654295, 0.04824476623535156, 0.0483430061340332, 0.04852345657348633, 0.048365375518798825, 0.048467967987060545, 0.04841267013549805, 0.048457534790039065, 0.04852134323120117, 0.04848646545410156, 0.048502880096435545, 0.04852918243408203, 0.048648319244384765, 0.050347999572753904, 0.04873532867431641, 0.048140865325927734, 0.04796627044677734, 0.047962432861328126, 0.0479021110534668, 0.04787891387939453, 0.04796921539306641, 0.04789715194702148, 0.04791289520263672, 0.04789712142944336, 0.04788611221313477, 0.04799065780639648, 0.04790879821777344, 0.04801523208618164, 0.0479378547668457, 0.04794169616699219, 0.048041248321533205, 0.04804867172241211, 0.04844972610473633, 0.0486297607421875, 0.04842623901367187, 0.048445758819580076, 0.04827932739257813, 0.04811439895629883, 0.04824825668334961, 0.048187904357910157, 0.04820787048339844, 0.04819046401977539, 0.04812083053588867, 0.04815420913696289, 0.04818179321289062, 0.04826217651367187, 0.04825174331665039, 0.04816864013671875, 0.048322879791259765, 0.04829100799560547, 0.04830495834350586, 0.04839136123657226, 0.04845036697387695, 0.048642047882080076, 0.04893199920654297, 0.04854460906982422, 0.04861030578613281, 0.04846899032592773, 0.048500415802001956, 0.04844326400756836, 0.04832505416870117, 0.048358497619628904, 0.04830915069580078, 0.048256992340087894, 0.04828131103515625, 0.04831468963623047, 0.04848796844482422, 0.04844182586669922, 0.04843110275268555, 0.0483749771118164, 0.048509281158447264, 0.04844182586669922, 0.04851030349731445, 0.04864886474609375, 0.048623615264892575, 0.04879359817504883]",tokens/s,20.72785849074811,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1153, in __init__ self.model = Qwen2MoeModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 918, in __init__ [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 918, in [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 696, in __init__ self.mlp = Qwen2MoeSparseMoeBlock(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 634, in __init__ [Qwen2MoeMLP(config, intermediate_size=config.moe_intermediate_size) for _ in range(self.num_experts)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 634, in [Qwen2MoeMLP(config, intermediate_size=config.moe_intermediate_size) for _ in range(self.num_experts)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 240, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 12.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 609, in __init__ self.dense_4h_to_h = nn.Linear(config.intermediate_size, config.hidden_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 400.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 163, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 268.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,814.362624,718.209024,0.0,322.961408,314.743808,s,1,7.12930908203125,7.12930908203125,0.0,7.12930908203125,7.12930908203125,7.12930908203125,7.12930908203125,[7.12930908203125],,kWh,4.444330741659997e-06,4.831028945992709e-07,9.975007979889927e-07,5.92493443424826e-06,,MB,1093.103616,810.483712,0.0,404.750336,391.119872,s,33,0.2363698244094849,0.007162721951802572,0.00012241536813872747,0.007113791942596435,0.007291059112548828,0.007445760154724121,0.007545480937957764,"[0.007535232067108154, 0.0070817599296569824, 0.0070991358757019046, 0.007550303936004639, 0.00729257583618164, 0.007106944084167481, 0.007197567939758301, 0.007075488090515137, 0.0072849922180175785, 0.007142528057098389, 0.007201119899749756, 0.007128448009490967, 0.007084000110626221, 0.007113791942596435, 0.007054240226745606, 0.007386112213134765, 0.007106272220611573, 0.00710313606262207, 0.007114751815795899, 0.007225279808044434, 0.007111711978912354, 0.007164415836334229, 0.007107615947723389, 0.0071018881797790525, 0.007068384170532227, 0.007080031871795654, 0.007123487949371338, 0.007161632061004639, 0.007251999855041504, 0.007114175796508789, 0.007083936214447022, 0.007061151981353759, 0.0070557122230529784]",tokens/s,35740.60276562529,kWh,2.3613727125809281e-07,2.6041602155796568e-08,1.5620697501942162e-07,4.1838584843331104e-07,tokens/kWh,611875380.007757,MB,1103.044608,825.163776,0.0,419.4304,391.122432,s,33,9.878880798339848,0.2993600241921165,0.024886758566057144,0.29488934326171873,0.30042205810546874,0.3044090698242187,0.3966516906738281,"[0.3088450927734375, 0.4379724426269531, 0.2999659729003906, 0.30038250732421873, 0.29915423583984374, 0.30145172119140623, 0.30043194580078125, 0.29284234619140626, 0.2969441223144531, 0.297340087890625, 0.29619937133789065, 0.2939107666015625, 0.29546102905273436, 0.2950823974609375, 0.29575534057617187, 0.29818655395507815, 0.29488934326171873, 0.2978695983886719, 0.2899470825195313, 0.29282321166992187, 0.29323248291015624, 0.29367416381835937, 0.2936785888671875, 0.2941048278808594, 0.29431427001953125, 0.29127127075195314, 0.29567034912109375, 0.29308224487304685, 0.29094442749023436, 0.28820068359375, 0.29039175415039065, 0.2868695068359375, 0.28799105834960936]",tokens/s,210.44894076962422,kWh,8.313185671241867e-06,9.168013610817225e-07,3.856812108409e-06,1.3086799140732585e-05,tokens/kWh,4814011.3806677805,,s,2079,9.863568037509914,0.004744380970423241,0.002985095676152631,0.00461740779876709,0.0049251838684082035,0.005016031885147094,0.005280369215011594,"[0.0054271998405456545, 0.00501145601272583, 0.005853248119354248, 0.004840735912322998, 0.004856544017791748, 0.004894815921783448, 0.004773024082183838, 0.00497705602645874, 0.005105343818664551, 0.004977344036102295, 0.004967455863952637, 0.005010528087615967, 0.005041728019714356, 0.00487446403503418, 0.004938943862915039, 0.004944704055786133, 0.004922976016998291, 0.004884031772613526, 0.004895391941070556, 0.005371776103973389, 0.004817344188690185, 0.004728415966033936, 0.004704192161560059, 0.004755807876586914, 0.0048558077812194825, 0.004992320060729981, 0.0050817918777465824, 0.004976640224456787, 0.004951231956481934, 0.0050020480155944826, 0.00499507188796997, 0.005085184097290039, 0.005083392143249512, 0.005064703941345215, 0.004931327819824219, 0.0049268479347229005, 0.004891039848327637, 0.004878560066223145, 0.004812640190124512, 0.004823200225830078, 0.0047309122085571285, 0.004699456214904785, 0.004698783874511719, 0.00466534423828125, 0.0046976637840271, 0.004804800033569336, 0.00509772777557373, 0.004915520191192627, 0.0050438718795776365, 0.004933055877685547, 0.004915711879730225, 0.004849023818969726, 0.004784800052642822, 0.004679999828338623, 0.004675392150878906, 0.004644192218780517, 0.004754015922546387, 0.004597760200500488, 0.004640768051147461, 0.004640768051147461, 0.004712448120117187, 0.004659488201141358, 0.004640480041503906, 0.004346464157104492, 0.004624256134033203, 0.004651008129119873, 0.0046629438400268555, 0.004768095970153809, 0.004857215881347656, 0.0050267200469970704, 0.0048947839736938475, 0.004871327877044678, 0.004870336055755615, 0.004734720230102539, 0.004693568229675293, 0.0046622719764709475, 0.004664735794067383, 0.140567138671875, 0.005075200080871582, 0.004854847908020019, 0.004851456165313721, 0.004910272121429443, 0.0050009598731994625, 0.004939072132110596, 0.004868224143981934, 0.004806335926055908, 0.00479091215133667, 0.0047801599502563475, 0.004822432041168213, 0.004737567901611328, 0.0047391681671142575, 0.004729951858520508, 0.004794943809509278, 0.004886047840118408, 0.004967296123504638, 0.004894336223602295, 0.004933055877685547, 0.004927999973297119, 0.004934112071990967, 0.005045536041259765, 0.005038815975189209, 0.004927135944366455, 0.004842016220092773, 0.004803487777709961, 0.00483571195602417, 0.0048781437873840336, 0.004803296089172363, 0.004674784183502197, 0.004645760059356689, 0.004601759910583496, 0.00459980821609497, 0.004632607936859131, 0.004689888000488281, 0.004840479850769043, 0.004864992141723633, 0.004793983936309814, 0.004798079967498779, 0.004684544086456298, 0.00467683219909668, 0.004678207874298096, 0.004645088195800781, 0.004609951972961426, 0.004647264003753662, 0.004709695816040039, 0.004702720165252685, 0.004698016166687012, 0.00436633586883545, 0.004623360157012939, 0.004641791820526123, 0.004632575988769531, 0.004603903770446777, 0.004593311786651611, 0.004701695919036865, 0.004627295970916748, 0.004640768051147461, 0.004750720024108887, 0.004719232082366943, 0.004966015815734863, 0.005053919792175293, 0.0050861120223999025, 0.005030111789703369, 0.005089216232299805, 0.005037951946258545, 0.005050528049468994, 0.005042272090911865, 0.004902400016784668, 0.004804831981658935, 0.0047223038673400875, 0.004736767768859863, 0.004696127891540528, 0.004724607944488525, 0.004743872165679931, 0.004793856143951416, 0.004770304203033447, 0.004709983825683594, 0.004679999828338623, 0.004659296035766602, 0.004631711959838867, 0.004653952121734619, 0.00465503978729248, 0.004622208118438721, 0.004608160018920899, 0.004673056125640869, 0.004641248226165772, 0.004605247974395752, 0.0046906242370605465, 0.0046564159393310545, 0.004661983966827392, 0.0046284799575805665, 0.004689792156219483, 0.004696191787719727, 0.004817152023315429, 0.004884223937988281, 0.004910816192626953, 0.004743743896484375, 0.004673408031463623, 0.004665184020996094, 0.004640384197235108, 0.004741504192352295, 0.004614143848419189, 0.004610047817230224, 0.004609504222869873, 0.004592160224914551, 0.004642303943634033, 0.0047779521942138675, 0.004925439834594727, 0.005044767856597901, 0.005058623790740967, 0.005099455833435059, 0.004639071941375732, 0.00496614408493042, 0.00498803186416626, 0.004862143993377686, 0.004855904102325439, 0.004809311866760254, 0.004765567779541016, 0.0047842559814453125, 0.0046910080909729, 0.004682688236236572, 0.004636672019958496, 0.00473302412033081, 0.0047634878158569335, 0.004751423835754395, 0.004689919948577881, 0.004644864082336426, 0.004615488052368164, 0.004622208118438721, 0.0045773439407348635, 0.004598176002502442, 0.004591968059539795, 0.004663296222686767, 0.004601856231689453, 0.004599232196807862, 0.004616767883300781, 0.004734079837799072, 0.004846720218658447, 0.004667136192321777, 0.004623360157012939, 0.004663936138153076, 0.004721280097961426, 0.004754303932189941, 0.004710944175720215, 0.004663392066955567, 0.004663648128509522, 0.004683680057525634, 0.00467142391204834, 0.0046633281707763674, 0.0046633281707763674, 0.004810751914978028, 0.004825183868408203, 0.004755360126495361, 0.004773183822631836, 0.004721343994140625, 0.004726784229278564, 0.004724736213684082, 0.0048204798698425295, 0.0050078721046447755, 0.004905055999755859, 0.004833183765411377, 0.0048446722030639645, 0.004829696178436279, 0.004823616027832031, 0.004742847919464111, 0.00477836799621582, 0.0048371200561523435, 0.004908703804016114, 0.004950143814086914, 0.004929183959960937, 0.004976672172546387, 0.005000927925109863, 0.004964896202087402, 0.0048941121101379395, 0.0045896959304809574, 0.004904160022735596, 0.004854112148284912, 0.004882688045501709, 0.004801631927490235, 0.004753568172454834, 0.004736959934234619, 0.004745279788970947, 0.004731135845184326, 0.004684447765350342, 0.0046386241912841795, 0.004667136192321777, 0.004665599822998047, 0.004724063873291016, 0.0048196158409118655, 0.004843776226043701, 0.0048596482276916505, 0.004803967952728271, 0.0049251198768615724, 0.004864448070526123, 0.004831744194030762, 0.004773312091827393, 0.004749824047088623, 0.004779679775238037, 0.004709983825683594, 0.004707136154174805, 0.004679200172424317, 0.00466377592086792, 0.004620287895202637, 0.004640768051147461, 0.004643167972564697, 0.004730207920074463, 0.004595424175262451, 0.004658783912658691, 0.00464793586730957, 0.004785408020019531, 0.004794112205505371, 0.004975264072418213, 0.004994815826416016, 0.004855455875396729, 0.004934783935546875, 0.004947328090667725, 0.004945536136627197, 0.004858047962188721, 0.004784992218017578, 0.004834784030914307, 0.004784448146820068, 0.004727968215942383, 0.004674399852752685, 0.004639935970306396, 0.004651936054229736, 0.004636352062225342, 0.004661471843719483, 0.004644864082336426, 0.004644768238067627, 0.0046449599266052246, 0.00466326379776001, 0.004628064155578614, 0.00462278413772583, 0.004571231842041015, 0.004614048004150391, 0.004579328060150147, 0.0046769919395446774, 0.0043779520988464354, 0.004636384010314941, 0.004629248142242431, 0.0046386241912841795, 0.0045825281143188476, 0.004565887928009034, 0.004624383926391602, 0.004632575988769531, 0.004611839771270752, 0.00461030387878418, 0.004620160102844238, 0.004642111778259277, 0.004612448215484619, 0.004655295848846435, 0.00466921615600586, 0.005441823959350586, 0.0048949441909790035, 0.00497049617767334, 0.005312511920928955, 0.005462016105651855, 0.005805312156677246, 0.004981503963470459, 0.00506060791015625, 0.005040256023406983, 0.005023359775543213, 0.004972383975982666, 0.00495465612411499, 0.0049847040176391605, 0.004952095985412598, 0.004825056076049805, 0.0047964158058166504, 0.004769536018371582, 0.004860447883605957, 0.0046566081047058104, 0.0046369280815124515, 0.004639776229858399, 0.0046080961227416995, 0.004614496231079101, 0.004757535934448242, 0.004717152118682861, 0.004672544002532959, 0.004721536159515381, 0.004693088054656983, 0.004660128116607666, 0.004730879783630371, 0.004724991798400879, 0.0046787199974060055, 0.004856063842773438, 0.004811264038085937, 0.004687424182891846, 0.004661632061004639, 0.004579328060150147, 0.004597760200500488, 0.004663296222686767, 0.004599232196807862, 0.004588096141815185, 0.004589824199676513, 0.004573984146118164, 0.004588511943817139, 0.004657120227813721, 0.0050802559852600095, 0.004856448173522949, 0.004821216106414795, 0.00451584005355835, 0.005016608238220215, 0.005290527820587158, 0.005105855941772461, 0.004919551849365235, 0.004872191905975342, 0.004913023948669434, 0.004888703823089599, 0.004745215892791748, 0.004716639995574951, 0.004701759815216065, 0.004692512035369873, 0.0046590080261230465, 0.004670656204223632, 0.004676415920257569, 0.00461033582687378, 0.004638175964355468, 0.004595967769622802, 0.004566976070404053, 0.004601920127868652, 0.004869728088378906, 0.004843999862670898, 0.004688864231109619, 0.004625376224517822, 0.00459884786605835, 0.004724800109863281, 0.004631423950195312, 0.004614143848419189, 0.004603903770446777, 0.004618336200714111, 0.004582784175872803, 0.004571135997772217, 0.004546271800994873, 0.004567935943603516, 0.004583360195159912, 0.004636672019958496, 0.0046382398605346676, 0.004717023849487305, 0.004863967895507812, 0.00497983980178833, 0.0049919037818908696, 0.0050087041854858395, 0.005182144165039063, 0.005055615901947022, 0.004999135971069336, 0.004893599987030029, 0.004996511936187744, 0.0050653119087219235, 0.004980735778808594, 0.0049409279823303225, 0.004889760017395019, 0.00478384017944336, 0.004758624076843262, 0.004677599906921387, 0.004656064033508301, 0.004601984024047851, 0.0047717118263244625, 0.004777984142303467, 0.004662975788116455, 0.004692287921905518, 0.004598944187164306, 0.004541279792785645, 0.004572800159454346, 0.0043023681640625, 0.00457366418838501, 0.004571135997772217, 0.004585023880004883, 0.0045879678726196286, 0.004567039966583252, 0.004579328060150147, 0.00460364818572998, 0.004598015785217285, 0.0045640959739685055, 0.004565599918365478, 0.004593215942382813, 0.004567264080047607, 0.004572800159454346, 0.00457532787322998, 0.00461407995223999, 0.0047829442024230956, 0.004877664089202881, 0.004852384090423584, 0.004838496208190918, 0.004744095802307129, 0.004800320148468018, 0.00466323184967041, 0.0046657600402832036, 0.004662303924560547, 0.00474399995803833, 0.004709983825683594, 0.0046739521026611325, 0.004646560192108154, 0.004661600112915039, 0.0047894401550292965, 0.005268288135528564, 0.004748288154602051, 0.004684544086456298, 0.004663008213043213, 0.004622464179992676, 0.004601823806762695, 0.004565440177917481, 0.004597760200500488, 0.004596799850463867, 0.004586143970489502, 0.004583712100982666, 0.004595104217529297, 0.004618847846984863, 0.0046178560256958006, 0.004640927791595459, 0.004601151943206787, 0.004554687976837158, 0.004932576179504394, 0.004599199771881103, 0.004554463863372803, 0.004598656177520752, 0.004681727886199951, 0.004655104160308838, 0.004554751873016357, 0.004577280044555664, 0.00456492805480957, 0.004556384086608887, 0.004553184032440185, 0.0045240321159362796, 0.004540319919586182, 0.004554848194122315, 0.0046284799575805665, 0.004928127765655517, 0.0050787200927734375, 0.005093696117401123, 0.005097472190856934, 0.00505241584777832, 0.0050769920349121095, 0.005005311965942383, 0.004978303909301758, 0.005055007934570313, 0.004829279899597168, 0.00483622407913208, 0.004807295799255371, 0.00471011209487915, 0.004667200088500976, 0.0046286721229553225, 0.0046044478416442875, 0.004605984210968018, 0.004560863971710205, 0.00459980821609497, 0.00459555196762085, 0.004581151962280273, 0.004577600002288819, 0.004581408023834228, 0.004581247806549072, 0.0045610561370849605, 0.004582431793212891, 0.0045577921867370605, 0.004575232028961182, 0.004552256107330322, 0.004608448028564453, 0.0049946560859680175, 0.004942399978637695, 0.004848671913146973, 0.004757952213287353, 0.004659167766571045, 0.0046063680648803715, 0.00457040023803711, 0.004754303932189941, 0.004581215858459472, 0.00459980821609497, 0.0045649919509887695, 0.004591616153717041, 0.00466534423828125, 0.004846752166748047, 0.0047329277992248535, 0.004711391925811767, 0.0046854400634765625, 0.004663551807403564, 0.004612095832824707, 0.004593215942382813, 0.004595295906066894, 0.00464572811126709, 0.004607840061187744, 0.004580575942993164, 0.004642879962921143, 0.004647039890289307, 0.0046128640174865725, 0.0045875201225280765, 0.00457260799407959, 0.004559423923492432, 0.004570432186126709, 0.004625088214874268, 0.004681727886199951, 0.004541567802429199, 0.004821887969970703, 0.004742591857910156, 0.004809216022491455, 0.004694079875946045, 0.004673535823822021, 0.004632063865661621, 0.004636159896850586, 0.004684160232543946, 0.004589983940124512, 0.004579552173614502, 0.004616447925567627, 0.00465670394897461, 0.004628320217132568, 0.0048925762176513675, 0.00461568021774292, 0.004567391872406006, 0.0049725441932678225, 0.004581471920013428, 0.00457369613647461, 0.004627871990203858, 0.0046962881088256836, 0.005204351902008056, 0.005133440017700195, 0.0050672321319580075, 0.005015967845916748, 0.005072127819061279, 0.005046527862548828, 0.005003776073455811, 0.005041728019714356, 0.004941887855529785, 0.004917632102966309, 0.004840767860412597, 0.0047942399978637695, 0.004639039993286133, 0.004611968040466308, 0.004579967975616455, 0.004606304168701172, 0.004592607975006104, 0.00458409595489502, 0.004607935905456543, 0.00458351993560791, 0.004605184078216553, 0.004647679805755615, 0.004593440055847168, 0.004546783924102783, 0.004562335968017578, 0.004546144008636475, 0.004565695762634278, 0.004577216148376465, 0.004555136203765869, 0.004701791763305664, 0.004660639762878418, 0.004631552219390869, 0.004589568138122559, 0.004646719932556153, 0.004720287799835205, 0.004699935913085937, 0.004646815776824951, 0.004621151924133301, 0.004624383926391602, 0.004769792079925537, 0.004640768051147461, 0.00467145586013794, 0.004927487850189209, 0.0049205441474914555, 0.004856095790863037, 0.0047291841506958004, 0.004773952007293701, 0.004746431827545166, 0.004704736232757569, 0.004736447811126709, 0.004672287940979004, 0.004614016056060791, 0.004632927894592285, 0.004577280044555664, 0.004575168132781983, 0.004649024009704589, 0.004585472106933594, 0.004589600086212158, 0.004653056144714355, 0.004753376007080078, 0.0046696319580078124, 0.004597568035125733, 0.004582752227783203, 0.004571904182434082, 0.0045559039115905765, 0.004685855865478515, 0.00460649585723877, 0.004581600189208984, 0.004589568138122559, 0.0046284799575805665, 0.004685184001922607, 0.004621151924133301, 0.004572319984436035, 0.004553343772888184, 0.004581344127655029, 0.004630144119262695, 0.0046473278999328614, 0.004576704025268555, 0.0045809922218322755, 0.004616608142852783, 0.0045873279571533206, 0.004676320075988769, 0.004573152065277099, 0.004579360008239746, 0.004534272193908692, 0.004564064025878906, 0.0045454401969909665, 0.004533984184265137, 0.00454915189743042, 0.004673279762268066, 0.004794367790222168, 0.005001215934753418, 0.005238751888275147, 0.00516918420791626, 0.005040128231048584, 0.004908063888549805, 0.004940767765045166, 0.00499507188796997, 0.004900320053100586, 0.004837600231170655, 0.004718368053436279, 0.004676127910614013, 0.00459065580368042, 0.0046150717735290525, 0.0043361282348632815, 0.0046284799575805665, 0.0045853757858276365, 0.004814112186431884, 0.004553120136260986, 0.004599711894989014, 0.004581888198852539, 0.004589568138122559, 0.004612127780914307, 0.004566207885742187, 0.004561696052551269, 0.004554751873016357, 0.0045424637794494625, 0.004668831825256348, 0.004602079868316651, 0.0049155840873718265, 0.00501910400390625, 0.00490777587890625, 0.0049641280174255375, 0.004929535865783692, 0.004743167877197266, 0.00471449613571167, 0.004614304065704346, 0.004636127948760987, 0.004603456020355225, 0.004616960048675537, 0.004583487987518311, 0.00457696008682251, 0.004554143905639648, 0.0047093119621276856, 0.004579616069793701, 0.004566400051116943, 0.004594240188598633, 0.004620351791381836, 0.004588255882263184, 0.004592512130737305, 0.00468828821182251, 0.0045965762138366695, 0.004561759948730469, 0.004562943935394287, 0.004550367832183838, 0.004550943851470948, 0.0045240321159362796, 0.004532383918762207, 0.004581344127655029, 0.004583295822143555, 0.004595263957977295, 0.004676032066345215, 0.004668416023254395, 0.004669536113739014, 0.004693120002746582, 0.004761375904083252, 0.004667712211608887, 0.0046711678504943845, 0.004655104160308838, 0.004683775901794434, 0.004618239879608154, 0.0046694397926330565, 0.004603616237640381, 0.004569375991821289, 0.0055808000564575196, 0.004869503974914551, 0.004661888122558594, 0.004327424049377441, 0.004560544013977051, 0.004563168048858642, 0.004561024188995361, 0.004534272193908692, 0.004536320209503173, 0.004567039966583252, 0.004562367916107177, 0.004577760219573975, 0.004596831798553466, 0.004673823833465576, 0.004940415859222412, 0.005058656215667725, 0.005117951869964599, 0.00506060791015625, 0.0050566082000732425, 0.004956096172332764, 0.0049417920112609865, 0.004884479999542236, 0.0049541440010070804, 0.004840703964233398, 0.004762527942657471, 0.004677248001098633, 0.004622528076171875, 0.004585536003112793, 0.004567999839782715, 0.004577727794647217, 0.0045994877815246586, 0.004639840126037598, 0.004595424175262451, 0.004586559772491455, 0.004570079803466797, 0.004581408023834228, 0.004652895927429199, 0.004716703891754151, 0.004593887805938721, 0.004595136165618897, 0.004589087963104248, 0.004600607872009278, 0.004567423820495606, 0.004904607772827149, 0.0047288317680358885, 0.0047636480331420894, 0.004603903770446777, 0.004582592010498047, 0.0045617280006408694, 0.004564544200897217, 0.004599743843078613, 0.004871679782867432, 0.004821407794952393, 0.004911712169647217, 0.004806848049163819, 0.004814112186431884, 0.004620607852935791, 0.004585887908935547, 0.004589568138122559, 0.004584671974182129, 0.00457747220993042, 0.004612512111663818, 0.004568480014801026, 0.004611711978912353, 0.004604159832000732, 0.0045718722343444824, 0.004315904140472412, 0.004589791774749756, 0.004595776081085205, 0.004628416061401367, 0.004621568202972412, 0.004604671955108643, 0.004611264228820801, 0.004642975807189941, 0.004596384048461914, 0.004591296195983887, 0.004580927848815918, 0.0046806402206420895, 0.004656960010528564, 0.004595967769622802, 0.004564383983612061, 0.004626783847808838, 0.004583424091339112, 0.004688960075378418, 0.004572095870971679, 0.0045875201225280765, 0.004631743907928467, 0.004618368148803711, 0.004593887805938721, 0.00455072021484375, 0.0045428800582885745, 0.004558976173400879, 0.0045443840026855465, 0.004546559810638427, 0.004581600189208984, 0.004562719821929932, 0.005248672008514405, 0.004553055763244629, 0.004534207820892334, 0.004572927951812744, 0.004554719924926758, 0.004683231830596924, 0.005129407882690429, 0.0048512001037597655, 0.004702400207519531, 0.004882368087768554, 0.005003007888793945, 0.0051653761863708495, 0.005128191947937012, 0.0050421757698059086, 0.005015583992004394, 0.004923359870910644, 0.004878335952758789, 0.004821280002593994, 0.004742847919464111, 0.00471395206451416, 0.00467628812789917, 0.0046826558113098144, 0.004608992099761963, 0.004593664169311523, 0.004562431812286377, 0.0045593600273132326, 0.0045281281471252445, 0.00455679988861084, 0.004550496101379395, 0.004610208034515381, 0.004713535785675049, 0.004615104198455811, 0.004581376075744629, 0.004315199851989746, 0.004556032180786132, 0.004541183948516846, 0.004544544219970703, 0.004671743869781494, 0.0049927358627319335, 0.004935808181762695, 0.0047628159523010255, 0.004868800163269043, 0.004576543807983399, 0.004557472229003906, 0.004773087978363037, 0.004786399841308594, 0.004589471817016601, 0.004573056221008301, 0.004684639930725098, 0.00731056022644043, 0.0051495041847229, 0.00467964792251587, 0.004637887954711914, 0.004588607788085937, 0.004558591842651367, 0.004574240207672119, 0.004803743839263916, 0.004593247890472412, 0.0046061758995056155, 0.004589568138122559, 0.004601600170135498, 0.004609568119049072, 0.004611008167266846, 0.004570496082305908, 0.004565408229827881, 0.004546271800994873, 0.0045632319450378414, 0.004537568092346191, 0.004543263912200928, 0.004556447982788086, 0.004570943832397461, 0.004586016178131104, 0.004871520042419433, 0.0048393278121948245, 0.004804416179656983, 0.0047441282272338865, 0.004744575977325439, 0.004663936138153076, 0.004630527973175049, 0.004590720176696777, 0.0045866560935974125, 0.00461740779876709, 0.004629119873046875, 0.004603231906890869, 0.004563360214233398, 0.0045610561370849605, 0.004540224075317383, 0.004533472061157226, 0.00454911994934082, 0.004530655860900879, 0.0045240321159362796, 0.0045281281471252445, 0.004523488044738769, 0.004566559791564942, 0.004754144191741944, 0.004999199867248535, 0.005045760154724121, 0.0050488319396972655, 0.00496127986907959, 0.00496127986907959, 0.004919072151184082, 0.004845791816711425, 0.004921599864959717, 0.0048865280151367185, 0.004755008220672608, 0.004653151988983154, 0.00461033582687378, 0.004642623901367188, 0.0053779840469360355, 0.006050015926361084, 0.00570527982711792, 0.004651296138763428, 0.004655104160308838, 0.004640768051147461, 0.004585696220397949, 0.004599584102630615, 0.004593760013580322, 0.00457692813873291, 0.004542943954467774, 0.0046113600730896, 0.004665855884552002, 0.004634463787078857, 0.004582719802856445, 0.004561791896820069, 0.0045977277755737305, 0.004597439765930176, 0.004583327770233154, 0.004731296062469482, 0.004689919948577881, 0.004642816066741944, 0.004591263771057129, 0.004575583934783935, 0.00454860782623291, 0.0045500478744506835, 0.004534880161285401, 0.004541888236999512, 0.004575808048248291, 0.0047205758094787595, 0.004916768074035644, 0.004830783843994141, 0.004729951858520508, 0.004783999919891357, 0.004700096130371094, 0.004667232036590576, 0.004618463993072509, 0.004571135997772217, 0.004591360092163086, 0.004618495941162109, 0.004614143848419189, 0.00460368013381958, 0.00471289587020874, 0.004770912170410156, 0.0046926078796386715, 0.004621471881866455, 0.004571616172790527, 0.004548863887786865, 0.004608191967010498, 0.004641791820526123, 0.004570112228393554, 0.004348639965057373, 0.004595263957977295, 0.004600128173828125, 0.004592127799987793, 0.004575071811676025, 0.004599711894989014, 0.004631648063659668, 0.004609024047851563, 0.004593599796295166, 0.004578976154327392, 0.004575647830963135, 0.004562367916107177, 0.0045874881744384765, 0.004545119762420655, 0.004554751873016357, 0.004579328060150147, 0.004550655841827392, 0.004584640026092529, 0.00466377592086792, 0.004841824054718017, 0.004941535949707031, 0.005077151775360107, 0.004902304172515869, 0.0048585920333862305, 0.004970176219940186, 0.0050302081108093265, 0.005031199932098389, 0.005069536209106445, 0.0049500160217285155, 0.0049235520362854, 0.004853536128997802, 0.004826655864715576, 0.004848159790039062, 0.004761792182922363, 0.004699647903442383, 0.0047288317680358885, 0.004651328086853027, 0.004632575988769531, 0.004601215839385987, 0.0045756158828735355, 0.004563168048858642, 0.00455404806137085, 0.004601664066314697, 0.004756383895874024, 0.004734975814819336, 0.00466534423828125, 0.004599199771881103, 0.0045840320587158204, 0.004576735973358155, 0.0045716800689697265, 0.004587456226348877, 0.004562560081481933, 0.004548575878143311, 0.004530655860900879, 0.0045649919509887695, 0.004583392143249512, 0.004560351848602295, 0.0045717120170593265, 0.004562943935394287, 0.0045418238639831545, 0.0045512962341308595, 0.004648320198059082, 0.004676224231719971, 0.004329631805419922, 0.004621920108795166, 0.004598112106323242, 0.004618175983428955, 0.004728096008300781, 0.0048873920440673825, 0.004847616195678711, 0.00473199987411499, 0.004670368194580078, 0.004631807804107666, 0.004604671955108643, 0.0046592001914978025, 0.00475551986694336, 0.004724063873291016, 0.004672095775604248, 0.004712448120117187, 0.0046542401313781736, 0.004627295970916748, 0.004650176048278809, 0.00467142391204834, 0.004598656177520752, 0.004632575988769531, 0.004652383804321289, 0.004632575988769531, 0.004581503868103028, 0.0045716800689697265, 0.004551712036132812, 0.00456928014755249, 0.004606751918792724, 0.004583168029785156, 0.004587584018707276, 0.0045651841163635255, 0.004557951927185059, 0.0045577921867370605, 0.005312255859375, 0.005375487804412842, 0.00538316822052002, 0.0046501121520996095, 0.0047803521156311035, 0.004970719814300537, 0.00495033597946167, 0.004912831783294678, 0.004779007911682129, 0.0047562880516052244, 0.004681280136108398, 0.004700352191925048, 0.004682047843933106, 0.004632031917572022, 0.004587264060974121, 0.004565919876098633, 0.004583424091339112, 0.0046267518997192385, 0.004806335926055908, 0.0049780158996582035, 0.004942431926727295, 0.004894527912139892, 0.004835775852203369, 0.004759359836578369, 0.004777120113372803, 0.004789087772369385, 0.004775296211242676, 0.004678271770477295, 0.004624383926391602, 0.004498591899871826, 0.004705215930938721, 0.004601856231689453, 0.004549888134002686, 0.004571904182434082, 0.004562655925750733, 0.004561183929443359, 0.004583263874053955, 0.004554912090301514, 0.0045359678268432615, 0.004528480052947998, 0.004527200222015381, 0.004561823844909668, 0.004627840042114258, 0.004659840106964111, 0.004663584232330323, 0.004622399806976318, 0.004564640045166016, 0.004583712100982666, 0.00457862377166748, 0.004569119930267334, 0.00454204797744751, 0.004534656047821045, 0.004569375991821289, 0.0045686402320861816, 0.004592351913452149, 0.004556640148162842, 0.00456444787979126, 0.004571904182434082, 0.004648736000061035, 0.0046459841728210445, 0.0046843838691711425, 0.004701759815216065, 0.004647744178771973, 0.004575168132781983, 0.004569087982177734, 0.004562943935394287, 0.004579135894775391, 0.004798655986785889, 0.004659135818481445, 0.004644927978515625, 0.004611680030822754, 0.0045931520462036135, 0.004577824115753174, 0.004585279941558838, 0.0045799040794372555, 0.004585472106933594, 0.004579328060150147, 0.004601024150848389, 0.004573760032653809, 0.004574783802032471, 0.004610752105712891, 0.004579328060150147, 0.0046459841728210445, 0.00458025598526001, 0.004560031890869141, 0.0045392317771911625, 0.0045519680976867675, 0.004617152214050293, 0.004699935913085937, 0.004634624004364014, 0.004562943935394287, 0.004601535797119141, 0.004674015998840332, 0.005090752124786377, 0.005066912174224853, 0.005022496223449707, 0.004954976081848144, 0.004960800170898437, 0.004839839935302734, 0.004761568069458008, 0.004729919910430909, 0.004731455802917481, 0.004665631771087647, 0.004618080139160157, 0.004608255863189697, 0.0045649919509887695, 0.004793407917022705, 0.004585792064666748, 0.004679999828338623, 0.004575551986694336, 0.004536320209503173, 0.004571135997772217, 0.00460316801071167, 0.004637407779693604, 0.004596928119659424, 0.004569920063018798, 0.004579328060150147, 0.004550591945648193, 0.004556863784790039, 0.0045640959739685055, 0.004553408145904541, 0.0045426559448242185, 0.004603903770446777, 0.004585472106933594, 0.0045378880500793455, 0.004581855773925781, 0.004552095890045166, 0.004560959815979004, 0.004588064193725586, 0.004726784229278564, 0.00466534423828125, 0.004615968227386475, 0.004591392040252685, 0.004573472023010254, 0.004656832218170166, 0.004660768032073975, 0.004611008167266846, 0.004595647811889649, 0.0046590399742126466, 0.004609248161315918, 0.004590591907501221, 0.004583424091339112, 0.004567359924316406, 0.004570015907287598, 0.004555552005767822, 0.00456876802444458, 0.004540736198425293, 0.0045567359924316405, 0.004545983791351319, 0.004694015979766846, 0.004577919960021972, 0.004554240226745605, 0.00459007978439331, 0.004567039966583252, 0.0045763840675354, 0.004323328018188476, 0.004667327880859375, 0.004732895851135254, 0.004680064201354981, 0.004619999885559082, 0.0045649919509887695, 0.004558847904205322, 0.004530176162719727, 0.0045240001678466795, 0.004555967807769776, 0.0045389761924743656, 0.004554880142211914, 0.004558976173400879, 0.004552703857421875, 0.004562784194946289, 0.004552224159240723, 0.004579967975616455, 0.004530176162719727, 0.004673535823822021, 0.004545567989349365, 0.0045349440574646, 0.004520256042480469, 0.004517888069152832, 0.004558847904205322, 0.00459980821609497, 0.004868192195892334, 0.00491100788116455, 0.005000895977020263, 0.004931903839111328, 0.004906432151794433, 0.004871903896331787, 0.0048607678413391114, 0.004890016078948975, 0.0049560642242431644, 0.004829887866973877, 0.0047381119728088375, 0.004650944232940673, 0.004677663803100586, 0.004672800064086914, 0.004675263881683349, 0.004613632202148437, 0.004591104030609131, 0.004572192192077637, 0.004558815956115723, 0.004552127838134766, 0.004601888179779053, 0.0046003518104553226, 0.004591839790344238, 0.0045504322052001955, 0.004553760051727295, 0.004555744171142578, 0.004552864074707032, 0.0046525120735168455, 0.004730559825897217, 0.004639423847198486, 0.004675583839416504, 0.004574687957763672, 0.004563488006591797, 0.004580416202545166, 0.00456390380859375, 0.004689919948577881, 0.004777984142303467, 0.004757503986358643, 0.004428671836853027, 0.004574592113494873, 0.004565631866455078, 0.004580863952636719, 0.004653567790985107, 0.004648255825042725, 0.004612607955932617, 0.004663487911224365, 0.004642816066741944, 0.0046451201438903805, 0.004636223793029785, 0.004581567764282227, 0.004593664169311523, 0.004664480209350586, 0.004829535961151123, 0.00470633602142334, 0.004632895946502686, 0.004759712219238281, 0.004705535888671875, 0.004550879955291748, 0.004604127883911133, 0.004560927867889405, 0.004565279960632324, 0.004576511859893799, 0.004567808151245118, 0.004539455890655518, 0.0045598077774047854, 0.0046319360733032226, 0.0045905599594116215, 0.00460972785949707, 0.004583104133605957, 0.004648447990417481, 0.00460041618347168, 0.004589151859283447, 0.004763743877410889, 0.004561408042907715, 0.004569087982177734, 0.004571455955505371, 0.00456057596206665, 0.004535776138305664, 0.004565087795257569, 0.004571584224700928, 0.004538368225097656, 0.004566207885742187, 0.004540544033050537, 0.0045203838348388675, 0.004524288177490235, 0.004537568092346191, 0.004569888114929199, 0.004556479930877685, 0.004664639949798584, 0.004975615978240967, 0.005093376159667969, 0.005156864166259765, 0.0050094079971313476, 0.004920576095581055, 0.004865920066833496, 0.005006207942962647, 0.004878015995025635, 0.004720831871032715, 0.0046859197616577145, 0.004632607936859131, 0.004661087989807129, 0.004303264141082764, 0.00460364818572998, 0.004556320190429688, 0.004555776119232178, 0.004532192230224609, 0.004535808086395264, 0.004620800018310547, 0.0046731200218200685, 0.004602272033691407, 0.004566559791564942, 0.004540895938873291, 0.00456492805480957, 0.004565055847167969, 0.004543903827667236, 0.004561183929443359, 0.004573503971099854, 0.004612095832824707, 0.004677375793457031, 0.004582816123962402, 0.004559679985046387, 0.004552735805511475, 0.004595392227172852, 0.004543968200683594, 0.004565536022186279, 0.004575039863586426, 0.00454915189743042, 0.004661119937896728, 0.004867455959320068, 0.004848351955413818, 0.0048692159652709965, 0.004738239765167236, 0.004691679954528808, 0.00458735990524292, 0.004778143882751465, 0.004667391777038574, 0.004684095859527588, 0.004644544124603272, 0.0046547198295593265, 0.004725344181060791, 0.004617440223693848, 0.004634624004364014, 0.004630815982818603, 0.004613471984863281, 0.005613823890686035, 0.004693759918212891, 0.004651743888854981, 0.0047017278671264645, 0.0049749441146850586, 0.005277503967285156, 0.004643360137939453, 0.004682975769042969, 0.004692255973815918, 0.0046228480339050295, 0.004577280044555664, 0.004605823993682861, 0.004640672206878662, 0.004632800102233887, 0.004607999801635742, 0.004591584205627441, 0.004558144092559814, 0.004565728187561035, 0.004585472106933594, 0.004664415836334228, 0.0043786239624023435, 0.00466534423828125, 0.004576704025268555, 0.004544960021972656, 0.004538496017456054, 0.004550399780273437, 0.004560448169708252, 0.0045303359031677246, 0.0045483198165893555, 0.004528160095214844, 0.004540607929229736, 0.004687776088714599, 0.004887296199798584, 0.00517523193359375, 0.005143648147583008, 0.005184447765350342, 0.005152991771697998, 0.005060351848602295, 0.0048558077812194825, 0.004846752166748047, 0.004794303894042969, 0.004700255870819092, 0.004663871765136719, 0.004604095935821533, 0.004588863849639893, 0.004565087795257569, 0.004530399799346924, 0.004571584224700928, 0.004554944038391113, 0.004613952159881592, 0.0045236158370971676, 0.004536736011505127, 0.004545760154724121, 0.004660192012786865, 0.004655136108398438, 0.004629951953887939, 0.004661600112915039, 0.004575104236602783, 0.004556287765502929, 0.004544223785400391, 0.004571392059326172, 0.004545184135437011, 0.004619584083557129, 0.004598176002502442, 0.004615647792816162, 0.004639008045196533, 0.004577824115753174, 0.004596960067749023, 0.0045939841270446775, 0.004676064014434814, 0.004759552001953125, 0.004709568023681641, 0.004663871765136719, 0.0045775361061096195, 0.004558784008026123, 0.0045784001350402835, 0.004551648139953613, 0.004568128108978271, 0.004627232074737549, 0.004806816101074219, 0.004821248054504394, 0.004718400001525879, 0.004651072025299072, 0.004319583892822266, 0.004555103778839112, 0.004560512065887451, 0.004581503868103028, 0.004583295822143555, 0.004632575988769531, 0.004626431941986084, 0.004602176189422607, 0.004554431915283203, 0.004558847904205322, 0.004562111854553222, 0.004685952186584473, 0.00478275203704834, 0.004697984218597412, 0.0045998401641845705, 0.004577407836914063, 0.004613120079040528, 0.004659679889678955, 0.004716959953308106, 0.004710495948791504, 0.00470198392868042, 0.004858304023742676, 0.004687679767608643, 0.004648096084594726, 0.00463753604888916, 0.0045874881744384765, 0.004582655906677246, 0.004579967975616455, 0.004710559844970703, 0.004664512157440186, 0.004662208080291748, 0.004592735767364502, 0.004584224224090576, 0.004560224056243897, 0.0045511040687561035, 0.004542751789093018, 0.004572896003723145, 0.004544735908508301, 0.004577280044555664, 0.004538464069366455, 0.004530079841613769, 0.00454204797744751, 0.004724512100219727, 0.004912831783294678, 0.005063072204589843, 0.0051140799522399905, 0.005103040218353272, 0.004967296123504638, 0.004891871929168701, 0.004944287776947022, 0.004966400146484375, 0.004835072040557861, 0.004722847938537598, 0.004653535842895508, 0.004638720035552979, 0.004630527973175049, 0.004609312057495117, 0.0045677762031555175, 0.004611839771270752, 0.004548863887786865, 0.0045649919509887695, 0.00458128023147583, 0.004579423904418945, 0.00434003210067749, 0.004636672019958496, 0.004583104133605957, 0.0045914239883422855, 0.004566592216491699, 0.004520319938659668, 0.0045749440193176266, 0.004722879886627197, 0.004649631977081299, 0.004572319984436035, 0.004577695846557617, 0.004547008037567139, 0.0045483198165893555, 0.004550943851470948, 0.004550655841827392, 0.004577216148376465, 0.004535744190216065, 0.004567552089691162, 0.004626560211181641, 0.0047226881980895995, 0.004663296222686767, 0.0046267518997192385, 0.004599040031433106, 0.004596096038818359, 0.004571199893951416, 0.004561984062194825, 0.004587584018707276, 0.0048336639404296874, 0.004863679885864258, 0.0047890558242797854, 0.004681727886199951, 0.004628064155578614, 0.004630623817443847, 0.004650464057922363, 0.004616799831390381, 0.004616447925567627, 0.004596928119659424, 0.004618080139160157, 0.004589856147766113, 0.004565055847167969, 0.0046143999099731445, 0.004671103954315185, 0.004791391849517822, 0.004652416229248047, 0.0045632319450378414, 0.00455679988861084, 0.00455452823638916, 0.004587615966796875, 0.004613376140594482, 0.004632575988769531, 0.004656000137329102, 0.00459980821609497, 0.0045994877815246586, 0.004624671936035156, 0.004581408023834228, 0.0046592001914978025, 0.004636159896850586, 0.004661375999450683, 0.0046391038894653324, 0.004741087913513183, 0.004600992202758789, 0.004589759826660157, 0.004565695762634278, 0.004333568096160889, 0.004584544181823731, 0.00456387186050415, 0.004560895919799805, 0.004675136089324951, 0.004827744007110596, 0.004904255867004394, 0.004909408092498779, 0.00498092794418335, 0.004904160022735596, 0.004987328052520752, 0.005027711868286133, 0.004995552062988282, 0.004902912139892578, 0.0048364481925964355, 0.004758431911468506, 0.004761536121368409, 0.004775392055511475, 0.004686431884765625, 0.00466534423828125, 0.004612095832824707, 0.004667744159698486, 0.004599199771881103, 0.004583680152893067, 0.004585472106933594, 0.004533472061157226, 0.004563744068145752, 0.004577280044555664, 0.004570752143859863, 0.004574816226959228, 0.004547359943389892, 0.004583424091339112, 0.004595071792602539, 0.004593472003936768, 0.004792640209197998, 0.004699935913085937, 0.00462499189376831, 0.004632031917572022, 0.00458409595489502, 0.004644576072692871, 0.00481331205368042, 0.004923168182373047, 0.004837376117706299, 0.004709824085235596, 0.004678336143493653, 0.004651999950408935, 0.004596735954284668, 0.004635680198669434, 0.004762495994567871, 0.004780032157897949, 0.004705887794494629, 0.00468620777130127, 0.004642848014831543, 0.004569087982177734, 0.004572512149810791, 0.00457206392288208, 0.004570943832397461, 0.004639999866485596, 0.004633279800415039, 0.004634624004364014, 0.004591360092163086, 0.004581632137298584, 0.004627679824829102, 0.004416031837463379, 0.004639039993286133, 0.004542367935180664, 0.004536608219146729, 0.0045483198165893555, 0.004565343856811523, 0.004576863765716553, 0.004526144027709961, 0.004538368225097656, 0.004646912097930909, 0.004631775856018067, 0.004655903816223145, 0.0047185921669006346, 0.004628736019134522, 0.004629248142242431, 0.00460214376449585, 0.004604640007019043, 0.004554751873016357, 0.004550655841827392, 0.004591616153717041, 0.004593664169311523, 0.004621376037597656, 0.004570047855377197, 0.004569024085998535, 0.004536384105682373, 0.004539743900299072, 0.004563551902770996, 0.004524159908294678, 0.004577216148376465, 0.0045424637794494625, 0.0045829439163208, 0.004624864101409912, 0.004812352180480957, 0.00506873607635498, 0.005022496223449707, 0.004994783878326416, 0.004984831809997559, 0.004946176052093506, 0.0049697279930114744, 0.004861855983734131, 0.00473967981338501, 0.004707712173461914, 0.004971136093139648, 0.004643104076385498, 0.0046293439865112305, 0.004589632034301758, 0.004592383861541748, 0.00461027193069458, 0.004581215858459472, 0.004578976154327392, 0.004560927867889405, 0.004567359924316406, 0.004553791999816895, 0.004539328098297119, 0.004595200061798096, 0.0045409278869628904, 0.004568448066711426, 0.004663936138153076, 0.004671487808227539, 0.004590879917144775, 0.004581727981567383, 0.004596096038818359, 0.004673535823822021, 0.00449894380569458, 0.004710912227630615, 0.004632160186767578, 0.004581215858459472, 0.004620863914489746, 0.004616191864013672, 0.004593408107757569, 0.004614016056060791, 0.004610432147979736, 0.004603392124176026, 0.004563488006591797, 0.00462230396270752, 0.004640992164611816, 0.0046529917716979985, 0.0046262722015380855, 0.004652800083160401, 0.004675839900970459, 0.00462662410736084, 0.004611904144287109, 0.004611104011535644, 0.00479695987701416, 0.004804287910461426, 0.00465177583694458, 0.004591872215270996, 0.004599552154541015, 0.004567039966583252, 0.00451584005355835, 0.004532224178314209, 0.004544511795043945, 0.004544159889221191, 0.004567391872406006, 0.004543968200683594, 0.0045081920623779295, 0.004557151794433594, 0.0045519680976867675, 0.004626560211181641, 0.004606207847595215, 0.00462063980102539, 0.004600927829742431, 0.004563519954681397, 0.004579135894775391, 0.004546751976013183, 0.004571135997772217, 0.004570591926574707, 0.004547008037567139, 0.004573472023010254, 0.004544320106506347, 0.004554656028747559, 0.004527967929840088, 0.004566336154937744, 0.004533215999603272, 0.00457260799407959, 0.004565536022186279, 0.004534304141998291, 0.004581344127655029, 0.004624576091766358, 0.004674975872039795, 0.004759967803955078, 0.0048355841636657714, 0.004765088081359863, 0.004735199928283691, 0.0047079682350158695, 0.004731264114379883, 0.004346047878265381, 0.004556159973144531, 0.004554495811462402, 0.004553919792175293, 0.004539072036743164, 0.004508992195129394, 0.004544064044952393, 0.004507232189178467, 0.004518432140350342, 0.004522336006164551, 0.005264832019805908, 0.004890848159790039, 0.0045730237960815425, 0.0045361919403076175, 0.0045857601165771485, 0.004593664169311523, 0.004761151790618897, 0.00456112003326416, 0.004579552173614502, 0.004591616153717041, 0.004550655841827392, 0.004583263874053955, 0.004560128211975097, 0.004576159954071045, 0.004536320209503173, 0.004545983791351319, 0.004542399883270263, 0.00455452823638916, 0.004563807964324951, 0.004569087982177734, 0.004529376029968262, 0.00463318395614624, 0.004556992053985596, 0.004550848007202148, 0.0045353279113769535, 0.004573344230651855, 0.004553567886352539, 0.0045606718063354494, 0.0045305280685424805, 0.004525599956512451, 0.00452243185043335, 0.004550335884094238, 0.0045788798332214355, 0.004561344146728516, 0.004533440113067627, 0.0045574398040771485, 0.004542240142822266, 0.004551072120666504, 0.0045281281471252445, 0.004517183780670166, 0.004524096012115479, 0.004512383937835694, 0.004517888069152832, 0.004566783905029297, 0.004608255863189697, 0.004519328117370606, 0.004522592067718506, 0.004615808010101318, 0.004565375804901123, 0.004534272193908692, 0.004567359924316406, 0.004531904220581055, 0.004536543846130371, 0.0042871999740600585, 0.004513792037963867, 0.004525087833404541, 0.00452732801437378, 0.004529920101165772, 0.00451584005355835, 0.004546559810638427, 0.004539999961853027, 0.00452239990234375, 0.004533919811248779, 0.0045138559341430665, 0.0058017921447753905, 0.0058475518226623535, 0.0048090238571167, 0.004603551864624023, 0.004546559810638427, 0.004534272193908692, 0.004534272193908692, 0.004544511795043945, 0.004560512065887451, 0.004570752143859863, 0.004526527881622314, 0.004546527862548828, 0.004548960208892823, 0.004542751789093018, 0.004519648075103759, 0.004536128044128418, 0.004581823825836182, 0.004574975967407227, 0.004539711952209473, 0.004573887825012207, 0.004530176162719727, 0.004569087982177734, 0.004534016132354737, 0.004628736019134522, 0.004628767967224121, 0.004533215999603272, 0.004552864074707032, 0.0045493760108947755, 0.004517280101776123, 0.0045101442337036135, 0.004530367851257325, 0.004536128044128418, 0.004533728122711181, 0.004550271987915039, 0.00452291202545166, 0.004532320022583008, 0.0045259838104248044, 0.0045240321159362796, 0.0045281281471252445, 0.00454207992553711, 0.00453056001663208, 0.00456057596206665, 0.004512063980102539, 0.004542111873626709, 0.004528831958770752, 0.004548128128051758, 0.004573311805725098, 0.005467840194702148, 0.004655392169952393, 0.004575263977050781, 0.004538176059722901, 0.00466921615600586, 0.004292704105377197, 0.004561823844909668, 0.004674560070037842, 0.00455679988861084, 0.004554751873016357, 0.0045424637794494625, 0.004545951843261719, 0.004541088104248047, 0.004569024085998535, 0.00455679988861084, 0.004554751873016357, 0.004541759967803955, 0.004526783943176269, 0.004530176162719727, 0.004534560203552246, 0.004582304000854492, 0.004533055782318115, 0.004546559810638427, 0.004537375926971435, 0.004524288177490235, 0.004532639980316162, 0.004552800178527832, 0.004567488193511963, 0.004521759986877441, 0.004575232028961182, 0.0045424637794494625, 0.004522143840789795, 0.0045586562156677245, 0.004517920017242431, 0.004519807815551758, 0.004557087898254395, 0.004543392181396484, 0.004542623996734619, 0.00453331184387207, 0.004534080028533936, 0.004528031826019287, 0.004517888069152832, 0.004558559894561768, 0.004548895835876465, 0.004563199996948242, 0.004555744171142578, 0.004538591861724854, 0.004547135829925537, 0.004518112182617187, 0.004569920063018798, 0.004541408061981201, 0.004571135997772217, 0.004567039966583252, 0.004544511795043945, 0.004558847904205322, 0.0045424637794494625, 0.004546559810638427, 0.004588575839996338, 0.004548736095428467, 0.004563615798950195, 0.004549920082092285, 0.004549536228179931, 0.004577280044555664, 0.004567039966583252, 0.004571135997772217, 0.004588960170745849, 0.0045447998046875, 0.004542784214019775, 0.004333631992340088, 0.004554368019104004, 0.0045344319343566895, 0.004541920185089111, 0.004579584121704102, 0.004540800094604492, 0.004561024188995361, 0.004538047790527344, 0.0045382399559021, 0.004538815975189209, 0.004547808170318603, 0.004552735805511475, 0.004543231964111328, 0.004571455955505371, 0.004549312114715576, 0.004533247947692871, 0.004552703857421875, 0.004527584075927734, 0.00456876802444458, 0.004618624210357666, 0.004559648036956787, 0.004581056118011475, 0.004577216148376465, 0.004571424007415771, 0.0045463361740112306, 0.004564671993255615, 0.004573503971099854, 0.004552703857421875, 0.004568384170532226, 0.004547264099121094, 0.004536320209503173, 0.004533919811248779, 0.004553055763244629, 0.004577280044555664, 0.004530176162719727, 0.004554751873016357, 0.004538271903991699, 0.004598144054412842, 0.004548384189605713, 0.0050236802101135255, 0.004588863849639893, 0.004729536056518555, 0.0045892162322998045, 0.0045994877815246586, 0.004569503784179687, 0.004617887973785401, 0.004566751956939698, 0.0045710082054138184, 0.004580063819885254, 0.004573472023010254, 0.0045649919509887695, 0.004533792018890381, 0.004594143867492676, 0.004571135997772217, 0.004552703857421875, 0.004568607807159424, 0.004520415782928467, 0.0045240321159362796, 0.004552703857421875, 0.0045223040580749515, 0.0045864639282226565, 0.004512479782104492, 0.004530176162719727]",tokens/s,210.77565360666875,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 163, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,814.235648,2127.495168,0.0,1732.247552,1728.316416,s,1,7.494126953125,7.494126953125,0.0,7.494126953125,7.494126953125,7.494126953125,7.494126953125,[7.494126953125],,kWh,9.68461827082289e-06,1.0611642310189389e-06,4.302503442007999e-06,1.5048285943849828e-05,,MB,1107.70176,2328.82176,0.0,1923.088384,1891.2,s,11,0.4685140800476074,0.042592189095237036,0.003926996687913523,0.04137561416625977,0.04316851043701172,0.04874103927612305,0.05319906234741212,"[0.05431356811523438, 0.03801161575317383, 0.040621376037597655, 0.04316851043701172, 0.04131808090209961, 0.04131238555908203, 0.04130863952636719, 0.04286995315551758, 0.041873439788818356, 0.042340896606445313, 0.04137561416625977]",tokens/s,6010.491722498193,kWh,1.7661555776105405e-06,1.940468376263582e-07,1.16911405443373e-06,3.1293164696706284e-06,tokens/kWh,81807002.41767012,MB,1117.642752,2328.82176,0.0,1923.088384,1895.80032,s,11,10.273246093749998,0.9339314630681816,0.004601488466255629,0.93250439453125,0.939381103515625,0.9407118835449219,0.9417765075683594,"[0.9289093627929688, 0.93250439453125, 0.939381103515625, 0.9280496826171875, 0.9309269409179688, 0.9381678466796874, 0.9389963989257812, 0.9330540771484375, 0.9307843627929687, 0.9304292602539063, 0.9420426635742187]",tokens/s,67.45677010712369,kWh,2.6926773923902357e-05,2.9703234128768476e-06,1.599403098920306e-05,4.589112832598225e-05,tokens/kWh,1372814.3608169074,,s,693,10.270677665710455,0.014820602692222868,0.00037578824727001534,0.014731040000915527,0.015048230171203613,0.015191097450256347,0.016418733444213875,"[0.015498559951782226, 0.015063743591308593, 0.014892288208007812, 0.01481833553314209, 0.01478012752532959, 0.014735360145568848, 0.014867487907409669, 0.014663968086242676, 0.014787263870239258, 0.014648703575134278, 0.014684639930725097, 0.014583135604858398, 0.01581753635406494, 0.015015935897827149, 0.014782464027404785, 0.01469644832611084, 0.01491763210296631, 0.014620672225952149, 0.014581952095031738, 0.014794495582580567, 0.014727231979370117, 0.014692352294921876, 0.014645376205444337, 0.014677248001098632, 0.014613375663757325, 0.014599295616149902, 0.014619263648986816, 0.014577664375305176, 0.014755328178405762, 0.014710432052612304, 0.014722111701965333, 0.014655263900756835, 0.01466534423828125, 0.014629247665405274, 0.014617664337158203, 0.01479526424407959, 0.01461683177947998, 0.014622912406921387, 0.014985216140747071, 0.014940159797668457, 0.014796799659729003, 0.014663680076599121, 0.014673439979553223, 0.01463548755645752, 0.014587840080261231, 0.014778431892395019, 0.014650752067565918, 0.014575231552124023, 0.01461350440979004, 0.014645248413085938, 0.014620672225952149, 0.01459404754638672, 0.014607839584350586, 0.01471951961517334, 0.014577504158020019, 0.01457577610015869, 0.014611935615539551, 0.014742048263549804, 0.01476198387145996, 0.014876319885253906, 0.014836064338684082, 0.014735360145568848, 0.014689599990844727, 0.014716352462768555, 0.014578240394592285, 0.014592000007629394, 0.014694399833679199, 0.014569472312927247, 0.014691328048706055, 0.014739520072937011, 0.01469644832611084, 0.01472003173828125, 0.014605600357055664, 0.01851798439025879, 0.014973600387573243, 0.014905344009399414, 0.014911487579345703, 0.01552128028869629, 0.014874464035034179, 0.014854016304016114, 0.014725919723510741, 0.014745183944702148, 0.014657952308654786, 0.014601728439331055, 0.014766592025756836, 0.014609472274780273, 0.014563679695129395, 0.014537311553955079, 0.014645248413085938, 0.014650495529174804, 0.014691200256347656, 0.014681568145751953, 0.014647839546203613, 0.014704416275024414, 0.014622943878173828, 0.014581567764282226, 0.014588095664978028, 0.014667967796325683, 0.014759743690490722, 0.01464748764038086, 0.014624575614929199, 0.014622079849243163, 0.014627455711364746, 0.014720128059387208, 0.014791551589965821, 0.014905344009399414, 0.014962688446044922, 0.014825247764587402, 0.014799072265625, 0.014699935913085938, 0.01470524787902832, 0.014561280250549317, 0.014733311653137206, 0.014657535552978516, 0.01470464038848877, 0.01463100814819336, 0.014720864295959472, 0.014832799911499023, 0.014930848121643067, 0.014774463653564452, 0.014648960113525391, 0.01515443229675293, 0.015434687614440917, 0.01466163158416748, 0.014608384132385254, 0.014684160232543946, 0.014941887855529785, 0.01491811180114746, 0.014723072052001953, 0.014741344451904297, 0.014659487724304199, 0.014606592178344726, 0.014649344444274901, 0.014655488014221191, 0.014751744270324708, 0.01489305591583252, 0.014804991722106933, 0.01467801570892334, 0.014683520317077637, 0.014592639923095703, 0.01461017608642578, 0.014699775695800782, 0.014664287567138673, 0.01495081615447998, 0.01474732780456543, 0.014873920440673828, 0.014982144355773925, 0.01477222442626953, 0.015073280334472656, 0.014868415832519532, 0.015034432411193848, 0.014731264114379883, 0.01475984001159668, 0.014876607894897462, 0.014890368461608888, 0.014884703636169434, 0.014758848190307617, 0.014720416069030762, 0.014807135581970214, 0.014834176063537598, 0.014928895950317383, 0.01477734375, 0.015099040031433105, 0.014801440238952636, 0.014649632453918458, 0.01473459243774414, 0.014773247718811035, 0.014685055732727052, 0.014818207740783691, 0.014790656089782715, 0.014638976097106933, 0.01463923168182373, 0.014618623733520507, 0.014606240272521973, 0.014977375984191894, 0.017809152603149414, 0.01989017677307129, 0.015123647689819336, 0.014771007537841796, 0.014790656089782715, 0.014600192070007324, 0.014711039543151855, 0.014597887992858887, 0.014559231758117675, 0.014872575759887695, 0.014771360397338867, 0.014676992416381835, 0.014686047554016113, 0.014897151947021485, 0.015046336174011231, 0.014784000396728515, 0.014740480422973632, 0.014780223846435547, 0.01460223960876465, 0.015560704231262207, 0.01657823944091797, 0.014774592399597167, 0.014700511932373046, 0.014555040359497071, 0.014641280174255372, 0.014798272132873535, 0.014623104095458984, 0.014600383758544921, 0.014612480163574219, 0.0145830078125, 0.014584128379821776, 0.014715007781982422, 0.014640959739685058, 0.014842399597167969, 0.01477129554748535, 0.01472383975982666, 0.014670207977294921, 0.01468393611907959, 0.014548992156982422, 0.014618111610412597, 0.01464575958251953, 0.014619903564453125, 0.014664447784423829, 0.014673919677734374, 0.014796640396118164, 0.01468227195739746, 0.014929920196533204, 0.014632960319519044, 0.014648991584777832, 0.01456982421875, 0.014573568344116212, 0.014663680076599121, 0.014632896423339843, 0.014594112396240234, 0.014582880020141601, 0.014637984275817872, 0.014655488014221191, 0.01465664005279541, 0.015078399658203125, 0.014714431762695313, 0.014611935615539551, 0.014667648315429688, 0.01458790397644043, 0.014695327758789062, 0.014727456092834473, 0.014665504455566407, 0.014702591896057129, 0.014730879783630371, 0.014680447578430176, 0.014589952468872071, 0.014643487930297851, 0.014649056434631347, 0.014727168083190918, 0.01469644832611084, 0.014682111740112304, 0.014706687927246094, 0.014624544143676758, 0.014831616401672363, 0.014730208396911621, 0.014741503715515136, 0.014724767684936524, 0.014689888000488281, 0.014727295875549317, 0.014957183837890626, 0.01470809555053711, 0.014700672149658204, 0.01476863956451416, 0.01479916763305664, 0.01523475170135498, 0.014870176315307617, 0.014926176071166992, 0.014736800193786622, 0.014673983573913574, 0.014704895973205566, 0.014723360061645508, 0.014720128059387208, 0.015069664001464844, 0.014917759895324708, 0.015108384132385255, 0.014791999816894531, 0.014798912048339843, 0.014836352348327636, 0.014687487602233887, 0.014852864265441895, 0.01480294418334961, 0.014663680076599121, 0.014628864288330079, 0.014628288269042968, 0.014692416191101074, 0.014713376045227052, 0.01487664031982422, 0.014839936256408691, 0.014665599822998046, 0.014694399833679199, 0.014665727615356445, 0.014691840171813965, 0.014782976150512696, 0.01481935977935791, 0.014659487724304199, 0.014710335731506348, 0.014707200050354004, 0.01459404754638672, 0.01465334415435791, 0.01511843204498291, 0.01482652759552002, 0.014750656127929687, 0.01485580825805664, 0.014780832290649413, 0.014679231643676759, 0.0147542724609375, 0.014711135864257813, 0.014652576446533203, 0.014715423583984375, 0.014698816299438476, 0.014684160232543946, 0.014696576118469239, 0.014636672019958497, 0.014678272247314453, 0.014796192169189454, 0.015120991706848145, 0.01546656036376953, 0.015187359809875489, 0.0149302396774292, 0.015048447608947755, 0.01488700771331787, 0.014885343551635743, 0.014739456176757813, 0.014681568145751953, 0.01460860824584961, 0.014584128379821776, 0.014897151947021485, 0.014731072425842285, 0.01477552032470703, 0.014667872428894044, 0.0147010555267334, 0.01490777587890625, 0.014693440437316895, 0.014705120086669922, 0.014797280311584473, 0.014809087753295898, 0.014917183876037597, 0.015276288032531738, 0.01499289608001709, 0.014787263870239258, 0.015539551734924316, 0.01778755187988281, 0.015519743919372558, 0.014776320457458495, 0.014895071983337402, 0.014782496452331543, 0.014692352294921876, 0.014671872138977051, 0.014749695777893066, 0.01472697639465332, 0.014683903694152832, 0.014746047973632813, 0.01519820785522461, 0.01469983959197998, 0.014809632301330566, 0.014779840469360352, 0.01479139232635498, 0.014702591896057129, 0.014673919677734374, 0.014656671524047852, 0.01471564769744873, 0.01472707176208496, 0.014672063827514648, 0.015119744300842284, 0.015082112312316895, 0.014788607597351074, 0.014712575912475587, 0.014852352142333984, 0.014710559844970703, 0.014702176094055176, 0.015079104423522949, 0.015038463592529297, 0.014687359809875488, 0.014681920051574707, 0.014794015884399414, 0.01468489646911621, 0.014658687591552734, 0.014814080238342285, 0.014856191635131836, 0.014937984466552734, 0.014757375717163086, 0.014722751617431641, 0.015066047668457032, 0.014750847816467285, 0.01461952018737793, 0.01459404754638672, 0.014735360145568848, 0.014730527877807617, 0.014848608016967774, 0.015125696182250976, 0.014913951873779297, 0.014783007621765137, 0.01498259162902832, 0.014770751953125, 0.015138815879821778, 0.015023551940917968, 0.016580863952636717, 0.016404863357543945, 0.015110079765319823, 0.014892095565795899, 0.01483193588256836, 0.015051103591918946, 0.015292703628540039, 0.014703840255737304, 0.014749567985534668, 0.014644000053405761, 0.014759455680847168, 0.014762592315673829, 0.014968832015991211, 0.015050751686096191, 0.01470620822906494, 0.014612544059753417, 0.014690367698669434, 0.014712608337402344, 0.014612192153930665, 0.014715744018554687, 0.014743552207946778, 0.014728287696838378, 0.014719903945922852, 0.01491977596282959, 0.01506704044342041, 0.014935839653015136, 0.015369536399841309, 0.014918496131896972, 0.014718463897705078, 0.015339776039123535, 0.014767999649047852, 0.014821696281433105, 0.015124608039855957, 0.015025152206420898, 0.01480191993713379, 0.01478649616241455, 0.014687616348266602, 0.014731040000915527, 0.014723999977111817, 0.014757696151733399, 0.01494035243988037, 0.014726719856262206, 0.014757951736450196, 0.014858176231384277, 0.014704768180847167, 0.014727487564086914, 0.014962656021118164, 0.01484553623199463, 0.014786975860595703, 0.01475699234008789, 0.014899488449096679, 0.014897279739379883, 0.014789088249206543, 0.014858240127563477, 0.014880767822265625, 0.014774271965026856, 0.014749695777893066, 0.014723072052001953, 0.014702079772949218, 0.014688480377197265, 0.014767904281616211, 0.014966336250305176, 0.014889920234680176, 0.015042112350463868, 0.01531481647491455, 0.014983327865600585, 0.01514742374420166, 0.014841856002807617, 0.014904895782470702, 0.014888863563537597, 0.015196703910827636, 0.014999551773071289, 0.01470464038848877, 0.014673727989196778, 0.01463929557800293, 0.014675840377807618, 0.014787903785705566, 0.014777152061462403, 0.014993535995483398, 0.014759807586669922, 0.014551039695739745, 0.01469587230682373, 0.014703167915344238, 0.01485209560394287, 0.015103039741516113, 0.014960672378540039, 0.015010592460632324, 0.0147640323638916, 0.014640607833862306, 0.014639776229858398, 0.014710783958435059, 0.014773856163024903, 0.014629280090332031, 0.014735360145568848, 0.014667776107788086, 0.014784511566162109, 0.01493507194519043, 0.01462166404724121, 0.014671872138977051, 0.014796799659729003, 0.014661408424377442, 0.014702560424804688, 0.014624128341674805, 0.0148090238571167, 0.014683072090148925, 0.014669280052185058, 0.014708959579467773, 0.014761311531066895, 0.014689472198486328, 0.014790111541748047, 0.014873120307922363, 0.014700544357299805, 0.0147640323638916, 0.01468825626373291, 0.014669568061828613, 0.014659839630126953, 0.014712832450866698, 0.014643199920654297, 0.014722623825073243, 0.014659551620483399, 0.014628735542297364, 0.014715167999267578, 0.014686528205871583, 0.014632543563842774, 0.014666144371032714, 0.014654687881469727, 0.014635807991027832, 0.014630911827087402, 0.014721023559570312, 0.015084575653076172, 0.014773216247558594, 0.01469155216217041, 0.01477507209777832, 0.01466982364654541, 0.014735360145568848, 0.014698495864868164, 0.01527990436553955, 0.014732576370239258, 0.014726079940795899, 0.014654687881469727, 0.014756159782409668, 0.01479043197631836, 0.014762687683105468, 0.014680064201354981, 0.0148602876663208, 0.014804991722106933, 0.014725119590759277, 0.014796480178833008, 0.014729087829589843, 0.014690752029418946, 0.014825471878051758, 0.014854080200195312, 0.014673536300659179, 0.01470736026763916, 0.014780032157897948, 0.014764287948608398, 0.014671775817871094, 0.014731264114379883, 0.014790656089782715, 0.01472111988067627, 0.014710304260253906, 0.014829952239990235, 0.014716320037841797, 0.014742112159729004, 0.014753791809082031, 0.014892831802368164, 0.015126272201538086, 0.015077823638916016, 0.015091744422912598, 0.01495257568359375, 0.014948224067687988, 0.014907391548156738, 0.014872544288635254, 0.014878848075866698, 0.014788736343383789, 0.014872447967529297, 0.014742752075195313, 0.014779168128967285, 0.01460204792022705, 0.01468230438232422, 0.014610431671142577, 0.014695712089538574, 0.014678688049316406, 0.014669343948364258, 0.014643551826477051, 0.014600383758544921, 0.014827520370483398, 0.01459609603881836, 0.014698016166687011, 0.014633440017700195, 0.014583807945251465, 0.01464089584350586, 0.014704895973205566, 0.015078623771667481, 0.01504736042022705, 0.014864480018615723, 0.014800127983093261, 0.01464361572265625, 0.014682463645935059, 0.014636863708496093, 0.014825823783874512, 0.014767264366149902, 0.014744256019592285, 0.014841407775878905, 0.014785120010375977, 0.014831456184387207, 0.014732640266418457, 0.01470736026763916, 0.014714879989624024, 0.014730239868164062, 0.014707615852355957, 0.014675264358520507, 0.014635807991027832, 0.014733311653137206, 0.014774271965026856, 0.014855487823486328, 0.014703295707702636, 0.014728192329406739, 0.014655872344970703, 0.014623231887817383, 0.014604415893554688, 0.01467369556427002, 0.014705056190490723, 0.014767935752868652, 0.01473737621307373, 0.014743583679199219, 0.015027392387390136, 0.014891712188720704, 0.015210304260253906, 0.01563811206817627, 0.014886943817138672, 0.014805439949035645, 0.014782719612121582, 0.014687295913696288, 0.014713983535766601, 0.015320480346679688, 0.015183775901794434, 0.015125184059143067, 0.01496678352355957, 0.01495587158203125, 0.014879391670227051, 0.014899200439453125, 0.015009792327880859, 0.017104352951049805, 0.015395232200622559, 0.014792192459106445, 0.014868800163269042, 0.014770496368408203, 0.015015935897827149, 0.015674816131591798, 0.016312351226806642, 0.014946368217468262, 0.015055328369140626, 0.014984736442565918, 0.015106528282165528, 0.014891008377075195, 0.014927871704101562, 0.01556275177001953, 0.015281279563903809, 0.015017120361328126, 0.014738176345825195, 0.01463593578338623, 0.014647359848022462, 0.015914912223815917, 0.014717023849487304, 0.01547379207611084, 0.014780608177185059, 0.01478112030029297, 0.014693408012390137, 0.014664640426635742, 0.014716959953308106, 0.014716927528381347, 0.014733311653137206, 0.014640159606933593, 0.01467619228363037, 0.014680031776428223, 0.014713184356689453, 0.014993856430053711, 0.014791680335998534, 0.01481328010559082, 0.0146844482421875, 0.014867391586303711, 0.014716480255126953, 0.014591808319091798, 0.01465775966644287, 0.014661120414733888, 0.014701151847839355, 0.01466982364654541, 0.014695615768432617, 0.014711935997009278, 0.014687007904052734, 0.015067968368530273, 0.014706080436706542, 0.01479695987701416, 0.014688799858093262, 0.014696703910827637, 0.01465727996826172, 0.014972800254821778]",tokens/s,67.47363928221026,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.08352,6174.998528,0.0,5779.750912,5773.960192,s,1,7.7395419921875,7.7395419921875,0.0,7.7395419921875,7.7395419921875,7.7395419921875,7.7395419921875,[7.7395419921875],,kWh,9.994903391674369e-06,1.0953041804092718e-06,3.4397249740053537e-06,1.4529932546088994e-05,,MB,1095.151616,6491.66848,0.0,6085.935104,6038.345728,s,10,2.1448580932617185,0.2144858093261719,0.0026452678682763126,0.21521057891845702,0.21694376525878906,0.21700848846435547,0.2170602670288086,"[0.2076636505126953, 0.21692938232421874, 0.21582981872558593, 0.2149775390625, 0.21544361877441406, 0.21384474182128907, 0.2147804412841797, 0.21227523803710938, 0.21707321166992188, 0.21604045104980468]",tokens/s,1193.5521552882635,kWh,6.232499810460658e-06,6.873281304016718e-07,4.159891034765951e-06,1.107971897562828e-05,tokens/kWh,23105279.16485205,MB,1099.776,6512.64,0.0,6106.906624,6086.544896,s,10,16.34459362792969,1.6344593627929687,0.005604136420545625,1.6363401489257812,1.6394989379882812,1.6408669006347656,1.641961270751953,"[1.6247216796875, 1.636993408203125, 1.6390897216796876, 1.6391949462890625, 1.6359698486328125, 1.634955322265625, 1.627091064453125, 1.62763232421875, 1.63671044921875, 1.64223486328125]",tokens/s,38.54485552479287,kWh,4.784965462412298e-05,5.277586740818806e-06,3.1805276626234714e-05,8.493251799117652e-05,tokens/kWh,741765.3625498888,,s,630,16.341073549270607,0.02593821198296925,0.0004003684958193192,0.02587648010253906,0.02615531234741211,0.02629103021621704,0.028321831398010255,"[0.028504127502441405, 0.026898399353027343, 0.026089408874511718, 0.02574118423461914, 0.025610015869140624, 0.02552422332763672, 0.025497119903564455, 0.02549238395690918, 0.025530527114868164, 0.02555001640319824, 0.02552511978149414, 0.025526016235351563, 0.025544704437255858, 0.02552217674255371, 0.02555084800720215, 0.025491455078125, 0.025591455459594726, 0.025532768249511718, 0.025536512374877928, 0.025540607452392578, 0.02569148826599121, 0.025616992950439454, 0.02561840057373047, 0.025754751205444334, 0.0256212158203125, 0.025647424697875978, 0.025589696884155272, 0.025595327377319337, 0.025629247665405273, 0.0256856632232666, 0.025683935165405274, 0.025722848892211915, 0.025692575454711913, 0.025634815216064453, 0.025727136611938477, 0.02568284797668457, 0.025804864883422853, 0.025985919952392578, 0.026102975845336916, 0.02615113639831543, 0.026013376235961914, 0.026021024703979493, 0.025937536239624023, 0.02591961669921875, 0.025876352310180664, 0.02587392044067383, 0.025856767654418945, 0.02578761672973633, 0.025791135787963868, 0.02573311996459961, 0.025728607177734376, 0.025786783218383787, 0.025790464401245116, 0.025784320831298828, 0.025776128768920898, 0.025827392578125, 0.025849023818969728, 0.025817951202392577, 0.025812671661376952, 0.025790847778320313, 0.025710432052612305, 0.025745407104492187, 0.025769279479980468, 0.028258304595947265, 0.026853376388549805, 0.026139808654785157, 0.025758560180664063, 0.02566124725341797, 0.025710527420043944, 0.025781856536865235, 0.025791135787963868, 0.02577987289428711, 0.025882976531982422, 0.025825279235839844, 0.02570444869995117, 0.02587238311767578, 0.025820608139038085, 0.025772127151489257, 0.025956832885742187, 0.02571628761291504, 0.0257043514251709, 0.025907264709472657, 0.025809375762939454, 0.0258306884765625, 0.025844448089599608, 0.02579462432861328, 0.025888032913208008, 0.025910112380981447, 0.02596147155761719, 0.02587116813659668, 0.025858047485351563, 0.025937824249267577, 0.025958719253540038, 0.025874208450317383, 0.02575564765930176, 0.025968639373779297, 0.02588073539733887, 0.025882463455200195, 0.025862144470214843, 0.02609891128540039, 0.026186527252197264, 0.026232831954956053, 0.026042272567749023, 0.02619196891784668, 0.025995647430419922, 0.02600297546386719, 0.025996992111206055, 0.02592767906188965, 0.025993824005126953, 0.02593168067932129, 0.026070943832397463, 0.02613212776184082, 0.025948511123657227, 0.026040319442749024, 0.02595430374145508, 0.02598422431945801, 0.026015680313110353, 0.026076095581054688, 0.025995168685913086, 0.026041343688964845, 0.026024959564208985, 0.02612838363647461, 0.02586537551879883, 0.02601046371459961, 0.026011648178100585, 0.025948160171508788, 0.028299264907836914, 0.027129056930541993, 0.026391040802001952, 0.026112287521362305, 0.02572457695007324, 0.025762144088745116, 0.025656320571899413, 0.025699392318725586, 0.025771520614624024, 0.025801151275634766, 0.025634815216064453, 0.025831424713134765, 0.025807008743286133, 0.02568191909790039, 0.02580463981628418, 0.02586400032043457, 0.025733312606811522, 0.025832639694213868, 0.02566022491455078, 0.02572287940979004, 0.02587238311767578, 0.025968608856201173, 0.02602191925048828, 0.025835519790649415, 0.025784320831298828, 0.025738815307617187, 0.025850303649902345, 0.026075263977050782, 0.02616511917114258, 0.025911296844482422, 0.025979936599731444, 0.025959455490112304, 0.025847679138183595, 0.025878591537475584, 0.026015743255615235, 0.026089471817016603, 0.026183135986328127, 0.026545888900756837, 0.02611078453063965, 0.02634880065917969, 0.026229503631591797, 0.026226688385009765, 0.02604252815246582, 0.02597052764892578, 0.0260316162109375, 0.026122560501098634, 0.025904895782470704, 0.02593631935119629, 0.026060991287231446, 0.026068864822387697, 0.025902496337890626, 0.02609724807739258, 0.026115007400512695, 0.02601945686340332, 0.02591097640991211, 0.025891519546508788, 0.02591744041442871, 0.025957759857177735, 0.026071680068969726, 0.026137599945068358, 0.025967872619628907, 0.025921279907226563, 0.026077280044555663, 0.02840553665161133, 0.027213951110839844, 0.02630441665649414, 0.026099903106689453, 0.02587353515625, 0.02575961685180664, 0.0259102725982666, 0.025827520370483397, 0.025784128189086913, 0.02561561584472656, 0.025879295349121093, 0.025785472869873045, 0.02571558380126953, 0.02593388748168945, 0.025875680923461913, 0.025754335403442383, 0.025845760345458983, 0.025822751998901366, 0.02577574348449707, 0.025911584854125976, 0.02576646423339844, 0.02595756721496582, 0.025946975708007813, 0.02580886459350586, 0.026021600723266602, 0.025749439239501952, 0.025960800170898437, 0.02575334358215332, 0.025772287368774415, 0.025951616287231444, 0.025858688354492187, 0.025964544296264647, 0.025861568450927734, 0.02592972755432129, 0.025958335876464844, 0.025971328735351563, 0.026123743057250976, 0.026065439224243165, 0.026187776565551758, 0.02625846481323242, 0.026104799270629885, 0.026171072006225586, 0.02627382469177246, 0.026079519271850586, 0.026087104797363283, 0.026245664596557618, 0.026100576400756834, 0.026006080627441405, 0.025921920776367187, 0.026044639587402343, 0.02590492820739746, 0.026089471817016603, 0.025990848541259767, 0.02596281623840332, 0.026066144943237304, 0.025803552627563477, 0.025836736679077148, 0.02607823944091797, 0.025966367721557616, 0.02588572883605957, 0.026189823150634766, 0.025929952621459963, 0.02612505531311035, 0.0286167049407959, 0.027272960662841798, 0.026245376586914063, 0.02609561538696289, 0.025882623672485353, 0.02592064094543457, 0.025670175552368165, 0.025641311645507814, 0.025651391983032228, 0.025636512756347655, 0.025614688873291016, 0.025890623092651367, 0.025790111541748047, 0.025695615768432618, 0.025656095504760744, 0.025692352294921873, 0.02572902488708496, 0.02593507194519043, 0.026022111892700196, 0.025846336364746095, 0.025710048675537108, 0.025600223541259765, 0.025647424697875978, 0.02583296012878418, 0.025762304306030274, 0.02592767906188965, 0.025724159240722657, 0.025682687759399414, 0.025806848526000976, 0.02580611228942871, 0.02602057647705078, 0.025757696151733397, 0.025997312545776367, 0.02575564765930176, 0.025806848526000976, 0.025958208084106444, 0.026136831283569337, 0.026128320693969725, 0.02615500831604004, 0.02612428855895996, 0.02615910339355469, 0.026038528442382813, 0.02606787109375, 0.026158048629760743, 0.02597260856628418, 0.02589695930480957, 0.025932992935180664, 0.02603091239929199, 0.026149120330810547, 0.02593142318725586, 0.02588035202026367, 0.025922143936157226, 0.02586595153808594, 0.02587388801574707, 0.025873023986816405, 0.02586614418029785, 0.026094911575317382, 0.02614143943786621, 0.026158912658691406, 0.02587660789489746, 0.025918527603149413, 0.025977792739868163, 0.0259051513671875, 0.028317695617675782, 0.02716806411743164, 0.02651215934753418, 0.026007455825805666, 0.025974239349365234, 0.02576643180847168, 0.02564240074157715, 0.0257030086517334, 0.0256777286529541, 0.025659488677978515, 0.0257475528717041, 0.02586614418029785, 0.0256777286529541, 0.025908319473266602, 0.025847999572753907, 0.025786176681518554, 0.025965503692626953, 0.025910655975341798, 0.025733823776245116, 0.025941951751708985, 0.025769567489624022, 0.02566806411743164, 0.025636863708496094, 0.025860095977783205, 0.026001407623291017, 0.02590105628967285, 0.026011743545532227, 0.02604003143310547, 0.025884000778198243, 0.025789024353027344, 0.02601907157897949, 0.025882783889770507, 0.025807712554931642, 0.02579654312133789, 0.025901119232177736, 0.02584351921081543, 0.025964736938476562, 0.026060800552368164, 0.026232831954956053, 0.0260928955078125, 0.026402496337890626, 0.026239967346191405, 0.025970111846923827, 0.02584783935546875, 0.025794944763183593, 0.025792863845825194, 0.02580851173400879, 0.02585737609863281, 0.02587923240661621, 0.025862144470214843, 0.02590086364746094, 0.02597702407836914, 0.025931167602539062, 0.02600956726074219, 0.02582966423034668, 0.025795072555541993, 0.025796607971191408, 0.025855552673339843, 0.02577043151855469, 0.025816543579101563, 0.025844255447387696, 0.025849119186401367, 0.025848543167114258, 0.028672000885009766, 0.027183103561401366, 0.026251264572143555, 0.02586604881286621, 0.025708736419677733, 0.02574950408935547, 0.02556620788574219, 0.02550886344909668, 0.025683967590332032, 0.025622528076171876, 0.025734975814819337, 0.025660608291625978, 0.025647647857666017, 0.025546592712402345, 0.02551849555969238, 0.02553059196472168, 0.025544704437255858, 0.025561088562011718, 0.025613920211791992, 0.025638431549072267, 0.025668479919433593, 0.025665536880493164, 0.025634111404418944, 0.025596128463745118, 0.025610719680786133, 0.025747583389282225, 0.02568383979797363, 0.025675775527954102, 0.02558118438720703, 0.02560576057434082, 0.025622720718383788, 0.025607744216918946, 0.025689088821411132, 0.025708511352539064, 0.025778207778930664, 0.02576383972167969, 0.026013696670532226, 0.02612633514404297, 0.02600137519836426, 0.025987104415893556, 0.026062847137451172, 0.02599068832397461, 0.025960735321044922, 0.025888160705566408, 0.02579862403869629, 0.025823328018188478, 0.025795263290405275, 0.025769983291625977, 0.025759487152099608, 0.025751583099365233, 0.02582966423034668, 0.025830911636352538, 0.026020191192626954, 0.02585523223876953, 0.025770143508911134, 0.025720863342285155, 0.02580768013000488, 0.025728864669799803, 0.02577952003479004, 0.025787103652954103, 0.02580672073364258, 0.02580454444885254, 0.025872768402099608, 0.028495744705200197, 0.027072288513183593, 0.026278240203857422, 0.02599443244934082, 0.025718687057495117, 0.025629600524902343, 0.025589792251586915, 0.025525535583496094, 0.025537216186523437, 0.02551535987854004, 0.02561724853515625, 0.0256135368347168, 0.02559619140625, 0.025547071456909178, 0.025600000381469725, 0.02564233589172363, 0.0256529598236084, 0.025649728775024413, 0.025663616180419922, 0.02569651222229004, 0.025657344818115234, 0.0256646728515625, 0.025614368438720704, 0.025671552658081055, 0.02563987159729004, 0.025645055770874024, 0.025638912200927736, 0.025716703414916994, 0.02572496032714844, 0.025741119384765625, 0.025744640350341796, 0.025713600158691407, 0.025774080276489256, 0.025694208145141603, 0.02570240020751953, 0.025734495162963868, 0.02592425537109375, 0.02604035186767578, 0.026072256088256834, 0.026082080841064455, 0.025980768203735353, 0.025905311584472655, 0.025993215560913087, 0.02590105628967285, 0.025832511901855468, 0.02584419250488281, 0.025744863510131836, 0.025789440155029295, 0.025778175354003906, 0.025826719284057616, 0.025789024353027344, 0.025852928161621092, 0.02584582328796387, 0.02581190490722656, 0.025829376220703124, 0.025792512893676758, 0.025781696319580077, 0.025999807357788087, 0.025897087097167967, 0.025841312408447267, 0.025860448837280274, 0.025802751541137696, 0.025827327728271485, 0.028323520660400392, 0.027041791915893554, 0.026205184936523438, 0.025771936416625976, 0.025613983154296874, 0.025598207473754884, 0.025546335220336915, 0.025534975051879884, 0.025536672592163086, 0.025793567657470703, 0.025671968460083006, 0.02557792091369629, 0.025574880599975584, 0.0256231689453125, 0.02572902488708496, 0.025783903121948244, 0.025860511779785156, 0.025847583770751952, 0.02596441650390625, 0.025874784469604492, 0.025960447311401368, 0.025960447311401368, 0.025784320831298828, 0.025927072525024415, 0.025969247817993164, 0.02581667137145996, 0.025909664154052735, 0.0260032958984375, 0.026056863784790038, 0.025993215560913087, 0.025960447311401368, 0.025917600631713868, 0.02595737648010254, 0.025731231689453123, 0.025838272094726562, 0.025939968109130858, 0.026060895919799806, 0.026015647888183592, 0.026161151885986327, 0.026101024627685546, 0.026034271240234375, 0.026212991714477538, 0.026013696670532226, 0.026281984329223632, 0.025974079132080077, 0.02599740791320801, 0.02590166473388672, 0.02613603210449219, 0.026298431396484374, 0.026027904510498048, 0.02613279914855957, 0.025962783813476564, 0.02590105628967285, 0.02592576026916504, 0.026054399490356445, 0.025946239471435546, 0.026108928680419922, 0.026035200119018553, 0.02595756721496582, 0.026013792037963866, 0.025859807968139647, 0.0261146240234375, 0.025868255615234374, 0.028612672805786134, 0.027265024185180665, 0.02637151908874512, 0.026057279586791993, 0.025996959686279297, 0.025694784164428712, 0.02592131233215332, 0.025842784881591797, 0.02577440071105957, 0.025743967056274415, 0.025887807846069335, 0.02587676811218262, 0.0259072322845459, 0.025735807418823243, 0.02571820831298828, 0.025987232208251953, 0.025881216049194335, 0.0259434871673584, 0.025950559616088865, 0.025857152938842772, 0.026002304077148437, 0.026009599685668947, 0.02599920082092285, 0.02601308822631836, 0.025946495056152343, 0.026027616500854493, 0.0260053768157959, 0.02588083267211914, 0.02580956840515137, 0.0258602237701416, 0.025824256896972656, 0.025835712432861327, 0.026052480697631837, 0.02590755271911621, 0.025847871780395507, 0.0259465274810791, 0.02638047981262207, 0.026145792007446288, 0.02649078369140625, 0.026422176361083984, 0.026275840759277344, 0.026229856491088867, 0.026230752944946287, 0.02623583984375, 0.025980287551879883, 0.02598361587524414, 0.026034175872802736, 0.02609971237182617, 0.025837568283081053, 0.026021888732910156, 0.026193920135498046, 0.02589004707336426, 0.02596735954284668, 0.025970239639282227, 0.02589753532409668, 0.026238847732543945, 0.02634774398803711, 0.026011423110961916, 0.02611814308166504, 0.026003456115722655, 0.025941984176635742, 0.025982208251953125, 0.02591209602355957]",tokens/s,38.5531585853562,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 608, in __init__ self.dense_h_to_4h = nn.Linear(config.hidden_size, config.intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 400.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 955, in __init__ self.model = MistralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 707, in __init__ [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 707, in [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 506, in __init__ self.mlp = MistralMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 147, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 224.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 555, in __init__ self.self_attn = QWEN2_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 216, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 163, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 268.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 164, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.50 GiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 609, in __init__ self.dense_4h_to_h = nn.Linear(config.intermediate_size, config.hidden_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1068, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 845, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 845, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 632, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 300, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 64.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 555, in __init__ self.self_attn = QWEN2_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 216, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 163, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 172.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 162, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 536.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,811.966464,1129.250816,0.0,734.0032,709.336064,s,1,7.4012548828125,7.4012548828125,0.0,7.4012548828125,7.4012548828125,7.4012548828125,7.4012548828125,[7.4012548828125],,kWh,5.018227141677774e-06,5.464397925686503e-07,1.0041674700089764e-06,6.568834404255401e-06,,MB,1117.376512,1276.051456,0.0,870.31808,809.960448,s,19,0.2845489587783813,0.01497626098833586,0.000721354646664666,0.01482044792175293,0.014997305488586425,0.01537096910476684,0.017479082736968996,"[0.018006111145019533, 0.014694175720214843, 0.014658592224121093, 0.014844799995422363, 0.014743328094482422, 0.014737407684326171, 0.01477455997467041, 0.014862208366394044, 0.01482044792175293, 0.014907584190368653, 0.01497708797454834, 0.014729791641235352, 0.015078175544738769, 0.014839903831481933, 0.014872063636779785, 0.014734848022460938, 0.014711680412292481, 0.01483948802947998, 0.014716704368591309]",tokens/s,17093.719199964766,kWh,5.896171635690091e-07,6.502351689316556e-08,3.9073656399191194e-07,1.0453772444540865e-06,tokens/kWh,244887672.2332783,MB,1127.317504,1309.605888,0.0,905.969664,809.963008,s,19,9.883693817138672,0.5201944114283512,0.007087630848022105,0.52034814453125,0.5285178955078126,0.5290438171386719,0.5304746325683594,"[0.5003568115234375, 0.5235473022460938, 0.5206721801757812, 0.518958984375, 0.5252100219726562, 0.5308323364257812, 0.527358642578125, 0.5236458740234375, 0.5288450927734375, 0.5238328247070313, 0.5284360961914063, 0.5189959106445312, 0.5198388061523438, 0.5151084594726563, 0.52034814453125, 0.5106662902832031, 0.5172429809570313, 0.5129843139648438, 0.516812744140625]",tokens/s,121.10856752000551,kWh,1.4459999236431325e-05,1.594679690931747e-06,7.439825256008322e-06,2.3494504183371386e-05,tokens/kWh,2681478.166480707,,s,1197,9.87563265323639,0.008250319676889214,0.0002649751535341334,0.008231072425842286,0.008551398658752441,0.008621772575378418,0.008864796257019043,"[0.007893343925476075, 0.007861472129821778, 0.007827648162841797, 0.0078115520477294925, 0.007755648136138916, 0.007863840103149415, 0.007809216022491455, 0.007858719825744629, 0.007843423843383789, 0.007976895809173584, 0.007849631786346435, 0.007805344104766846, 0.007824992179870606, 0.007926464080810547, 0.007839263916015625, 0.007803487777709961, 0.007813119888305664, 0.007804927825927735, 0.0079584641456604, 0.007798208236694336, 0.007791232109069824, 0.00779859209060669, 0.007766208171844482, 0.0077844481468200685, 0.00777785587310791, 0.007813600063323975, 0.007848991870880126, 0.007807936191558838, 0.007755360126495361, 0.00779699182510376, 0.007785920143127441, 0.0077749438285827635, 0.0077844481468200685, 0.007790048122406006, 0.0078032960891723634, 0.007833727836608887, 0.007806015968322754, 0.007857279777526855, 0.007810880184173584, 0.007821568012237549, 0.007825151920318603, 0.007868319988250732, 0.007917664051055907, 0.007870463848114014, 0.007855519771575928, 0.007848896026611329, 0.007898784160614014, 0.008034175872802734, 0.008136832237243652, 0.008252575874328613, 0.008133472442626953, 0.00814675235748291, 0.008493247985839844, 0.008154208183288575, 0.008208992004394532, 0.008403264045715331, 0.008435711860656739, 0.008345600128173827, 0.008331263542175293, 0.00829644775390625, 0.008318976402282715, 0.008258975982666016, 0.008069727897644043, 0.007967040061950684, 0.008228927612304688, 0.008381855964660645, 0.008514080047607422, 0.008398847579956055, 0.008409088134765624, 0.008484383583068848, 0.008565216064453126, 0.008537343978881836, 0.008313599586486817, 0.008328895568847656, 0.008457728385925293, 0.008162112236022949, 0.008193792343139649, 0.008510784149169922, 0.008345919609069824, 0.008247072219848633, 0.008231391906738281, 0.00831116771697998, 0.008439647674560546, 0.008395071983337402, 0.008331104278564453, 0.008305952072143555, 0.00849782371520996, 0.008475744247436523, 0.008368800163269043, 0.008397120475769043, 0.008384511947631837, 0.008136704444885253, 0.008265855789184571, 0.008190848350524903, 0.008176639556884765, 0.008426912307739258, 0.008436320304870605, 0.008615936279296875, 0.008404255867004395, 0.008436127662658692, 0.008482687950134278, 0.008280447959899902, 0.008165184020996094, 0.008310720443725586, 0.008370112419128418, 0.008553888320922852, 0.008442848205566407, 0.008437760353088379, 0.008281696319580078, 0.008396448135375977, 0.0084933443069458, 0.008259679794311523, 0.008149408340454101, 0.008048895835876466, 0.008050399780273438, 0.007923295974731445, 0.008352160453796387, 0.007975264072418213, 0.0080480318069458, 0.008184063911437988, 0.008363519668579102, 0.0083189115524292, 0.008245823860168457, 0.008046048164367676, 0.007946591854095459, 0.007879039764404298, 0.007845503807067871, 0.007882847785949706, 0.007893856048583985, 0.007968128204345703, 0.008161439895629882, 0.008217056274414063, 0.008203519821166991, 0.008290495872497559, 0.008127391815185547, 0.00802569580078125, 0.008048992156982421, 0.008435423851013184, 0.008085344314575195, 0.008283712387084962, 0.00825551986694336, 0.008249216079711914, 0.008295104026794434, 0.008364031791687012, 0.008305983543395996, 0.008198431968688965, 0.008036767959594727, 0.007948287963867188, 0.008017056465148926, 0.007942048072814942, 0.007928768157958984, 0.00800767993927002, 0.00848796844482422, 0.008551360130310059, 0.00869379234313965, 0.008538111686706543, 0.008468416213989258, 0.0085665283203125, 0.008523872375488281, 0.008462559700012208, 0.008455360412597656, 0.008371007919311524, 0.008291680335998535, 0.0082008638381958, 0.008112159729003907, 0.008010880470275879, 0.008014687538146973, 0.008093695640563964, 0.008437760353088379, 0.008528127670288085, 0.008328960418701173, 0.008343551635742187, 0.008365504264831543, 0.008278592109680176, 0.008408512115478516, 0.008274496078491211, 0.008278016090393067, 0.008184831619262695, 0.008113375663757324, 0.008284031867980957, 0.008564640045166015, 0.008697855949401855, 0.00841318416595459, 0.008391776084899903, 0.0085097599029541, 0.008290528297424316, 0.00822105598449707, 0.008324831962585448, 0.008116064071655273, 0.00847436809539795, 0.008275967597961426, 0.00808291244506836, 0.008046751976013183, 0.008059264183044434, 0.008085503578186035, 0.007964672088623047, 0.007993343830108643, 0.008001055717468262, 0.008038368225097656, 0.008157695770263672, 0.008253439903259278, 0.008138751983642578, 0.008196096420288086, 0.008193087577819825, 0.00810694408416748, 0.008030207633972167, 0.007966720104217529, 0.00798467206954956, 0.007971360206604003, 0.008165311813354493, 0.008267200469970704, 0.008436287879943848, 0.008556223869323731, 0.008446271896362306, 0.00830835247039795, 0.008552831649780273, 0.008658304214477539, 0.00851417636871338, 0.008351743698120117, 0.008291647911071778, 0.008297183990478515, 0.00825712013244629, 0.008360320091247559, 0.008189824104309082, 0.008130496025085449, 0.008180224418640136, 0.008197823524475097, 0.008269824028015137, 0.008309920310974122, 0.008174528121948242, 0.008161375999450684, 0.008107487678527831, 0.008431743621826172, 0.008214752197265624, 0.00818825626373291, 0.00803395175933838, 0.00808140754699707, 0.008119808197021485, 0.008130559921264649, 0.00838912010192871, 0.008478719711303711, 0.008484864234924316, 0.008355839729309082, 0.008289695739746094, 0.008280672073364258, 0.008333312034606934, 0.008519680023193359, 0.00829212760925293, 0.00808777618408203, 0.00809779167175293, 0.008170783996582031, 0.008368864059448242, 0.00807372760772705, 0.008327168464660644, 0.008491007804870606, 0.008148991584777832, 0.008132287979125977, 0.008038080215454102, 0.00799014377593994, 0.008025856018066406, 0.008228863716125488, 0.008476256370544433, 0.00838419246673584, 0.008294976234436036, 0.008198304176330566, 0.008206175804138184, 0.00829974365234375, 0.008547264099121094, 0.008525504112243652, 0.008403264045715331, 0.008376607894897461, 0.008361408233642578, 0.008531455993652343, 0.008562975883483887, 0.00856713581085205, 0.00840886402130127, 0.008364671707153321, 0.008375200271606445, 0.008358495712280273, 0.008333567619323731, 0.008259455680847167, 0.008468544006347656, 0.008330656051635741, 0.008266400337219239, 0.008178943634033204, 0.00832579231262207, 0.008216671943664551, 0.00854412841796875, 0.008306112289428711, 0.008290719985961915, 0.008437952041625977, 0.00832316780090332, 0.008240480422973633, 0.00841590404510498, 0.008347935676574707, 0.008347040176391601, 0.00865328025817871, 0.008306528091430664, 0.008287232398986816, 0.008217599868774414, 0.008336735725402831, 0.008370528221130371, 0.008427712440490722, 0.008331392288208008, 0.008293888092041016, 0.00841983985900879, 0.008529919624328614, 0.008481056213378906, 0.008519200325012207, 0.008364224433898925, 0.008295680046081543, 0.008223487854003906, 0.008154111862182617, 0.00812339210510254, 0.008159135818481445, 0.008236448287963867, 0.008600128173828126, 0.008493056297302246, 0.008465503692626953, 0.008273088455200195, 0.008203007698059083, 0.008550463676452636, 0.008588191986083984, 0.010002752304077149, 0.00867296028137207, 0.008240287780761718, 0.008157471656799316, 0.008440320014953612, 0.008505279541015626, 0.008552576065063476, 0.008591168403625489, 0.008485055923461915, 0.008316448211669922, 0.008450528144836426, 0.008347647666931152, 0.00828006362915039, 0.008299903869628906, 0.008428319931030274, 0.008572256088256836, 0.008470623970031739, 0.00844979190826416, 0.008491616249084472, 0.008536383628845215, 0.008495903968811036, 0.00866198444366455, 0.00853219223022461, 0.008431391716003418, 0.008480832099914551, 0.008421119689941406, 0.008550592422485351, 0.008783552169799804, 0.008365440368652343, 0.008335871696472168, 0.00822316837310791, 0.008155136108398438, 0.008136704444885253, 0.008103167533874512, 0.008094464302062988, 0.008187904357910156, 0.008322079658508301, 0.00836297607421875, 0.008487135887145995, 0.008410016059875488, 0.008301440238952637, 0.008353792190551757, 0.008359456062316894, 0.008275456428527832, 0.00817046356201172, 0.008167136192321777, 0.008337696075439454, 0.008477760314941405, 0.008567744255065918, 0.008835071563720704, 0.008524160385131836, 0.00831436824798584, 0.008167360305786133, 0.008154560089111328, 0.008239647865295411, 0.007981023788452148, 0.00805337619781494, 0.008075424194335937, 0.007984352111816407, 0.0080250244140625, 0.008019807815551758, 0.008181759834289551, 0.008272928237915039, 0.008317503929138183, 0.008543935775756836, 0.008490719795227052, 0.008501279830932617, 0.008325471878051757, 0.008342144012451171, 0.008417280197143554, 0.008378368377685547, 0.008416864395141601, 0.008936863899230957, 0.008348671913146973, 0.008507391929626466, 0.008493056297302246, 0.008456000328063965, 0.008521984100341797, 0.008660927772521972, 0.008773856163024903, 0.008524736404418946, 0.008498016357421876, 0.008807583808898926, 0.008489824295043946, 0.008476287841796874, 0.008496671676635743, 0.008440192222595216, 0.008616543769836426, 0.008331135749816894, 0.008194047927856446, 0.008173567771911621, 0.00827625560760498, 0.008230112075805663, 0.008634880065917968, 0.008355392456054687, 0.00826358413696289, 0.008108575820922852, 0.008154751777648926, 0.008067456245422364, 0.008034272193908691, 0.00803228759765625, 0.008056832313537597, 0.008468480110168456, 0.008479935646057129, 0.008289216041564941, 0.00835142421722412, 0.008232640266418458, 0.008229120254516602, 0.008286687850952148, 0.008476448059082032, 0.008456192016601562, 0.008443903923034669, 0.00850483226776123, 0.008499520301818848, 0.008613311767578125, 0.008526080131530762, 0.008360447883605958, 0.008357888221740722, 0.008232928276062012, 0.0083853759765625, 0.008403008460998534, 0.008356863975524903, 0.008250368118286134, 0.008292351722717285, 0.008486559867858887, 0.008591135978698731, 0.00847110366821289, 0.008386560440063476, 0.008230912208557128, 0.008144895553588867, 0.008285696029663087, 0.008130144119262696, 0.00812662410736084, 0.008098591804504394, 0.008072383880615234, 0.00818665599822998, 0.008347455978393554, 0.008509056091308594, 0.008597824096679687, 0.008417535781860351, 0.008417280197143554, 0.008645888328552245, 0.008354559898376464, 0.008396127700805663, 0.00839033603668213, 0.008549344062805176, 0.008593119621276856, 0.00849948787689209, 0.00844934368133545, 0.008407744407653808, 0.008566783905029298, 0.008455231666564941, 0.008331775665283203, 0.008417599678039551, 0.008460384368896484, 0.008302623748779298, 0.008165375709533691, 0.008308159828186034, 0.008319135665893554, 0.008290975570678712, 0.008187647819519042, 0.008077312469482421, 0.008122400283813476, 0.00819315242767334, 0.00810649585723877, 0.008220352172851563, 0.00801587200164795, 0.008063167572021484, 0.0080164155960083, 0.008062911987304687, 0.007990816116333007, 0.007985631942749023, 0.008064000129699708, 0.00842240047454834, 0.008576319694519044, 0.00855519962310791, 0.008228992462158203, 0.008252511978149414, 0.008172287940979003, 0.008324735641479493, 0.008303071975708008, 0.008181599617004395, 0.008525376319885253, 0.008513728141784667, 0.0084235200881958, 0.008639840126037598, 0.008487903594970703, 0.008484864234924316, 0.008605695724487305, 0.008460288047790527, 0.008486847877502441, 0.008362048149108887, 0.008273920059204102, 0.00830463981628418, 0.008208383560180664, 0.008187456130981445, 0.008864128112792968, 0.00864998435974121, 0.00861676788330078, 0.009211999893188477, 0.008236960411071777, 0.008302592277526855, 0.008456192016601562, 0.008475872039794921, 0.00861673641204834, 0.00847606372833252, 0.008650367736816406, 0.008737983703613282, 0.008650015830993653, 0.008571264266967773, 0.008437888145446777, 0.008568256378173829, 0.008350111961364747, 0.008177824020385742, 0.008120223999023438, 0.00813417625427246, 0.008196576118469238, 0.008304991722106934, 0.008441216468811035, 0.008232895851135253, 0.00831942367553711, 0.00852121639251709, 0.008184384346008301, 0.008292287826538085, 0.008134655952453614, 0.008112128257751466, 0.008132448196411133, 0.008179295539855956, 0.008274496078491211, 0.008304320335388183, 0.008236800193786621, 0.008333632469177246, 0.008198399543762207, 0.008365823745727538, 0.00832921600341797, 0.008423616409301758, 0.008585056304931641, 0.008304032325744629, 0.008250176429748535, 0.008218175888061523, 0.008159680366516113, 0.008155136108398438, 0.008089599609375, 0.008546303749084473, 0.008078528404235839, 0.008096927642822266, 0.008194496154785156, 0.007995007991790771, 0.008026495933532714, 0.008001440048217774, 0.008232864379882812, 0.00834598445892334, 0.008450048446655273, 0.008591391563415528, 0.008654815673828125, 0.008589311599731446, 0.008564736366271973, 0.008566271781921387, 0.008468159675598145, 0.008812383651733398, 0.008422368049621582, 0.008356127738952637, 0.008275679588317871, 0.008263360023498536, 0.008216896057128907, 0.00828758430480957, 0.00841983985900879, 0.008331423759460449, 0.00828611183166504, 0.008232416152954102, 0.008276063919067383, 0.00833795166015625, 0.008241151809692383, 0.008034303665161132, 0.007924799919128417, 0.007915552139282227, 0.008047136306762695, 0.008191519737243653, 0.008090047836303712, 0.008049056053161622, 0.008033760070800781, 0.008239359855651855, 0.008652928352355958, 0.008412991523742676, 0.008470175743103027, 0.008372063636779786, 0.008317184448242188, 0.00832380771636963, 0.008177536010742188, 0.008122367858886719, 0.008002943992614746, 0.007998079776763915, 0.007978367805480956, 0.008300224304199218, 0.008360256195068359, 0.0081844482421875, 0.00820019245147705, 0.009277440071105958, 0.008279871940612794, 0.00808569622039795, 0.008163328170776368, 0.008333312034606934, 0.008546431541442872, 0.008609663963317872, 0.008595680236816406, 0.008615488052368165, 0.008722559928894043, 0.008186847686767577, 0.008368127822875977, 0.00819200038909912, 0.008048192024230957, 0.008068896293640137, 0.008684255599975587, 0.008110015869140626, 0.008671232223510742, 0.008306112289428711, 0.009097151756286621, 0.008798848152160645, 0.008244288444519044, 0.00821446418762207, 0.008372575759887695, 0.008743583679199219, 0.008548352241516113, 0.008304032325744629, 0.008849856376647949, 0.008466591835021972, 0.008455391883850097, 0.008193920135498047, 0.008168352127075196, 0.008011743545532227, 0.008216608047485351, 0.00807913589477539, 0.008062687873840332, 0.008203071594238281, 0.008142144203186034, 0.008132991790771484, 0.008484864234924316, 0.00833743953704834, 0.008362175941467286, 0.008283967971801759, 0.008486880302429198, 0.008244447708129883, 0.008339327812194824, 0.008360447883605958, 0.008481184005737304, 0.008665087699890137, 0.008570879936218261, 0.00840294361114502, 0.008417280197143554, 0.008826848030090331, 0.0086212158203125, 0.008470720291137695, 0.008880831718444825, 0.011236672401428222, 0.00931059169769287, 0.008454463958740234, 0.008196096420288086, 0.00810153579711914, 0.008171456336975098, 0.008034111976623536, 0.007997727870941163, 0.008010047912597656, 0.00807919979095459, 0.00840719985961914, 0.008341504096984862, 0.008046208381652832, 0.007915616035461426, 0.007931968212127686, 0.00785814380645752, 0.00784819221496582, 0.00840499210357666, 0.008392543792724609, 0.008231072425842286, 0.008404352188110352, 0.00820844841003418, 0.008217151641845704, 0.008535264015197754, 0.008381216049194336, 0.008258624076843262, 0.008239680290222168, 0.008194432258605958, 0.008219903945922851, 0.008194815635681153, 0.008148991584777832, 0.007991551876068115, 0.007947264194488525, 0.007922080039978028, 0.007887167930603027, 0.007928224086761474, 0.00800921630859375, 0.008417407989501953, 0.008666624069213867, 0.008603424072265624, 0.008616095542907714, 0.00857363224029541, 0.008460160255432128, 0.008380415916442872, 0.00828166389465332, 0.008448543548583985, 0.008463616371154786, 0.008170080184936524, 0.008065088272094727, 0.007980576038360596, 0.007960608005523681, 0.007971263885498047, 0.008167424201965333, 0.00831488037109375, 0.008226816177368163, 0.008178688049316407, 0.008449024200439453, 0.008407039642333984, 0.008070624351501465, 0.008073760032653808, 0.008359935760498047, 0.008291711807250976, 0.00817625617980957, 0.008065376281738281, 0.008107680320739746, 0.00825260829925537, 0.008116479873657226, 0.008038975715637208, 0.008048992156982421, 0.007956128120422363, 0.008148063659667968, 0.008534879684448242, 0.008304703712463378, 0.008199551582336425, 0.008147295951843261, 0.008075263977050781, 0.008067487716674804, 0.008081119537353515, 0.008183296203613282, 0.00852560043334961, 0.008423359870910644, 0.008521920204162597, 0.008351552009582519, 0.008386655807495117, 0.008320704460144043, 0.008333632469177246, 0.008140800476074218, 0.008011775970458984, 0.008249343872070313, 0.007929855823516846, 0.007964672088623047, 0.007979008197784423, 0.00820633602142334, 0.008935423851013183, 0.00862003231048584, 0.008289823532104492, 0.008305215835571289, 0.0084683837890625, 0.00825654411315918, 0.008097920417785644, 0.008061216354370118, 0.008040384292602539, 0.008004511833190918, 0.008230624198913574, 0.008390175819396972, 0.008348128318786622, 0.008115424156188964, 0.008161312103271484, 0.008471296310424805, 0.008167327880859375, 0.008062080383300781, 0.008191136360168457, 0.008146719932556152, 0.008073247909545899, 0.00833676815032959, 0.008116352081298827, 0.007987711906433105, 0.007921664237976075, 0.007921664237976075, 0.008173824310302735, 0.008551456451416016, 0.008560832023620606, 0.008646719932556153, 0.00862399959564209, 0.008491616249084472, 0.00848908805847168, 0.008255359649658203, 0.00830787181854248, 0.008190784454345703, 0.008233023643493651, 0.008042624473571777, 0.00812399959564209, 0.008027711868286132, 0.007998144149780274, 0.007976960182189942, 0.008002911567687988, 0.00812713623046875, 0.008550016403198243, 0.00856112003326416, 0.00820576000213623, 0.008137184143066406, 0.008199423789978027, 0.008426112174987793, 0.00812880039215088, 0.008356063842773438, 0.008142848014831543, 0.008099136352539062, 0.00818239974975586, 0.00812992000579834, 0.008083744049072266, 0.008203840255737305, 0.008388928413391113, 0.008256031990051269, 0.008214112281799316, 0.008565152168273926, 0.008335200309753418, 0.008150527954101563, 0.008063936233520507, 0.008044256210327148, 0.007970464229583741, 0.007915872097015381, 0.00790502405166626, 0.008302783966064453, 0.008859231948852539, 0.008552927970886231, 0.008525216102600097, 0.008458847999572755, 0.008341407775878907, 0.008337504386901855, 0.00820019245147705, 0.008064800262451172, 0.00802019214630127, 0.008025728225708007, 0.008016256332397461, 0.007983104228973388, 0.007948287963867188, 0.007999743938446045, 0.007988096237182617, 0.008270272254943848, 0.008196543693542481, 0.007996992111206054, 0.00811411190032959, 0.008144895553588867, 0.00809177589416504, 0.008313216209411622, 0.008396544456481934, 0.008177472114562987, 0.00800812816619873, 0.007985151767730713, 0.007974080085754395, 0.008003487586975097, 0.007978208065032959, 0.007951648235321044, 0.008077247619628906, 0.00804911994934082, 0.00791756820678711, 0.007942143917083741, 0.008171520233154296, 0.008089632034301757, 0.007984384059906006, 0.008079551696777343, 0.008093695640563964, 0.008130208015441894, 0.008411328315734863, 0.008708800315856934, 0.008506367683410645, 0.008454143524169922, 0.00865884780883789, 0.008448127746582032, 0.00828822422027588, 0.00819200038909912, 0.008239104270935058, 0.008218624114990235, 0.008334495544433594, 0.008170175552368163, 0.008070816040039062, 0.008052255630493164, 0.008004608154296875, 0.007892960071563721, 0.00820633602142334, 0.008373503684997558, 0.008169568061828614, 0.008089599609375, 0.008493696212768555, 0.00831491184234619, 0.008093695640563964, 0.008099840164184571, 0.008072511672973633, 0.008121024131774903, 0.008099519729614257, 0.007995359897613526, 0.007963263988494873, 0.008063839912414552, 0.008090208053588867, 0.008007807731628418, 0.008288000106811523, 0.008364447593688965, 0.008232704162597656, 0.008270079612731934, 0.008054783821105957, 0.007943967819213867, 0.00787663984298706, 0.007902751922607422, 0.007883423805236817, 0.007914783954620362, 0.008302335739135741, 0.008564864158630371, 0.008680224418640137, 0.008549951553344726, 0.008594143867492675, 0.008519455909729004, 0.008575072288513183, 0.008613759994506837, 0.00842959976196289, 0.008267775535583496, 0.008349920272827148, 0.008248448371887207, 0.008104063987731934, 0.008280608177185058, 0.008378656387329101, 0.00829206371307373, 0.008140800476074218, 0.008396800041198731, 0.008746623992919922, 0.008175775527954102, 0.008378591537475586, 0.008978431701660156, 0.008488960266113281, 0.007985151767730713, 0.008249279975891113, 0.008201727867126465, 0.008133184432983399, 0.008124416351318359, 0.008108256340026855, 0.008061984062194823, 0.008010496139526367, 0.007982624053955079, 0.007936480045318604, 0.007929408073425293, 0.007891200065612792, 0.007823552131652832, 0.008043999671936036, 0.008509984016418457, 0.008179455757141114, 0.008226207733154297, 0.008029024124145507, 0.008134655952453614, 0.008047840118408204, 0.008176416397094726, 0.008130559921264649, 0.008011199951171876, 0.007973440170288086, 0.007966495990753174, 0.008197759628295899, 0.008629983901977539, 0.008559488296508789, 0.008609919548034669, 0.008513216018676759, 0.008305952072143555, 0.008380319595336914, 0.008340352058410645, 0.008226271629333495, 0.008026080131530762, 0.0080513916015625, 0.007966720104217529, 0.007896992206573486, 0.008033632278442383, 0.008243295669555664, 0.008204959869384765, 0.008069120407104492, 0.008114175796508789, 0.008463744163513184, 0.008304448127746581, 0.008116448402404785, 0.007993408203125, 0.008055359840393066, 0.008008735656738281, 0.00798528003692627, 0.007956384181976318, 0.007905759811401368, 0.007999584197998047, 0.008100064277648925, 0.008003583908081055, 0.007946368217468261, 0.007911424160003662, 0.008095711708068847, 0.007929887771606445, 0.007823359966278077, 0.007791711807250977, 0.007845920085906982, 0.007817440032958985, 0.008020832061767577, 0.008016448020935058, 0.008191264152526856, 0.008037088394165039, 0.007976960182189942, 0.00798089599609375, 0.007878431797027588, 0.007895423889160156, 0.007921664237976075, 0.008054783821105957, 0.008519680023193359, 0.008683775901794434, 0.008574720382690429, 0.00854422378540039, 0.008513567924499511, 0.008310784339904785, 0.00821350383758545, 0.008184831619262695, 0.008077280044555664, 0.00793398380279541, 0.00801091194152832, 0.008084320068359374, 0.00799948787689209, 0.007918784141540527, 0.007942751884460449, 0.008067584037780762, 0.00801529598236084, 0.009736063957214356, 0.010015135765075683, 0.008671232223510742, 0.008746272087097167, 0.008314784049987794, 0.008313023567199707, 0.00828604793548584, 0.008320863723754884, 0.00812947177886963, 0.008040224075317383, 0.008100159645080566, 0.008004960060119628, 0.008047264099121093, 0.008218015670776367, 0.00814095973968506, 0.008052767753601075, 0.007964735984802246, 0.007909887790679932, 0.007898880004882812, 0.007847936153411865, 0.00786787223815918, 0.008053088188171386, 0.008134016036987305, 0.008071904182434081, 0.008062047958374024, 0.007986239910125733, 0.008013216018676758, 0.007948639869689942, 0.00790118408203125, 0.007892288208007813, 0.007910272121429444, 0.007943999767303467, 0.008232224464416503, 0.00857795238494873, 0.0085731840133667, 0.008666303634643555, 0.008530495643615722, 0.00810086441040039, 0.008224767684936523, 0.008120256423950195, 0.008083104133605957, 0.0080797758102417, 0.008028287887573243, 0.00818284797668457, 0.008061984062194823, 0.008080256462097169, 0.008491359710693359, 0.008476799964904785, 0.00821292781829834, 0.008077312469482421, 0.008114175796508789, 0.008019935607910157, 0.008079392433166504, 0.007921664237976075, 0.007870816230773925, 0.007877823829650878, 0.007954271793365478, 0.00816316795349121, 0.0081081600189209, 0.00799180793762207, 0.007944352149963379, 0.007925439834594726, 0.007937471866607666, 0.00790822410583496, 0.008116543769836426, 0.008072319984436036, 0.008022591590881348, 0.007999167919158936, 0.00786198377609253, 0.007845536231994629, 0.007854047775268555, 0.008104415893554687, 0.008836768150329589, 0.00863702392578125, 0.00854860782623291, 0.00856454372406006, 0.008382528305053712, 0.008247424125671386, 0.008433664321899414, 0.00832431983947754, 0.008137503623962402, 0.008054847717285157, 0.008040063858032227, 0.007966432094573974, 0.007985023975372315, 0.00820911979675293, 0.007979008197784423, 0.008433759689331055, 0.008546015739440919, 0.008310815811157227, 0.008165696144104003, 0.00819388771057129, 0.008173567771911621, 0.008044544219970704, 0.00827187156677246, 0.008140800476074218, 0.007962175846099854, 0.007987552165985108, 0.008020064353942872, 0.008130559921264649, 0.007943999767303467, 0.008036607742309571, 0.007968512058258057, 0.007925280094146728, 0.00794159984588623, 0.008189151763916016, 0.008091423988342285, 0.008025631904602051, 0.007999328136444092, 0.007933631896972656, 0.008051648139953613, 0.00819974422454834, 0.008421152114868165, 0.008501343727111817, 0.0084137601852417, 0.00841318416595459, 0.008475872039794921, 0.008506048202514648, 0.008560480117797852, 0.008425727844238281, 0.008361087799072266, 0.008130496025085449, 0.008211392402648926, 0.008130304336547852, 0.00799564790725708, 0.007981056213378907, 0.007982624053955079, 0.008571136474609374, 0.008288479804992676, 0.008104127883911133, 0.008101471900939941, 0.008231167793273925, 0.008155103683471679, 0.008447999954223634, 0.008294400215148925, 0.008199839591979981, 0.008261088371276856, 0.008442239761352539, 0.008205120086669921, 0.008139552116394043, 0.008012096405029296, 0.007983712196350098, 0.008044544219970704, 0.008105376243591308, 0.007918176174163818, 0.008075136184692383, 0.008065152168273925, 0.008039423942565918, 0.007978303909301758, 0.008029888153076172, 0.008065024375915527, 0.007975135803222656, 0.007892159938812256, 0.008155743598937988, 0.007886847972869874, 0.007906911849975586, 0.00841801643371582, 0.008842944145202637, 0.00868556785583496, 0.00871014404296875, 0.00861184024810791, 0.0085032958984375, 0.008278016090393067]",tokens/s,121.20742458031037,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.878144,6174.998528,0.0,5779.750912,5773.960192,s,1,7.529166015625,7.529166015625,0.0,7.529166015625,7.529166015625,7.529166015625,7.529166015625,[7.529166015625],,kWh,9.614256604165423e-06,1.0533241153822993e-06,3.4061138360005905e-06,1.4073694555548313e-05,,MB,1105.555456,6491.66848,0.0,6085.935104,6038.345728,s,10,2.1597591094970703,0.21597591094970703,0.0036829909604721837,0.21662503814697265,0.21944928131103517,0.21986086349487305,0.22019012924194337,"[0.2079114227294922, 0.22027244567871093, 0.21362310791015626, 0.21935781860351564, 0.21618031311035157, 0.21913043212890626, 0.21372621154785157, 0.21706976318359375, 0.2131793975830078, 0.21930819702148438]",tokens/s,1185.3173757864743,kWh,6.226532404787322e-06,6.86674918086594e-07,4.1377515371491e-06,1.1050958860023016e-05,tokens/kWh,23165410.643784337,MB,1110.44608,6512.64,0.0,6106.906624,6086.544896,s,10,16.33959387207031,1.633959387207031,0.0051381663713000875,1.6338790893554687,1.6407128662109374,1.640982470703125,1.641198154296875,"[1.63083251953125, 1.6313094482421875, 1.62900927734375, 1.6296138916015626, 1.636846923828125, 1.6254072265625, 1.63644873046875, 1.6412520751953126, 1.6406529541015624, 1.6382208251953125]",tokens/s,38.556649873463215,kWh,4.7937475727713365e-05,5.287167085089536e-06,3.1846027249850915e-05,8.50706700626538e-05,tokens/kWh,740560.7591147579,,s,630,16.336016490936288,0.025930184906248065,0.0004121603329365757,0.02585750389099121,0.02618778533935547,0.026375424003601076,0.02839885004043579,"[0.028554208755493166, 0.02700819206237793, 0.02650809669494629, 0.025849376678466797, 0.026167327880859376, 0.02565996742248535, 0.025659263610839842, 0.02553856086730957, 0.02552182388305664, 0.025471328735351562, 0.02551807975769043, 0.02555084800720215, 0.025553983688354494, 0.025490367889404297, 0.025875808715820313, 0.025686559677124025, 0.025657472610473634, 0.02591744041442871, 0.025706144332885743, 0.02560047912597656, 0.025612159729003905, 0.025671680450439452, 0.025697887420654295, 0.025631135940551757, 0.02568191909790039, 0.025675519943237305, 0.025996992111206055, 0.025712608337402344, 0.025807455062866212, 0.025778175354003906, 0.02591139221191406, 0.02567977523803711, 0.025651199340820312, 0.025674816131591796, 0.025709503173828124, 0.025656671524047853, 0.025999839782714845, 0.026155231475830078, 0.026337087631225584, 0.02622480010986328, 0.026170560836791992, 0.02613667106628418, 0.025944543838500977, 0.025921152114868163, 0.025809535980224608, 0.025774080276489256, 0.0258121280670166, 0.02577008056640625, 0.026023712158203125, 0.025981632232666016, 0.025785856246948242, 0.025925600051879882, 0.025856447219848634, 0.025973119735717774, 0.02585740852355957, 0.02588035202026367, 0.025958560943603517, 0.025852319717407226, 0.02582044792175293, 0.025735488891601564, 0.025868288040161135, 0.02577631950378418, 0.026087392807006837, 0.028281280517578125, 0.02699679946899414, 0.026230783462524415, 0.025883647918701173, 0.026042848587036132, 0.025707040786743164, 0.025827007293701174, 0.025631040573120118, 0.025598016738891602, 0.025558464050292967, 0.025530879974365234, 0.025577472686767577, 0.025629888534545897, 0.025606048583984374, 0.025574304580688476, 0.025511072158813475, 0.02555788803100586, 0.02591542434692383, 0.02561836814880371, 0.025786304473876955, 0.025772096633911133, 0.02561193656921387, 0.025598304748535156, 0.02568560028076172, 0.025692575454711913, 0.025655616760253908, 0.025673023223876955, 0.025727359771728516, 0.02566774368286133, 0.02599510383605957, 0.025839616775512695, 0.02592086410522461, 0.025905759811401367, 0.025720767974853516, 0.025736640930175782, 0.025845855712890626, 0.026042816162109374, 0.026171552658081056, 0.026214527130126952, 0.026075008392333985, 0.0261529598236084, 0.026364927291870118, 0.026214527130126952, 0.02597318458557129, 0.025978912353515626, 0.025825696945190428, 0.025835647583007812, 0.025876352310180664, 0.025810943603515626, 0.025823232650756835, 0.025884096145629882, 0.025770559310913085, 0.02588387107849121, 0.026081151962280273, 0.025921472549438475, 0.025901952743530274, 0.0259400634765625, 0.02574336051940918, 0.025851743698120117, 0.02586025619506836, 0.025772031784057618, 0.025788415908813478, 0.02607251167297363, 0.02840150451660156, 0.026957984924316405, 0.0263372802734375, 0.0258306884765625, 0.025880319595336914, 0.026266143798828124, 0.02564358329772949, 0.025802112579345702, 0.025628480911254883, 0.025569280624389647, 0.02564908790588379, 0.02578505516052246, 0.02563484764099121, 0.025533567428588866, 0.025705343246459962, 0.025609695434570312, 0.025878591537475584, 0.025913408279418945, 0.025803167343139647, 0.0257860164642334, 0.02572118377685547, 0.025740991592407225, 0.025637184143066406, 0.025599264144897462, 0.02575200080871582, 0.025769535064697265, 0.025713184356689452, 0.025652671813964845, 0.0259520320892334, 0.02580886459350586, 0.02580534362792969, 0.02564143943786621, 0.025701759338378906, 0.025709184646606445, 0.025677824020385744, 0.02571392059326172, 0.025899776458740233, 0.025980031967163086, 0.026053279876708985, 0.026046688079833985, 0.026025215148925782, 0.025963520050048827, 0.02591414451599121, 0.025856992721557618, 0.025817087173461914, 0.025803808212280274, 0.025800735473632812, 0.025758047103881836, 0.025769887924194337, 0.025745664596557617, 0.025753664016723632, 0.025756160736083986, 0.025872447967529296, 0.025802560806274414, 0.02573107147216797, 0.0257126407623291, 0.025830751419067384, 0.025850175857543945, 0.025792863845825194, 0.02572287940979004, 0.02572697639465332, 0.025778175354003906, 0.025771039962768555, 0.028329984664916992, 0.026820608139038086, 0.026093568801879883, 0.0257574405670166, 0.025628992080688476, 0.025628511428833007, 0.025511455535888673, 0.0254715518951416, 0.0255633602142334, 0.025519039154052733, 0.025509792327880858, 0.02546988868713379, 0.0255644474029541, 0.0256212158203125, 0.025630720138549806, 0.025587263107299803, 0.025622047424316407, 0.025605056762695314, 0.02558153533935547, 0.025534080505371093, 0.025586048126220704, 0.025558464050292967, 0.025516607284545897, 0.025593856811523437, 0.025677824020385744, 0.025632383346557618, 0.025646848678588866, 0.02560233688354492, 0.025637216567993164, 0.025594112396240234, 0.025655040740966795, 0.025659040451049806, 0.025633119583129884, 0.025612384796142577, 0.025595808029174806, 0.025696287155151366, 0.025866207122802735, 0.025892288208007812, 0.026063104629516602, 0.02611801528930664, 0.025981184005737304, 0.026070880889892577, 0.02657417678833008, 0.026955968856811525, 0.025932607650756837, 0.0259421443939209, 0.026011520385742188, 0.025829376220703124, 0.0261724796295166, 0.025999807357788087, 0.025917919158935546, 0.02618704032897949, 0.026061727523803712, 0.0261363525390625, 0.02583763122558594, 0.026006975173950196, 0.026034751892089845, 0.02612633514404297, 0.025894912719726562, 0.02585759925842285, 0.025887071609497072, 0.025996448516845704, 0.0259399356842041, 0.02850204849243164, 0.027066335678100586, 0.026425344467163086, 0.025944063186645508, 0.025855167388916016, 0.02579756736755371, 0.0259050235748291, 0.0258950080871582, 0.025751455307006836, 0.025608192443847655, 0.025875999450683595, 0.025665407180786134, 0.025721439361572264, 0.025896383285522462, 0.02572496032714844, 0.025782976150512695, 0.025859935760498047, 0.026048511505126954, 0.02581711959838867, 0.02576585578918457, 0.025765663146972657, 0.025781856536865235, 0.02578019142150879, 0.025774751663208008, 0.025761119842529295, 0.025901695251464844, 0.025933088302612303, 0.025743104934692382, 0.02583843231201172, 0.025792608261108397, 0.025810272216796874, 0.025844480514526365, 0.02579452705383301, 0.02575971221923828, 0.02599020767211914, 0.025943008422851563, 0.02605241584777832, 0.02610736083984375, 0.026150880813598634, 0.026276063919067384, 0.026128927230834962, 0.026125600814819336, 0.026063392639160157, 0.025907392501831054, 0.02632089614868164, 0.02609916877746582, 0.02595484733581543, 0.025851743698120117, 0.026177183151245117, 0.025895423889160156, 0.02585379219055176, 0.025914880752563478, 0.025897632598876952, 0.026113567352294923, 0.026114751815795898, 0.02588035202026367, 0.025965984344482423, 0.025907487869262696, 0.025905471801757812, 0.026064895629882814, 0.025882623672485353, 0.02586934471130371, 0.02584227180480957, 0.028392351150512696, 0.02694313621520996, 0.026177984237670898, 0.025800575256347658, 0.025653472900390627, 0.025547840118408202, 0.02547804832458496, 0.02550092887878418, 0.02551456069946289, 0.025540607452392578, 0.02547711944580078, 0.02566713523864746, 0.025676223754882814, 0.02555904006958008, 0.025543872833251952, 0.025508192062377928, 0.02557084846496582, 0.025559999465942382, 0.025638912200927736, 0.02568191909790039, 0.025673728942871094, 0.025636863708496094, 0.025579008102416992, 0.02556889533996582, 0.025627519607543944, 0.025656959533691407, 0.02564499282836914, 0.025665983200073243, 0.025601408004760743, 0.02561724853515625, 0.025614112854003907, 0.025638912200927736, 0.025686016082763673, 0.025665536880493164, 0.02570240020751953, 0.02574950408935547, 0.02592767906188965, 0.02609561538696289, 0.026201631546020506, 0.026130655288696288, 0.026071199417114256, 0.02599888038635254, 0.02594054412841797, 0.02585795211791992, 0.025870431900024415, 0.02573846435546875, 0.025715328216552733, 0.025793792724609375, 0.025772287368774415, 0.02576358413696289, 0.02576278305053711, 0.02586310386657715, 0.025833759307861328, 0.025803487777709962, 0.02579020881652832, 0.0258272647857666, 0.025801183700561524, 0.025748735427856446, 0.025941631317138673, 0.02579555130004883, 0.025767936706542968, 0.02570569610595703, 0.025832223892211913, 0.028497695922851562, 0.0270296630859375, 0.026333248138427735, 0.025767936706542968, 0.025632896423339845, 0.025534175872802736, 0.025747615814208983, 0.02611609649658203, 0.025558303833007813, 0.025531103134155273, 0.025566783905029297, 0.025541343688964845, 0.025577184677124023, 0.02558998489379883, 0.025657312393188476, 0.025593215942382813, 0.025573471069335937, 0.025653600692749023, 0.02572697639465332, 0.025681535720825197, 0.025627103805541993, 0.025661312103271484, 0.02572496032714844, 0.025613632202148438, 0.025596704483032227, 0.025683456420898438, 0.026134944915771483, 0.025990560531616212, 0.02590985679626465, 0.02599504089355469, 0.026085535049438478, 0.02596665573120117, 0.02602947235107422, 0.025977439880371093, 0.025800703048706054, 0.02611404800415039, 0.026351680755615236, 0.026268768310546874, 0.026346303939819335, 0.02637775993347168, 0.026368192672729492, 0.026153280258178712, 0.026177536010742186, 0.026241024017333983, 0.026055744171142578, 0.026186687469482422, 0.026197887420654296, 0.025946239471435546, 0.025994815826416016, 0.025949888229370117, 0.025839935302734374, 0.025913791656494142, 0.02595020866394043, 0.02611177635192871, 0.026031999588012694, 0.025866592407226562, 0.026093568801879883, 0.025956352233886718, 0.026082752227783203, 0.025913568496704103, 0.025943904876708983, 0.02597337532043457, 0.025966688156127928, 0.02856547164916992, 0.027140127182006837, 0.026556127548217772, 0.02602217674255371, 0.02590732765197754, 0.025788480758666993, 0.025646751403808593, 0.025686176300048828, 0.025915456771850587, 0.025747264862060547, 0.025820352554321288, 0.025879648208618163, 0.025964384078979493, 0.025719871520996095, 0.025795391082763672, 0.025703840255737305, 0.02575174331665039, 0.02566352081298828, 0.025834016799926758, 0.025976160049438476, 0.02579315185546875, 0.025982271194458006, 0.02581724739074707, 0.025866783142089844, 0.025955711364746094, 0.02593791961669922, 0.02600204849243164, 0.025987071990966795, 0.025809024810791014, 0.0260053768157959, 0.025792032241821288, 0.025943872451782226, 0.025879199981689454, 0.025845760345458983, 0.02602556800842285, 0.0261976318359375, 0.026444608688354493, 0.02642521667480469, 0.02637628746032715, 0.02632908821105957, 0.026306144714355467, 0.02617305564880371, 0.02617545509338379, 0.02625619125366211, 0.02608252716064453, 0.026158143997192383, 0.026058240890502928, 0.02601558494567871, 0.02581747245788574, 0.02611155128479004, 0.026071487426757814, 0.025965856552124023, 0.025998048782348633, 0.026004608154296876, 0.026076032638549806, 0.025914783477783202, 0.025903615951538086, 0.02615920066833496, 0.026163040161132814, 0.025933631896972655, 0.02593564796447754, 0.02605708885192871, 0.02601900863647461, 0.028486623764038085, 0.027301759719848634, 0.02667747116088867, 0.026177440643310547, 0.025956352233886718, 0.02584547233581543, 0.025848352432250976, 0.02577791976928711, 0.025794559478759766, 0.025776128768920898, 0.025659391403198242, 0.02570444869995117, 0.02582032012939453, 0.025903968811035155, 0.025986976623535156, 0.025792608261108397, 0.025741024017333983, 0.026015775680541992, 0.025765695571899415, 0.026016191482543947, 0.025963680267333984, 0.02584441566467285, 0.025974943161010743, 0.025956031799316406, 0.02572447967529297, 0.025847936630249025, 0.025872127532958984, 0.025827999114990233, 0.025835744857788084, 0.025923072814941408, 0.026091775894165038, 0.026136831283569337, 0.025914623260498048, 0.02598784065246582, 0.025886720657348632, 0.025878528594970703, 0.026306560516357422, 0.026330463409423827, 0.026077856063842775, 0.026118303298950197, 0.026105312347412108, 0.02629465675354004, 0.025995264053344725, 0.02608332824707031, 0.026060991287231446, 0.026003263473510743, 0.025853952407836913, 0.025916704177856444, 0.025940223693847655, 0.02584009552001953, 0.026173152923583985, 0.02605036735534668, 0.026021408081054687, 0.025934783935546876, 0.02595840072631836, 0.02618742370605469, 0.025977344512939454, 0.026177215576171874, 0.025952415466308595, 0.025972736358642577, 0.02648828887939453, 0.025903648376464843, 0.025839616775512695, 0.028528608322143555, 0.027115264892578126, 0.026390783309936522, 0.026191871643066408, 0.025841663360595703, 0.02581817626953125, 0.02572319984436035, 0.025821535110473633, 0.02594793510437012, 0.025824928283691408, 0.025737119674682618, 0.025678783416748046, 0.02570444869995117, 0.025841663360595703, 0.02568806457519531, 0.025815040588378906, 0.025708608627319336, 0.025767871856689453, 0.025783519744873046, 0.02588912010192871, 0.025936128616333008, 0.025722528457641603, 0.025920032501220703, 0.025869983673095703, 0.025952512741088868, 0.025785791397094728, 0.0257542724609375, 0.025703584671020508, 0.025892831802368163, 0.02575971221923828, 0.025790687561035155, 0.026021951675415038, 0.02575833511352539, 0.02591974449157715, 0.02577961540222168, 0.02623910331726074, 0.026374368667602538, 0.026347232818603517, 0.02615247917175293, 0.02613324737548828, 0.026042367935180662, 0.026042367935180662, 0.026071008682250978, 0.02627743911743164, 0.02599977684020996, 0.026169408798217775, 0.0261910400390625, 0.02604047966003418, 0.025903615951538086, 0.02603843116760254, 0.02609766387939453, 0.025853952407836913, 0.02588057518005371, 0.025878528594970703, 0.025912832260131836, 0.025872896194458008, 0.02585420799255371, 0.02593356704711914, 0.026064895629882814, 0.025911296844482422, 0.02593187141418457, 0.026150175094604492, 0.026067583084106446]",tokens/s,38.56509329245249,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,835.735552,8760.786944,0.0,8365.539328,8230.228992,s,1,7.5355458984375,7.5355458984375,0.0,7.5355458984375,7.5355458984375,7.5355458984375,7.5355458984375,[7.5355458984375],,kWh,1.1459933887461678e-05,1.2341689698212937e-06,3.4725027780041495e-06,1.616660563528712e-05,,MB,1149.796352,8951.627776,0.0,8545.8944,8499.295232,s,10,2.679052764892578,0.2679052764892578,0.010860641831617654,0.2712129821777344,0.27328599548339844,0.2738173904418945,0.2742425064086914,"[0.2356790771484375, 0.27014712524414064, 0.27174835205078124, 0.27090145874023436, 0.26872930908203124, 0.2731679077148437, 0.2715245056152344, 0.2729480285644531, 0.27434878540039065, 0.26985821533203125]",tokens/s,955.5616199678128,kWh,7.3679221284370055e-06,8.121571587166401e-07,4.902892811200002e-06,1.3082972098353649e-05,tokens/kWh,19567419.243538313,MB,1154.797568,8953.724928,0.0,8547.991552,8499.297792,s,10,18.936921508789062,1.8936921508789062,0.005111415589837481,1.89559033203125,1.8990034301757812,1.8996725524902343,1.900207850341797,"[1.8848922119140625, 1.8963909912109376, 1.894943603515625, 1.892427734375, 1.885875732421875, 1.898854736328125, 1.8976529541015625, 1.896237060546875, 1.8893048095703124, 1.9003416748046875]",tokens/s,33.26834299374386,kWh,5.593598999989695e-05,6.169981117973483e-06,3.6891668402201307e-05,9.899763952007175e-05,tokens/kWh,636378.8096909802,,s,630,18.934200822830196,0.030054287020365396,0.000419725913338335,0.029986000061035155,0.030328757858276368,0.03050526542663574,0.0327036884689331,"[0.03242416000366211, 0.030667360305786134, 0.030141727447509765, 0.02985203170776367, 0.029776224136352537, 0.02967897605895996, 0.02964748764038086, 0.02971238327026367, 0.029749248504638674, 0.02972390365600586, 0.029741855621337892, 0.029601503372192382, 0.02968310356140137, 0.029616992950439455, 0.029657087326049804, 0.02959974479675293, 0.02976518440246582, 0.029729215621948243, 0.029764671325683594, 0.02981091117858887, 0.029688543319702148, 0.029853696823120116, 0.02996428871154785, 0.02980454444885254, 0.029784063339233398, 0.02978816032409668, 0.029814016342163085, 0.029776639938354492, 0.0298024959564209, 0.029878271102905272, 0.029820928573608397, 0.029859840393066408, 0.03020595169067383, 0.03010527992248535, 0.030054719924926757, 0.02996553611755371, 0.030034719467163087, 0.02991923141479492, 0.029914335250854494, 0.02989481544494629, 0.02987446403503418, 0.029851999282836914, 0.029911039352416992, 0.030033119201660158, 0.030029951095581056, 0.029911712646484376, 0.029937664031982423, 0.029884416580200194, 0.029866016387939454, 0.0299233283996582, 0.029976287841796876, 0.02992767906188965, 0.030025279998779297, 0.029931968688964843, 0.02997248077392578, 0.02997452735900879, 0.029962175369262694, 0.029883615493774413, 0.029870559692382812, 0.029886848449707033, 0.02982809638977051, 0.029907264709472657, 0.029999807357788087, 0.03290230560302734, 0.03099091148376465, 0.03013043212890625, 0.02987932777404785, 0.029702239990234375, 0.029717536926269533, 0.029621728897094725, 0.029688192367553712, 0.029620223999023438, 0.029624319076538085, 0.02974652862548828, 0.029745311737060548, 0.029775455474853517, 0.029754495620727538, 0.02989641571044922, 0.029706144332885744, 0.02975257682800293, 0.029741376876831056, 0.029698047637939453, 0.029844127655029296, 0.02996832084655762, 0.030048255920410157, 0.03000934410095215, 0.030011392593383788, 0.030007167816162108, 0.029967487335205076, 0.030071807861328126, 0.030019584655761718, 0.029916351318359374, 0.030004032135009767, 0.03016703987121582, 0.03041279983520508, 0.030543872833251953, 0.030498559951782227, 0.03054751968383789, 0.03041299247741699, 0.030374176025390626, 0.030316768646240236, 0.03024460792541504, 0.029997312545776367, 0.03013222312927246, 0.030242816925048828, 0.02996643257141113, 0.03014851188659668, 0.030123008728027343, 0.030043136596679686, 0.029932863235473634, 0.03013497543334961, 0.030253055572509766, 0.029961536407470703, 0.030270015716552735, 0.03019379234313965, 0.030119935989379884, 0.030228479385375977, 0.03021993637084961, 0.03014486312866211, 0.03008064079284668, 0.03000556755065918, 0.029921279907226563, 0.03004195213317871, 0.0303287353515625, 0.030200128555297853, 0.030318431854248047, 0.03271088027954101, 0.030930816650390627, 0.030321407318115234, 0.029898752212524415, 0.02976483154296875, 0.02975823974609375, 0.029944896697998047, 0.0296847038269043, 0.02999465560913086, 0.03007689666748047, 0.0297903995513916, 0.030206111907958983, 0.029803871154785156, 0.029818559646606447, 0.029751903533935548, 0.029932960510253907, 0.029704704284667968, 0.029819488525390625, 0.029714303970336912, 0.02979840087890625, 0.02978201675415039, 0.030007295608520508, 0.029859840393066408, 0.029837312698364257, 0.02976153564453125, 0.029734912872314452, 0.030007295608520508, 0.029826400756835937, 0.03021683120727539, 0.02993769645690918, 0.03018060874938965, 0.030399168014526367, 0.030398527145385743, 0.030492671966552733, 0.030305791854858398, 0.030429695129394533, 0.030385215759277343, 0.030120351791381835, 0.030062911987304687, 0.029926847457885743, 0.030183263778686523, 0.03015776062011719, 0.030044160842895507, 0.030097408294677733, 0.030070783615112305, 0.030005247116088866, 0.02993152046203613, 0.02992265510559082, 0.030134944915771483, 0.030225791931152345, 0.030245216369628906, 0.030140703201293945, 0.030291967391967774, 0.030013439178466796, 0.029976160049438476, 0.030105472564697266, 0.02993404769897461, 0.030019039154052733, 0.030037631988525392, 0.029876319885253907, 0.02994470405578613, 0.030137760162353516, 0.030101503372192383, 0.032686080932617184, 0.03074835205078125, 0.03022265625, 0.029915391921997072, 0.029895679473876953, 0.029819648742675783, 0.029845312118530275, 0.029944255828857423, 0.029928895950317384, 0.02967977523803711, 0.029718687057495117, 0.0296342716217041, 0.03002191925048828, 0.02975948715209961, 0.02978358459472656, 0.029948383331298827, 0.029847103118896483, 0.029849151611328124, 0.03005939292907715, 0.029984256744384766, 0.029870464324951173, 0.030093439102172853, 0.029841407775878907, 0.02986400032043457, 0.030054336547851564, 0.029869504928588867, 0.029872703552246093, 0.029870080947875976, 0.02983526420593262, 0.030052352905273437, 0.030082944869995118, 0.030328960418701173, 0.03038617515563965, 0.03034217643737793, 0.030172096252441407, 0.030150047302246095, 0.03005504035949707, 0.030192703247070313, 0.03009836769104004, 0.030039552688598634, 0.030101024627685546, 0.02992438316345215, 0.029999040603637697, 0.030089216232299806, 0.030052352905273437, 0.02998240089416504, 0.02999942398071289, 0.02992505645751953, 0.029970048904418945, 0.02995065689086914, 0.0299005126953125, 0.029832576751708983, 0.030040512084960936, 0.030007360458374023, 0.03006096076965332, 0.02997452735900879, 0.029969919204711915, 0.029929983139038087, 0.030109695434570313, 0.030003200531005858, 0.029990943908691406, 0.03002774429321289, 0.02998681640625, 0.032603233337402344, 0.030684064865112305, 0.03004140853881836, 0.029840063095092774, 0.02968329620361328, 0.029661600112915038, 0.02966281509399414, 0.02962499237060547, 0.029615583419799803, 0.02959347152709961, 0.02964521598815918, 0.029741056442260744, 0.029663103103637695, 0.029699935913085937, 0.029720863342285155, 0.029724128723144533, 0.02969215965270996, 0.02972217559814453, 0.029764320373535155, 0.029749248504638674, 0.029683391571044923, 0.029750783920288085, 0.02972329521179199, 0.02967302322387695, 0.029723039627075197, 0.029726911544799804, 0.029710336685180663, 0.029766975402832033, 0.02976633644104004, 0.029838560104370117, 0.030062400817871093, 0.03022332763671875, 0.030457632064819336, 0.030341344833374022, 0.03026460838317871, 0.03008995246887207, 0.03002572822570801, 0.030078975677490235, 0.029988224029541016, 0.029864576339721678, 0.029861440658569337, 0.02985004806518555, 0.02983103942871094, 0.029830400466918944, 0.02986073684692383, 0.029867488861083983, 0.029866527557373047, 0.029865983963012696, 0.029999103546142578, 0.029935039520263673, 0.02998963165283203, 0.029988319396972656, 0.029974880218505858, 0.02994175910949707, 0.029963903427124024, 0.029892192840576173, 0.029883167266845704, 0.030040063858032227, 0.029994047164916993, 0.03010825538635254, 0.030009183883666992, 0.030086816787719725, 0.03007369613647461, 0.03281955337524414, 0.03074662399291992, 0.030135583877563477, 0.029820959091186525, 0.02976838493347168, 0.029669376373291017, 0.029577215194702147, 0.029666784286499024, 0.02970889663696289, 0.029832927703857422, 0.030029504776000977, 0.029959903717041016, 0.030077760696411132, 0.030029823303222656, 0.029988704681396486, 0.030019744873046875, 0.02986537551879883, 0.03053219223022461, 0.030080320358276368, 0.029802431106567384, 0.0299400634765625, 0.03007263946533203, 0.029846176147460938, 0.030010911941528322, 0.03023094367980957, 0.029868032455444334, 0.03013327980041504, 0.02998179244995117, 0.030080896377563476, 0.030143680572509764, 0.03019830322265625, 0.030246463775634766, 0.030637983322143555, 0.030320671081542967, 0.03040336036682129, 0.03040870475769043, 0.030295040130615233, 0.030166015625, 0.030208000183105467, 0.030064640045166017, 0.03004787254333496, 0.029991296768188475, 0.030097408294677733, 0.030220287322998047, 0.030228479385375977, 0.030211231231689454, 0.03019206428527832, 0.03019817543029785, 0.030150400161743165, 0.030161151885986327, 0.030173248291015625, 0.03008505630493164, 0.030076927185058593, 0.030192640304565428, 0.03001241683959961, 0.03015065574645996, 0.03026915168762207, 0.03019411277770996, 0.03006857681274414, 0.029982591629028322, 0.030068864822387697, 0.030271488189697264, 0.03012777519226074, 0.03287305450439453, 0.0310064640045166, 0.030153087615966797, 0.030005247116088866, 0.029834400177001952, 0.029749536514282228, 0.029909568786621092, 0.02979430389404297, 0.029931167602539062, 0.029920927047729494, 0.029889215469360353, 0.030170591354370117, 0.029907487869262697, 0.02982649612426758, 0.029801023483276366, 0.029863199234008788, 0.029909727096557617, 0.029963327407836915, 0.029929695129394532, 0.03004899215698242, 0.029968095779418946, 0.029905055999755858, 0.029780096054077148, 0.029824447631835938, 0.02997920036315918, 0.029880319595336914, 0.03091654396057129, 0.030005216598510742, 0.02981488037109375, 0.03011686325073242, 0.030108671188354492, 0.0304202880859375, 0.03053228759765625, 0.03054128074645996, 0.030340639114379883, 0.03015920066833496, 0.030106239318847657, 0.030010976791381837, 0.02999545669555664, 0.029995008468627928, 0.030051776885986328, 0.029990976333618163, 0.03012444877624512, 0.029941408157348633, 0.030095008850097655, 0.03025315284729004, 0.03013907241821289, 0.02993561553955078, 0.030212095260620117, 0.030166816711425782, 0.029878496170043945, 0.03001651191711426, 0.030071456909179686, 0.030226783752441408, 0.030082975387573242, 0.030163040161132814, 0.030067840576171876, 0.029979103088378905, 0.030257568359375, 0.030275583267211914, 0.030216192245483397, 0.03019161605834961, 0.03012403106689453, 0.03308038330078125, 0.03097078323364258, 0.03037593650817871, 0.029882368087768556, 0.029797439575195313, 0.03003049659729004, 0.030408992767333984, 0.029917184829711913, 0.029773279190063475, 0.029830911636352538, 0.02995631980895996, 0.030059072494506837, 0.029949951171875, 0.030040063858032227, 0.029937599182128908, 0.02988425636291504, 0.02980067253112793, 0.030151775360107422, 0.02994883155822754, 0.029895967483520507, 0.029985504150390627, 0.0301527042388916, 0.029839359283447265, 0.030089216232299806, 0.030035968780517577, 0.02997657585144043, 0.029869407653808595, 0.029860511779785156, 0.029824256896972656, 0.029934335708618164, 0.03021004867553711, 0.030216192245483397, 0.03031804847717285, 0.030484672546386718, 0.030242399215698244, 0.030431999206542968, 0.030390335083007813, 0.030268928527832032, 0.030029951095581056, 0.029960512161254883, 0.030228479385375977, 0.029945951461791992, 0.029902624130249023, 0.029919359207153322, 0.029949951171875, 0.029965599060058593, 0.02992201614379883, 0.030099456787109374, 0.030189535140991212, 0.029957792282104493, 0.03001910400390625, 0.029997919082641603, 0.02994528007507324, 0.029986495971679687, 0.030022527694702147, 0.029952096939086913, 0.0299703369140625, 0.029988000869750977, 0.029958976745605468, 0.0299704647064209, 0.030044000625610353, 0.03005766487121582, 0.030186464309692383, 0.03293356704711914, 0.031051231384277345, 0.030247711181640626, 0.029863967895507812, 0.029722015380859376, 0.02969251251220703, 0.029677568435668947, 0.029628416061401368, 0.029765695571899415, 0.02968364715576172, 0.029885759353637697, 0.029809343338012696, 0.029818431854248047, 0.029802944183349608, 0.029773567199707033, 0.029773759841918945, 0.029729087829589843, 0.02975103950500488, 0.029899072647094727, 0.02981216049194336, 0.029796512603759765, 0.02971683120727539, 0.029829120635986327, 0.02978611183166504, 0.029841312408447264, 0.029800287246704103, 0.029804319381713868, 0.029765504837036133, 0.029790815353393556, 0.029845504760742186, 0.029930559158325196, 0.030208959579467773, 0.030510751724243165, 0.03034761619567871, 0.030334304809570313, 0.030233247756958008, 0.030117151260375976, 0.03006057548522949, 0.030079679489135744, 0.029976415634155273, 0.02997395133972168, 0.029997791290283203, 0.029951007843017578, 0.029905311584472655, 0.02990342330932617, 0.029892032623291015, 0.029827648162841797, 0.029855743408203125, 0.03000044822692871, 0.0298187198638916, 0.029948415756225585, 0.02991958427429199, 0.02997039985656738, 0.029968416213989258, 0.029906944274902345, 0.02991823959350586, 0.029927839279174806, 0.029976543426513673, 0.030095840454101564, 0.029976703643798827, 0.030035295486450196, 0.030077472686767578, 0.030111295700073242, 0.03284787368774414, 0.030930944442749023, 0.030246496200561523, 0.029925119400024413, 0.029825696945190428, 0.02974048042297363, 0.029772287368774415, 0.029750879287719727, 0.02976358413696289, 0.029780288696289063, 0.029792192459106446, 0.029762943267822264, 0.029709152221679688, 0.02976972770690918, 0.02976563262939453, 0.02976972770690918, 0.02977996826171875, 0.029838399887084963, 0.029847679138183595, 0.029764608383178712, 0.02987615966796875, 0.029941631317138673, 0.03006073570251465, 0.0300664005279541, 0.030038112640380858, 0.030145856857299806, 0.03008787155151367, 0.03019161605834961, 0.030012639999389648, 0.030761760711669923, 0.030089216232299806, 0.03032035255432129, 0.030791967391967774, 0.030568384170532228, 0.030522687911987305, 0.030348255157470704, 0.030376928329467773, 0.030362432479858398, 0.030300159454345704, 0.030242816925048828, 0.0303636474609375, 0.030300159454345704, 0.029994848251342774, 0.030375648498535156, 0.03017568016052246, 0.030074527740478516, 0.030249311447143556, 0.030121984481811522, 0.030176448822021484, 0.030276416778564453, 0.03013222312927246, 0.030121376037597656, 0.03034396743774414, 0.03032806396484375, 0.03026531219482422, 0.030003583908081055, 0.030066911697387694, 0.03020185661315918, 0.030023263931274413, 0.030156383514404295, 0.03029203224182129, 0.03025177574157715, 0.0302259521484375]",tokens/s,33.273123375789275,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1068, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 845, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 845, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 634, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 230, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.90272,14274.199552,0.0,13878.951936,13865.632768,s,1,7.55004736328125,7.55004736328125,0.0,7.55004736328125,7.55004736328125,7.55004736328125,7.55004736328125,[7.55004736328125],,kWh,1.3800054829179469e-05,1.5147876523830924e-06,6.2275049819982e-06,2.1542347463560763e-05,,MB,1129.074688,14695.727104,0.0,14289.993728,14241.298944,s,10,13.345639648437501,1.3345639648437502,0.003894462219437641,1.3360365600585937,1.3384409790039062,1.3385873718261718,1.3387044860839843,"[1.32904296875, 1.3282088623046875, 1.335257568359375, 1.3383267822265625, 1.3303668212890625, 1.3368155517578124, 1.3375191650390625, 1.332959716796875, 1.338408447265625, 1.3387337646484374]",tokens/s,191.82295247269943,kWh,3.896184749041557e-05,4.297043286655047e-06,2.5829770663800265e-05,6.908866144087088e-05,tokens/kWh,3705383.7006104984,MB,1144.2176,14863.499264,0.0,14457.765888,14413.156352,s,10,40.96315966796875,4.096315966796874,0.004376815909226851,4.095991577148437,4.10388447265625,4.10389853515625,4.10390978515625,"[4.0968349609375, 4.09696240234375, 4.0912841796875, 4.091384033203125, 4.091685791015625, 4.095148193359375, 4.10391259765625, 4.10388134765625, 4.0973232421875, 4.094742919921875]",tokens/s,15.379672981931378,kWh,0.00011971858115291828,1.3205879698689224e-05,7.97050915417999e-05,0.00021262955239340745,tokens/kWh,296289.952599991,,s,630,40.95959750366215,0.065015234132797,0.00030070849411744234,0.06500059127807617,0.06537307205200196,0.06550978622436524,0.06580866020202637,"[0.06566841888427734, 0.0649345932006836, 0.06491478729248047, 0.064529052734375, 0.06437792205810547, 0.06460717010498047, 0.06472637176513672, 0.06464761352539063, 0.06466377258300782, 0.06464022064208984, 0.06524393463134766, 0.06474956512451172, 0.06488063812255859, 0.06469017791748047, 0.06489087677001953, 0.06500761413574219, 0.06507929229736328, 0.06488790130615234, 0.06477302551269531, 0.06478643035888672, 0.06467791748046875, 0.06478128051757813, 0.06495948791503907, 0.06486016082763672, 0.06484114837646485, 0.06477062225341797, 0.06485327911376954, 0.06519471740722656, 0.06498918151855469, 0.06497280120849609, 0.06513423919677734, 0.06511446380615235, 0.06509158325195312, 0.0651878433227539, 0.06503424072265625, 0.06495027160644531, 0.06492147064208985, 0.06489920043945313, 0.06504227447509765, 0.06505487823486328, 0.06510944366455078, 0.06513481903076172, 0.06512834930419922, 0.06528361511230468, 0.06507778930664063, 0.06528355407714843, 0.06535820770263671, 0.06523753356933594, 0.06533468627929688, 0.06531251525878906, 0.06503919982910156, 0.0651673583984375, 0.06505677032470703, 0.06529228973388672, 0.06516524505615234, 0.06512646484375, 0.06538822174072266, 0.06524905395507813, 0.06524777221679688, 0.06541516876220703, 0.06530246734619141, 0.06541289520263673, 0.06531100463867187, 0.06574205017089843, 0.06474626922607422, 0.06458573150634765, 0.06454886627197266, 0.06448902130126953, 0.06465171051025391, 0.06456521606445312, 0.06463081359863282, 0.06571759796142577, 0.06469599914550782, 0.06491776275634766, 0.06506569671630859, 0.06485417938232421, 0.06487229156494141, 0.0650997085571289, 0.06497187042236328, 0.06491168212890625, 0.0650533447265625, 0.06488678741455078, 0.06479257965087891, 0.06527699279785157, 0.06478943634033203, 0.06479052734375, 0.06486630249023438, 0.06473628997802734, 0.06518867492675781, 0.0647927017211914, 0.06483766174316406, 0.06511766052246094, 0.06510582733154296, 0.06502873229980469, 0.06505401611328125, 0.06495712280273437, 0.06498713684082032, 0.0649085464477539, 0.06493177795410156, 0.0649019546508789, 0.06482128143310546, 0.06498095703125, 0.06516121673583984, 0.06492569732666016, 0.06497593688964844, 0.06514579010009766, 0.06516310119628907, 0.065087646484375, 0.06524889373779297, 0.0652721939086914, 0.0653148193359375, 0.06516918182373047, 0.06506495666503906, 0.06531462097167968, 0.06532870483398437, 0.0650514907836914, 0.06499327850341798, 0.06496611022949218, 0.06510441589355469, 0.06534963226318359, 0.06541004943847656, 0.06521753692626953, 0.06518918609619141, 0.06565516662597656, 0.06534751892089843, 0.06533113861083985, 0.06566092681884765, 0.06467581176757813, 0.06447232055664062, 0.06449842834472656, 0.06442598724365234, 0.06454025268554688, 0.06453414154052735, 0.06457955169677734, 0.06463369750976562, 0.06469990539550781, 0.06478844451904296, 0.06468659210205079, 0.06478643035888672, 0.0648419189453125, 0.06505452728271484, 0.06485919952392578, 0.06507965087890626, 0.06497955322265625, 0.06466934204101563, 0.0644807357788086, 0.06452438354492188, 0.06451689910888672, 0.0645889892578125, 0.06477606201171875, 0.0646902084350586, 0.06488771057128906, 0.0649031982421875, 0.0649785614013672, 0.06503663635253906, 0.06511325073242187, 0.06509859466552734, 0.06510173034667968, 0.06499452972412109, 0.0649708480834961, 0.06495465850830077, 0.06484585571289063, 0.06518614196777343, 0.06482752227783203, 0.06474752044677734, 0.06481327819824219, 0.06491932678222656, 0.06494822692871094, 0.06515238189697266, 0.0650183334350586, 0.06523222351074219, 0.06515766143798828, 0.06512777709960937, 0.06517775726318359, 0.06519254302978515, 0.06522492980957031, 0.06504230499267578, 0.06504457855224609, 0.06518342590332031, 0.06499263763427734, 0.06497571563720703, 0.06496265411376953, 0.06517350769042969, 0.06516896057128906, 0.06528173065185547, 0.06538246154785156, 0.0652008285522461, 0.06550131225585938, 0.06537583923339843, 0.06551789093017578, 0.06466127777099609, 0.0644920654296875, 0.06452019500732421, 0.06456082916259766, 0.06460572814941407, 0.0644820785522461, 0.06446694183349609, 0.06463488006591797, 0.06458573150634765, 0.06469427490234375, 0.06481436920166016, 0.06486093139648437, 0.0648636474609375, 0.06487635040283203, 0.06506896209716796, 0.064768798828125, 0.06471481323242187, 0.0646983642578125, 0.06454393768310547, 0.06450873565673829, 0.06484134674072266, 0.06481139373779297, 0.06474137878417968, 0.06478438568115234, 0.06505052947998047, 0.06504867553710937, 0.06495641326904297, 0.06489702606201173, 0.06500748443603516, 0.06494834899902344, 0.06498303985595703, 0.06494783782958985, 0.06490560150146485, 0.06471065521240234, 0.06470764923095704, 0.06493280029296875, 0.06489839935302734, 0.06490589141845703, 0.06488665771484375, 0.0650769271850586, 0.06490550231933594, 0.06500969696044921, 0.06506050872802735, 0.06521001434326172, 0.06527378845214844, 0.06508332824707032, 0.06537862396240235, 0.06519257354736328, 0.06497689819335938, 0.06497261047363281, 0.06490509033203125, 0.06509305572509766, 0.06494502258300781, 0.06516534423828126, 0.06528406524658203, 0.06514076995849609, 0.06523286437988281, 0.06554332733154297, 0.06534381103515625, 0.06538409423828125, 0.06563276672363282, 0.06530496215820313, 0.06576019287109375, 0.06473725128173828, 0.06455296325683593, 0.06439094543457032, 0.06444000244140625, 0.06440399932861328, 0.06449254608154296, 0.06460288238525391, 0.06458745574951172, 0.06467027282714843, 0.06464921569824218, 0.06488063812255859, 0.06479027557373047, 0.0646371841430664, 0.06480681610107422, 0.06494998168945312, 0.06486463928222656, 0.06469631958007813, 0.06464717102050781, 0.06469120025634766, 0.06469516754150391, 0.06468732452392578, 0.06474127960205078, 0.06512108612060546, 0.06474320220947266, 0.06476432037353516, 0.06485603332519531, 0.06473017883300781, 0.0649920654296875, 0.06499094390869141, 0.06499971008300781, 0.0649483871459961, 0.06502178955078125, 0.06498524475097656, 0.06486573028564453, 0.06489555358886719, 0.06516941070556641, 0.06506665802001953, 0.06490509033203125, 0.06499702453613282, 0.06496342468261719, 0.06498505401611328, 0.0651304931640625, 0.06514265441894532, 0.06519331359863281, 0.06515106964111328, 0.06518841552734375, 0.06523932647705079, 0.06521590423583984, 0.06518422698974609, 0.06502397155761719, 0.06500479888916015, 0.06505955505371094, 0.06500662231445313, 0.06506598663330078, 0.06529843139648438, 0.0652042236328125, 0.06524684906005859, 0.06536844635009766, 0.06525917053222656, 0.0653028793334961, 0.06538240051269531, 0.06530662536621094, 0.06581043243408204, 0.06480486297607421, 0.06455059051513672, 0.06436873626708985, 0.06460643005371093, 0.06458338928222657, 0.06468172454833984, 0.06466614532470703, 0.0645670394897461, 0.06468633270263671, 0.06464832305908202, 0.06487273406982422, 0.06517158508300781, 0.06481970977783204, 0.06497071838378907, 0.06490499114990235, 0.06494025421142578, 0.06484371185302734, 0.0646075210571289, 0.064768798828125, 0.06466371154785157, 0.06477811431884765, 0.06478224182128907, 0.06464112091064453, 0.06476188659667968, 0.0647628173828125, 0.06468918609619141, 0.0646368637084961, 0.0648908462524414, 0.06497901153564453, 0.06499062347412109, 0.06511676788330079, 0.06512640380859375, 0.06505401611328125, 0.06510047912597657, 0.06476515197753906, 0.0647524185180664, 0.06487449645996093, 0.0650240020751953, 0.06510387420654297, 0.06495549011230468, 0.06512060546875, 0.06526009368896485, 0.06518374633789062, 0.06514864349365235, 0.0651918716430664, 0.06524307250976563, 0.06530457305908204, 0.06527843475341796, 0.06501558685302734, 0.06514895629882812, 0.06534770965576171, 0.06509372711181641, 0.06512630462646485, 0.06607872009277344, 0.06522982025146484, 0.06520524597167969, 0.06540691375732421, 0.06531897735595703, 0.06524313354492188, 0.06541455841064453, 0.06537276458740235, 0.06572978973388671, 0.06578253173828125, 0.06482681274414062, 0.06451795196533203, 0.06444729614257813, 0.06442393493652344, 0.0647352294921875, 0.06463078308105469, 0.06457158660888672, 0.06472013092041015, 0.06484639739990235, 0.06554192352294921, 0.06488086700439454, 0.06483148956298829, 0.06485807800292968, 0.06518172454833984, 0.06533939361572266, 0.06498303985595703, 0.06475775909423828, 0.06483148956298829, 0.06468402862548828, 0.06485148620605469, 0.06499488067626953, 0.06501673889160156, 0.06486937713623046, 0.06485017395019531, 0.0650881576538086, 0.06509986877441407, 0.06499046325683594, 0.06515174102783203, 0.06541280364990235, 0.06516515350341796, 0.06511459350585938, 0.06521199798583985, 0.06526403045654297, 0.06499942779541015, 0.06506633758544922, 0.06502057647705078, 0.06510793304443359, 0.0655848617553711, 0.06494611358642578, 0.06521040344238281, 0.06518972778320313, 0.06546809387207031, 0.06522962951660156, 0.06529843139648438, 0.06558035278320312, 0.06542546844482422, 0.06523958587646485, 0.06530876922607422, 0.06528739166259766, 0.0652415008544922, 0.06523664093017578, 0.06572048187255859, 0.06552841949462891, 0.06527516937255859, 0.06544044494628906, 0.06539878082275391, 0.06531276702880859, 0.06557901000976563, 0.0655579833984375, 0.06586000061035156, 0.06545830535888672, 0.06546227264404297, 0.06596784210205078, 0.06479612731933594, 0.06460905456542969, 0.06471068572998047, 0.06447625732421874, 0.06458822631835938, 0.06484835052490234, 0.06476534271240235, 0.06479318237304688, 0.06462646484375, 0.06482150268554687, 0.06494409942626952, 0.06486019134521484, 0.06479049682617187, 0.06489292907714844, 0.0651608657836914, 0.06488272094726563, 0.06495587158203125, 0.0648642578125, 0.06471561431884766, 0.06484992218017578, 0.06548585510253906, 0.0651785888671875, 0.06488665771484375, 0.06478451538085937, 0.06500713348388672, 0.06502857971191406, 0.06495577239990234, 0.06512191772460937, 0.06566194915771484, 0.06539453125, 0.06518390655517578, 0.06499737548828124, 0.06515302276611327, 0.06500147247314453, 0.06496208190917968, 0.06532867431640625, 0.065274658203125, 0.06513426971435547, 0.06526000213623047, 0.06526976013183594, 0.06536396789550782, 0.0654725112915039, 0.06538400268554688, 0.06539103698730468, 0.06556854248046876, 0.0655912322998047, 0.06531206512451172, 0.0658043212890625, 0.0649942398071289, 0.0652260513305664, 0.06528844451904296, 0.06515699005126953, 0.06513107299804688, 0.06537830352783203, 0.06532051086425782, 0.06523494720458985, 0.065285888671875, 0.06517420959472656, 0.06545817565917969, 0.06550527954101562, 0.06596604919433594, 0.06548694610595703, 0.06600908660888671, 0.06495177459716797, 0.06456761932373047, 0.06459986877441407, 0.06448931121826172, 0.06461702728271485, 0.06476595306396485, 0.06465535736083984, 0.0648253402709961, 0.06484150695800782, 0.06476620483398438, 0.06488060760498048, 0.06486611175537109, 0.06500985717773437, 0.06498303985595703, 0.06528614044189453, 0.06506700897216797, 0.06486195373535156, 0.06490345764160156, 0.0647775650024414, 0.0647399673461914, 0.06466969299316407, 0.06493596649169922, 0.06477616119384766, 0.06496665954589843, 0.06510387420654297, 0.06502809906005859, 0.06503132629394531, 0.06511638641357421, 0.06524524688720704, 0.06500204467773438, 0.06512614440917969, 0.06497100830078124, 0.06493103790283203, 0.06491801452636718, 0.06514495849609375, 0.06500982666015626, 0.06499737548828124, 0.06487452697753907, 0.06493746948242188, 0.06505110168457032, 0.06507705688476563, 0.06498249816894532, 0.06516400146484375, 0.06539469146728516, 0.06531890869140625, 0.0652390365600586, 0.0651878433227539, 0.06531053161621093, 0.06518739318847656, 0.06508515167236328, 0.06504108428955079, 0.06505622100830079, 0.06520665740966797, 0.06515340423583985, 0.06516643524169922, 0.06523792266845703, 0.06529638671875, 0.06522415924072265, 0.06541327667236328, 0.0653438720703125, 0.06532495880126953, 0.06526764678955078, 0.06558719635009766, 0.06475142669677734, 0.06442540740966797, 0.06439762878417969, 0.06428511810302734, 0.06423757171630859, 0.06430105590820312, 0.06452569580078125, 0.06485874938964843, 0.06492364501953125, 0.06457094573974609, 0.06479625701904297, 0.06475145721435546, 0.06480377960205078, 0.06479055786132812, 0.06503427124023438, 0.06476799774169922, 0.06475737762451172, 0.06491808319091796, 0.06468716430664062, 0.06462335968017578, 0.06455625915527344, 0.06453305816650391, 0.06464739227294922, 0.06489907073974609, 0.06476390075683594, 0.06473728179931641, 0.06612966156005859, 0.06490310668945312, 0.06496697235107422, 0.06522035217285156, 0.06524543762207032, 0.06548592376708984, 0.06495938873291016, 0.06513017272949219, 0.0651185302734375, 0.06496463775634766, 0.06490723419189454, 0.06497484588623047, 0.06505062103271485, 0.06496230316162109, 0.06503977966308594, 0.06507504272460937, 0.06514585876464844, 0.0652779541015625, 0.06534915161132812, 0.06519602966308594, 0.06534601593017578, 0.06513375854492187, 0.06514265441894532, 0.06531305694580078, 0.06525199890136718, 0.0651855697631836, 0.06507894134521484, 0.06510854339599609, 0.06512230682373046, 0.06523407745361329, 0.06532592010498046, 0.06526361846923828, 0.06537149047851562, 0.06567388916015625, 0.06551347351074219, 0.06529452514648437]",tokens/s,15.381010517588047,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,836.268032,1903.099904,0.0,1507.852288,1469.840384,s,1,7.54557958984375,7.54557958984375,0.0,7.54557958984375,7.54557958984375,7.54557958984375,7.54557958984375,[7.54557958984375],,kWh,9.880303533331396e-06,1.08249113978262e-06,4.201947805995698e-06,1.5164742479109715e-05,,MB,1141.8624,1942.945792,0.0,1537.212416,1426.272256,s,10,0.9013761444091797,0.09013761444091797,0.0016001539587170693,0.08954978942871095,0.09113024673461914,0.09279881935119628,0.09413367744445801,"[0.09446739196777344, 0.0894210205078125, 0.08925401306152343, 0.08892195129394531, 0.08967855834960937, 0.09071209716796876, 0.09034918212890625, 0.08910758209228516, 0.09075945281982421, 0.08870489501953124]",tokens/s,2840.1017886689133,kWh,3.001324800595198e-06,3.309910265364032e-07,1.9376659492245198e-06,5.269981776356121e-06,tokens/kWh,48577018.074056566,MB,1163.390976,1953.431552,0.0,1547.698176,1426.274816,s,10,15.022645507812502,1.5022645507812498,0.01031252672261329,1.50402294921875,1.5139361694335938,1.5142273620605469,1.5144603161621093,"[1.5039227294921875, 1.480812255859375, 1.4960413818359375, 1.490408935546875, 1.5138714599609375, 1.5145185546875, 1.512547119140625, 1.506765869140625, 1.5041231689453125, 1.499634033203125]",tokens/s,41.93668816004276,kWh,4.3225946373989707e-05,4.767480243062044e-06,2.0830685598775315e-05,6.882411221582706e-05,tokens/kWh,915376.8638880063,,s,630,15.020117973327645,0.023841457100520057,0.00040457175319861285,0.023801039695739747,0.024200953865051267,0.02438948554992676,0.02525592248916626,"[0.024852064132690428, 0.02422825622558594, 0.02429481506347656, 0.024156768798828124, 0.024006656646728516, 0.023957504272460937, 0.024294912338256838, 0.024455680847167968, 0.02365763282775879, 0.023536415100097657, 0.02394291114807129, 0.023818559646606445, 0.023776319503784178, 0.0238351993560791, 0.02362406349182129, 0.023688383102416992, 0.023827264785766602, 0.024055328369140625, 0.023734272003173826, 0.02416819190979004, 0.023736127853393553, 0.02375369644165039, 0.023872608184814452, 0.026456415176391603, 0.02418943977355957, 0.024086528778076172, 0.02422915267944336, 0.026542816162109375, 0.023611488342285155, 0.023537664413452147, 0.023500064849853515, 0.023700096130371093, 0.023640064239501952, 0.02365235137939453, 0.02338128089904785, 0.023447872161865235, 0.02354422378540039, 0.023480319976806642, 0.023801023483276368, 0.023806783676147462, 0.02365644836425781, 0.023564287185668945, 0.02348646354675293, 0.023533567428588868, 0.023565343856811524, 0.023419008255004883, 0.02376585578918457, 0.023390111923217775, 0.025808992385864257, 0.023408063888549803, 0.023433792114257813, 0.023416831970214845, 0.023480319976806642, 0.02348441505432129, 0.023752704620361328, 0.023813119888305666, 0.02369193649291992, 0.023557567596435548, 0.023382720947265626, 0.02354198455810547, 0.023463935852050782, 0.02376412773132324, 0.02343612861633301, 0.024520671844482422, 0.0238985595703125, 0.024315488815307616, 0.02368284797668457, 0.023501184463500975, 0.023414047241210937, 0.023410879135131835, 0.023374624252319336, 0.02328335952758789, 0.023163232803344726, 0.023373376846313475, 0.023466432571411133, 0.02323865509033203, 0.023355392456054686, 0.023635551452636717, 0.023787935256958007, 0.023557376861572266, 0.023819007873535157, 0.02378447914123535, 0.0237291202545166, 0.02350694465637207, 0.02350809669494629, 0.023184255599975587, 0.02323187255859375, 0.023411199569702147, 0.023406047821044922, 0.023384735107421874, 0.023341056823730468, 0.0236910400390625, 0.023423200607299806, 0.0230830078125, 0.023504896163940428, 0.023980031967163085, 0.02371174430847168, 0.02355200004577637, 0.0231910400390625, 0.023260799407958985, 0.023427967071533204, 0.02324662399291992, 0.023134431838989257, 0.023355392456054686, 0.023549312591552733, 0.023511680603027343, 0.023525375366210938, 0.023721536636352538, 0.023814592361450195, 0.023623680114746092, 0.023513088226318358, 0.02369126319885254, 0.023572479248046875, 0.023447519302368165, 0.023271455764770507, 0.023130111694335938, 0.02314035224914551, 0.023244096755981446, 0.02318582344055176, 0.023439647674560547, 0.023447551727294923, 0.023470048904418946, 0.023547103881835937, 0.023543775558471678, 0.023669599533081054, 0.023670783996582033, 0.024637439727783202, 0.023605247497558594, 0.023558143615722657, 0.023611391067504883, 0.023580671310424805, 0.023615488052368162, 0.023574399948120116, 0.02502822494506836, 0.024299072265625, 0.023546016693115235, 0.023716032028198244, 0.023717727661132813, 0.023950271606445313, 0.02384671974182129, 0.023476224899291992, 0.023442527770996095, 0.02343619155883789, 0.02434048080444336, 0.023770591735839845, 0.023618303298950195, 0.023850784301757813, 0.023573759078979493, 0.023341312408447265, 0.023451776504516603, 0.02421798324584961, 0.023754751205444336, 0.02405311965942383, 0.023831008911132812, 0.023714975357055666, 0.02383091163635254, 0.023883424758911132, 0.023797887802124024, 0.023755615234375, 0.02386124801635742, 0.02396675109863281, 0.02373516845703125, 0.02371183967590332, 0.023813663482666017, 0.023732704162597658, 0.02359059143066406, 0.023607616424560548, 0.02348646354675293, 0.024089696884155274, 0.0238703670501709, 0.023610431671142577, 0.023579584121704102, 0.023779327392578126, 0.02372812843322754, 0.023740415573120118, 0.023631872177124022, 0.023785472869873047, 0.02352332878112793, 0.02352895927429199, 0.023427391052246095, 0.023637760162353517, 0.02370787239074707, 0.02410927963256836, 0.023607295989990236, 0.02349465560913086, 0.02360316848754883, 0.023409696578979493, 0.02344403266906738, 0.02356268882751465, 0.02451046371459961, 0.02348646354675293, 0.02364601516723633, 0.02332896041870117, 0.023513088226318358, 0.023595008850097656, 0.023392255783081056, 0.023474111557006835, 0.02335955238342285, 0.023558143615722657, 0.02341993522644043, 0.023361759185791017, 0.0233110408782959, 0.02361555290222168, 0.023412736892700195, 0.023357440948486328, 0.023371776580810546, 0.02367660713195801, 0.023428800582885743, 0.0237574405670166, 0.023330816268920897, 0.023387584686279297, 0.023500383377075194, 0.02341747283935547, 0.023462015151977537, 0.023574176788330077, 0.02367340850830078, 0.023562240600585937, 0.023963455200195313, 0.02351532745361328, 0.023694559097290038, 0.024437311172485352, 0.023382495880126954, 0.02346143913269043, 0.023541952133178713, 0.02384048080444336, 0.02358278465270996, 0.023361759185791017, 0.023644128799438478, 0.0233984317779541, 0.023512800216674803, 0.023385536193847655, 0.023598207473754882, 0.023793376922607423, 0.023789567947387694, 0.023418880462646483, 0.023448991775512695, 0.02467286491394043, 0.023975936889648438, 0.02371075248718262, 0.02362246322631836, 0.0238573112487793, 0.02373222351074219, 0.02360483169555664, 0.02414224052429199, 0.024260608673095704, 0.02524985694885254, 0.023766016006469725, 0.02343212890625, 0.023394304275512694, 0.023553024291992186, 0.024290559768676757, 0.023668479919433594, 0.024482336044311524, 0.023914016723632813, 0.027047840118408203, 0.023898880004882814, 0.023870559692382814, 0.02419580841064453, 0.024005983352661135, 0.02396022415161133, 0.023855104446411132, 0.024151744842529296, 0.023976255416870117, 0.024004608154296874, 0.023877504348754883, 0.024145183563232423, 0.023827295303344726, 0.02377628707885742, 0.02399945640563965, 0.02387331199645996, 0.02398579216003418, 0.02385686492919922, 0.02392563247680664, 0.02392278480529785, 0.023838623046875, 0.02445516777038574, 0.02374790382385254, 0.023749311447143553, 0.02394316864013672, 0.023844192504882813, 0.023968416213989256, 0.023911680221557617, 0.023823104858398437, 0.02391244888305664, 0.02400592041015625, 0.023876319885253905, 0.023961599349975587, 0.02394726371765137, 0.024170303344726564, 0.024234176635742188, 0.02388172721862793, 0.023807327270507814, 0.023994720458984375, 0.0238799991607666, 0.023715328216552735, 0.023926336288452147, 0.023968095779418944, 0.023877792358398438, 0.024109504699707032, 0.024223552703857423, 0.02459872055053711, 0.023932928085327147, 0.023901695251464843, 0.024283008575439455, 0.024006784439086912, 0.023795679092407228, 0.02388956832885742, 0.024142496109008788, 0.023959775924682618, 0.023911840438842775, 0.023972448348999024, 0.023941024780273438, 0.023838432312011718, 0.024108608245849608, 0.023964672088623046, 0.024815231323242187, 0.02404390335083008, 0.02390425682067871, 0.02394726371765137, 0.02401241683959961, 0.02406630325317383, 0.024004608154296874, 0.024166528701782226, 0.023903263092041015, 0.02394620704650879, 0.024391040802001954, 0.024068735122680665, 0.024619007110595705, 0.02387334442138672, 0.023967296600341796, 0.02399091148376465, 0.023862464904785156, 0.023937536239624024, 0.023828096389770508, 0.023898015975952147, 0.02416924858093262, 0.023879680633544922, 0.024196256637573244, 0.023876287460327147, 0.023916704177856445, 0.02386147117614746, 0.02435465621948242, 0.024108991622924805, 0.025121984481811525, 0.024613536834716798, 0.02463759994506836, 0.024009952545166014, 0.023895872116088866, 0.02431059265136719, 0.02419910430908203, 0.02419728088378906, 0.023920703887939453, 0.02392064094543457, 0.02393087959289551, 0.024018495559692384, 0.023988672256469726, 0.02383839988708496, 0.023943264007568358, 0.02383673667907715, 0.024028448104858397, 0.023714656829833983, 0.02396112060546875, 0.02376156806945801, 0.024032127380371093, 0.02378031921386719, 0.023965120315551758, 0.023955680847167968, 0.023797344207763672, 0.023781152725219728, 0.024282175064086912, 0.023967552185058593, 0.023963743209838868, 0.024039520263671874, 0.024047456741333007, 0.02406118392944336, 0.023698240280151366, 0.023760896682739258, 0.023672096252441405, 0.024681407928466795, 0.024115232467651366, 0.023858848571777343, 0.02416579246520996, 0.023919551849365235, 0.024352767944335937, 0.02417020797729492, 0.02393641662597656, 0.023804800033569336, 0.02378982353210449, 0.02382179260253906, 0.02501251220703125, 0.026570751190185548, 0.024061952590942383, 0.024307296752929686, 0.025258399963378905, 0.024263776779174805, 0.02410179138183594, 0.023623680114746092, 0.02373222351074219, 0.023875583648681642, 0.023836671829223634, 0.023827615737915038, 0.02402390480041504, 0.02388582420349121, 0.023736160278320314, 0.023898271560668944, 0.024002464294433593, 0.02389948844909668, 0.024437503814697267, 0.024023040771484375, 0.024176704406738282, 0.02396972846984863, 0.024006496429443358, 0.023765439987182616, 0.023778112411499023, 0.02389289665222168, 0.023818239212036133, 0.023859199523925782, 0.023777280807495117, 0.023965696334838867, 0.023721567153930666, 0.023912031173706053, 0.023712223052978515, 0.02369366455078125, 0.024250207901000978, 0.023914560317993164, 0.02408563232421875, 0.023845439910888673, 0.02373878479003906, 0.023910400390625, 0.02393427276611328, 0.023683712005615233, 0.023912511825561523, 0.023856416702270507, 0.023716447830200195, 0.023648384094238282, 0.02368297576904297, 0.023778879165649414, 0.02411369514465332, 0.0236723518371582, 0.023711328506469728, 0.02374336051940918, 0.024385087966918944, 0.024039968490600586, 0.02404902458190918, 0.023900800704956055, 0.023787071228027343, 0.0243306884765625, 0.023918048858642578, 0.02387334442138672, 0.02386403274536133, 0.02363363265991211, 0.02376252746582031, 0.023724767684936525, 0.02370351982116699, 0.02353971290588379, 0.02388787269592285, 0.024100223541259767, 0.023925535202026366, 0.023867231369018554, 0.024225791931152343, 0.02379132843017578, 0.023914335250854492, 0.023699264526367187, 0.02392947196960449, 0.02382361602783203, 0.023841535568237305, 0.02379724884033203, 0.023675392150878907, 0.023879680633544922, 0.023834047317504884, 0.023862112045288087, 0.02382204818725586, 0.023778400421142577, 0.024135679244995118, 0.024091552734375, 0.0241395206451416, 0.023902463912963866, 0.023770559310913087, 0.02389846420288086, 0.02373244857788086, 0.023746559143066406, 0.02369068717956543, 0.02456550407409668, 0.02382931137084961, 0.023764768600463868, 0.02385532760620117, 0.023801055908203125, 0.023825183868408203, 0.024217599868774413, 0.02380307197570801, 0.02385001564025879, 0.024059680938720702, 0.024037376403808593, 0.023756799697875978, 0.023996416091918944, 0.023778432846069335, 0.02398624038696289, 0.024126367568969728, 0.023967647552490236, 0.023852096557617188, 0.02424233627319336, 0.02382863998413086, 0.02396019172668457, 0.024086528778076172, 0.024357471466064453, 0.024000511169433594, 0.024387584686279298, 0.024080671310424805, 0.02372371292114258, 0.023669824600219727, 0.023708639144897462, 0.02393087959289551, 0.02371993637084961, 0.02388991928100586, 0.023814144134521483, 0.023762016296386718, 0.02376563262939453, 0.023764511108398438, 0.02354457664489746, 0.023879680633544922, 0.023705408096313475, 0.023664831161499023, 0.02354956817626953, 0.023871231079101562, 0.024068735122680665, 0.023864543914794922, 0.023745023727416992, 0.023709983825683595, 0.023799871444702147, 0.026411136627197265, 0.024299167633056642, 0.024096927642822265, 0.02387331199645996, 0.023748735427856445, 0.02396995162963867, 0.024084415435791016, 0.02379292869567871, 0.0238189754486084, 0.02380326461791992, 0.024281728744506837, 0.02356393623352051, 0.023928159713745116, 0.024118207931518556, 0.023997600555419923, 0.023585695266723633, 0.023752511978149413, 0.023719232559204103, 0.023589759826660155, 0.02395955276489258, 0.02369126319885254, 0.02364825630187988, 0.02368230438232422, 0.0237739200592041, 0.023377952575683595, 0.02385696029663086, 0.024391807556152344, 0.023620864868164063, 0.023446432113647463, 0.023719072341918945, 0.023997184753417968, 0.023813343048095702, 0.023834815979003908, 0.023660671234130858, 0.02369174385070801, 0.023635456085205078, 0.023947776794433592, 0.023732095718383788, 0.024270847320556642, 0.0237455997467041, 0.023673791885375977, 0.023611391067504883, 0.023603200912475586, 0.024223264694213868, 0.02384124755859375, 0.023747583389282227, 0.023540351867675783, 0.023878015518188477, 0.023856639862060547, 0.023692991256713865, 0.023608287811279296, 0.02364419174194336, 0.02389094352722168, 0.02349888038635254, 0.02364076805114746, 0.023780832290649413, 0.024172479629516602, 0.02366329574584961, 0.023625247955322264, 0.02356617546081543, 0.023954015731811523, 0.02391152000427246, 0.02376380729675293, 0.023777280807495117, 0.023817695617675782, 0.02358255958557129, 0.02390399932861328, 0.023786432266235353, 0.02373017692565918, 0.023841856002807617, 0.02381737518310547, 0.02405353546142578, 0.023707008361816405, 0.02385174369812012, 0.02389731216430664, 0.023720640182495117, 0.023650144577026366, 0.02403299140930176, 0.023998912811279298, 0.02388528060913086, 0.023849664688110353, 0.02370908737182617, 0.023589311599731447, 0.023934112548828126, 0.023666719436645507, 0.023733055114746094, 0.025133056640625, 0.02352332878112793, 0.02332467269897461, 0.02354380798339844, 0.024034400939941407, 0.023745439529418946, 0.023605247497558594, 0.023530752182006835, 0.024808191299438478, 0.023941152572631835, 0.023577632904052733, 0.023743423461914062, 0.02357801628112793, 0.02376969528198242, 0.023597055435180665]",tokens/s,41.94374512362278,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 582, in __init__ self.transformer = CodeGenModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 400, in __init__ self.h = nn.ModuleList([CodeGenBlock(config) for _ in range(config.n_layer)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 400, in self.h = nn.ModuleList([CodeGenBlock(config) for _ in range(config.n_layer)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 258, in __init__ self.mlp = CodeGenMLP(inner_dim, config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 236, in __init__ self.fc_in = nn.Linear(embed_dim, intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,841.879552,12640.518144,0.0,12245.270528,12234.440192,s,1,7.38060107421875,7.38060107421875,0.0,7.38060107421875,7.38060107421875,7.38060107421875,7.38060107421875,[7.38060107421875],,kWh,1.417804526249521e-05,1.5536159205005573e-06,7.2327835639977855e-06,2.2964444746993552e-05,,MB,1094.017024,12923.633664,0.0,12517.900288,12440.744448,s,10,11.752084594726563,1.1752084594726564,0.005107282186621974,1.1762398071289062,1.1798683715820313,1.1802225158691406,1.1805058312988281,"[1.1633475341796875, 1.1689735107421875, 1.17531640625, 1.173837158203125, 1.1766422119140625, 1.178609375, 1.1791546630859375, 1.1797896728515624, 1.17583740234375, 1.18057666015625]",tokens/s,217.83369404511708,kWh,3.425772850499953e-05,3.778108879748517e-06,2.2671157025799803e-05,6.070699441054785e-05,tokens/kWh,4216977.013698441,MB,1109.85216,12986.548224,0.0,12580.814848,12543.681024,s,10,37.15011059570313,3.7150110595703127,0.0038259498988241136,3.7155910644531254,3.717879052734375,3.72025166015625,3.7221497460937503,"[3.707107421875, 3.715757568359375, 3.7126572265625, 3.712315185546875, 3.715424560546875, 3.71710791015625, 3.71376318359375, 3.717351806640625, 3.71600146484375, 3.722624267578125]",tokens/s,16.958226769663167,kWh,0.00010866937428958418,1.1985650922047147e-05,7.236253011220009e-05,0.0001930175553238314,tokens/kWh,326395.18148648704,,s,630,37.14671617889402,0.05896304155380006,0.0003156827940853869,0.05896268844604492,0.05928697166442871,0.05940313873291016,0.060383857650756836,"[0.05999932861328125, 0.058727359771728514, 0.05837321472167969, 0.05853577423095703, 0.05843865585327149, 0.05848601531982422, 0.05831142425537109, 0.0584376335144043, 0.05838838577270508, 0.058574337005615235, 0.058396385192871096, 0.05852979278564453, 0.05912623977661133, 0.058511775970458986, 0.05869977569580078, 0.05861500930786133, 0.05880207824707031, 0.05899766540527344, 0.05897830581665039, 0.05879600143432617, 0.058531841278076174, 0.05841632080078125, 0.05851532745361328, 0.058525760650634764, 0.05855731201171875, 0.0586608657836914, 0.05935103988647461, 0.05890252685546875, 0.05878121566772461, 0.05884934234619141, 0.05882102584838867, 0.05874835205078125, 0.05885599899291992, 0.058933246612548826, 0.059031585693359374, 0.05919334411621094, 0.05912547302246094, 0.05888230514526367, 0.05868364715576172, 0.05875686264038086, 0.05893257522583008, 0.058915489196777346, 0.05899625778198242, 0.05898211288452149, 0.05889510345458984, 0.058889919281005856, 0.058732864379882815, 0.058738689422607425, 0.05881241607666016, 0.05911129760742188, 0.05926105499267578, 0.059041534423828125, 0.0590195198059082, 0.05901091384887695, 0.059146400451660155, 0.058916862487792966, 0.05890380859375, 0.05901324844360351, 0.05903424072265625, 0.05938995361328125, 0.05917612838745117, 0.05891372680664062, 0.05913177490234375, 0.06052249526977539, 0.05883084869384766, 0.05861171340942383, 0.05854412841796875, 0.058423297882080075, 0.05864857482910156, 0.058515296936035154, 0.058488094329833984, 0.05856752014160156, 0.05899814224243164, 0.0586267204284668, 0.05878579330444336, 0.05865382385253906, 0.05896268844604492, 0.058953697204589844, 0.058808353424072264, 0.058925182342529296, 0.0588043212890625, 0.05885737609863281, 0.05882195281982422, 0.05873487854003906, 0.0586346549987793, 0.058673152923583986, 0.05860966491699219, 0.058710014343261716, 0.058916160583496094, 0.058813121795654295, 0.05889596939086914, 0.05879235076904297, 0.05901091384887695, 0.059078815460205075, 0.05912128067016602, 0.05942499160766602, 0.059232574462890625, 0.05945737457275391, 0.05916649627685547, 0.05919356918334961, 0.05924863815307617, 0.059025409698486325, 0.05893939208984375, 0.058703872680664064, 0.058866943359375, 0.05896268844604492, 0.058840320587158206, 0.05895423889160156, 0.05923455810546875, 0.05900294494628906, 0.059319393157958984, 0.059335296630859374, 0.05948819351196289, 0.05927350234985351, 0.05917055892944336, 0.059074817657470705, 0.059172863006591796, 0.05908070373535156, 0.05954150390625, 0.059089984893798825, 0.05897312164306641, 0.0589571533203125, 0.059084449768066406, 0.05915075302124023, 0.0589543342590332, 0.05915852737426758, 0.060614654541015625, 0.05891900634765625, 0.05849827194213867, 0.058653278350830076, 0.05840300750732422, 0.058479713439941405, 0.058436511993408206, 0.058570751190185545, 0.05862771224975586, 0.05852812957763672, 0.05885257720947266, 0.05851567840576172, 0.058576702117919925, 0.058906688690185546, 0.05884998321533203, 0.05869583892822266, 0.05945942306518555, 0.059150337219238285, 0.058910591125488285, 0.05881622314453125, 0.058589599609375, 0.05854719924926758, 0.058618881225585937, 0.05868342590332031, 0.058755039215087894, 0.05885974502563476, 0.05876508712768555, 0.058759166717529294, 0.05881961441040039, 0.05871020889282227, 0.05907904052734375, 0.05906790542602539, 0.059105663299560546, 0.05914064025878906, 0.05915238571166992, 0.05899776077270508, 0.05904793548583984, 0.058893310546875, 0.058950847625732425, 0.058988574981689454, 0.058854175567626954, 0.058963966369628903, 0.05892095947265625, 0.05890614318847656, 0.05889686584472656, 0.05892902374267578, 0.059039169311523435, 0.05892323303222656, 0.05900511932373047, 0.05905155181884766, 0.05939878463745117, 0.05923443222045898, 0.05934044647216797, 0.05924508666992188, 0.05915590286254883, 0.05906265640258789, 0.05895491027832031, 0.059093791961669924, 0.05922617721557617, 0.05904723358154297, 0.05897507095336914, 0.05900886535644531, 0.05911072158813477, 0.06039766311645508, 0.05884249496459961, 0.05860611343383789, 0.058449920654296876, 0.058342910766601565, 0.05838681411743164, 0.05829235076904297, 0.05855136108398438, 0.05854832077026367, 0.05859209442138672, 0.05850243377685547, 0.058581729888916016, 0.058492416381835936, 0.05890646362304688, 0.05859804916381836, 0.05860761642456055, 0.058947582244873044, 0.059099136352539064, 0.05869772720336914, 0.05866495895385742, 0.0585904312133789, 0.05875996780395508, 0.05871615982055664, 0.058654720306396485, 0.058834720611572265, 0.058895614624023436, 0.05874784088134766, 0.05896527862548828, 0.05874560165405274, 0.05895161437988281, 0.05891897583007812, 0.05894553756713867, 0.05907046508789063, 0.05906556701660156, 0.059284351348876954, 0.05925183868408203, 0.05929040145874023, 0.05918467330932617, 0.058964000701904294, 0.05883964920043945, 0.058724193572998046, 0.05890662384033203, 0.05888204956054687, 0.05890252685546875, 0.05897216033935547, 0.05907046508789063, 0.05916617584228516, 0.05912793731689453, 0.05901558303833008, 0.05918467330932617, 0.05950716781616211, 0.05940825653076172, 0.05910134506225586, 0.059031105041503905, 0.05905039978027344, 0.05907455825805664, 0.05907660675048828, 0.0591278076171875, 0.059084800720214846, 0.05939302444458008, 0.059020286560058595, 0.05916672134399414, 0.05920134353637695, 0.0606530876159668, 0.058823200225830076, 0.058709087371826174, 0.058522464752197266, 0.05850896072387695, 0.05868988800048828, 0.05851264190673828, 0.05855104064941406, 0.05850124740600586, 0.05857238388061523, 0.05884700775146484, 0.05874431991577148, 0.05870265579223633, 0.05878406524658203, 0.05881948852539062, 0.05868233489990234, 0.05884108734130859, 0.05897750473022461, 0.05888694381713867, 0.05900697708129883, 0.05915238571166992, 0.05862400054931641, 0.05874470520019531, 0.058721630096435544, 0.0586391372680664, 0.05880569458007812, 0.05894406509399414, 0.059041793823242185, 0.058931102752685545, 0.05909097671508789, 0.059004222869873044, 0.05897702407836914, 0.05887171173095703, 0.059041278839111325, 0.05937622451782226, 0.05941862487792969, 0.05895539093017578, 0.05897452926635742, 0.05880223846435547, 0.0590643196105957, 0.05903974533081055, 0.058964126586914065, 0.058910560607910153, 0.058969825744628904, 0.05904412841796875, 0.059145790100097656, 0.05919996643066406, 0.05919247817993164, 0.05913888168334961, 0.05918124771118164, 0.05942252731323242, 0.05936304092407227, 0.05921820831298828, 0.05916204833984375, 0.05918572616577149, 0.05895270538330078, 0.05884415817260742, 0.05904592132568359, 0.059027423858642576, 0.05907455825805664, 0.059084800720214846, 0.059084800720214846, 0.05931792068481445, 0.06030950546264648, 0.05882470321655273, 0.05849702453613281, 0.05855417633056641, 0.058599006652832034, 0.05862051010131836, 0.05859328079223633, 0.05885504150390625, 0.05869417572021484, 0.05868288040161133, 0.05870627212524414, 0.058552223205566405, 0.05859542465209961, 0.058867263793945315, 0.05890220642089844, 0.058727169036865236, 0.059305633544921875, 0.05950860977172852, 0.05886614227294922, 0.05884438323974609, 0.058815265655517576, 0.0591071662902832, 0.058643936157226566, 0.058663616180419924, 0.05893280029296875, 0.05895212936401367, 0.05898438262939453, 0.05884460830688477, 0.058887809753417966, 0.05897727966308594, 0.058961376190185544, 0.058837535858154294, 0.05906985473632813, 0.059127521514892575, 0.059114368438720706, 0.05911286544799805, 0.05910179138183594, 0.05921791839599609, 0.05891401672363281, 0.059018016815185544, 0.05902054214477539, 0.05905075073242187, 0.059041793823242185, 0.058977886199951174, 0.05892275238037109, 0.05909324645996094, 0.05914870452880859, 0.059022815704345706, 0.058951423645019534, 0.05896271896362305, 0.05929081726074219, 0.05931500625610352, 0.05932457733154297, 0.05928537750244141, 0.059444862365722655, 0.0590912971496582, 0.059035873413085936, 0.05917059326171875, 0.059150337219238285, 0.059186656951904296, 0.059087390899658206, 0.05949033737182617, 0.059286590576171874, 0.0603873291015625, 0.05882032012939453, 0.05850809478759766, 0.058455615997314456, 0.05847833633422852, 0.05841561508178711, 0.05851359939575195, 0.05853152084350586, 0.05855059051513672, 0.05873481750488281, 0.05867507171630859, 0.05864233779907226, 0.05863401412963867, 0.058646751403808595, 0.05874892807006836, 0.05875299072265625, 0.05881043243408203, 0.05888988876342773, 0.058813953399658205, 0.058772480010986325, 0.05858915328979492, 0.05866684722900391, 0.05880217742919922, 0.05890572738647461, 0.05871731185913086, 0.058746238708496094, 0.058880287170410155, 0.05901116943359375, 0.058851329803466794, 0.058963966369628903, 0.05899468612670898, 0.05923395156860352, 0.05903721618652344, 0.058966846466064454, 0.05894553756713867, 0.05912985610961914, 0.05921692657470703, 0.05906940841674805, 0.05890457534790039, 0.05937561416625976, 0.058910720825195315, 0.05897340774536133, 0.058937793731689454, 0.0590250244140625, 0.05904662322998047, 0.05904572677612305, 0.058931358337402345, 0.05895100784301758, 0.05917379379272461, 0.05944895935058594, 0.05917241668701172, 0.05885923385620117, 0.05895999908447266, 0.059173473358154295, 0.05949248123168945, 0.05969305419921875, 0.05897571182250977, 0.05902582550048828, 0.05908230209350586, 0.059011646270751957, 0.05916057586669922, 0.05917302322387695, 0.05940806579589844, 0.0604653434753418, 0.05899248123168945, 0.0585230712890625, 0.058399105072021486, 0.05863004684448242, 0.058402305603027345, 0.058577312469482425, 0.05851299285888672, 0.058552959442138675, 0.058638526916503904, 0.05881241607666016, 0.05895782470703125, 0.05872956848144531, 0.058839969635009766, 0.05877110290527344, 0.05877590560913086, 0.05916057586669922, 0.05910502243041992, 0.05878195190429687, 0.058670143127441406, 0.05908486557006836, 0.05890089416503906, 0.058861793518066405, 0.05884259033203125, 0.058753631591796876, 0.05890848159790039, 0.058849662780761716, 0.05899017715454102, 0.0589881591796875, 0.05894595336914062, 0.05898073577880859, 0.0591234245300293, 0.059138336181640626, 0.05913977432250977, 0.059144512176513675, 0.05912566375732422, 0.05905215835571289, 0.059076576232910155, 0.05907660675048828, 0.059006175994873046, 0.05908153533935547, 0.05903926467895508, 0.058971969604492185, 0.05924025726318359, 0.05894636917114258, 0.059025409698486325, 0.05919049453735352, 0.05906438446044922, 0.05905481719970703, 0.05940224075317383, 0.05937062454223633, 0.059267967224121094, 0.059230335235595705, 0.059275009155273437, 0.059410430908203124, 0.059312255859375, 0.05907436752319336, 0.05913417434692383, 0.05901689529418945, 0.05919772720336914, 0.05901276779174805, 0.059216224670410156, 0.05917625427246094, 0.06037535858154297, 0.0588590087890625, 0.05867366409301758, 0.0584169921875, 0.05848489761352539, 0.05843379211425781, 0.05856972885131836, 0.058538753509521486, 0.05853567886352539, 0.058566177368164066, 0.05868207931518555, 0.058789249420166015, 0.05885948944091797, 0.05870249557495117, 0.05881856155395508, 0.05890457534790039, 0.059229248046875, 0.05913491058349609, 0.05901039886474609, 0.058872032165527347, 0.05869612884521484, 0.058842910766601565, 0.05880416107177734, 0.05881679916381836, 0.05888332748413086, 0.05888691329956055, 0.058915969848632815, 0.059035968780517575, 0.05883142471313477, 0.05897206497192383, 0.05910537719726563, 0.05902748870849609, 0.05908067321777344, 0.0590909423828125, 0.05910528182983398, 0.05920153427124023, 0.05922633743286133, 0.05908662414550781, 0.05886777496337891, 0.05884921646118164, 0.05886361694335938, 0.058893600463867185, 0.05892499160766602, 0.05933747100830078, 0.05896809768676758, 0.059066497802734375, 0.05911337661743164, 0.05917283248901367, 0.05921177673339844, 0.059391551971435544, 0.05934038543701172, 0.059275360107421876, 0.05929616165161133, 0.05922649765014648, 0.059231296539306644, 0.059274143218994144, 0.058981472015380856, 0.05895414352416992, 0.05913782501220703, 0.059120384216308594, 0.05887382507324219, 0.058992641448974606, 0.05922812652587891, 0.0604139518737793, 0.05894710540771484, 0.05870230484008789, 0.05864572906494141, 0.058583839416503906, 0.05867833709716797, 0.05860857772827149, 0.058738689422607425, 0.058761215209960936, 0.05891481781005859, 0.05876041412353516, 0.05862684631347656, 0.05875487899780273, 0.05874710464477539, 0.05874070358276367, 0.05892252731323242, 0.05909142303466797, 0.05925273513793945, 0.059383167266845706, 0.05929638290405274, 0.059057697296142575, 0.05873916625976563, 0.05876435089111328, 0.058886592864990234, 0.05922870254516602, 0.05901836776733398, 0.05902016067504883, 0.058919967651367186, 0.05904838562011719, 0.0590300178527832, 0.05894569778442383, 0.05902320098876953, 0.05931008148193359, 0.05937753677368164, 0.05957235336303711, 0.05927084732055664, 0.058992641448974606, 0.05901708984375, 0.059003326416015626, 0.059030975341796875, 0.05907638549804688, 0.0591798095703125, 0.0590643196105957, 0.059039295196533205, 0.05907500839233398, 0.05910732650756836, 0.05925628662109375, 0.05908124923706055, 0.05931622314453125, 0.05945257568359375, 0.05925084686279297, 0.059219905853271484, 0.05917567825317383, 0.05940150451660156, 0.05947055816650391, 0.059061504364013674, 0.05922278213500977, 0.05940387344360352, 0.05949276733398438, 0.05931008148193359, 0.059138046264648435, 0.05922332763671875, 0.05939683151245117]",tokens/s,16.95977638954671,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1709, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,816.238592,6223.233024,0.0,5827.985408,5712.718848,s,1,7.54695166015625,7.54695166015625,0.0,7.54695166015625,7.54695166015625,7.54695166015625,7.54695166015625,[7.54695166015625],,kWh,1.0930174412499126e-05,1.1981091724824766e-06,4.701948206002615e-06,1.683023179098422e-05,,MB,1111.724032,6451.822592,0.0,6046.089216,5989.425664,s,10,5.286087219238281,0.5286087219238281,0.002321793649104793,0.5290133361816407,0.5308089782714844,0.531027963256836,0.5312031512451172,"[0.5225958251953124, 0.52860546875, 0.5275892333984376, 0.5301387939453125, 0.5295997314453125, 0.5275242309570313, 0.5293984375, 0.5286282348632813, 0.5307603149414063, 0.5312469482421875]",tokens/s,484.29015523676753,kWh,1.5522173169080033e-05,1.7117404982930236e-06,1.0341382542105052e-05,2.757529620947811e-05,tokens/kWh,9283671.807376934,MB,1137.668096,6514.737152,0.0,6109.003776,6090.851328,s,10,20.916714111328126,2.0916714111328125,0.006449218288308434,2.09066162109375,2.1005560058593753,2.101516650390625,2.102285166015625,"[2.08726904296875, 2.081265380859375, 2.084882080078125, 2.09225341796875, 2.08906982421875, 2.088210693359375, 2.096529296875, 2.09441455078125, 2.102477294921875, 2.100342529296875]",tokens/s,30.119453593277495,kWh,6.068127666216896e-05,6.6933406774990235e-06,4.018646343569547e-05,0.00010756108077536346,tokens/kWh,585713.7130443371,,s,630,20.91351463317871,0.03319605497329954,0.0003447476206661443,0.03314238357543945,0.033563927841186524,0.03374415397644043,0.03473555046081543,"[0.0346644172668457, 0.03373174285888672, 0.03311088180541992, 0.032956447601318356, 0.03288854217529297, 0.03291366577148438, 0.03288684844970703, 0.032904319763183594, 0.03283145523071289, 0.033006431579589844, 0.033435329437255856, 0.03340934371948242, 0.03290457534790039, 0.032905887603759766, 0.03291545486450195, 0.03283990478515625, 0.03289583969116211, 0.0328480339050293, 0.03286297607421875, 0.03293398284912109, 0.03310291290283203, 0.03321331024169922, 0.03304035186767578, 0.033058815002441407, 0.03306905746459961, 0.033058559417724606, 0.0330590705871582, 0.03303395080566406, 0.0330937614440918, 0.033147041320800784, 0.03329228973388672, 0.033277950286865234, 0.03304857635498047, 0.03302918243408203, 0.03296761703491211, 0.033058815002441407, 0.03318374252319336, 0.0332485122680664, 0.03301862335205078, 0.033132190704345706, 0.033091934204101565, 0.03320217514038086, 0.033159168243408206, 0.03303593444824219, 0.03306710433959961, 0.033245311737060544, 0.03321219253540039, 0.033144416809082033, 0.033140830993652344, 0.033102527618408206, 0.03308540725708008, 0.03312639999389649, 0.03320217514038086, 0.03317708969116211, 0.0331514892578125, 0.03316659164428711, 0.03310464096069336, 0.03313180923461914, 0.03322544097900391, 0.0334552001953125, 0.03326249694824219, 0.03326092910766602, 0.03325750350952148, 0.03412057495117188, 0.03347763061523437, 0.03413993453979492, 0.03299964904785156, 0.032835582733154296, 0.03278652954101562, 0.03289075088500976, 0.03295849609375, 0.032958335876464843, 0.03279430389404297, 0.032841793060302736, 0.03278041458129883, 0.0328111686706543, 0.03270207977294922, 0.032647647857666016, 0.032704479217529295, 0.03279289627075195, 0.03269398498535156, 0.03277004623413086, 0.032815006256103514, 0.03281110382080078, 0.03290697479248047, 0.032792865753173826, 0.03286227035522461, 0.03290924835205078, 0.03297484970092773, 0.032958240509033204, 0.03303433609008789, 0.03300979232788086, 0.03334707260131836, 0.033323360443115235, 0.03318742370605469, 0.033185760498046876, 0.03328224182128906, 0.033251777648925784, 0.03302601623535156, 0.03299123382568359, 0.03292559814453125, 0.032845920562744144, 0.03288063812255859, 0.03284172821044922, 0.03291664123535156, 0.032885601043701175, 0.03296390533447266, 0.0329911994934082, 0.033027935028076175, 0.032981246948242185, 0.03294476699829101, 0.03302134323120117, 0.03292745590209961, 0.03291545486450195, 0.03314777755737305, 0.03300131225585937, 0.03313036727905273, 0.03328841781616211, 0.03325049591064453, 0.033121150970458986, 0.03308240127563476, 0.03323344039916992, 0.033579456329345704, 0.033261566162109374, 0.033186912536621094, 0.03325337600708008, 0.03483504104614258, 0.03366851043701172, 0.033057376861572264, 0.03282876968383789, 0.03278876876831055, 0.032866687774658204, 0.0327086067199707, 0.03273523330688476, 0.03268150329589844, 0.0326517448425293, 0.03269222259521484, 0.03263488006591797, 0.03274947357177734, 0.03284182357788086, 0.032866302490234374, 0.03283679962158203, 0.03276227188110352, 0.032766368865966795, 0.03275980758666992, 0.032785633087158206, 0.03287033462524414, 0.032783199310302734, 0.032958110809326174, 0.033443519592285156, 0.0329152946472168, 0.03288761520385742, 0.03294617462158203, 0.0329986572265625, 0.03314163208007812, 0.033107616424560546, 0.03309769439697265, 0.03311008071899414, 0.033199905395507816, 0.03368387222290039, 0.033027294158935544, 0.033142623901367185, 0.033164222717285155, 0.03310182571411133, 0.033157119750976564, 0.03319807815551758, 0.03292940902709961, 0.03304486465454102, 0.03298009490966797, 0.03306489562988281, 0.033063201904296874, 0.032997440338134766, 0.033065567016601564, 0.03308745574951172, 0.03315100860595703, 0.03362815856933594, 0.03298303985595703, 0.03314688110351562, 0.033392574310302736, 0.03329017639160156, 0.03316044616699219, 0.03317644882202148, 0.03328409576416016, 0.03356991958618164, 0.03341721725463867, 0.03358972930908203, 0.033339710235595704, 0.03355420684814453, 0.03321001434326172, 0.034756607055664065, 0.033670238494873043, 0.03304262542724609, 0.03287424087524414, 0.03284435272216797, 0.032865985870361325, 0.03284860610961914, 0.033023998260498046, 0.03377910232543945, 0.03296112060546875, 0.03295846557617187, 0.03299942398071289, 0.03294972610473633, 0.03289961624145508, 0.03284915161132813, 0.03289891052246094, 0.03287542343139648, 0.0331038703918457, 0.03307136154174805, 0.033083393096923826, 0.03305036926269531, 0.032903167724609376, 0.03285942459106445, 0.03283222579956055, 0.03294345474243164, 0.03314748764038086, 0.033062976837158205, 0.03313049697875976, 0.033232383728027344, 0.033417598724365234, 0.03349331283569336, 0.03329625701904297, 0.03362575912475586, 0.03313059234619141, 0.03301007843017578, 0.03293683242797851, 0.0330269775390625, 0.03323494338989258, 0.033130016326904294, 0.033098369598388674, 0.03313443374633789, 0.03319718551635742, 0.03323788833618164, 0.033345535278320314, 0.03339571380615235, 0.033205249786376956, 0.03314467239379883, 0.03329244613647461, 0.03317750549316406, 0.03317891311645508, 0.03314771270751953, 0.03317724609375, 0.0336448974609375, 0.0333496322631836, 0.033263614654541016, 0.03339433670043945, 0.03337027359008789, 0.03347270584106445, 0.03363772964477539, 0.03380438232421875, 0.033538623809814455, 0.033495040893554685, 0.0334194221496582, 0.034786399841308595, 0.0339851188659668, 0.033372478485107424, 0.03309568023681641, 0.03301375961303711, 0.03279673767089844, 0.033261505126953125, 0.03282944107055664, 0.03278031921386719, 0.03283305740356445, 0.03292745590209961, 0.03279536056518555, 0.0328089599609375, 0.03299327850341797, 0.03306291198730469, 0.03294617462158203, 0.03289702224731445, 0.03279872131347656, 0.032763904571533206, 0.03296041488647461, 0.03303004837036133, 0.033175167083740235, 0.033101406097412106, 0.03356361770629883, 0.033253246307373044, 0.0330937614440918, 0.032892929077148435, 0.03299532699584961, 0.03361740875244141, 0.03319039916992188, 0.03499359893798828, 0.0331454086303711, 0.03320627212524414, 0.03334572982788086, 0.03323782348632812, 0.03299430465698242, 0.032997184753417966, 0.03290361785888672, 0.032906017303466796, 0.032850528717041014, 0.032833919525146485, 0.032890911102294924, 0.03291952133178711, 0.03292364883422851, 0.0329411849975586, 0.03311094284057617, 0.0332369613647461, 0.03330035018920898, 0.03319411087036133, 0.033062400817871096, 0.03306108856201172, 0.03304476928710937, 0.03313071823120117, 0.03314467239379883, 0.03315008163452148, 0.033395294189453126, 0.0331286735534668, 0.03320832061767578, 0.03345427322387695, 0.033400863647460935, 0.03343337631225586, 0.03335699081420898, 0.033232769012451174, 0.034336734771728515, 0.033591327667236326, 0.03292127990722656, 0.032930110931396486, 0.03280691146850586, 0.0331893424987793, 0.03295929718017578, 0.03282304000854492, 0.03278041458129883, 0.03273043060302734, 0.0327685432434082, 0.03267900848388672, 0.03280579376220703, 0.03274342346191406, 0.03281305694580078, 0.03268972778320312, 0.03284566497802734, 0.03278908920288086, 0.03286969757080078, 0.03294892883300781, 0.032882686614990234, 0.03295795059204101, 0.03296307373046875, 0.03303219223022461, 0.03301686477661133, 0.03299407958984375, 0.033005470275878905, 0.03311030578613281, 0.03322995376586914, 0.03330342483520508, 0.033371326446533206, 0.03308217620849609, 0.033076351165771484, 0.03305971145629883, 0.0333251838684082, 0.03315727996826172, 0.033125312805175784, 0.033118305206298826, 0.033239742279052735, 0.0332327995300293, 0.03335382461547851, 0.03323260879516601, 0.0331280632019043, 0.0330667839050293, 0.033315296173095706, 0.03325788879394531, 0.03325155258178711, 0.03319305419921875, 0.03341110229492188, 0.03338499069213867, 0.03320230484008789, 0.03342326354980469, 0.03337839889526367, 0.03309088134765625, 0.03315987014770508, 0.03323075103759766, 0.03348489761352539, 0.033277950286865234, 0.03352492904663086, 0.0332927360534668, 0.033720703125, 0.033380352020263675, 0.033819904327392576, 0.03460748672485352, 0.0335906867980957, 0.033067615509033206, 0.03298918533325195, 0.03309590530395508, 0.033021728515625, 0.03294131088256836, 0.03297148895263672, 0.03291120147705078, 0.032970943450927735, 0.03298099136352539, 0.03295353698730469, 0.03295743942260742, 0.03300070571899414, 0.032879169464111326, 0.03287859344482422, 0.03301299285888672, 0.03294486236572266, 0.03323635101318359, 0.03301033782958984, 0.03294585418701172, 0.03303456115722656, 0.033019134521484375, 0.03312460708618164, 0.03317097473144531, 0.033003616333007815, 0.03300748825073242, 0.033062015533447266, 0.033216350555419924, 0.033552417755126955, 0.03336739349365234, 0.03354000091552734, 0.03331078338623047, 0.033333953857421876, 0.03337814331054687, 0.034025630950927734, 0.03321855926513672, 0.03313423919677734, 0.033083518981933596, 0.03327772903442383, 0.03452972793579102, 0.033226303100585934, 0.0331569595336914, 0.03317583847045898, 0.03330847930908203, 0.033219070434570314, 0.03390195083618164, 0.033979007720947266, 0.03343155288696289, 0.03343900680541992, 0.03319881439208985, 0.033181697845458984, 0.033268993377685546, 0.033344257354736326, 0.0332410888671875, 0.033588958740234376, 0.033322975158691405, 0.0332817268371582, 0.03349798583984375, 0.033328575134277345, 0.033382015228271486, 0.03340924835205078, 0.03392777633666992, 0.035089534759521486, 0.03408512115478515, 0.03329500961303711, 0.033158878326416015, 0.03313488006591797, 0.03288883209228516, 0.032917503356933595, 0.032919551849365236, 0.0329150390625, 0.03288310241699219, 0.03299532699584961, 0.033068416595458984, 0.03313257598876953, 0.033122081756591794, 0.03312022399902344, 0.03299728012084961, 0.03302406311035156, 0.033046817779541014, 0.033043041229248046, 0.033121952056884764, 0.033046558380126954, 0.032906719207763675, 0.032965473175048825, 0.03297484970092773, 0.03308700942993164, 0.03296099090576172, 0.03305814361572266, 0.0331168327331543, 0.03309097671508789, 0.03307785415649414, 0.03375059127807617, 0.033184192657470704, 0.033208511352539063, 0.03359955215454102, 0.033259456634521484, 0.03313782501220703, 0.03305539321899414, 0.033087646484375, 0.03297264099121094, 0.033132545471191405, 0.03312188720703125, 0.03308585739135742, 0.03317964935302734, 0.03309363174438477, 0.033124351501464845, 0.03314604949951172, 0.033194080352783206, 0.03323958587646485, 0.03342729568481445, 0.03336431884765625, 0.03326342391967774, 0.03319561767578125, 0.03322531127929688, 0.033236927032470706, 0.033386878967285157, 0.03318751907348633, 0.03322265625, 0.03344998550415039, 0.03339878463745117, 0.03411558532714844, 0.03395779037475586, 0.03424854278564453, 0.03361753463745117, 0.03513507080078125, 0.0338436164855957, 0.03355263900756836, 0.0332710075378418, 0.03310441589355469, 0.03313840103149414, 0.03324860763549805, 0.03308745574951172, 0.03301196670532226, 0.03307136154174805, 0.033692127227783204, 0.03321395111083984, 0.03307980728149414, 0.033355777740478515, 0.033191776275634764, 0.03306089782714844, 0.03326959991455078, 0.03310316848754883, 0.03320876693725586, 0.033161758422851566, 0.03307855987548828, 0.03309641647338867, 0.03321241760253906, 0.03364044952392578, 0.033142143249511716, 0.033245918273925784, 0.033255329132080076, 0.03332207870483399, 0.03332150268554687, 0.033656993865966794, 0.03338671875, 0.03347478485107422, 0.03352143859863281, 0.03347455978393555, 0.033306625366210936, 0.033261600494384765, 0.033381534576416017, 0.03324601745605469, 0.033102977752685545, 0.03315299224853516, 0.03331961441040039, 0.03344406509399414, 0.03333232116699219, 0.03336431884765625, 0.03333587265014649, 0.03325337600708008, 0.03329600143432617, 0.03331216049194336, 0.03319087982177735, 0.03312633514404297, 0.03323052978515625, 0.03359577560424805, 0.033304576873779294, 0.033363006591796876, 0.034683998107910154, 0.03353785705566406, 0.033157024383544925, 0.033277057647705076, 0.033653759002685545, 0.033465438842773435, 0.0336530876159668, 0.03373072052001953, 0.033401153564453126, 0.03485955047607422, 0.03357500839233398, 0.033286048889160154, 0.03316454315185547, 0.0330043830871582, 0.03305628967285156, 0.03305305480957031, 0.032881759643554685, 0.03303926467895508, 0.03318560028076172, 0.03313273620605469, 0.032985088348388675, 0.03300742340087891, 0.033027359008789066, 0.032956832885742186, 0.03291350555419922, 0.03288025665283203, 0.032946975708007815, 0.03299711990356445, 0.03320857620239258, 0.033189888000488284, 0.03327350234985352, 0.03318790435791016, 0.03309372711181641, 0.03320969772338867, 0.033164127349853516, 0.03320627212524414, 0.033274078369140626, 0.03347135925292969, 0.033491008758544924, 0.033501697540283204, 0.03369152069091797, 0.03331343841552734, 0.03340268707275391, 0.03335907363891601, 0.03317225646972656, 0.03317379379272461, 0.033508190155029295, 0.03332185745239258, 0.03330223846435547, 0.03336220932006836, 0.03324860763549805, 0.03318236923217773, 0.03330192184448242, 0.03348080062866211, 0.034129886627197265, 0.03324067306518555, 0.03355244827270508, 0.033483905792236326, 0.03344563293457031, 0.033271808624267575, 0.03326736068725586, 0.03365923309326172, 0.0332677116394043, 0.03356671905517578, 0.0333656005859375, 0.033736286163330076, 0.033407905578613284, 0.033771327972412106, 0.03390047836303711, 0.03364988708496094, 0.033455039978027346, 0.03375619125366211]",tokens/s,30.12406145261316,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1153, in __init__ self.model = Qwen2MoeModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 918, in __init__ [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 918, in [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 691, in __init__ self.self_attn = QWEN2MOE_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 294, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 634, in __init__ self.attention = GPT_NEOX_ATTENTION_CLASSES[config._attn_implementation](config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 102, in __init__ self.query_key_value = nn.Linear(config.hidden_size, 3 * config.hidden_size, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 150.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 163, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,814.669824,569.311232,0.0,174.063616,172.57984,s,1,7.17195458984375,7.17195458984375,0.0,7.17195458984375,7.17195458984375,7.17195458984375,7.17195458984375,[7.17195458984375],,kWh,4.49210194999902e-06,4.882781367663077e-07,1.981946030002457e-06,6.962326116767785e-06,,MB,1108.475904,640.6144,0.0,234.881024,215.589888,s,25,0.27508755302429205,0.011003502120971677,0.00015233908185133062,0.010978240013122559,0.011162656211853028,0.011172537803649903,0.011461617774963379,"[0.01155247974395752, 0.010974176406860351, 0.011006848335266113, 0.010918208122253418, 0.011093888282775879, 0.011083488464355468, 0.010961407661437989, 0.010906815528869629, 0.010854496002197265, 0.010844511985778809, 0.010817952156066894, 0.010858304023742675, 0.011097536087036133, 0.011025247573852539, 0.011046751976013184, 0.01100153636932373, 0.011167136192321778, 0.011155936241149902, 0.011173888206481934, 0.010980640411376953, 0.010856415748596191, 0.010905407905578613, 0.010951775550842285, 0.010978240013122559, 0.010874464035034179]",tokens/s,23265.320184933415,kWh,3.5411350660398743e-07,3.90525435950372e-08,2.336999577257331e-07,6.268660079247578e-07,tokens/kWh,408380733.3045366,MB,1135.460352,642.711552,0.0,236.978176,215.592448,s,25,9.833397613525392,0.39333590454101564,0.02841497875139694,0.38773126220703125,0.3964486999511719,0.3983803649902344,0.498144299316406,"[0.3969692077636719, 0.5295372924804688, 0.39566793823242186, 0.3912528991699219, 0.39300204467773436, 0.388822998046875, 0.38113458251953125, 0.37845510864257814, 0.3840166931152344, 0.37589962768554686, 0.3793076171875, 0.39152328491210936, 0.3906401062011719, 0.3927539978027344, 0.39408251953125, 0.392140869140625, 0.398733154296875, 0.38739495849609373, 0.3850337829589844, 0.38190274047851563, 0.38393341064453124, 0.38520547485351564, 0.38467071533203123, 0.3835853271484375, 0.38773126220703125]",tokens/s,160.16844450931782,kWh,1.0956496214870327e-05,1.2083142877942698e-06,4.543194889428047e-06,1.670800539209264e-05,tokens/kWh,3770647.574115332,,s,1575,9.820896668434136,0.006235489948212153,0.0033837550131982762,0.0061016960144042965,0.006408895969390869,0.006480585527420044,0.006702362804412841,"[0.006441760063171387, 0.006477503776550293, 0.007278592109680176, 0.006242335796356201, 0.006197023868560791, 0.006123712062835693, 0.00619536018371582, 0.0062871999740600586, 0.0062791681289672855, 0.006250271797180176, 0.006262400150299072, 0.00640777587890625, 0.006451871871948242, 0.006435488224029541, 0.0064787201881408695, 0.006379903793334961, 0.006322336196899414, 0.006346879959106445, 0.006325600147247315, 0.006332896232604981, 0.006299583911895752, 0.0063175358772277835, 0.006257599830627442, 0.006154208183288574, 0.0061010241508483885, 0.006008416175842285, 0.00629750394821167, 0.006022655963897705, 0.006118144035339355, 0.006304160118103027, 0.006664127826690674, 0.006571743965148926, 0.006684864044189453, 0.00659660816192627, 0.006536287784576416, 0.006404416084289551, 0.006496863842010498, 0.006506591796875, 0.006361055850982666, 0.006279104232788086, 0.006217728137969971, 0.006187007904052734, 0.00610537576675415, 0.006141664028167725, 0.006139488220214844, 0.0062119998931884765, 0.006354015827178955, 0.006132031917572021, 0.006070847988128662, 0.006041632175445556, 0.006061952114105224, 0.006266335964202881, 0.0062523198127746584, 0.006249343872070313, 0.006047743797302246, 0.006020480155944824, 0.006121088027954102, 0.006068895816802979, 0.006124063968658448, 0.0061190400123596195, 0.006299839973449707, 0.006440864086151123, 0.006282815933227539, 0.006154208183288574, 0.006459455966949463, 0.006365056037902832, 0.006270336151123047, 0.006281504154205322, 0.0063179202079772945, 0.006151008129119873, 0.006214687824249268, 0.006289663791656494, 0.00641868782043457, 0.00624396800994873, 0.006178719997406006, 0.006220736026763916, 0.006245855808258057, 0.14023097229003906, 0.006438432216644287, 0.006478464126586914, 0.006367008209228516, 0.006177023887634277, 0.006741055965423584, 0.006666848182678223, 0.006194975852966309, 0.006236991882324219, 0.006126751899719238, 0.006145696163177491, 0.006063039779663086, 0.0061320638656616215, 0.006153183937072754, 0.0061016960144042965, 0.006318079948425293, 0.0062912960052490235, 0.006263199806213379, 0.006190336227416992, 0.006142079830169678, 0.006067615985870361, 0.0063088321685791015, 0.006425695896148682, 0.006314911842346191, 0.006272895812988281, 0.006056159973144531, 0.00615558385848999, 0.006147903919219971, 0.0063373122215271, 0.0063508481979370115, 0.006336512088775635, 0.006254591941833496, 0.00695091199874878, 0.006221824169158936, 0.006158239841461181, 0.006170720100402832, 0.006177087783813476, 0.006147520065307617, 0.006162687778472901, 0.006333727836608887, 0.006486815929412842, 0.006649504184722901, 0.006252831935882568, 0.006184415817260742, 0.006132256031036377, 0.0061831679344177244, 0.0062082881927490235, 0.006208127975463868, 0.006134111881256104, 0.005865471839904785, 0.006037856101989746, 0.006028960227966309, 0.006107135772705078, 0.006444416046142578, 0.00649894380569458, 0.006588704109191894, 0.006657760143280029, 0.006494016170501709, 0.006662591934204102, 0.006512447834014893, 0.006442016124725342, 0.006470560073852539, 0.0063610877990722655, 0.006357279777526856, 0.006414048194885254, 0.006186272144317627, 0.006176896095275879, 0.006113887786865235, 0.006187104225158692, 0.00617193603515625, 0.006304192066192627, 0.0063203201293945315, 0.006230016231536865, 0.006182015895843506, 0.006160352230072021, 0.006350048065185547, 0.0063259520530700684, 0.006202720165252686, 0.006091423988342285, 0.006133312225341797, 0.0062689919471740725, 0.006090464115142823, 0.006234784126281739, 0.006102240085601807, 0.006074975967407226, 0.006121664047241211, 0.006028992176055908, 0.006142591953277588, 0.0065491838455200195, 0.006555808067321777, 0.006338240146636963, 0.006260863780975342, 0.006132832050323486, 0.006200287818908691, 0.006116479873657227, 0.006169439792633057, 0.006230016231536865, 0.006195199966430664, 0.0062259202003479, 0.006518784046173095, 0.0065413122177124024, 0.0064430079460144046, 0.006467584133148193, 0.006606847763061524, 0.006338560104370118, 0.006399328231811523, 0.00623308801651001, 0.006174143791198731, 0.006082784175872802, 0.006190815925598145, 0.0060104641914367676, 0.0060824317932128905, 0.0061421761512756344, 0.00657366418838501, 0.0065446081161499026, 0.006555808067321777, 0.006583104133605957, 0.006516064167022705, 0.006380127906799316, 0.0063571839332580565, 0.006253856182098389, 0.0062368960380554195, 0.006162240028381348, 0.0061420159339904785, 0.006054944038391114, 0.00614246416091919, 0.006173151969909668, 0.006033408164978027, 0.00603110408782959, 0.0060646400451660155, 0.0060698561668396, 0.006082719802856446, 0.006064127922058105, 0.006088223934173584, 0.006042079925537109, 0.006090752124786377, 0.006047743797302246, 0.006150144100189209, 0.006136127948760986, 0.0060638079643249515, 0.006078271865844726, 0.006041088104248047, 0.006166207790374756, 0.006115615844726563, 0.0062984638214111325, 0.006290847778320312, 0.0061831998825073245, 0.006312287807464599, 0.006327744007110596, 0.006338592052459717, 0.00621401596069336, 0.006223872184753418, 0.006199295997619629, 0.006153952121734619, 0.006160448074340821, 0.006117216110229492, 0.006191167831420898, 0.006327807903289795, 0.006329152107238769, 0.006367392063140869, 0.006283103942871094, 0.006315167903900147, 0.006409056186676025, 0.006300831794738769, 0.0062206401824951175, 0.006278848171234131, 0.006219295978546142, 0.006136608123779297, 0.006045087814331055, 0.006036064147949219, 0.006017024040222168, 0.006071424007415772, 0.006059135913848877, 0.006045440196990967, 0.006107135772705078, 0.006348832130432129, 0.006434815883636475, 0.006508607864379883, 0.006668320178985596, 0.0067721281051635746, 0.006377151966094971, 0.0064234561920166015, 0.0063610877990722655, 0.006407392024993896, 0.006359295845031738, 0.006272575855255127, 0.006247712135314941, 0.006297279834747314, 0.006217728137969971, 0.006209856033325196, 0.006119455814361572, 0.006096127986907959, 0.006143487930297851, 0.006663167953491211, 0.006080416202545166, 0.00609830379486084, 0.006070015907287597, 0.006091231822967529, 0.006020991802215576, 0.006253087997436523, 0.006494400024414063, 0.006410048007965088, 0.006289247989654541, 0.006141503810882568, 0.006091360092163086, 0.006208896160125732, 0.006910528182983398, 0.006057888031005859, 0.006135871887207031, 0.00620358419418335, 0.006245855808258057, 0.00627945613861084, 0.006191264152526856, 0.006190847873687744, 0.006051519870758057, 0.00606166410446167, 0.005978752136230469, 0.006058656215667725, 0.006114816188812256, 0.006309120178222656, 0.006243264198303223, 0.00638972806930542, 0.006241663932800293, 0.006118048191070557, 0.006101088047027588, 0.0061131839752197265, 0.006191103935241699, 0.006060031890869141, 0.0061168642044067386, 0.006089119911193847, 0.006089824199676514, 0.006071296215057373, 0.006104415893554688, 0.006050464153289795, 0.006069952011108398, 0.006017343997955322, 0.006122591972351074, 0.00630617618560791, 0.006260799884796143, 0.0063446397781372075, 0.006395904064178467, 0.00641974401473999, 0.006273727893829345, 0.006219711780548096, 0.006221920013427734, 0.006196320056915283, 0.0062262721061706544, 0.00628495979309082, 0.006218656063079834, 0.0061131839752197265, 0.006133855819702149, 0.006064127922058105, 0.006159391880035401, 0.006024159908294678, 0.006062079906463623, 0.006039135932922364, 0.00635097599029541, 0.006383039951324463, 0.006588287830352783, 0.00664467191696167, 0.006448959827423096, 0.006289792060852051, 0.006215136051177978, 0.006246880054473877, 0.006176544189453125, 0.006204576015472412, 0.006123551845550537, 0.00604256010055542, 0.006075520038604737, 0.006012063980102539, 0.006055232048034668, 0.0060133438110351566, 0.006082464218139649, 0.006002560138702393, 0.006215968132019043, 0.006258624076843262, 0.006158336162567139, 0.0060860800743103025, 0.006007359981536865, 0.00601907205581665, 0.006003903865814209, 0.0060076799392700195, 0.005959616184234619, 0.006174111843109131, 0.005993055820465088, 0.0060416641235351565, 0.005990623950958252, 0.006008416175842285, 0.005994624137878418, 0.006017183780670166, 0.005955584049224853, 0.006011839866638184, 0.00609168004989624, 0.006328256130218506, 0.006506559848785401, 0.006327807903289795, 0.006261248111724854, 0.006199391841888427, 0.006086688041687011, 0.006033279895782471, 0.006017024040222168, 0.0057554559707641605, 0.005979135990142822, 0.006034463882446289, 0.0059576001167297365, 0.005983967781066895, 0.00596940803527832, 0.005995296001434326, 0.005969823837280274, 0.006023519992828369, 0.0059818878173828124, 0.006021183967590332, 0.005967264175415039, 0.005986911773681641, 0.005952576160430908, 0.006038688182830811, 0.005959455966949463, 0.006041376113891601, 0.006084832191467285, 0.007065279960632324, 0.006388031959533692, 0.006017024040222168, 0.006012928009033203, 0.00597811222076416, 0.006039552211761475, 0.0059935998916625976, 0.006026112079620362, 0.006000639915466309, 0.006014944076538086, 0.005980224132537842, 0.0059732160568237306, 0.005970751762390137, 0.0059697279930114745, 0.006072256088256836, 0.006136000156402588, 0.006228991985321045, 0.005983168125152588, 0.005959743976593017, 0.0061354880332946775, 0.005955327987670898, 0.006042175769805909, 0.005967455863952637, 0.0060208959579467775, 0.005972608089447022, 0.006039231777191162, 0.005978271961212158, 0.006039711952209472, 0.006003712177276611, 0.0061224961280822755, 0.005964191913604736, 0.006037087917327881, 0.0059592962265014645, 0.0060207037925720215, 0.006015103816986084, 0.006032032012939453, 0.006418496131896973, 0.006033567905426025, 0.005982175827026367, 0.0060347518920898435, 0.006017536163330078, 0.0059593281745910645, 0.005968224048614502, 0.006455264091491699, 0.0059987521171569825, 0.005822688102722168, 0.005946527957916259, 0.005976704120635987, 0.006002463817596435, 0.005988416194915772, 0.005937376022338867, 0.006008287906646728, 0.005966303825378418, 0.006005023956298828, 0.00594217586517334, 0.005980991840362549, 0.005929152011871338, 0.005975872039794922, 0.005917695999145508, 0.005995488166809082, 0.005975903987884522, 0.006008831977844238, 0.006013472080230713, 0.006019968032836914, 0.005978911876678467, 0.00601907205581665, 0.005935200214385986, 0.005986112117767334, 0.005942495822906494, 0.006029727935791015, 0.005998112201690674, 0.006013023853302002, 0.006009056091308594, 0.006053567886352539, 0.006023359775543213, 0.006052127838134766, 0.005986783981323242, 0.006036863803863525, 0.00612175989151001, 0.006025568008422851, 0.006076416015625, 0.00601907205581665, 0.0059411201477050785, 0.006060192108154297, 0.0059411201477050785, 0.0059905281066894535, 0.005918687820434571, 0.0060104641914367676, 0.0059415998458862305, 0.005976352214813232, 0.0059489598274230955, 0.006432576179504395, 0.0059584641456604005, 0.006020927906036377, 0.005933023929595947, 0.006121088027954102, 0.005955743789672852, 0.006004511833190918, 0.005968160152435302, 0.006020415782928467, 0.006044415950775146, 0.006006912231445312, 0.006170400142669678, 0.006030496120452881, 0.005959968090057373, 0.005988959789276123, 0.005946400165557861, 0.005996607780456543, 0.005866015911102295, 0.006158720016479493, 0.005962912082672119, 0.007720128059387207, 0.008408767700195312, 0.00780617618560791, 0.007717919826507568, 0.0070100479125976565, 0.005971519947052002, 0.005986656188964844, 0.005963840007781982, 0.005984255790710449, 0.005922560214996338, 0.005966080188751221, 0.005912576198577881, 0.005987552165985108, 0.00588265609741211, 0.006002463817596435, 0.0059699521064758305, 0.005950784206390381, 0.005971136093139648, 0.005990079879760742, 0.005945087909698487, 0.005932384014129639, 0.005895071983337402, 0.005936927795410156, 0.005925087928771972, 0.0060026879310607914, 0.005938943862915039, 0.005990655899047852, 0.005951583862304688, 0.005928864002227783, 0.0059435200691223145, 0.005924479961395264, 0.0059671678543090825, 0.005953824043273926, 0.0059500160217285155, 0.0059269118309021, 0.005910528182983398, 0.005937151908874512, 0.005947391986846923, 0.005967872142791748, 0.005944736003875732, 0.005964384078979492, 0.005908480167388916, 0.0058951997756958, 0.0059647679328918455, 0.005904160022735596, 0.005949247837066651, 0.005894495964050293, 0.00592083215713501, 0.0059678077697753905, 0.005975488185882568, 0.005884543895721435, 0.00593452787399292, 0.005900864124298096, 0.0059658241271972655, 0.005898528099060059, 0.005941055774688721, 0.005913760185241699, 0.0059275197982788085, 0.005965983867645264, 0.00594870376586914, 0.005664127826690674, 0.005906688213348389, 0.005962111949920654, 0.005951231956481934, 0.0059558401107788084, 0.005912223815917969, 0.0059415998458862305, 0.005984255790710449, 0.006062079906463623, 0.0059205121994018554, 0.005947648048400879, 0.005887519836425781, 0.006045728206634521, 0.005915008068084717, 0.005945280075073243, 0.005939616203308106, 0.005927807807922363, 0.005907360076904297, 0.005953536033630371, 0.005914559841156006, 0.005943168163299561, 0.005936384201049805, 0.005938144207000733, 0.0061801280975341795, 0.005986944198608398, 0.005943168163299561, 0.005965727806091309, 0.005927231788635254, 0.005988255977630615, 0.005937280178070069, 0.00595136022567749, 0.005937119960784912, 0.005950975894927979, 0.0059227199554443355, 0.0059584641456604005, 0.005932864189147949, 0.005969088077545166, 0.006034175872802735, 0.0059515519142150876, 0.005980160236358643, 0.0059550080299377445, 0.005952064037322998, 0.005922815799713135, 0.005988351821899414, 0.005933055877685547, 0.005988639831542969, 0.005928063869476318, 0.00599510383605957, 0.005927167892456055, 0.005991968154907226, 0.005951712131500244, 0.0062979841232299804, 0.005985983848571777, 0.005965760231018066, 0.006059455871582031, 0.005966400146484375, 0.0059269118309021, 0.005943552017211914, 0.0059246401786804195, 0.005973983764648438, 0.0059617919921875, 0.005983295917510986, 0.005919616222381591, 0.005689343929290771, 0.005947455883026123, 0.005965760231018066, 0.005934144020080566, 0.005941247940063477, 0.0059211840629577635, 0.006122079849243164, 0.007403456211090088, 0.006957056045532227, 0.0062156801223754886, 0.0059688959121704105, 0.005952511787414551, 0.005935232162475586, 0.005983520030975342, 0.005941855907440186, 0.005958752155303955, 0.005910431861877442, 0.005935679912567138, 0.00593887996673584, 0.005985023975372314, 0.005918879985809326, 0.005928639888763428, 0.005926559925079346, 0.006015679836273193, 0.006008607864379883, 0.005953567981719971, 0.005951039791107178, 0.005949535846710205, 0.0060635838508605955, 0.005949600219726563, 0.00594374418258667, 0.00595747184753418, 0.005957183837890625, 0.00599948787689209, 0.005953536033630371, 0.005937151908874512, 0.005975135803222656, 0.005939551830291748, 0.005976672172546387, 0.005942431926727295, 0.005974080085754395, 0.005902112007141113, 0.005958623886108398, 0.005910528182983398, 0.006109087944030762, 0.0060498881340026855, 0.00601043176651001, 0.005929408073425293, 0.005969503879547119, 0.005916063785552979, 0.005978879928588868, 0.005904928207397461, 0.005973728179931641, 0.0059324798583984375, 0.00602784013748169, 0.005980160236358643, 0.006082560062408447, 0.006088160037994385, 0.006142240047454834, 0.006040095806121826, 0.006086368083953858, 0.005998432159423828, 0.006023327827453614, 0.005827936172485351, 0.006211872100830078, 0.006291872024536133, 0.006391583919525146, 0.0063482561111450194, 0.006269248008728028, 0.006226431846618652, 0.006270976066589356, 0.006367072105407715, 0.006162496089935303, 0.006154304027557373, 0.00603872013092041, 0.006025728225708008, 0.005963967800140381, 0.006080671787261963, 0.006012224197387696, 0.006118080139160156, 0.006209536075592041, 0.00617087984085083, 0.006147071838378906, 0.006057888031005859, 0.006024032115936279, 0.0060026879310607914, 0.006038688182830811, 0.006154719829559326, 0.006179200172424316, 0.00610748815536499, 0.006158304214477539, 0.006168288230895996, 0.006375391960144043, 0.006330687999725342, 0.006268608093261719, 0.006367231845855713, 0.00638105583190918, 0.006407904148101807, 0.006279935836791992, 0.0063266558647155765, 0.006227583885192871, 0.0061645121574401855, 0.006158527851104737, 0.006137663841247558, 0.006098944187164307, 0.0060661759376525876, 0.006227456092834473, 0.006087007999420166, 0.0062846078872680665, 0.00647049617767334, 0.006552927970886231, 0.006453120231628418, 0.006269696235656738, 0.006259871959686279, 0.006187136173248291, 0.00618943977355957, 0.006101376056671143, 0.006158336162567139, 0.006090752124786377, 0.006023263931274414, 0.006039167881011963, 0.006266304016113281, 0.006155104160308838, 0.006387455940246582, 0.006463935852050781, 0.006512032032012939, 0.005967423915863037, 0.006191008090972901, 0.006179327964782715, 0.006275360107421875, 0.006350592136383056, 0.006295551776885986, 0.006327616214752197, 0.006430399894714355, 0.006392288208007813, 0.006359392166137695, 0.006389472007751465, 0.006377855777740479, 0.006456895828247071, 0.006438752174377441, 0.006408736228942871, 0.0064139838218688966, 0.00637337589263916, 0.006441472053527832, 0.0063192639350891115, 0.006246975898742676, 0.006361375808715821, 0.006244095802307129, 0.006202688217163086, 0.006146719932556152, 0.006285600185394287, 0.006205440044403076, 0.006162367820739746, 0.006121535778045654, 0.006080512046813965, 0.006096896171569824, 0.006042943954467773, 0.006105279922485352, 0.006131648063659668, 0.006113247871398926, 0.006105984210968017, 0.006102303981781006, 0.006078911781311035, 0.006082464218139649, 0.005999743938446045, 0.006074592113494873, 0.006146111965179443, 0.006275775909423828, 0.006165887832641602, 0.006128255844116211, 0.006019040107727051, 0.006021152019500733, 0.006053887844085694, 0.006060128211975098, 0.006184864044189453, 0.006217728137969971, 0.006178815841674804, 0.006133247852325439, 0.006124032020568848, 0.0061214399337768555, 0.006066048145294189, 0.006052000045776367, 0.006092512130737305, 0.005996672153472901, 0.0061166400909423825, 0.006114528179168701, 0.006098176002502441, 0.006242112159729004, 0.006140768051147461, 0.00602726411819458, 0.006256671905517578, 0.006212992191314697, 0.006238719940185547, 0.006159743785858154, 0.006120255947113037, 0.0061420159339904785, 0.00606601619720459, 0.006109183788299561, 0.006094848155975342, 0.006072319984436035, 0.0060499200820922855, 0.0061626238822937015, 0.006196928024291992, 0.006330592155456543, 0.0063528637886047365, 0.006278560161590576, 0.006265247821807861, 0.006199295997619629, 0.006172671794891358, 0.006117087841033935, 0.00626470422744751, 0.006071936130523681, 0.006139808177947998, 0.006394112110137939, 0.006197855949401855, 0.006326047897338867, 0.0064924159049987796, 0.006612991809844971, 0.006840415954589844, 0.00640934419631958, 0.006416672229766845, 0.006408703804016113, 0.006343999862670899, 0.006365888118743897, 0.006369279861450195, 0.00623638391494751, 0.0060778241157531734, 0.00602563190460205, 0.005991680145263672, 0.006054656028747558, 0.006199295997619629, 0.00638156795501709, 0.006262784004211426, 0.006258975982666015, 0.006280928134918213, 0.006245888233184814, 0.006208000183105469, 0.006141024112701416, 0.0060834879875183104, 0.006049791812896729, 0.006100992202758789, 0.00603545618057251, 0.006067615985870361, 0.006199903964996338, 0.0064245758056640625, 0.00630998420715332, 0.006158143997192383, 0.006049312114715576, 0.00611084794998169, 0.006308703899383545, 0.00630406379699707, 0.006362912178039551, 0.005897439956665039, 0.00616534423828125, 0.006092735767364502, 0.00611737585067749, 0.006205440044403076, 0.006292736053466797, 0.00623308801651001, 0.006235263824462891, 0.006324160099029541, 0.006265247821807861, 0.006299935817718506, 0.0060677118301391605, 0.006086880207061767, 0.006038976192474365, 0.006107872009277344, 0.006119552135467529, 0.006217567920684814, 0.006217887878417969, 0.006168575763702393, 0.006154240131378174, 0.006098944187164307, 0.006134975910186768, 0.00616099214553833, 0.006314208030700683, 0.006379231929779052, 0.006312096118927002, 0.00620966386795044, 0.006117280006408691, 0.006115327835083008, 0.00610211181640625, 0.006093823909759521, 0.0063318080902099606, 0.006295904159545899, 0.006119103908538819, 0.006462368011474609, 0.006688767910003662, 0.006082208156585694, 0.0062847681045532225, 0.006187263965606689, 0.006215968132019043, 0.006256383895874023, 0.006211679935455322, 0.006125823974609375, 0.006379392147064209, 0.006526783943176269, 0.006590784072875977, 0.006518688201904297, 0.006479135990142822, 0.006424511909484863, 0.006437695980072021, 0.006342016220092774, 0.0063630399703979495, 0.0064617919921875, 0.006400352001190186, 0.006414048194885254, 0.006240543842315674, 0.006280735969543457, 0.006132192134857177, 0.006188159942626953, 0.006201759815216064, 0.006191264152526856, 0.006251071929931641, 0.0061478400230407714, 0.00613478422164917, 0.00637440013885498, 0.006485568046569824, 0.006318111896514892, 0.006315487861633301, 0.0061874880790710445, 0.006168384075164795, 0.006094624042510986, 0.006034624099731445, 0.006061535835266113, 0.006031583786010742, 0.006145023822784424, 0.0062863359451293946, 0.006276768207550049, 0.0061586880683898925, 0.00636627197265625, 0.006429632186889648, 0.0064204797744750975, 0.006291679859161377, 0.006173791885375976, 0.006222527980804443, 0.006141952037811279, 0.006155488014221192, 0.006160639762878418, 0.006238080024719238, 0.006093183994293213, 0.006111519813537598, 0.006350783824920654, 0.0063303041458129886, 0.006332543849945069, 0.0062518720626831056, 0.006242976188659668, 0.0062791681289672855, 0.0061851201057434085, 0.006068064212799072, 0.006004735946655273, 0.006038911819458008, 0.006015615940093994, 0.006028831958770752, 0.006009088039398193, 0.00608892822265625, 0.006134047985076905, 0.006310688018798828, 0.0063639039993286135, 0.0063526082038879396, 0.006220384120941162, 0.006207071781158448, 0.006164768218994141, 0.006038943767547608, 0.006070528030395508, 0.006048416137695313, 0.0060514240264892575, 0.006055871963500977, 0.006183072090148926, 0.006347104072570801, 0.006454944133758545, 0.006505951881408691, 0.006426464080810547, 0.006367487907409668, 0.00624889612197876, 0.006256608009338379, 0.006230048179626465, 0.006424799919128418, 0.006057983875274659, 0.006498559951782227, 0.006460447788238525, 0.006382304191589356, 0.0064471039772033695, 0.006289408206939697, 0.006297344207763672, 0.006307871818542481, 0.0063879361152648926, 0.0064143362045288085, 0.0062873601913452145, 0.006250688076019287, 0.006131455898284912, 0.006221888065338135, 0.006342175960540771, 0.0063820481300354, 0.006338304042816162, 0.00623638391494751, 0.006258143901824951, 0.006285888195037842, 0.006350336074829102, 0.0065090560913085935, 0.006334464073181153, 0.006316031932830811, 0.00620688009262085, 0.006375487804412842, 0.006523104190826416, 0.006490143775939941, 0.006472224235534668, 0.006325215816497803, 0.006255392074584961, 0.006174975872039795, 0.006133503913879394, 0.006199295997619629, 0.006174719810485839, 0.006164480209350586, 0.006131711959838867, 0.006171807765960693, 0.006201663970947266, 0.0063508481979370115, 0.006567935943603515, 0.006392384052276611, 0.006473760128021241, 0.006891456127166748, 0.006373663902282715, 0.006431903839111328, 0.00642310380935669, 0.006405151844024658, 0.006299967765808105, 0.0061938238143920896, 0.00626204776763916, 0.006171360015869141, 0.006129407882690429, 0.006087135791778564, 0.006098720073699951, 0.0060661759376525876, 0.006061279773712158, 0.006371583938598633, 0.006484511852264404, 0.006563615798950195, 0.006442560195922852, 0.0063883838653564454, 0.006408192157745361, 0.006060031890869141, 0.0062269439697265625, 0.006217055797576905, 0.006143519878387451, 0.0060797438621521, 0.006110079765319824, 0.00604307222366333, 0.0060505599975585935, 0.005973440170288086, 0.005990816116333008, 0.005987552165985108, 0.005990431785583496, 0.005960415840148925, 0.006063519954681396, 0.006291200160980224, 0.006267744064331055, 0.006080512046813965, 0.006150144100189209, 0.005996543884277344, 0.006103040218353272, 0.006000639915466309, 0.00601635217666626, 0.005994175910949707, 0.006044640064239502, 0.006025184154510498, 0.00612559986114502, 0.006193376064300537, 0.006233888149261475, 0.006188416004180909, 0.006079103946685791, 0.006084928035736084, 0.006065855979919434, 0.006127295970916748, 0.0060910720825195315, 0.006051839828491211, 0.006017024040222168, 0.006033184051513672, 0.006013440132141113, 0.006033152103424072, 0.006117216110229492, 0.006416512012481689, 0.006428671836853027, 0.006489952087402343, 0.0065414719581604, 0.0064839677810668945, 0.0064692158699035645, 0.006375840187072754, 0.006318143844604492, 0.006313920021057129, 0.00614739179611206, 0.006134367942810058, 0.006090559959411621, 0.006202976226806641, 0.006089312076568603, 0.006100480079650879, 0.006111839771270752, 0.006103040218353272, 0.006029439926147461, 0.006084479808807373, 0.006189184188842773, 0.006073311805725097, 0.006073247909545898, 0.006238207817077636, 0.006025728225708008, 0.006159872055053711, 0.00615664005279541, 0.0061132159233093265, 0.006126815795898437, 0.00610745620727539, 0.006160255908966065, 0.006083168029785156, 0.00608028793334961, 0.006062528133392334, 0.006122432231903076, 0.0060342721939086914, 0.006114880084991455, 0.006039999961853028, 0.006031360149383545, 0.006002848148345947, 0.006000607967376709, 0.005936351776123047, 0.005968544006347656, 0.005928256034851074, 0.006011360168457032, 0.005955264091491699, 0.005996928215026855, 0.005961343765258789, 0.006068287849426269, 0.006064223766326904, 0.0060993280410766605, 0.006006175994873047, 0.006093376159667969, 0.006051167964935302, 0.006216383934020996, 0.006110911846160889, 0.006062528133392334, 0.0060126399993896485, 0.005975488185882568, 0.006025951862335205, 0.005996511936187744, 0.006082496166229248, 0.005963456153869629, 0.005976480007171631, 0.006033408164978027, 0.006270495891571045, 0.006408671855926514, 0.006436992168426514, 0.00635481595993042, 0.006428864002227783, 0.006422336101531982, 0.006315743923187256, 0.006238495826721192, 0.006180863857269287, 0.0061996479034423825, 0.006098495960235596, 0.006084703922271729, 0.006024608135223389, 0.006019455909729004, 0.005974239826202392, 0.006000256061553955, 0.005927584171295166, 0.00599183988571167, 0.005986271858215332, 0.00624291181564331, 0.006526048183441162, 0.006341279983520508, 0.005863423824310303, 0.006123519897460937, 0.006124767780303955, 0.00605398416519165, 0.006046400070190429, 0.006004831790924072, 0.00598419189453125, 0.00603439998626709, 0.006181024074554444, 0.0060136961936950685, 0.00599283218383789, 0.005920447826385498, 0.005955967903137207, 0.005933856010437012, 0.005976128101348877, 0.005927264213562012, 0.005923295974731445, 0.005905695915222168, 0.005988863945007325, 0.005998176097869873, 0.006101632118225097, 0.006268032073974609, 0.006323071956634521, 0.0065491518974304195, 0.006379871845245361, 0.0061494078636169435, 0.006073376178741455, 0.0060044159889221195, 0.0059920320510864255, 0.005978528022766113, 0.005963776111602783, 0.005942912101745606, 0.0059498238563537595, 0.00591212797164917, 0.0059433279037475585, 0.005898848056793213, 0.005949567794799804, 0.005912255764007568, 0.005931295871734619, 0.005997568130493164, 0.006108928203582764, 0.006393983840942383, 0.006373888015747071, 0.006304096221923828, 0.006280288219451904, 0.006288127899169922, 0.00618668794631958, 0.006004672050476074, 0.005966368198394775, 0.005976096153259277, 0.005947360038757324, 0.006011072158813476, 0.005924352169036865, 0.006003007888793945, 0.005971968173980713, 0.00606822395324707, 0.005928959846496582, 0.005998623847961426, 0.005969823837280274, 0.0061502718925476076, 0.00624019193649292, 0.0061328959465026854, 0.006015071868896485, 0.0057849278450012205, 0.0059688959121704105, 0.0060002880096435545, 0.005978271961212158, 0.005995744228363037, 0.005912320137023926, 0.0059827518463134765, 0.006021471977233886, 0.006016895771026611, 0.006072447776794433, 0.005980127811431884, 0.005994527816772461, 0.006014976024627685, 0.006030432224273682, 0.005996511936187744, 0.005995456218719482, 0.006004735946655273, 0.006012224197387696, 0.005935808181762695, 0.005937280178070069, 0.00604966402053833, 0.006230016231536865, 0.006453248023986816, 0.006352255821228028, 0.006274847984313965, 0.00631712007522583, 0.006347743988037109, 0.006470143795013428, 0.006451519966125488, 0.006684351921081543, 0.006367775917053222, 0.006267712116241455, 0.006167520046234131, 0.005980160236358643, 0.006047743797302246, 0.006021344184875489, 0.006053696155548096, 0.005959360122680664, 0.006027008056640625, 0.005945888042449951, 0.005984384059906006, 0.005894015789031983, 0.006018176078796387, 0.00593395185470581, 0.005955584049224853, 0.0060269122123718264, 0.006312287807464599, 0.006440095901489258, 0.006320608139038086, 0.006125823974609375, 0.006037888050079346, 0.0059983677864074705, 0.0060412797927856445, 0.006050079822540283, 0.005973311901092529, 0.005982975959777832, 0.006041728019714356, 0.006018847942352295, 0.006092832088470459, 0.0060702719688415525, 0.00601043176651001, 0.006035583972930908, 0.005996863842010498, 0.00571830415725708, 0.00597760009765625, 0.005939231872558594, 0.0059732160568237306, 0.005960512161254883, 0.005957632064819336, 0.005944767951965332, 0.005994688034057618, 0.006013472080230713, 0.006033184051513672, 0.005986559867858887, 0.006039775848388672, 0.0060433921813964845, 0.006076704025268555, 0.006024064064025879, 0.006093152046203613, 0.006001120090484619, 0.006060031890869141, 0.006020448207855225, 0.006078176021575928, 0.006093791961669922, 0.00602726411819458, 0.005966047763824463, 0.005991487979888916, 0.005982336044311524, 0.005960544109344482, 0.005969664096832275, 0.006103199958801269, 0.006364511966705322, 0.006425087928771973, 0.006424352169036866, 0.006443071842193604, 0.0065168957710266115, 0.006370975971221924, 0.006370687961578369, 0.006353663921356201, 0.0061561279296875, 0.006211904048919678, 0.0061010560989379885, 0.006090752124786377, 0.006010015964508056, 0.00604860782623291, 0.006236159801483154, 0.005990399837493897, 0.00601087999343872, 0.005996672153472901, 0.00602294397354126, 0.006016511917114258, 0.006050399780273438, 0.006316031932830811, 0.006278783798217774, 0.006130080223083496, 0.006166016101837158, 0.006275551795959473, 0.006389472007751465, 0.00618287992477417, 0.006101503849029541, 0.006090496063232422, 0.0061051521301269535, 0.006059967994689942, 0.006326432228088379, 0.006035359859466552, 0.006033472061157227, 0.005752831935882568, 0.0060356159210205075, 0.005936031818389893, 0.005987360000610352, 0.005955488204956055, 0.006002560138702393, 0.005959743976593017, 0.006010367870330811, 0.005978687763214112, 0.00606822395324707, 0.006017087936401367, 0.006042623996734619, 0.006076960086822509, 0.006073823928833008, 0.006103936195373535, 0.0060910720825195315, 0.006091839790344238, 0.006062975883483887, 0.006149312019348144, 0.006130303859710693, 0.006112576007843018, 0.006180736064910888, 0.006131840229034424, 0.0060217280387878415, 0.006031455993652343, 0.005939328193664551, 0.005975935935974121, 0.005959360122680664, 0.006052031993865967, 0.006234208106994629, 0.00655731201171875, 0.00655951976776123, 0.006455872058868408, 0.006457151889801026, 0.0064330239295959475, 0.006379007816314697, 0.006131392002105713, 0.006113696098327637, 0.006167263984680176, 0.00608403205871582, 0.006092160224914551, 0.006030144214630127, 0.006004672050476074, 0.005966207981109619, 0.005994304180145264, 0.005958784103393555, 0.005989183902740478, 0.005955584049224853, 0.005969120025634766, 0.006038368225097656, 0.006219711780548096, 0.0061562881469726565, 0.006203104019165039, 0.006199584007263183, 0.006162432193756104, 0.006068287849426269, 0.0060638079643249515, 0.005968128204345703, 0.0060201921463012694, 0.005993728160858154, 0.00617468786239624, 0.006400864124298095, 0.006084671974182129, 0.005830495834350586, 0.00610313606262207, 0.006026559829711914, 0.006130176067352295, 0.00604150390625, 0.006032896041870117, 0.0060217280387878415, 0.005990784168243408, 0.005957503795623779, 0.005926400184631348, 0.005952000141143799, 0.0060208640098571775, 0.006024608135223389, 0.006037407875061035, 0.006036416053771972, 0.0060085439682006836, 0.0060152640342712406, 0.006039552211761475, 0.006014944076538086, 0.006035520076751709, 0.006027232170104981, 0.006017216205596924, 0.005966976165771485, 0.005962431907653809, 0.005986303806304932, 0.005989408016204834, 0.00602950382232666, 0.005972767829895019, 0.006166528224945069, 0.006420447826385498, 0.0064980158805847165, 0.00632863998413086, 0.006455296039581298, 0.006471712112426758, 0.006507743835449218, 0.006408959865570068, 0.006246335983276367, 0.006293568134307861, 0.006256319999694824, 0.006134079933166504, 0.006110496044158935, 0.006025951862335205, 0.006008831977844238, 0.006017024040222168, 0.005963007926940918, 0.006025375843048096, 0.005999199867248535, 0.005965151786804199, 0.005992159843444824, 0.0059539518356323245, 0.006113311767578125, 0.006276768207550049, 0.00624073600769043, 0.006053664207458496, 0.0060136961936950685, 0.005996511936187744, 0.006004096031188965, 0.005982592105865479, 0.005983967781066895, 0.006072735786437989, 0.005949440002441406, 0.005951488018035888, 0.00601859188079834, 0.0060785279273986815, 0.006256703853607178, 0.006608831882476807, 0.006103104114532471, 0.005994336128234863, 0.005976416110992431, 0.0060165758132934575, 0.006070816040039063, 0.0060415358543396, 0.006125408172607422, 0.005971583843231201, 0.006031551837921143, 0.0060126399993896485, 0.0060824317932128905, 0.005994016170501709, 0.006110079765319824, 0.006024511814117431, 0.0060750718116760255, 0.006047935962677002, 0.0060661759376525876, 0.006077983856201172, 0.006032800197601319, 0.006023263931274414, 0.0060152320861816405, 0.006079360008239746, 0.006043456077575684, 0.006074048042297363, 0.006452991962432862, 0.006256608009338379, 0.00640880012512207, 0.006524767875671387, 0.006377471923828125, 0.0063805441856384275, 0.006441952228546143, 0.006280352115631103, 0.0062657279968261715, 0.006302815914154053, 0.006370463848114014, 0.0062991042137146, 0.006215968132019043, 0.006140223979949951, 0.006065695762634277, 0.006123680114746094, 0.005989376068115234, 0.005999839782714843, 0.005955359935760498, 0.006092512130737305, 0.005951712131500244, 0.00603276777267456, 0.006220064163208008, 0.006422880172729492, 0.006346176147460938, 0.0061543679237365725, 0.006097536087036132, 0.006162303924560547, 0.006078464031219482, 0.006067999839782715, 0.006039775848388672, 0.006053088188171386, 0.006179615974426269, 0.006032735824584961, 0.006146687984466553, 0.006250527858734131]",tokens/s,160.37232171093808,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,738.89792,6315.507712,0.0,5920.260096,5695.433728,s,1,7.3876904296875,7.3876904296875,0.0,7.3876904296875,7.3876904296875,7.3876904296875,7.3876904296875,[7.3876904296875],,kWh,8.986639549997715e-06,9.709685918221336e-07,4.50611471600193e-06,1.4463722857821777e-05,,MB,1064.28416,6328.090624,0.0,5922.357248,5577.220096,s,10,5.196808197021484,0.5196808197021484,0.003407965845916442,0.5197416381835938,0.5229087463378906,0.5238181121826172,0.5245456048583984,"[0.5115663757324219, 0.520348388671875, 0.51844873046875, 0.5185545654296875, 0.5213436279296875, 0.5227066650390625, 0.5247274780273438, 0.5191348876953125, 0.5178452758789063, 0.5221322021484375]",tokens/s,492.6100604342579,kWh,1.5207874129583086e-05,1.6765977565631868e-06,1.0062230271999998e-05,2.6946702158146272e-05,tokens/kWh,9500234.889507936,MB,1090.33472,6328.090624,0.0,5922.357248,5663.963136,s,10,20.073852661132815,2.007385266113281,0.007085642304722359,2.0098646240234377,2.013493786621094,2.0144458068847655,2.015207423095703,"[2.0055675048828125, 1.990516845703125, 2.0153978271484374, 2.0097249755859377, 2.0074752197265626, 2.0100042724609377, 1.9990238037109376, 2.0132822265625, 2.0117939453125, 2.0110660400390623]",tokens/s,31.384109997968263,kWh,5.835305238583259e-05,6.436982631482033e-06,3.8695558734199933e-05,0.00010348559375151456,tokens/kWh,608780.3888072871,,s,630,20.070110181808488,0.03185731774890234,0.00032581729772583455,0.031819424629211425,0.032132120513916014,0.03226419200897217,0.03332832782745361,"[0.03364457702636719, 0.032511905670166014, 0.031821855545043944, 0.03167440032958985, 0.03160035133361817, 0.03163164710998535, 0.031692384719848636, 0.03164780807495117, 0.03161692810058594, 0.031658432006835935, 0.03155763244628906, 0.03162112045288086, 0.031893503189086916, 0.03163750457763672, 0.03157318305969238, 0.031683391571044925, 0.03163875198364258, 0.031537952423095705, 0.03178275108337402, 0.0316921272277832, 0.031609664916992186, 0.031647743225097655, 0.031681631088256834, 0.03170121574401855, 0.03179795265197754, 0.03204217529296875, 0.03178486442565918, 0.031685535430908206, 0.03170099258422852, 0.03175155258178711, 0.032068225860595705, 0.032020481109619144, 0.031970752716064456, 0.03193449592590332, 0.03193091201782226, 0.03188531112670898, 0.032010238647460935, 0.031859935760498045, 0.031705888748168945, 0.031645696640014646, 0.03176038360595703, 0.03180748748779297, 0.03198953628540039, 0.0317606086730957, 0.03176198387145996, 0.03175673675537109, 0.031821823120117186, 0.031926271438598636, 0.03189491271972656, 0.03185523223876953, 0.03183206367492676, 0.03185647964477539, 0.03193395233154297, 0.032012958526611325, 0.03196425628662109, 0.031870880126953126, 0.031716352462768556, 0.03179929542541504, 0.031757535934448244, 0.031742752075195314, 0.03178291130065918, 0.03192422485351563, 0.032128158569335936, 0.03323545455932617, 0.03220479965209961, 0.03161907196044922, 0.03136531257629394, 0.031268672943115236, 0.03119094467163086, 0.03125257682800293, 0.03129343986511231, 0.03142860794067383, 0.0313055362701416, 0.03136531257629394, 0.03120742416381836, 0.031238143920898437, 0.031320064544677735, 0.03128639984130859, 0.0314081916809082, 0.03133523178100586, 0.031297407150268554, 0.03127900886535644, 0.03128956794738769, 0.03134806442260742, 0.03133872032165527, 0.0313450870513916, 0.03139776039123535, 0.03132019233703613, 0.0313956470489502, 0.031393983840942385, 0.03136511993408203, 0.0314748477935791, 0.031529823303222654, 0.03161622428894043, 0.0316563835144043, 0.03176883125305176, 0.03214960098266602, 0.031538240432739256, 0.03141932868957519, 0.03154431915283203, 0.03152899169921875, 0.03147651290893555, 0.03152607917785644, 0.03195337677001953, 0.03204150390625, 0.03173782348632812, 0.03154249572753906, 0.03160108757019043, 0.03157439994812012, 0.0314839038848877, 0.03145491218566895, 0.03176684761047363, 0.03164713668823242, 0.03174870491027832, 0.03176985549926758, 0.03173862457275391, 0.0319815673828125, 0.03183103942871094, 0.03170809555053711, 0.03170076751708984, 0.03169513511657715, 0.031747808456420896, 0.03189545631408691, 0.03225433731079102, 0.03203440093994141, 0.031951263427734376, 0.03333001708984375, 0.032143360137939454, 0.03180544090270996, 0.031597759246826174, 0.031690784454345707, 0.031598976135253906, 0.03160895919799805, 0.032005630493164065, 0.031691551208496094, 0.0318599681854248, 0.031759103775024417, 0.031716543197631834, 0.03188585662841797, 0.03184259223937988, 0.03176355171203613, 0.03165072059631348, 0.03174195289611816, 0.03171520042419434, 0.03182198333740234, 0.03174332809448242, 0.03172211265563965, 0.031752191543579104, 0.03174399948120117, 0.031649248123168945, 0.03164825630187988, 0.031704479217529294, 0.03171801567077637, 0.031808895111083986, 0.031928255081176755, 0.03219731140136719, 0.03217366409301758, 0.03227689743041992, 0.032069633483886716, 0.031936511993408204, 0.031903743743896484, 0.03194495964050293, 0.03185024070739746, 0.03177881622314453, 0.031825504302978515, 0.031951263427734376, 0.03189145660400391, 0.03189760017395019, 0.03199532890319824, 0.03189123153686523, 0.03187824058532715, 0.031848127365112305, 0.03422604751586914, 0.03305449676513672, 0.03204745483398438, 0.03199542427062988, 0.031930591583251955, 0.032069889068603516, 0.032094207763671875, 0.03196256065368652, 0.03200057601928711, 0.03209011077880859, 0.03209011077880859, 0.03214131164550781, 0.03209830474853516, 0.03224576187133789, 0.03235158538818359, 0.03224233627319336, 0.032405502319335935, 0.033595169067382816, 0.03245078277587891, 0.03198361587524414, 0.03163955116271973, 0.03162521553039551, 0.03153715133666992, 0.03158220863342285, 0.03165798377990723, 0.03172473526000977, 0.031646528244018556, 0.031657312393188475, 0.03177948760986328, 0.03174604797363281, 0.03163955116271973, 0.03172966384887695, 0.03166566467285156, 0.03164825630187988, 0.03169475173950195, 0.03178508758544922, 0.03177264022827148, 0.031850496292114255, 0.03180339241027832, 0.031774400711059574, 0.031932735443115236, 0.03185868835449219, 0.03182086372375488, 0.03178191947937012, 0.0317807674407959, 0.03178256034851074, 0.03189174461364746, 0.03197513580322266, 0.03192188835144043, 0.03188800048828125, 0.031936511993408204, 0.03198512077331543, 0.03185103988647461, 0.03173948860168457, 0.03179151916503906, 0.03180691146850586, 0.03181011199951172, 0.031971328735351565, 0.03179520034790039, 0.03176227188110352, 0.03176041603088379, 0.03179088020324707, 0.03217343902587891, 0.03187401580810547, 0.032478782653808595, 0.03190940856933594, 0.031846847534179684, 0.031789535522460936, 0.0321223030090332, 0.032096832275390626, 0.03199702453613281, 0.03188591957092285, 0.03198582458496094, 0.032014400482177734, 0.032030208587646485, 0.031998559951782225, 0.03224921417236328, 0.0320579833984375, 0.0320184326171875, 0.03218841552734375, 0.033562625885009766, 0.03228876876831055, 0.03167334365844727, 0.03148886489868164, 0.0315098876953125, 0.03154780769348144, 0.031790496826171875, 0.03160323143005371, 0.031513023376464847, 0.031528959274291994, 0.031620992660522464, 0.03166425514221191, 0.031663455963134766, 0.03159721565246582, 0.03155558395385742, 0.031494144439697266, 0.03177676773071289, 0.031627264022827145, 0.03159404754638672, 0.03158060836791992, 0.0318047046661377, 0.031816415786743164, 0.03166630363464355, 0.03164665603637695, 0.03173472023010254, 0.03175014305114746, 0.031719423294067385, 0.0317706241607666, 0.031753759384155276, 0.032018207550048826, 0.03195964813232422, 0.03200214385986328, 0.032010238647460935, 0.0320145263671875, 0.03197420883178711, 0.03186345672607422, 0.031848703384399414, 0.03181167984008789, 0.03182083129882812, 0.03175699234008789, 0.03176380729675293, 0.031777727127075196, 0.0317640323638916, 0.031651487350463865, 0.03183603286743164, 0.03178793525695801, 0.03172944068908692, 0.031768672943115236, 0.03186700820922852, 0.031993600845336916, 0.03188966369628906, 0.03191398429870605, 0.03208806228637695, 0.03222083282470703, 0.03216524887084961, 0.03247817611694336, 0.03197542381286621, 0.03211075210571289, 0.03207900619506836, 0.032154304504394535, 0.03211468887329102, 0.03230892944335938, 0.03227225494384765, 0.03355865478515625, 0.032326431274414064, 0.031844127655029295, 0.03173548889160156, 0.03172831916809082, 0.03156771278381348, 0.03159040069580078, 0.03158835220336914, 0.031524864196777344, 0.031784959793090824, 0.03174959945678711, 0.031830560684204104, 0.0318317756652832, 0.03165417671203613, 0.0316331844329834, 0.03172988891601562, 0.03171123123168945, 0.03168460845947266, 0.031649791717529296, 0.03160883140563965, 0.03187302398681641, 0.031741056442260746, 0.031654399871826173, 0.03162252807617188, 0.03171612739562988, 0.0318047046661377, 0.0318591365814209, 0.03189311981201172, 0.03184934425354004, 0.03194009590148926, 0.0326426887512207, 0.031996608734130856, 0.031919551849365235, 0.03184934425354004, 0.03179471969604492, 0.0317604808807373, 0.031993215560913085, 0.032549663543701174, 0.031817344665527346, 0.03176259231567383, 0.0319550724029541, 0.03198176002502441, 0.03178000068664551, 0.03182691192626953, 0.03199324798583984, 0.03192265510559082, 0.03188531112670898, 0.03195020866394043, 0.03189769554138184, 0.031830560684204104, 0.03182796859741211, 0.032018241882324217, 0.03197766494750977, 0.032045280456542966, 0.03193014335632324, 0.03198111915588379, 0.03199750328063965, 0.03195788764953613, 0.03204095840454101, 0.032116161346435544, 0.032078399658203124, 0.032142654418945316, 0.03215020751953125, 0.03330598449707031, 0.03207619094848633, 0.03159791946411133, 0.03177353668212891, 0.031531007766723636, 0.03138559913635254, 0.0313753604888916, 0.03138553619384766, 0.03126217651367187, 0.03138179206848145, 0.031612543106079104, 0.03155612754821777, 0.03156368064880371, 0.0315346565246582, 0.0315516471862793, 0.0314619197845459, 0.03136307144165039, 0.03144499206542969, 0.0314768009185791, 0.03151558494567871, 0.03145840072631836, 0.03149663925170899, 0.031496383666992187, 0.03162345504760742, 0.031512575149536134, 0.0315043830871582, 0.031536832809448245, 0.031586624145507815, 0.031514623641967776, 0.031733760833740236, 0.031784032821655275, 0.03170528030395508, 0.03171180725097656, 0.03203702545166016, 0.0318525447845459, 0.03173318481445313, 0.031734336853027345, 0.0316146240234375, 0.0316964168548584, 0.03167519950866699, 0.03157811164855957, 0.03157401657104492, 0.032292865753173826, 0.032005599975585934, 0.031765024185180665, 0.032126625061035155, 0.03200649642944336, 0.031784959793090824, 0.031874719619750976, 0.03187337684631348, 0.031786272048950195, 0.031731712341308595, 0.0318450870513916, 0.03188732719421387, 0.03175638389587403, 0.03185164833068848, 0.031802175521850586, 0.03169251251220703, 0.031991615295410156, 0.03175472068786621, 0.03193804740905762, 0.03233433532714844, 0.03222937774658203, 0.03330867385864258, 0.03218022537231445, 0.0318832950592041, 0.031659231185913086, 0.03170995140075684, 0.03162931251525879, 0.03157811164855957, 0.031676416397094724, 0.03172537612915039, 0.03169907188415527, 0.03177273559570312, 0.0319180793762207, 0.03174720001220703, 0.03172543907165527, 0.03184687995910645, 0.031814176559448244, 0.032099998474121094, 0.03182947158813477, 0.03184115219116211, 0.03181740760803223, 0.03184854316711426, 0.03185481643676758, 0.031838207244873046, 0.031866880416870115, 0.03179238319396973, 0.03185113525390625, 0.03177894401550293, 0.0318951358795166, 0.03191001510620117, 0.03193065643310547, 0.03213523101806641, 0.03200121688842773, 0.03210521697998047, 0.032010494232177736, 0.031923967361450194, 0.03198975944519043, 0.031919359207153324, 0.03187788772583008, 0.03193391990661621, 0.03202921676635742, 0.03187449645996094, 0.03184848022460938, 0.03202835083007813, 0.03201849746704102, 0.03246160125732422, 0.032097793579101565, 0.03180118370056152, 0.031868896484375, 0.03213177490234375, 0.03217407989501953, 0.03196928024291992, 0.03205254364013672, 0.03206828689575195, 0.03207721710205078, 0.03190025520324707, 0.03192793655395508, 0.03191231918334961, 0.03194175910949707, 0.03200294494628906, 0.03209625625610352, 0.03220012664794922, 0.032285247802734375, 0.03220275115966797, 0.0336558723449707, 0.03234255981445312, 0.03182022476196289, 0.03171327972412109, 0.031719423294067385, 0.031579967498779296, 0.03161734390258789, 0.031737344741821286, 0.031721216201782226, 0.03154598426818848, 0.03174617576599121, 0.031712352752685545, 0.03171817588806152, 0.031718816757202145, 0.03173391914367676, 0.03160518455505371, 0.03174399948120117, 0.031741439819335936, 0.03187148857116699, 0.031763999938964844, 0.03173606491088867, 0.03172710418701172, 0.03182665634155273, 0.03188060760498047, 0.031879776000976565, 0.03188051223754883, 0.03179999923706055, 0.03172336006164551, 0.031806751251220705, 0.03199884796142578, 0.032014209747314455, 0.032083393096923825, 0.03202732849121094, 0.032015872955322267, 0.032010753631591796, 0.03191193580627441, 0.03193391990661621, 0.03188995170593262, 0.031938304901123045, 0.031749504089355465, 0.03181862449645996, 0.032102081298828126, 0.0319768009185791, 0.03183244705200195, 0.03195881652832031, 0.03186054420471191, 0.03190003204345703, 0.03187366485595703, 0.032059391021728514, 0.031940031051635745, 0.032008094787597655, 0.03202320098876953, 0.03205238342285156, 0.03214422225952149, 0.03213625717163086, 0.03207632064819336, 0.03211920166015625, 0.032115745544433597, 0.03207215881347656, 0.032045726776123044, 0.032180065155029296, 0.03215359878540039, 0.032249855041503905, 0.03332419204711914, 0.03208687973022461, 0.03204428863525391, 0.0317652473449707, 0.03178291130065918, 0.031858047485351565, 0.03163983917236328, 0.031678815841674805, 0.03183001518249512, 0.03170508766174317, 0.03165705680847168, 0.03168108749389648, 0.03164534378051758, 0.03162796783447266, 0.03176038360595703, 0.03183404731750488, 0.03171743965148926, 0.03163340759277344, 0.0317923526763916, 0.03177529525756836, 0.03180771255493164, 0.03171846389770508, 0.031802175521850586, 0.03169452857971192, 0.03179295921325684, 0.03189823913574219, 0.0319180793762207, 0.03292095947265625, 0.032139808654785156, 0.03182601547241211, 0.031938560485839845, 0.03201612854003906, 0.03217184066772461, 0.03257769775390625, 0.03200780868530274, 0.03189561653137207, 0.03185510444641113, 0.03192563247680664, 0.031800031661987305, 0.03177388763427735, 0.031884096145629884, 0.031677600860595706, 0.031734624862670896, 0.03173776054382324, 0.031717472076416016, 0.031735807418823245, 0.03191791915893555, 0.031799455642700196, 0.03181059265136719, 0.031910688400268554, 0.03188723182678223, 0.03197932815551758, 0.03212457656860351, 0.031923040390014645, 0.032046497344970705, 0.03191456031799316, 0.032087200164794924, 0.03199830436706543, 0.03201257705688477, 0.03200153732299805, 0.032107425689697267, 0.03210764694213867, 0.03224649429321289]",tokens/s,31.3899622021523,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,809.92256,14639.104,0.0,14243.856384,14221.3376,s,1,7.50015673828125,7.50015673828125,0.0,7.50015673828125,7.50015673828125,7.50015673828125,7.50015673828125,[7.50015673828125],,kWh,1.5394994562500605e-05,1.6904588861759922e-06,7.184450191999921e-06,2.426990364067652e-05,,MB,1110.228992,14735.572992,0.0,14329.839616,14290.688,s,10,14.017546020507812,1.4017546020507814,0.005064467607014813,1.4028267822265625,1.4066118896484376,1.4069143432617188,1.4071563061523438,"[1.3934443359375, 1.3924166259765625, 1.399595947265625, 1.4004007568359376, 1.4030111083984376, 1.4026424560546875, 1.4064407958984375, 1.407216796875, 1.40583251953125, 1.406544677734375]",tokens/s,182.6282572038425,kWh,4.101319376291641e-05,4.523301419646922e-06,2.717318840519999e-05,7.270968358776332e-05,tokens/kWh,3520851.520293007,MB,1138.958336,14750.253056,0.0,14344.51968,14290.69056,s,10,39.382056884765625,3.9382056884765624,0.0028361852012258122,3.9377154541015624,3.9428391357421875,3.942949108886719,3.943037087402344,"[3.93264404296875, 3.937390380859375, 3.936491943359375, 3.9376533203125, 3.937684814453125, 3.93774609375, 3.938677978515625, 3.93789453125, 3.94305908203125, 3.942814697265625]",tokens/s,15.997132954315202,kWh,0.00011518598807333356,1.2704394243449077e-05,7.642061669200003e-05,0.00020431099900878268,tokens/kWh,308353.4430630033,,s,630,39.37825381469723,0.06250516478523374,0.00023357617037945695,0.06250548934936523,0.06278532638549804,0.06288896923065185,0.06312925102233886,"[0.06311766433715821, 0.06226121520996094, 0.06225932693481445, 0.06189859390258789, 0.06193881607055664, 0.06222883224487305, 0.062042686462402345, 0.06216089630126953, 0.06200115203857422, 0.06224281692504883, 0.062414207458496095, 0.06231216049194336, 0.06223750305175781, 0.06258502578735352, 0.06216080093383789, 0.062195838928222655, 0.06219948959350586, 0.06223212814331055, 0.062155391693115236, 0.061967552185058596, 0.06203680038452149, 0.06238412857055664, 0.062453758239746096, 0.06251529693603515, 0.062299297332763674, 0.06239932632446289, 0.06256835174560547, 0.06234688186645508, 0.06235583877563477, 0.0625022087097168, 0.06236435317993164, 0.06235955047607422, 0.06239231872558594, 0.062486526489257815, 0.0626104965209961, 0.062387134552001955, 0.062183425903320315, 0.06274252700805664, 0.06236774444580078, 0.06257020950317382, 0.062295360565185545, 0.06242812728881836, 0.06235100936889648, 0.06262566375732422, 0.06256089782714844, 0.06289523315429688, 0.06241558456420898, 0.062365280151367185, 0.06243158340454102, 0.0625992317199707, 0.06236108779907226, 0.06244403076171875, 0.06293017578125, 0.0627119369506836, 0.06256089782714844, 0.062718017578125, 0.06256991958618165, 0.06255836868286133, 0.06274492645263671, 0.06263919830322266, 0.062446495056152344, 0.06305791854858399, 0.06260940933227539, 0.06337152099609375, 0.06236617660522461, 0.062189697265625, 0.0622059211730957, 0.06199219131469726, 0.06248444747924805, 0.0626480941772461, 0.0623089599609375, 0.062179550170898434, 0.06225324630737305, 0.06225100708007812, 0.06245321655273438, 0.06236959838867188, 0.062443359375, 0.06221836853027344, 0.06223244857788086, 0.06235635375976562, 0.06242083358764648, 0.06240480041503906, 0.0625781135559082, 0.06203776168823242, 0.06258774566650391, 0.06259241485595703, 0.062396064758300784, 0.06261407852172851, 0.06264172744750976, 0.06238899230957031, 0.06265856170654296, 0.06253936004638672, 0.06258870315551758, 0.062401153564453124, 0.06234316635131836, 0.06234223937988281, 0.06262614440917968, 0.06252953720092773, 0.06270214462280274, 0.06232451248168945, 0.06226908874511719, 0.062341697692871095, 0.06239846420288086, 0.06252105712890625, 0.06245404815673828, 0.062389633178710935, 0.06264281463623046, 0.06261920166015625, 0.0628813133239746, 0.06250588989257813, 0.062437374114990236, 0.0626480941772461, 0.06256371307373047, 0.06261398315429688, 0.06275305557250976, 0.06250102233886719, 0.06256019210815429, 0.062453407287597656, 0.0627305908203125, 0.0628223991394043, 0.06261491012573242, 0.06262643051147461, 0.06271155166625976, 0.06254111862182617, 0.06269382476806641, 0.0625805778503418, 0.06323279953002929, 0.06244480133056641, 0.06195462417602539, 0.06197174453735352, 0.06202057647705078, 0.06210355377197266, 0.062117889404296876, 0.062156097412109375, 0.062370494842529295, 0.06252544021606446, 0.062461952209472656, 0.06241888046264649, 0.06259267044067383, 0.06221993637084961, 0.06219209671020508, 0.06256262588500977, 0.06238361740112305, 0.06256662368774414, 0.06222463989257813, 0.062279678344726565, 0.06249241638183594, 0.0623372802734375, 0.062230270385742185, 0.06227788925170898, 0.06233472061157227, 0.0628364486694336, 0.06253366470336914, 0.062470657348632816, 0.06265001678466797, 0.06252579116821289, 0.06264012908935547, 0.06253948974609375, 0.062384449005126956, 0.06255379104614257, 0.062331199645996094, 0.06238934326171875, 0.062281856536865236, 0.06225382232666016, 0.06267903900146485, 0.0625541114807129, 0.06258895874023437, 0.06283039855957032, 0.06266073608398437, 0.06249065780639648, 0.06242899322509766, 0.06250310516357421, 0.0625450553894043, 0.062402721405029296, 0.06244579315185547, 0.06283929443359375, 0.06248566436767578, 0.06252767944335938, 0.06247283172607422, 0.06247219085693359, 0.06278511810302734, 0.0625316162109375, 0.06254767990112305, 0.06276079940795898, 0.06269209671020508, 0.06261142349243164, 0.06286959838867187, 0.06285472106933594, 0.06267744064331054, 0.06311296081542969, 0.063023681640625, 0.06239980697631836, 0.062185665130615235, 0.062368255615234375, 0.06248646545410156, 0.062292030334472656, 0.06258009719848633, 0.06228649520874024, 0.06237334442138672, 0.06241888046264649, 0.062293952941894534, 0.06242367935180664, 0.06255001449584961, 0.06257664108276367, 0.06236569595336914, 0.062296062469482424, 0.062438560485839845, 0.06245379257202149, 0.06234195327758789, 0.06222438430786133, 0.062273536682128906, 0.06208512115478516, 0.06271721649169922, 0.06257056045532226, 0.06263792037963867, 0.06243411254882812, 0.06236502456665039, 0.06232950210571289, 0.062381790161132815, 0.06241923141479492, 0.062394367218017575, 0.062210079193115234, 0.06262688064575195, 0.06285609436035157, 0.06252953720092773, 0.06239641571044922, 0.06255001449584961, 0.06234255981445312, 0.06252934265136718, 0.06247504043579102, 0.06254182434082031, 0.06264236831665039, 0.06281609725952149, 0.06253065490722656, 0.06274288177490234, 0.06250960159301758, 0.06239401626586914, 0.06245830535888672, 0.0625516471862793, 0.062439743041992186, 0.06271139144897461, 0.06240431976318359, 0.06270022583007813, 0.0625041618347168, 0.06275913619995117, 0.06271446228027344, 0.06268937683105469, 0.06248819351196289, 0.06251264190673828, 0.06257535934448243, 0.0625334701538086, 0.06250486373901368, 0.06326476669311523, 0.06257171249389648, 0.062003265380859374, 0.06205094528198242, 0.061884449005126956, 0.062217662811279294, 0.062077598571777345, 0.06223052978515625, 0.06232252883911133, 0.06264233779907226, 0.06252463912963867, 0.06259519958496093, 0.062415519714355466, 0.06256006240844726, 0.0626157455444336, 0.06241487884521484, 0.06250508880615234, 0.06264815902709961, 0.06249881744384766, 0.0624824333190918, 0.06230019378662109, 0.0625561294555664, 0.06217113494873047, 0.06223801422119141, 0.062281566619873045, 0.06251708984375, 0.06240972900390625, 0.06280825424194336, 0.0623675537109375, 0.06285702514648438, 0.06253587341308593, 0.06262579345703125, 0.06268713760375977, 0.06254982376098633, 0.06233116912841797, 0.06246809768676758, 0.06227084732055664, 0.0624251823425293, 0.06218191909790039, 0.06240179061889648, 0.062400447845458985, 0.06284735870361328, 0.06265647888183594, 0.062519775390625, 0.06236569595336914, 0.06251043319702149, 0.06238684844970703, 0.06243139266967773, 0.062449504852294925, 0.06248857498168945, 0.06268915176391601, 0.0627256965637207, 0.06258723068237304, 0.06278720092773438, 0.06251100921630859, 0.062462337493896486, 0.06264438247680663, 0.06260559844970703, 0.06244486236572266, 0.06257030487060547, 0.0629502067565918, 0.06282969665527344, 0.0629349136352539, 0.06313398361206055, 0.062306304931640626, 0.061986175537109375, 0.06216870498657227, 0.06230876922607422, 0.062469982147216795, 0.062294784545898436, 0.06219980621337891, 0.06226947021484375, 0.06256022262573242, 0.06264131164550782, 0.06254806518554687, 0.062470943450927734, 0.06226953506469726, 0.06219555282592774, 0.06238332748413086, 0.06248860931396484, 0.06250783920288086, 0.062375358581542965, 0.062324703216552736, 0.06210390472412109, 0.06235891342163086, 0.062196575164794925, 0.0625398063659668, 0.06229913711547851, 0.06261417770385742, 0.0629865608215332, 0.0625316162109375, 0.06247011184692383, 0.062484127044677734, 0.0624268798828125, 0.0625011215209961, 0.062443294525146485, 0.06257846450805664, 0.06254230499267578, 0.062403999328613284, 0.062364574432373046, 0.06262287902832031, 0.06239718246459961, 0.06246342468261719, 0.06266947174072265, 0.06256972885131835, 0.0625456657409668, 0.06253676986694336, 0.06256768035888671, 0.06252819061279297, 0.06265964889526367, 0.062491134643554686, 0.06285734558105469, 0.06281849670410156, 0.06255801773071289, 0.062488895416259765, 0.06267084884643555, 0.06258393478393555, 0.06242598342895508, 0.06245785522460937, 0.06271385574340821, 0.06294937515258789, 0.06274867248535156, 0.06264435195922852, 0.06258838272094727, 0.06258131027221679, 0.06251897430419921, 0.06338969421386718, 0.06230764770507812, 0.06221667098999024, 0.06216847991943359, 0.0619958381652832, 0.06206399917602539, 0.06206531143188477, 0.0622562255859375, 0.06222476959228516, 0.06220982360839844, 0.06274329757690429, 0.06271392059326172, 0.0625266227722168, 0.06247663879394531, 0.06232310485839844, 0.062362945556640625, 0.06251187133789063, 0.06235504150390625, 0.062306655883789065, 0.06257664108276367, 0.06219776153564453, 0.062321727752685546, 0.062446529388427735, 0.062281726837158206, 0.06234883117675781, 0.0623985595703125, 0.06262211227416992, 0.06258256149291992, 0.06240604782104492, 0.06253647994995117, 0.06243081665039062, 0.06266694259643554, 0.06258505630493164, 0.06255820846557616, 0.06245158386230469, 0.06241225433349609, 0.06237820816040039, 0.0623724479675293, 0.062370849609375, 0.06228255844116211, 0.062434879302978516, 0.06264214324951171, 0.06268473434448242, 0.06282332611083985, 0.06271356964111328, 0.06252691268920899, 0.06268012619018555, 0.06284265518188477, 0.06255372619628906, 0.06265894317626954, 0.062475936889648434, 0.06269987106323242, 0.06254169464111328, 0.06267712020874024, 0.06287155151367188, 0.06269337463378906, 0.06264124679565429, 0.06267919921875, 0.0626879997253418, 0.0628809928894043, 0.06277920150756836, 0.06287263870239258, 0.06273993682861329, 0.06307872009277343, 0.06253891372680664, 0.062354270935058596, 0.06221014404296875, 0.062033790588378906, 0.062272670745849606, 0.06225913619995117, 0.06230518341064453, 0.06236972808837891, 0.06242867279052734, 0.06242707061767578, 0.06253760147094727, 0.062314369201660155, 0.06243779373168945, 0.06252297592163086, 0.06257548904418946, 0.06270326232910156, 0.06265420913696289, 0.06231228637695312, 0.06224972915649414, 0.06216022491455078, 0.06218112182617187, 0.062192543029785156, 0.06210697555541992, 0.062169761657714845, 0.06227475357055664, 0.06244160079956055, 0.0626572151184082, 0.06275459289550782, 0.06264585494995117, 0.06250969696044922, 0.06244966506958008, 0.06251830291748046, 0.0626115837097168, 0.06230223846435547, 0.06219232177734375, 0.062425247192382814, 0.0626003189086914, 0.06239113616943359, 0.062461952209472656, 0.0623135986328125, 0.06242755126953125, 0.06236620712280273, 0.06269705581665039, 0.06277772903442383, 0.06264371109008789, 0.06244809722900391, 0.06266681671142578, 0.06259913635253907, 0.06306204986572266, 0.06258070373535156, 0.06246793746948242, 0.06249283218383789, 0.06253148651123047, 0.06256204986572265, 0.0623985595703125, 0.062445217132568356, 0.06278358459472656, 0.06269321441650391, 0.0630951042175293, 0.06310960006713867, 0.06295849609375, 0.06276559829711914, 0.06363750457763671, 0.062493854522705075, 0.062117855072021486, 0.06217001724243164, 0.062152671813964847, 0.0625541114807129, 0.062363296508789065, 0.06243977737426758, 0.062228511810302735, 0.06235340881347656, 0.06246192169189453, 0.06279894256591798, 0.06251359939575195, 0.062454238891601566, 0.06237913513183594, 0.06274665451049805, 0.06229414367675781, 0.062333343505859375, 0.062303550720214845, 0.062235649108886716, 0.0620637435913086, 0.06254681777954102, 0.06274867248535156, 0.06261920166015625, 0.06242758560180664, 0.06263603210449219, 0.06255363082885743, 0.06261193466186524, 0.062441375732421874, 0.06254409790039063, 0.06260015869140625, 0.0626902084350586, 0.06256991958618165, 0.06255465698242188, 0.06277059173583985, 0.06254451370239258, 0.062304256439208984, 0.06237712097167969, 0.06239068984985351, 0.06246591949462891, 0.06242569732666016, 0.06299440002441406, 0.06295347213745117, 0.06295670318603516, 0.0626328010559082, 0.06287974548339843, 0.06260534286499024, 0.06251830291748046, 0.06257145690917969, 0.06277436828613281, 0.0627680320739746, 0.06278144073486328, 0.06265174484252929, 0.06286403274536133, 0.06257846450805664, 0.06270793533325195, 0.06261964797973633, 0.0626729278564453, 0.0627199363708496, 0.06285654449462891, 0.06297833633422852, 0.06302560043334961, 0.06266876983642578, 0.06329708862304688, 0.062414302825927734, 0.062134815216064454, 0.062142688751220705, 0.06235161590576172, 0.062339038848876954, 0.06250451278686524, 0.062354942321777344, 0.062443649291992184, 0.06258319854736329, 0.0622022705078125, 0.06255408096313476, 0.0625992317199707, 0.06256995010375976, 0.062335487365722655, 0.06255615997314454, 0.06250499343872071, 0.0625458869934082, 0.0623185920715332, 0.062217601776123045, 0.062173534393310546, 0.06231606292724609, 0.06236630249023437, 0.062457408905029294, 0.062487136840820315, 0.06252544021606446, 0.06254796981811524, 0.06251472091674805, 0.06262179183959961, 0.06260089492797852, 0.06244217681884766, 0.06255136108398437, 0.0625835189819336, 0.06261270523071288, 0.0625334701538086, 0.06253456115722657, 0.062408737182617184, 0.06248239898681641, 0.0625576629638672, 0.06263407897949219, 0.06251359939575195, 0.06258073425292969, 0.06271088027954101, 0.06274863815307617, 0.06261446380615235, 0.06283059310913086, 0.06292214584350586, 0.06275337600708007, 0.06265804672241211, 0.06294681549072266, 0.06256313705444336, 0.06280774307250976, 0.06289596939086914, 0.06260147094726562, 0.06283283233642578, 0.06293116760253906, 0.06290537643432617, 0.06275993728637695, 0.06263804626464843, 0.06274665451049805, 0.06286844635009765, 0.06285209655761718, 0.06282649612426758]",tokens/s,15.998677924231961,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,739.258368,3450.79808,0.0,3055.550464,2937.680896,s,1,7.3439462890625,7.3439462890625,0.0,7.3439462890625,7.3439462890625,7.3439462890625,7.3439462890625,[7.3439462890625],,kWh,7.681751145829215e-06,8.40129576004978e-07,3.286391518003695e-06,1.1808272239837887e-05,,MB,1068.863488,3520.004096,0.0,3114.27072,2817.473024,s,10,2.6321105651855463,0.2632110565185547,0.002689435497496622,0.26245011901855464,0.26671708374023434,0.26688536682128905,0.2670199932861328,"[0.26238107299804686, 0.2625191650390625, 0.26112960815429687, 0.26030831909179686, 0.2659143371582031, 0.25960931396484377, 0.26705364990234376, 0.26563711547851565, 0.2666796875, 0.2608782958984375]",tokens/s,972.603519723168,kWh,7.710339589473723e-06,8.499682402763044e-07,5.08705085326314e-06,1.3647358683013168e-05,tokens/kWh,18758208.525627933,MB,1094.914048,3520.004096,0.0,3114.27072,2877.80864,s,10,11.568907958984372,1.1568907958984376,0.0022785457809861944,1.1568141479492189,1.1604347778320312,1.1604911926269532,1.1605363244628906,"[1.1604222412109375, 1.15336181640625, 1.15709375, 1.1544727783203126, 1.1566710205078126, 1.160547607421875, 1.156957275390625, 1.1546573486328124, 1.156302001953125, 1.158422119140625]",tokens/s,54.45630670012757,kWh,3.367687294385985e-05,3.7132154301009313e-06,2.2316998847736745e-05,5.970708722169752e-05,tokens/kWh,1055151.1207719047,,s,630,11.56644199371338,0.01835943173605298,0.0003209969920466255,0.018289616584777832,0.018546284675598143,0.0187432110786438,0.019744583721160894,"[0.019453887939453126, 0.018830528259277345, 0.01853696060180664, 0.018365087509155272, 0.018214624404907228, 0.01823315238952637, 0.01832512092590332, 0.01828883171081543, 0.018188735961914063, 0.018196352005004884, 0.018296735763549805, 0.018154783248901366, 0.018243871688842773, 0.01816636848449707, 0.018135232925415037, 0.018240959167480468, 0.018158111572265625, 0.018204383850097657, 0.018325632095336913, 0.018389152526855468, 0.018384735107421876, 0.01846067237854004, 0.0198656005859375, 0.01984102439880371, 0.018390335083007813, 0.018277088165283204, 0.018292800903320312, 0.018261920928955077, 0.018371904373168945, 0.01853004837036133, 0.018463680267333984, 0.01850102424621582, 0.018246240615844726, 0.01829020881652832, 0.018315807342529297, 0.01853593635559082, 0.0183055362701416, 0.018237567901611327, 0.018239295959472657, 0.01830297660827637, 0.018343936920166014, 0.01822275161743164, 0.01828803253173828, 0.018283456802368165, 0.01830873680114746, 0.018313600540161134, 0.018397184371948243, 0.01855414390563965, 0.018504512786865233, 0.018479007720947266, 0.018550783157348632, 0.018451839447021483, 0.018412160873413085, 0.018460159301757813, 0.01839699172973633, 0.018536256790161132, 0.01848566436767578, 0.018379232406616212, 0.018358272552490236, 0.018347936630249022, 0.018329376220703124, 0.018392608642578124, 0.018329727172851563, 0.019767904281616212, 0.01886630439758301, 0.01852332878112793, 0.01838368034362793, 0.018229248046875, 0.018263168334960937, 0.018137983322143555, 0.01817804718017578, 0.018249727249145507, 0.018104320526123048, 0.018143072128295898, 0.018239648818969726, 0.018147232055664063, 0.018089855194091797, 0.01811244773864746, 0.018112384796142578, 0.01812931251525879, 0.018153472900390624, 0.018151071548461913, 0.018116767883300782, 0.01916876792907715, 0.018240032196044923, 0.01825939178466797, 0.018201311111450194, 0.018190336227416993, 0.018190336227416993, 0.01817804718017578, 0.018300256729125976, 0.01826220893859863, 0.018134880065917968, 0.018178688049316407, 0.018118656158447266, 0.018307104110717773, 0.018170015335083008, 0.01837808036804199, 0.018197151184082033, 0.01817990493774414, 0.018240991592407226, 0.01823798370361328, 0.01827235221862793, 0.01822096061706543, 0.018210271835327148, 0.018172447204589843, 0.01827599906921387, 0.018237791061401366, 0.01822710418701172, 0.01826348876953125, 0.018206687927246095, 0.018285247802734376, 0.018279455184936524, 0.018396127700805665, 0.01829478454589844, 0.018339168548583983, 0.018384639739990234, 0.018406303405761718, 0.018521631240844726, 0.018417823791503907, 0.01848556709289551, 0.018378751754760742, 0.018345407485961914, 0.018546239852905273, 0.018379776000976563, 0.01834592056274414, 0.019687488555908204, 0.018814207077026367, 0.018500192642211914, 0.01828659248352051, 0.01818435287475586, 0.01825584030151367, 0.0180861759185791, 0.01811404800415039, 0.018075103759765624, 0.01810518455505371, 0.018165760040283203, 0.01836025619506836, 0.01811414337158203, 0.018104736328125, 0.018167808532714845, 0.018744895935058594, 0.019987136840820312, 0.019451263427734376, 0.01813747215270996, 0.01819647979736328, 0.018137088775634767, 0.018198528289794923, 0.01819148826599121, 0.018149728775024413, 0.018317855834960938, 0.018155519485473632, 0.018275936126708983, 0.0182706241607666, 0.01825529670715332, 0.018264223098754882, 0.018223520278930663, 0.01828976058959961, 0.018393375396728515, 0.01833184051513672, 0.018155168533325196, 0.018279296875, 0.018202272415161133, 0.01826201629638672, 0.018218399047851563, 0.018207712173461912, 0.01820044708251953, 0.0181777286529541, 0.01820198440551758, 0.018498495101928712, 0.01827840042114258, 0.018233343124389647, 0.01822105598449707, 0.01830297660827637, 0.01827020835876465, 0.018379968643188478, 0.018319328308105468, 0.018445152282714844, 0.01840447998046875, 0.018498432159423827, 0.018579456329345705, 0.018525888442993164, 0.01848521614074707, 0.01847881507873535, 0.018461376190185546, 0.01853398323059082, 0.018358911514282227, 0.018388864517211913, 0.01829052734375, 0.01924390411376953, 0.018632095336914064, 0.018447071075439452, 0.018308191299438475, 0.018262624740600586, 0.018225151062011717, 0.0180731201171875, 0.01809401512145996, 0.01814790344238281, 0.018214912414550782, 0.018124544143676757, 0.018218816757202147, 0.018151872634887694, 0.018225151062011717, 0.01819443130493164, 0.018356224060058594, 0.018220895767211913, 0.018206880569458007, 0.018116607666015624, 0.018200159072875977, 0.018231296539306642, 0.01842598342895508, 0.018198816299438477, 0.018211904525756835, 0.01816419219970703, 0.01823967933654785, 0.01822559928894043, 0.01823289680480957, 0.018163904190063477, 0.018222623825073243, 0.01816428756713867, 0.018370559692382812, 0.018294015884399415, 0.018372480392456054, 0.0183604793548584, 0.01831599998474121, 0.018300832748413084, 0.01837065505981445, 0.01823539161682129, 0.018266111373901366, 0.018211904525756835, 0.018289600372314453, 0.018324960708618165, 0.018330144882202148, 0.018253055572509766, 0.01826278305053711, 0.018278079986572264, 0.018397504806518555, 0.018386943817138672, 0.01836851119995117, 0.018331647872924805, 0.018429439544677736, 0.01839923286437988, 0.018561119079589843, 0.018417888641357422, 0.018688192367553712, 0.018374656677246092, 0.018471200942993163, 0.01852582359313965, 0.018583263397216797, 0.018430335998535156, 0.018493440628051756, 0.018388256072998047, 0.019920671463012695, 0.019418975830078126, 0.018680192947387694, 0.018445472717285156, 0.01834480094909668, 0.018225151062011717, 0.01832352066040039, 0.018296512603759765, 0.018256128311157225, 0.01824947166442871, 0.018143487930297852, 0.01812227249145508, 0.018078176498413086, 0.018151071548461913, 0.018390752792358397, 0.0181429443359375, 0.018141120910644532, 0.018113504409790038, 0.018273759841918945, 0.018268159866333008, 0.018188831329345703, 0.01816166305541992, 0.018621856689453126, 0.018188896179199218, 0.018192800521850586, 0.018207679748535155, 0.018196863174438478, 0.018162975311279295, 0.018192895889282225, 0.018260480880737305, 0.018288639068603514, 0.018251264572143554, 0.018283008575439453, 0.018249887466430664, 0.01823030471801758, 0.018250560760498045, 0.01829478454589844, 0.018269664764404298, 0.01827280044555664, 0.018268159866333008, 0.018311168670654295, 0.018414880752563475, 0.01833462333679199, 0.018227008819580077, 0.018296831130981444, 0.018272256851196288, 0.018333120346069334, 0.01839366340637207, 0.018423999786376953, 0.01839286422729492, 0.018361568450927734, 0.018349056243896485, 0.018536224365234374, 0.018741151809692384, 0.018443391799926757, 0.01846784019470215, 0.018459903717041017, 0.018608896255493165, 0.018515552520751953, 0.01834435272216797, 0.018333311080932616, 0.018378400802612306, 0.018428512573242187, 0.01960960006713867, 0.018894847869873048, 0.01864076805114746, 0.018396863937377928, 0.01821446418762207, 0.018225471496582032, 0.018145439147949218, 0.018182559967041014, 0.018228607177734377, 0.01814790344238281, 0.018217023849487306, 0.01817366409301758, 0.018188352584838866, 0.01827043151855469, 0.01821696090698242, 0.018155296325683593, 0.01822492790222168, 0.018319807052612304, 0.018386335372924806, 0.018415647506713866, 0.018256351470947264, 0.018255231857299804, 0.018138944625854494, 0.018260896682739256, 0.0183045768737793, 0.018299327850341798, 0.018298879623413086, 0.01827436828613281, 0.01820460891723633, 0.018488895416259764, 0.018288383483886717, 0.018250431060791016, 0.018181535720825197, 0.01825424003601074, 0.018218591690063478, 0.018301536560058593, 0.01820467185974121, 0.018259967803955078, 0.018282112121582032, 0.018393184661865233, 0.01848758316040039, 0.018542015075683593, 0.021207616806030272, 0.018765823364257812, 0.018479103088378905, 0.01841766357421875, 0.018520320892333984, 0.018386016845703124, 0.01827702331542969, 0.018343936920166014, 0.01840332794189453, 0.018583839416503906, 0.01858460807800293, 0.01841231918334961, 0.01863462448120117, 0.01839926338195801, 0.0184597110748291, 0.018508384704589844, 0.01838729667663574, 0.018396480560302735, 0.018436800003051756, 0.018476736068725585, 0.018506048202514648, 0.019605951309204103, 0.019136512756347656, 0.01873945617675781, 0.01846451187133789, 0.018296831130981444, 0.0182825927734375, 0.018261920928955077, 0.018325504302978517, 0.018173952102661133, 0.01813827133178711, 0.01819343948364258, 0.018146976470947266, 0.01817795181274414, 0.018106048583984374, 0.01817888069152832, 0.018148128509521484, 0.018239967346191405, 0.018333471298217774, 0.018268896102905274, 0.018224992752075196, 0.018268512725830077, 0.018243392944335936, 0.01820057678222656, 0.018336864471435548, 0.018174879074096678, 0.018181535720825197, 0.018180320739746094, 0.0182030086517334, 0.01832246398925781, 0.018213855743408204, 0.01823299217224121, 0.018257728576660158, 0.018354463577270507, 0.018555135726928712, 0.01847222328186035, 0.01841744041442871, 0.018292831420898437, 0.018260448455810548, 0.01829052734375, 0.018235200881958007, 0.018256032943725586, 0.018210752487182617, 0.0182806396484375, 0.018275871276855468, 0.01835238456726074, 0.01826883125305176, 0.018274303436279296, 0.018284543991088868, 0.018354175567626953, 0.01838489532470703, 0.018422975540161132, 0.018328096389770506, 0.018657087326049804, 0.01851430320739746, 0.018394880294799805, 0.01840995216369629, 0.018582592010498045, 0.01836934471130371, 0.018296192169189453, 0.01834566307067871, 0.018328447341918946, 0.018535999298095702, 0.019448320388793947, 0.019371519088745116, 0.018810911178588866, 0.01856787109375, 0.018382303237915038, 0.018267711639404296, 0.018181951522827148, 0.018045024871826174, 0.018043296813964844, 0.01807200050354004, 0.018036800384521483, 0.01808332824707031, 0.018125247955322266, 0.01801420783996582, 0.01801625633239746, 0.01823744010925293, 0.018078720092773438, 0.01812761688232422, 0.018141311645507814, 0.018135168075561522, 0.018102272033691406, 0.018104320526123048, 0.018193471908569337, 0.018127552032470705, 0.018102527618408203, 0.018108415603637695, 0.018481151580810547, 0.01951900863647461, 0.018546688079833985, 0.018257919311523436, 0.018321887969970703, 0.018427616119384767, 0.018204959869384765, 0.018239168167114257, 0.01843132781982422, 0.018236703872680664, 0.01811404800415039, 0.018187488555908203, 0.01828963279724121, 0.01835212707519531, 0.018501312255859374, 0.018237760543823242, 0.01828220748901367, 0.01820086479187012, 0.018354175567626953, 0.018368543624877928, 0.018208703994750976, 0.018222143173217773, 0.018323551177978514, 0.018485536575317384, 0.01850569534301758, 0.01850022315979004, 0.018366464614868162, 0.0183767032623291, 0.018483327865600585, 0.018435007095336915, 0.01855526351928711, 0.018348415374755858, 0.018546880722045897, 0.018314720153808594, 0.018475391387939452, 0.018448671340942382, 0.018421472549438475, 0.01836828804016113, 0.01953596878051758, 0.01903545570373535, 0.018678112030029295, 0.01847270393371582, 0.018279008865356446, 0.018192384719848635, 0.018163711547851562, 0.018322656631469727, 0.01822115135192871, 0.018344640731811523, 0.018388351440429686, 0.01833228874206543, 0.018481151580810547, 0.01835212707519531, 0.018696191787719727, 0.01840153694152832, 0.01828428840637207, 0.018237056732177733, 0.018213375091552735, 0.018179616928100585, 0.01824188804626465, 0.018106367111206053, 0.01816991996765137, 0.01833568000793457, 0.018694143295288086, 0.018351551055908202, 0.01835475158691406, 0.01827164840698242, 0.018219743728637695, 0.018211936950683592, 0.018237344741821288, 0.018387840270996093, 0.018210079193115233, 0.01819107246398926, 0.01822892761230469, 0.018198495864868165, 0.018346208572387696, 0.018391168594360352, 0.018366464614868162, 0.018323455810546875, 0.0182108154296875, 0.01823766326904297, 0.018157407760620116, 0.018175935745239256, 0.01824563217163086, 0.01819647979736328, 0.018183839797973632, 0.018283039093017577, 0.01824924850463867, 0.0182989444732666, 0.01838307189941406, 0.018414976119995118, 0.01850227165222168, 0.01844428825378418, 0.018307104110717773, 0.018318431854248047, 0.018342720031738282, 0.01848860740661621, 0.018481952667236328, 0.018446079254150392, 0.01841177558898926, 0.018291711807250977, 0.018338048934936523, 0.019400863647460936, 0.018782207489013672, 0.018579391479492186, 0.018427967071533203, 0.018279808044433594, 0.018155616760253908, 0.01806800079345703, 0.018061216354370118, 0.01883145523071289, 0.018276351928710938, 0.01805721664428711, 0.018070592880249023, 0.018035648345947265, 0.018136287689208986, 0.018141759872436523, 0.01809667205810547, 0.01801593589782715, 0.018129215240478516, 0.018242496490478516, 0.018379520416259766, 0.018542400360107424, 0.018299072265625, 0.018155519485473632, 0.01817215919494629, 0.018267135620117187, 0.01833839988708496, 0.01826972770690918, 0.018170560836791992, 0.018120800018310547, 0.018116640090942382, 0.018321056365966797, 0.018155839920043944, 0.01816582489013672, 0.01814313507080078, 0.018141056060791017, 0.018317312240600587, 0.018141183853149414, 0.018183231353759766, 0.018112991333007814, 0.018133216857910157, 0.021786880493164063, 0.01957683181762695, 0.018312736511230467, 0.01821129608154297, 0.01817190361022949, 0.018241535186767577, 0.018714399337768556, 0.018602207183837892, 0.018382848739624022, 0.01840460777282715, 0.018228992462158203, 0.018355199813842774, 0.018331680297851562, 0.018513343811035157, 0.01847555160522461, 0.01839030456542969, 0.01856988716125488, 0.018596960067749024, 0.018422271728515623, 0.018411455154418947, 0.018291072845458986, 0.018446495056152343, 0.018296831130981444]",tokens/s,54.46791678395302,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,837.947392,9637.39648,0.0,9242.148864,8603.568128,s,1,7.6634521484375,7.6634521484375,0.0,7.6634521484375,7.6634521484375,7.6634521484375,7.6634521484375,[7.6634521484375],,kWh,1.2463342704139297e-05,1.3674601049854078e-06,5.724449023997158e-06,1.9555251833121862e-05,,MB,1140.477952,9886.957568,0.0,9481.224192,8972.090368,s,10,7.041542114257813,0.7041542114257812,0.00480887993823181,0.7044410705566406,0.7089775390624999,0.7092788391113282,0.7095198791503906,"[0.6915390625, 0.7037074584960937, 0.7027904663085938, 0.7025294799804688, 0.707328369140625, 0.7038873291015625, 0.7049948120117188, 0.7062744140625, 0.7095801391601563, 0.7089105834960937]",tokens/s,363.5567264188446,kWh,2.0551944174999337e-05,2.266518104224068e-06,1.3657510926000834e-05,3.647597320522424e-05,tokens/kWh,7018318.5671200855,MB,1162.006528,9891.151872,0.0,9485.418496,8972.092928,s,10,25.263025390625,2.5263025390625002,0.014150736211522822,2.5282164306640627,2.541820458984375,2.5455982177734375,2.5486204248046875,"[2.50933154296875, 2.5493759765625, 2.54098095703125, 2.530982421875, 2.53126611328125, 2.530928466796875, 2.52550439453125, 2.523835205078125, 2.524316650390625, 2.496503662109375]",tokens/s,24.937630796737047,kWh,7.337199067916725e-05,8.093241882545913e-06,4.856909441079924e-05,0.0001300343269725124,tokens/kWh,484487.45394219947,,s,630,25.25983639907835,0.0400949784112355,0.0007732978727018159,0.03999001693725586,0.04052755584716797,0.041276592826843254,0.04312175277709963,"[0.04249353790283203, 0.043307422637939456, 0.04041104125976563, 0.04082649612426758, 0.039516448974609375, 0.04048089599609375, 0.03943430328369141, 0.03960422515869141, 0.03983273696899414, 0.03978736114501953, 0.040048641204833986, 0.03961222457885742, 0.039841983795166014, 0.039616512298583983, 0.0398315200805664, 0.03960160064697266, 0.039370529174804686, 0.039526657104492186, 0.039404094696044924, 0.039636993408203126, 0.0396492805480957, 0.0395489273071289, 0.03973104095458985, 0.03973699188232422, 0.03969279861450195, 0.03982950210571289, 0.0396124153137207, 0.039546463012695314, 0.039659393310546874, 0.039483936309814456, 0.03946700668334961, 0.03945676803588867, 0.039907329559326174, 0.039407646179199216, 0.03943753433227539, 0.03988966369628906, 0.03963401412963867, 0.0395846061706543, 0.039653438568115235, 0.03948249435424805, 0.039720897674560544, 0.03951440048217773, 0.03964144134521484, 0.03988883209228516, 0.039820735931396484, 0.03991151809692383, 0.039537567138671875, 0.03972438430786133, 0.04002799987792969, 0.0399183349609375, 0.0400445442199707, 0.039894943237304685, 0.040130657196044923, 0.03968819046020508, 0.039223297119140625, 0.03971072006225586, 0.03956041717529297, 0.03962140655517578, 0.0394728012084961, 0.039738945007324215, 0.03996652984619141, 0.03994704055786133, 0.0397468147277832, 0.04230822372436523, 0.04061753463745117, 0.04019039916992188, 0.04031488037109375, 0.040381568908691406, 0.04056358337402344, 0.04083017730712891, 0.04029232025146484, 0.04026860809326172, 0.04024115371704102, 0.04027801513671875, 0.040235008239746094, 0.040065025329589846, 0.04008972930908203, 0.04031475067138672, 0.03991551971435547, 0.040182785034179686, 0.04011872100830078, 0.04032979202270508, 0.040613887786865234, 0.04000521469116211, 0.04030636978149414, 0.04048355102539063, 0.04027391815185547, 0.04000342559814453, 0.04035190582275391, 0.04004249572753906, 0.04007843017578125, 0.04011529541015625, 0.04025939178466797, 0.04039475250244141, 0.04056883239746094, 0.04006092834472656, 0.040030208587646485, 0.04024729537963867, 0.03990323257446289, 0.04020822525024414, 0.040249504089355466, 0.040359935760498046, 0.04177094268798828, 0.05037062454223633, 0.040304641723632816, 0.03995606231689453, 0.04008182525634765, 0.040570911407470704, 0.04038803100585937, 0.04024076843261719, 0.040403968811035154, 0.040560256958007815, 0.03999116897583008, 0.04015913772583008, 0.040331775665283204, 0.04028211212158203, 0.04038614273071289, 0.03992556762695312, 0.039981056213378906, 0.04011193466186523, 0.04023580932617187, 0.040226814270019534, 0.04014284896850586, 0.04012236785888672, 0.04028742218017578, 0.040119102478027344, 0.042237953186035154, 0.040529918670654294, 0.040189056396484374, 0.040264575958251954, 0.03995600128173828, 0.04003839874267578, 0.03977059173583984, 0.03991689682006836, 0.04018652725219726, 0.040130561828613284, 0.03990528106689453, 0.04024115371704102, 0.03989299011230469, 0.04024140930175781, 0.04001475143432617, 0.04149129486083984, 0.04015513610839844, 0.0400148811340332, 0.04007974243164063, 0.04015478515625, 0.04041366577148438, 0.0402619514465332, 0.04006108856201172, 0.04007491302490234, 0.04033980941772461, 0.040793441772460935, 0.043618976593017576, 0.04017356872558594, 0.03986022567749024, 0.040005630493164065, 0.040182880401611325, 0.040197025299072264, 0.040136703491210936, 0.04324726486206055, 0.040528160095214844, 0.04041494369506836, 0.040080928802490236, 0.04008348846435547, 0.04017641448974609, 0.03976380920410156, 0.04017942428588867, 0.04032761764526367, 0.04001587295532227, 0.040271198272705075, 0.04011804962158203, 0.039943038940429686, 0.039852001190185546, 0.03999337768554687, 0.04029849624633789, 0.0404029426574707, 0.04031401443481445, 0.04038896179199219, 0.04026371383666992, 0.041116416931152346, 0.04016505432128906, 0.040022048950195316, 0.040136703491210936, 0.039890113830566405, 0.040360767364501955, 0.040118270874023435, 0.04021657562255859, 0.03993804931640625, 0.04049903869628906, 0.04219910430908203, 0.040132190704345705, 0.040548126220703126, 0.040122112274169924, 0.0401212158203125, 0.040339710235595704, 0.03994598388671875, 0.040343551635742186, 0.04240588760375977, 0.040908798217773434, 0.04048889541625977, 0.04038246536254883, 0.04024639892578125, 0.04031155014038086, 0.040116416931152345, 0.03985168075561524, 0.03987900924682617, 0.03994009780883789, 0.039725055694580076, 0.039882080078125, 0.03977072143554688, 0.04063852691650391, 0.040030208587646485, 0.04000358581542969, 0.04009369659423828, 0.039929855346679685, 0.03970364761352539, 0.03980527877807617, 0.03950249481201172, 0.03989807891845703, 0.04387321472167969, 0.04003190231323242, 0.04065299224853516, 0.03973068618774414, 0.0397823371887207, 0.04006943893432617, 0.040132545471191404, 0.03981727981567383, 0.03960211181640625, 0.03975743865966797, 0.039737407684326174, 0.03945568084716797, 0.040290145874023436, 0.04001792144775391, 0.04005401611328125, 0.03995929718017578, 0.03975167846679688, 0.039782398223876955, 0.039728416442871096, 0.0397790412902832, 0.03991686248779297, 0.040065216064453124, 0.04014883041381836, 0.03991619110107422, 0.03998886489868164, 0.04026816177368164, 0.04030025482177734, 0.04028995132446289, 0.039895103454589846, 0.03997753524780273, 0.04017935943603516, 0.03992995071411133, 0.040478046417236326, 0.04252345657348633, 0.04176688003540039, 0.03992374420166016, 0.040097793579101565, 0.03978035354614258, 0.04008038330078125, 0.03974403381347656, 0.03961459350585937, 0.0397633285522461, 0.03987760162353516, 0.039792640686035156, 0.04057510375976563, 0.04004441452026367, 0.0400261116027832, 0.04024428939819336, 0.039893184661865234, 0.04004735946655273, 0.03968000030517578, 0.03990323257446289, 0.040164958953857424, 0.040137054443359375, 0.04082284927368164, 0.0402655029296875, 0.04041046524047852, 0.04049359893798828, 0.04001177597045898, 0.041587039947509764, 0.03987875366210938, 0.040081024169921875, 0.039936286926269535, 0.039667713165283204, 0.04018918228149414, 0.03981593704223633, 0.03968729782104492, 0.04035820770263672, 0.04001811218261719, 0.04026124954223633, 0.03986412811279297, 0.039830463409423825, 0.0398636474609375, 0.03971343994140625, 0.039937343597412106, 0.039529151916503906, 0.039663646697998045, 0.0399318733215332, 0.04036198425292969, 0.041538719177246095, 0.04276924896240234, 0.041893470764160154, 0.039936416625976565, 0.03991926574707031, 0.04089478302001953, 0.03980233764648437, 0.039567039489746096, 0.03998348617553711, 0.039784286499023436, 0.04051545715332031, 0.039713569641113285, 0.04026313781738281, 0.03988124847412109, 0.0395489273071289, 0.03957756805419922, 0.03954691314697266, 0.042626911163330075, 0.03976003265380859, 0.03952409744262695, 0.03968022537231446, 0.03951004791259766, 0.03968819046020508, 0.039609760284423826, 0.03963875198364258, 0.039723743438720704, 0.03981123352050781, 0.03977948760986328, 0.0396317138671875, 0.03970457458496094, 0.039626750946044925, 0.03955507278442383, 0.039599777221679684, 0.03958204650878906, 0.039752960205078125, 0.03978931045532227, 0.04245913696289062, 0.03993804931640625, 0.04020412826538086, 0.04121567916870117, 0.0398135986328125, 0.039796382904052734, 0.039817184448242185, 0.03974959945678711, 0.0399117431640625, 0.039822654724121095, 0.03967055892944336, 0.03969023895263672, 0.0395994873046875, 0.04105484771728515, 0.04093337631225586, 0.039919296264648435, 0.04002848052978516, 0.04005462265014648, 0.04026339340209961, 0.040126625061035155, 0.04013699340820313, 0.040226814270019534, 0.04046847915649414, 0.040269824981689455, 0.040271873474121096, 0.040267200469970704, 0.04038304138183594, 0.0405852165222168, 0.04038643264770508, 0.03994432067871094, 0.04101116943359375, 0.04034694290161133, 0.040089920043945314, 0.040272289276123044, 0.040458240509033204, 0.04013449478149414, 0.040185791015625, 0.04039680099487305, 0.04019836807250977, 0.04041475296020508, 0.04062019348144531, 0.04253523254394531, 0.040132766723632814, 0.040224609375, 0.0425750732421875, 0.04048934555053711, 0.04036662292480469, 0.04028598403930664, 0.04020230484008789, 0.040374271392822264, 0.040390655517578124, 0.04035500717163086, 0.04003923034667969, 0.04014617538452148, 0.04024396896362305, 0.0400992317199707, 0.040127071380615234, 0.04062822341918945, 0.040447681427001954, 0.04046675109863281, 0.041326431274414065, 0.04020598220825195, 0.04014745712280274, 0.04004191970825195, 0.04042195129394531, 0.040180862426757814, 0.04027891159057617, 0.04054425430297852, 0.04032067108154297, 0.04011452865600586, 0.04089785766601563, 0.03979945755004883, 0.03999542236328125, 0.039649185180664064, 0.0397694091796875, 0.03962073516845703, 0.04077017593383789, 0.04016035079956055, 0.03974854278564453, 0.039739166259765625, 0.039728862762451175, 0.04039120101928711, 0.03972911834716797, 0.03956307220458984, 0.03968819046020508, 0.03958784103393555, 0.04004188919067383, 0.040230846405029295, 0.03981939315795899, 0.039656158447265624, 0.0395546875, 0.039639423370361325, 0.03953823852539062, 0.039483329772949216, 0.039559680938720705, 0.039943294525146486, 0.04048166275024414, 0.039798782348632815, 0.03970172882080078, 0.03984054565429687, 0.039683391571044925, 0.03960022354125976, 0.03969836807250977, 0.03976668930053711, 0.03972476959228516, 0.039532257080078126, 0.04019878387451172, 0.042686431884765626, 0.040072574615478515, 0.04028684616088867, 0.040050048828125, 0.0437254409790039, 0.03998454284667969, 0.04017139053344727, 0.039885025024414066, 0.039629310607910154, 0.04011008071899414, 0.039631935119628904, 0.039328704833984374, 0.040013343811035156, 0.04003644943237305, 0.03962099075317383, 0.03975481414794922, 0.0398570556640625, 0.03965254211425781, 0.039871326446533205, 0.03969177627563476, 0.0398770866394043, 0.03965760040283203, 0.04024899291992187, 0.039974143981933594, 0.03990771102905273, 0.03967552185058594, 0.03973392105102539, 0.03979504013061524, 0.039462913513183595, 0.03963651275634766, 0.03969887924194336, 0.03991145706176758, 0.04006707382202149, 0.040174976348876956, 0.03981990432739258, 0.039886081695556644, 0.039639774322509765, 0.03974313735961914, 0.040053119659423826, 0.03975987243652344, 0.03970230484008789, 0.039491134643554686, 0.03988528060913086, 0.04009328079223633, 0.03975228881835938, 0.03974553680419922, 0.04007052612304687, 0.0397523193359375, 0.03972844696044922, 0.03989369583129883, 0.03988479995727539, 0.03985123062133789, 0.0399529914855957, 0.04143328094482422, 0.03999334335327148, 0.039800830841064457, 0.04371839904785156, 0.039946495056152345, 0.03993190383911133, 0.040308734893798825, 0.0398131217956543, 0.039817054748535155, 0.040161441802978516, 0.04222566223144531, 0.03999692916870117, 0.03972876739501953, 0.04028710556030273, 0.039657024383544924, 0.039728641510009766, 0.039791038513183594, 0.03967356872558594, 0.03973308944702148, 0.03974854278564453, 0.03970790481567383, 0.03975040054321289, 0.03960627365112305, 0.03949747085571289, 0.040175167083740235, 0.03974828720092773, 0.03984384155273438, 0.0396124153137207, 0.039929855346679685, 0.03963900756835938, 0.04009539031982422, 0.03987494277954102, 0.03967583847045898, 0.03975600051879883, 0.039875839233398436, 0.040221473693847654, 0.03997257614135742, 0.039954784393310544, 0.040189697265625, 0.04098787307739258, 0.039983905792236325, 0.03960780715942383, 0.03994265747070312, 0.039782398223876955, 0.03944412612915039, 0.04019235229492187, 0.03976396942138672, 0.03990323257446289, 0.040030208587646485, 0.03968511962890625, 0.0428144645690918, 0.040089599609375, 0.04002816009521484, 0.03951747131347656, 0.03972784042358399, 0.040898399353027345, 0.040099647521972655, 0.04009603118896484, 0.04015929412841797, 0.040086849212646485, 0.04044796752929687, 0.04009664154052734, 0.04006076812744141, 0.04142710494995117, 0.04020198440551758, 0.040527488708496096, 0.0401638069152832, 0.04038860702514648, 0.04010598373413086, 0.039847934722900394, 0.04001315307617188, 0.040106655120849606, 0.04006467056274414, 0.042194847106933595, 0.03951193618774414, 0.03925651168823242, 0.03937737655639648, 0.039175552368164064, 0.03977484893798828, 0.039327743530273435, 0.03949977493286133, 0.03945798492431641, 0.03958249664306641, 0.03936259078979492, 0.03938825607299805, 0.03962563323974609, 0.039908607482910155, 0.03928128051757813, 0.0394013442993164, 0.03953241729736328, 0.039489601135253904, 0.039338302612304685, 0.03938508987426758, 0.03924972915649414, 0.03937094497680664, 0.04028982543945313, 0.04028464126586914, 0.04197785568237305, 0.039395328521728515, 0.03915724945068359, 0.03924579238891602, 0.03899756622314453, 0.03931235122680664, 0.039117855072021486, 0.03911164855957031, 0.039075328826904294, 0.03918502426147461, 0.03954390335083008, 0.039448673248291016, 0.03922118377685547, 0.039185150146484375, 0.03907516860961914, 0.03897139358520508, 0.03913897705078125, 0.03926323318481445, 0.03935980987548828, 0.03906351852416992, 0.039336673736572264, 0.03940966415405273, 0.039340000152587894, 0.039489566802978514, 0.03951520156860352, 0.039642047882080075, 0.03990937423706055, 0.040341503143310545, 0.040232158660888674, 0.040098686218261716, 0.04002374267578125, 0.04030691146850586, 0.03999884796142578, 0.040100479125976564, 0.04018175888061523, 0.04001094436645508, 0.04002899169921875, 0.0400711669921875, 0.040223873138427735]",tokens/s,24.940779110627425,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.403008,1326.383104,0.0,931.135488,917.648384,s,1,7.63307861328125,7.63307861328125,0.0,7.63307861328125,7.63307861328125,7.63307861328125,7.63307861328125,[7.63307861328125],,kWh,1.0338465666666252e-05,1.13285999688864e-06,3.105280261997412e-06,1.4576605925552304e-05,,MB,1135.603712,1456.406528,0.0,1050.673152,1018.330112,s,10,0.7034482269287109,0.0703448226928711,0.0008076623861901718,0.07017057418823242,0.0707837059020996,0.0716950527191162,0.07242413017272949,"[0.0726063995361328, 0.07038086700439453, 0.06963465881347657, 0.07012895965576171, 0.06993052673339843, 0.07021218872070313, 0.07058118438720704, 0.06974800109863281, 0.07038400268554687, 0.06984143829345703]",tokens/s,3639.21593942611,kWh,2.329240217592651e-06,2.56763201417403e-07,1.5366215996666443e-06,4.122625018676698e-06,tokens/kWh,62096358.228130154,MB,1162.858496,1473.183744,0.0,1067.450368,1032.767488,s,10,13.302551025390624,1.3302551025390623,0.024940005579764144,1.3159708251953126,1.3628881225585936,1.3655794982910157,1.367732598876953,"[1.3622900390625, 1.3682708740234375, 1.3511224365234376, 1.3586226806640624, 1.3164345703125, 1.3095928955078124, 1.307552978515625, 1.302308349609375, 1.315507080078125, 1.31084912109375]",tokens/s,47.3593372276879,kWh,3.8109842997824186e-05,4.20321723531936e-06,1.767769932733434e-05,5.999075956047789e-05,tokens/kWh,1050161.7325996421,,s,630,13.296805097579947,0.0211060398374285,0.0005385663482820034,0.020997008323669436,0.02172949161529541,0.021857902526855467,0.02261669342041016,"[0.022597631454467772, 0.021526527404785157, 0.02134342384338379, 0.0214182071685791, 0.021647872924804686, 0.02165353584289551, 0.02230284881591797, 0.021929983139038087, 0.02179680061340332, 0.02147532844543457, 0.02156470489501953, 0.021756288528442382, 0.02178492736816406, 0.02179836845397949, 0.021703359603881835, 0.02156732749938965, 0.021453855514526367, 0.021554143905639648, 0.021456480026245117, 0.021507711410522462, 0.021325952529907228, 0.02159174346923828, 0.021931072235107422, 0.02183065605163574, 0.021742431640625, 0.021707103729248046, 0.02154863929748535, 0.021587648391723634, 0.021436864852905274, 0.021700927734375, 0.0222096004486084, 0.02158451271057129, 0.02149177551269531, 0.02155232048034668, 0.021584127426147463, 0.021605056762695314, 0.021738880157470702, 0.0214136962890625, 0.021658239364624025, 0.021723455429077148, 0.02163596725463867, 0.021720191955566407, 0.021691583633422853, 0.02162259292602539, 0.021740224838256834, 0.02165555191040039, 0.02157583999633789, 0.021528032302856444, 0.021591615676879884, 0.021457056045532226, 0.021041183471679686, 0.02081046485900879, 0.02141584014892578, 0.021598207473754884, 0.021302688598632814, 0.021457504272460938, 0.021532447814941406, 0.021798208236694337, 0.021652383804321287, 0.021726272583007813, 0.021573631286621094, 0.02139814376831055, 0.021291328430175782, 0.021510143280029297, 0.02159119987487793, 0.02144470405578613, 0.021570304870605468, 0.02223308753967285, 0.02168012809753418, 0.021338111877441408, 0.02104876708984375, 0.021000288009643556, 0.02151641654968262, 0.021782655715942383, 0.022624479293823243, 0.02234339141845703, 0.021600831985473634, 0.02215228843688965, 0.02167788887023926, 0.02157401657104492, 0.021508895874023437, 0.021968544006347655, 0.021540319442749024, 0.02161292839050293, 0.02156764793395996, 0.021411584854125976, 0.021298944473266603, 0.021227136611938476, 0.02191244888305664, 0.021729280471801758, 0.021763744354248046, 0.02145110321044922, 0.021521856307983398, 0.021525056838989257, 0.021354496002197267, 0.021484735488891602, 0.021832319259643556, 0.021975200653076173, 0.022322656631469727, 0.021938751220703125, 0.022347776412963868, 0.022716415405273437, 0.021846048355102538, 0.021401567459106444, 0.021297407150268555, 0.021462783813476563, 0.021796863555908205, 0.021540864944458008, 0.02166988754272461, 0.021497856140136717, 0.021683328628540038, 0.02194428825378418, 0.02155788803100586, 0.02177872085571289, 0.021589311599731445, 0.021657440185546876, 0.021940288543701173, 0.02181427192687988, 0.021816511154174805, 0.022708831787109376, 0.021864511489868163, 0.021682111740112305, 0.0216964168548584, 0.02168025588989258, 0.021612512588500978, 0.021380096435546874, 0.021208703994750975, 0.021369375228881837, 0.02177039909362793, 0.021630655288696288, 0.021644607543945312, 0.022151872634887694, 0.021448703765869142, 0.021169919967651368, 0.02114358329772949, 0.021694143295288085, 0.021748191833496095, 0.021297536849975585, 0.021289888381958007, 0.021183263778686522, 0.021206111907958985, 0.02091859245300293, 0.021012351989746093, 0.0214268798828125, 0.021451967239379883, 0.021248863220214843, 0.0211267204284668, 0.0213222713470459, 0.02145849609375, 0.0216210880279541, 0.02145280075073242, 0.021587039947509764, 0.02152579116821289, 0.021505279541015623, 0.02154319953918457, 0.021606496810913086, 0.021522432327270507, 0.021383167266845703, 0.02147532844543457, 0.021537919998168946, 0.021441408157348633, 0.02217513656616211, 0.021218048095703126, 0.021415359497070314, 0.021092800140380858, 0.021483488082885742, 0.021340383529663085, 0.021753631591796874, 0.02155264091491699, 0.021606016159057616, 0.021639711380004884, 0.0217545280456543, 0.021277952194213866, 0.020963903427124023, 0.020832128524780273, 0.021217279434204102, 0.02180499267578125, 0.02173139190673828, 0.021690271377563478, 0.021427871704101563, 0.02144095993041992, 0.02148953628540039, 0.021319807052612303, 0.02125619125366211, 0.02129305648803711, 0.021514240264892577, 0.021602304458618164, 0.02127667236328125, 0.021186559677124024, 0.02140812873840332, 0.02176576042175293, 0.021624191284179688, 0.021646656036376954, 0.021587648391723634, 0.021403648376464843, 0.021587968826293946, 0.02150918388366699, 0.021399648666381835, 0.02099292755126953, 0.02099510383605957, 0.021535648345947265, 0.02160867118835449, 0.02149558448791504, 0.021563135147094726, 0.021566848754882812, 0.021711936950683595, 0.022163040161132814, 0.021571807861328125, 0.021524511337280273, 0.021698816299438477, 0.021481184005737303, 0.02207043266296387, 0.024928895950317383, 0.02184982490539551, 0.021383167266845703, 0.02158233642578125, 0.021397504806518555, 0.021315616607666017, 0.02128700828552246, 0.0214649600982666, 0.02128291130065918, 0.02143577575683594, 0.020889888763427733, 0.021646751403808593, 0.02177680015563965, 0.02154745674133301, 0.02135264015197754, 0.02127177619934082, 0.0215347843170166, 0.021653215408325197, 0.021590656280517578, 0.021583999633789062, 0.021612607955932617, 0.021534719467163087, 0.02271027183532715, 0.021413888931274414, 0.020996095657348633, 0.021243743896484375, 0.02168764877319336, 0.021784959793090822, 0.0214881591796875, 0.0211691837310791, 0.021033504486083984, 0.021164384841918946, 0.021284799575805664, 0.02158006477355957, 0.021384992599487306, 0.021288415908813477, 0.021391904830932618, 0.021606399536132814, 0.021489664077758788, 0.02142617607116699, 0.02091663932800293, 0.021643264770507813, 0.021413888931274414, 0.021665023803710937, 0.022551551818847656, 0.021619808197021483, 0.0214800968170166, 0.02152889633178711, 0.021487295150756838, 0.0211494083404541, 0.02095337677001953, 0.020762624740600585, 0.020727807998657227, 0.020707359313964845, 0.020774879455566407, 0.02126665687561035, 0.021325056076049804, 0.021209983825683593, 0.021097152709960938, 0.020837503433227537, 0.020583904266357422, 0.020625791549682616, 0.020488000869750975, 0.02047609519958496, 0.02048723220825195, 0.020454336166381835, 0.020557695388793946, 0.020559999465942384, 0.020563968658447264, 0.020561759948730468, 0.020437152862548828, 0.02056972885131836, 0.020474239349365233, 0.020502527236938475, 0.020598688125610352, 0.020615455627441406, 0.020645727157592775, 0.02071548843383789, 0.020731903076171874, 0.02085068893432617, 0.020609024047851563, 0.020658176422119142, 0.020764671325683593, 0.020694623947143553, 0.020728031158447267, 0.021235071182250976, 0.02084947204589844, 0.02065328025817871, 0.020948991775512696, 0.02064259147644043, 0.020862464904785157, 0.02090991973876953, 0.02174131202697754, 0.020972000122070313, 0.020951007843017577, 0.020821855545043944, 0.02077743911743164, 0.020721824645996093, 0.020710655212402344, 0.020760799407958986, 0.020799488067626954, 0.020748832702636718, 0.020717567443847656, 0.02096758460998535, 0.021202623367309572, 0.020926624298095702, 0.020901023864746095, 0.020617696762084962, 0.020652416229248047, 0.020700639724731445, 0.020554271697998047, 0.020690944671630858, 0.020692991256713866, 0.020738048553466795, 0.020572160720825194, 0.020590591430664062, 0.020768768310546876, 0.02099612808227539, 0.021086463928222655, 0.02112483215332031, 0.021112831115722656, 0.020860576629638673, 0.020709888458251953, 0.023066272735595705, 0.020792959213256836, 0.020821920394897463, 0.02054742431640625, 0.020437183380126952, 0.02080620765686035, 0.020719680786132812, 0.021372127532958984, 0.020670944213867188, 0.020645343780517578, 0.0210565128326416, 0.020676448822021486, 0.020586496353149415, 0.020554784774780274, 0.02155766487121582, 0.0205963191986084, 0.02075894355773926, 0.020826688766479494, 0.020780799865722656, 0.02061747169494629, 0.02046976089477539, 0.020463615417480468, 0.020597856521606447, 0.02041529655456543, 0.020491424560546874, 0.020487104415893555, 0.020418560028076172, 0.020600128173828124, 0.020888256072998046, 0.020961280822753905, 0.021006080627441408, 0.020869312286376954, 0.020778751373291014, 0.02073017692565918, 0.020817920684814452, 0.02062745666503906, 0.020590591430664062, 0.020485248565673828, 0.020646976470947265, 0.02061497688293457, 0.020662559509277343, 0.02057366371154785, 0.02053116798400879, 0.020390527725219727, 0.020537343978881836, 0.020815872192382814, 0.020912128448486327, 0.021594112396240234, 0.020979007720947265, 0.02063350486755371, 0.02062214469909668, 0.020557792663574218, 0.020448959350585938, 0.020511039733886717, 0.020639999389648438, 0.0205185604095459, 0.020424800872802733, 0.020496383666992187, 0.02066201591491699, 0.02184217643737793, 0.021061632156372072, 0.020510879516601563, 0.020465696334838867, 0.02052012825012207, 0.020386432647705077, 0.02046063995361328, 0.020568992614746092, 0.020609024047851563, 0.02061484718322754, 0.02051718330383301, 0.02067036819458008, 0.0205164794921875, 0.02058243179321289, 0.020588991165161132, 0.020508607864379882, 0.02056608009338379, 0.023451648712158202, 0.020735712051391603, 0.02062735939025879, 0.020516544342041015, 0.02051862335205078, 0.020495328903198242, 0.02046281623840332, 0.02062345504760742, 0.020520832061767576, 0.02062745666503906, 0.02058336067199707, 0.020694143295288087, 0.02105187225341797, 0.020827423095703124, 0.020581344604492187, 0.020508703231811524, 0.021874624252319334, 0.02118662452697754, 0.020649984359741212, 0.020619264602661135, 0.021360095977783204, 0.020636192321777345, 0.020743711471557617, 0.020643455505371094, 0.020519775390625, 0.02053513526916504, 0.02164486312866211, 0.02097417640686035, 0.02055561637878418, 0.020544864654541015, 0.020424671173095703, 0.020645151138305663, 0.020695808410644532, 0.020649984359741212, 0.020510719299316405, 0.02052412796020508, 0.02068355178833008, 0.020743871688842775, 0.020494783401489258, 0.020450912475585937, 0.020611488342285156, 0.020564096450805664, 0.020563840866088867, 0.020756832122802736, 0.020581695556640626, 0.020576608657836913, 0.02047177505493164, 0.02052092742919922, 0.02062339210510254, 0.020449312210083007, 0.020463615417480468, 0.020404096603393554, 0.020804895401000976, 0.020849760055541993, 0.020788991928100586, 0.020620479583740234, 0.020656095504760743, 0.020458240509033204, 0.020463712692260744, 0.020602880477905275, 0.020561920166015626, 0.020619264602661135, 0.020553407669067384, 0.02051513671875, 0.020653663635253908, 0.020644256591796875, 0.020707328796386718, 0.02069196891784668, 0.0206812801361084, 0.020532800674438478, 0.02058464050292969, 0.020918975830078124, 0.020563968658447264, 0.02050214385986328, 0.020504255294799805, 0.02041516876220703, 0.022337535858154296, 0.020785280227661133, 0.02076201629638672, 0.020655616760253907, 0.020655071258544922, 0.020642879486083985, 0.020847551345825194, 0.021036096572875976, 0.021221855163574218, 0.02116399955749512, 0.020859392166137695, 0.02081558418273926, 0.02051100730895996, 0.020512767791748047, 0.02049843215942383, 0.02058639907836914, 0.020586591720581054, 0.02040652847290039, 0.02061235237121582, 0.020551551818847658, 0.020430912017822267, 0.020560447692871093, 0.020539392471313478, 0.021753984451293944, 0.020602176666259766, 0.020643903732299806, 0.02060873603820801, 0.020570560455322264, 0.020472095489501952, 0.020466943740844727, 0.0204083194732666, 0.020564800262451173, 0.020482303619384766, 0.02060406494140625, 0.020533855438232423, 0.02086092758178711, 0.021056575775146483, 0.020751615524291993, 0.020930143356323243, 0.02113545608520508, 0.021727231979370116, 0.021716543197631836, 0.021051296234130858, 0.021044767379760743, 0.020877504348754884, 0.02225663948059082, 0.021180416107177736, 0.020877119064331054, 0.020776960372924806, 0.020705087661743164, 0.020738239288330077, 0.02072812843322754, 0.02070319938659668, 0.020997888565063478, 0.02068191909790039, 0.020789087295532225, 0.020587455749511718, 0.020760576248168947, 0.02087500762939453, 0.020519168853759765, 0.02087731170654297, 0.02064588737487793, 0.02083839988708496, 0.021583520889282226, 0.021025119781494142, 0.02166374397277832, 0.02077401542663574, 0.02108095932006836, 0.020578304290771485, 0.020857887268066408, 0.02072675132751465, 0.02066431999206543, 0.02059644889831543, 0.02176950454711914, 0.021230527877807617, 0.021084224700927735, 0.02123366355895996, 0.021002464294433594, 0.020777055740356445, 0.0208155517578125, 0.02128892707824707, 0.02117635154724121, 0.021127168655395507, 0.021153087615966796, 0.021492576599121092, 0.021489503860473633, 0.021152095794677736, 0.020925151824951173, 0.02084566307067871, 0.021243999481201172, 0.021345983505249022, 0.021276191711425783, 0.02126902389526367, 0.02118396759033203, 0.02096623992919922, 0.020848320007324218, 0.020796800613403322, 0.020834943771362305, 0.020602880477905275, 0.020587776184082033, 0.02049420738220215, 0.020490816116333008, 0.020631296157836914, 0.02058425521850586, 0.02077568054199219, 0.02062131118774414, 0.020572160720825194, 0.020752384185791017, 0.020981760025024415, 0.02136412811279297, 0.021004095077514648, 0.020795200347900392, 0.020568735122680665, 0.020582304000854493, 0.02071139144897461, 0.02068115234375, 0.020676383972167967, 0.020512256622314453, 0.0205599365234375, 0.020578975677490233, 0.02048204803466797, 0.020570112228393556, 0.020520959854125977, 0.02051024055480957, 0.02057881546020508, 0.020772512435913087, 0.020777280807495118, 0.020813215255737306, 0.020719648361206055, 0.02123404884338379, 0.020660415649414062, 0.02060492706298828, 0.020510656356811523, 0.020465728759765624, 0.020989952087402345, 0.02068604850769043, 0.020591392517089843, 0.020531200408935548, 0.020551136016845703, 0.020476415634155275, 0.020449312210083007, 0.020600223541259767, 0.020673023223876954]",tokens/s,47.37980254479786,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,813.715456,3354.329088,0.0,2959.081472,2942.567424,s,1,7.60247119140625,7.60247119140625,0.0,7.60247119140625,7.60247119140625,7.60247119140625,7.60247119140625,[7.60247119140625],,kWh,1.027498409583245e-05,1.124794598929495e-06,4.711670436002846e-06,1.611144913076479e-05,,MB,1118.08512,3545.16992,0.0,3139.436544,3105.830912,s,10,2.5615629425048834,0.2561562942504883,0.0025241374356524703,0.25539284515380856,0.26039235534667965,0.26045658416748046,0.26050796722412106,"[0.2603780822753906, 0.2574383544921875, 0.2528868408203125, 0.2541678009033203, 0.25575640869140626, 0.2550292816162109, 0.2546710662841797, 0.2536778869628906, 0.2605208129882812, 0.2570364074707031]",tokens/s,999.3898480966645,kWh,7.679832204056866e-06,8.469384555319108e-07,5.085201436578925e-06,1.3611972096167704e-05,tokens/kWh,18806973.610537585,MB,1144.561664,3587.11296,0.0,3181.379584,3162.0096,s,10,13.417264282226563,1.3417264282226564,0.013599975796685194,1.3432182006835938,1.3592529663085937,1.3610109436035156,1.3624173254394532,"[1.3265428466796876, 1.3588623046875, 1.3477608642578125, 1.3466318359375, 1.34207275390625, 1.3443636474609375, 1.3353577880859375, 1.31404443359375, 1.3627689208984375, 1.33885888671875]",tokens/s,46.954430258524575,kWh,3.888306745719286e-05,4.2885395841120465e-06,2.506946157602105e-05,6.824106861732596e-05,tokens/kWh,923197.7352711724,,s,630,13.414208038330063,0.021292393711635042,0.0005596877692547637,0.021259455680847167,0.021615834045410155,0.022005284976959225,0.02318074527740479,"[0.02122380828857422, 0.020987552642822267, 0.02083875274658203, 0.020719200134277343, 0.02167843246459961, 0.0205980167388916, 0.02066633605957031, 0.02083718490600586, 0.020845888137817382, 0.02263644790649414, 0.021711904525756835, 0.020838176727294922, 0.020822015762329102, 0.02083430480957031, 0.02065555191040039, 0.020869695663452148, 0.02089574432373047, 0.0209749755859375, 0.020836544036865235, 0.02113580894470215, 0.021360479354858398, 0.020811935424804688, 0.020969472885131835, 0.02066227149963379, 0.02062131118774414, 0.020551071166992188, 0.02053590393066406, 0.02062950325012207, 0.020549631118774413, 0.021269535064697264, 0.020786144256591796, 0.020676607131958007, 0.020793216705322266, 0.02068809509277344, 0.020581279754638672, 0.020642976760864257, 0.020616031646728514, 0.02067865562438965, 0.020774944305419922, 0.020943904876708986, 0.020870271682739257, 0.020727615356445312, 0.020727807998657227, 0.020917728424072267, 0.021040735244750978, 0.021232160568237304, 0.021495296478271485, 0.021324703216552734, 0.02145052719116211, 0.021397727966308594, 0.02136195182800293, 0.021284927368164064, 0.022167680740356445, 0.021407808303833008, 0.02129484748840332, 0.02139561653137207, 0.021470048904418945, 0.02145155143737793, 0.02151910400390625, 0.021424320220947264, 0.021409887313842774, 0.021597728729248047, 0.021488000869750976, 0.021881311416625977, 0.021443904876708983, 0.02135465621948242, 0.021456735610961914, 0.02142473602294922, 0.021325759887695313, 0.021406976699829102, 0.02133475112915039, 0.02135481643676758, 0.021377952575683593, 0.021386016845703126, 0.022263168334960937, 0.021505760192871093, 0.021564064025878907, 0.021763423919677734, 0.02161552047729492, 0.02145894432067871, 0.02149580764770508, 0.021546016693115233, 0.021470176696777345, 0.021213279724121094, 0.02145471954345703, 0.021467168807983397, 0.022299840927124025, 0.021452831268310546, 0.021343008041381836, 0.021336063385009766, 0.021480640411376952, 0.021420864105224608, 0.02146633529663086, 0.02124880027770996, 0.02123075294494629, 0.0213656005859375, 0.021475040435791015, 0.021487648010253907, 0.02156972885131836, 0.023054399490356446, 0.022042623519897463, 0.02148761558532715, 0.021634592056274413, 0.021317375183105468, 0.02159814453125, 0.021299680709838866, 0.02141747283935547, 0.0214304313659668, 0.021364704132080078, 0.021347007751464843, 0.021420032501220702, 0.021348352432250976, 0.021336063385009766, 0.021317983627319338, 0.021400768280029295, 0.021154272079467774, 0.02126643180847168, 0.02125823974609375, 0.02151628875732422, 0.021640928268432617, 0.02147545623779297, 0.02143657684326172, 0.021393760681152344, 0.021406784057617187, 0.023015232086181642, 0.024418752670288087, 0.02168012809753418, 0.021169279098510744, 0.021162879943847655, 0.021434463500976563, 0.02137868881225586, 0.021133600234985353, 0.021174272537231444, 0.021131263732910157, 0.021227519989013673, 0.021198848724365234, 0.02211862373352051, 0.021614368438720704, 0.021710847854614256, 0.02145075225830078, 0.021223424911499023, 0.02128108787536621, 0.02122659111022949, 0.0211746883392334, 0.021403839111328125, 0.0215118408203125, 0.021544832229614258, 0.02135641670227051, 0.021267040252685547, 0.02140390396118164, 0.021192447662353515, 0.021213184356689452, 0.02110588836669922, 0.02128156852722168, 0.021301248550415038, 0.021151744842529296, 0.021102592468261717, 0.02126665687561035, 0.020952287673950194, 0.024524831771850587, 0.02122707176208496, 0.020874399185180664, 0.021102399826049806, 0.020743488311767578, 0.02100704002380371, 0.022416608810424805, 0.021646112442016602, 0.021581823348999024, 0.022279199600219728, 0.021314527511596678, 0.021263744354248045, 0.02117043113708496, 0.021361024856567382, 0.021446304321289064, 0.02125971221923828, 0.021273151397705078, 0.021256479263305664, 0.021198911666870116, 0.021267967224121095, 0.021387487411499023, 0.021221824645996094, 0.02156732749938965, 0.02130668830871582, 0.021289663314819338, 0.02168422317504883, 0.021261407852172853, 0.021424543380737304, 0.02136252784729004, 0.021185184478759767, 0.02188047981262207, 0.02115190315246582, 0.021336544036865236, 0.021114944458007812, 0.02106572723388672, 0.02136809539794922, 0.021592159271240235, 0.02159881591796875, 0.021263904571533203, 0.0210150089263916, 0.021147680282592774, 0.021376096725463867, 0.021313631057739257, 0.021316608428955077, 0.02214860725402832, 0.021268287658691407, 0.02139187240600586, 0.021180416107177736, 0.021311487197875977, 0.021194944381713866, 0.021243711471557618, 0.021204416275024413, 0.021113407135009764, 0.02130473518371582, 0.021080160140991212, 0.021375423431396486, 0.021497919082641603, 0.021477344512939454, 0.021302623748779295, 0.0215631046295166, 0.021289440155029298, 0.02198358345031738, 0.02294528007507324, 0.02134310340881348, 0.021241216659545897, 0.02128486442565918, 0.021287391662597657, 0.021213119506835937, 0.021202943801879884, 0.02134364891052246, 0.02120355224609375, 0.022134559631347656, 0.021079263687133788, 0.02146611213684082, 0.021729280471801758, 0.021307392120361326, 0.02122547149658203, 0.021153791427612305, 0.02115692710876465, 0.02112403106689453, 0.02127872085571289, 0.02143846321105957, 0.021238912582397462, 0.02117932891845703, 0.021249984741210936, 0.0212541446685791, 0.021048479080200196, 0.02147769546508789, 0.02115135955810547, 0.02103593635559082, 0.021352447509765626, 0.022439071655273438, 0.021257055282592772, 0.021649152755737304, 0.021547359466552736, 0.0215097599029541, 0.02141744041442871, 0.021077024459838868, 0.021036991119384764, 0.02137654495239258, 0.02134009552001953, 0.021148191452026368, 0.021053440093994142, 0.020995264053344728, 0.021138240814208984, 0.021253952026367186, 0.02129859161376953, 0.021215744018554687, 0.02117238426208496, 0.02136457633972168, 0.021090591430664062, 0.021120351791381838, 0.021191328048706055, 0.021397504806518555, 0.02139952087402344, 0.021348384857177733, 0.021336063385009766, 0.021157888412475585, 0.021211135864257814, 0.021332000732421873, 0.021198816299438476, 0.021151744842529296, 0.020967424392700194, 0.02103091239929199, 0.02128281593322754, 0.021331199645996092, 0.022072063446044923, 0.021421760559082032, 0.021137311935424806, 0.021170591354370116, 0.021110784530639647, 0.02126028823852539, 0.021172224044799806, 0.021338111877441408, 0.021437471389770507, 0.021540895462036132, 0.021273536682128905, 0.021254207611083983, 0.021075904846191405, 0.021280128479003905, 0.021259199142456053, 0.021472543716430665, 0.021387680053710938, 0.021560672760009766, 0.021324447631835938, 0.02127769660949707, 0.02110361671447754, 0.021839872360229492, 0.021310848236083986, 0.02146771240234375, 0.02093881607055664, 0.02141788864135742, 0.021553247451782227, 0.02166783905029297, 0.021310623168945313, 0.021184576034545897, 0.02168822479248047, 0.02104470443725586, 0.021189599990844726, 0.021583871841430666, 0.02163408088684082, 0.022155807495117186, 0.021166528701782227, 0.02120412826538086, 0.021271392822265624, 0.02122137641906738, 0.021011680603027345, 0.021085311889648437, 0.021350048065185548, 0.024834367752075197, 0.021546592712402345, 0.02120841598510742, 0.021218048095703126, 0.021567487716674806, 0.02112512016296387, 0.021510143280029297, 0.021736703872680663, 0.021349119186401366, 0.021362176895141603, 0.02142255973815918, 0.021403743743896485, 0.021088191986083984, 0.0212807674407959, 0.021024480819702148, 0.021094688415527342, 0.021202943801879884, 0.021204736709594725, 0.021047552108764647, 0.021127168655395507, 0.02106572723388672, 0.021073919296264648, 0.02088960075378418, 0.02111692810058594, 0.021034112930297853, 0.02130828857421875, 0.020786687850952147, 0.021209152221679687, 0.0214716796875, 0.02141209602355957, 0.021454559326171876, 0.02116217613220215, 0.02119868850708008, 0.021134815216064452, 0.02127324867248535, 0.02127017593383789, 0.021012704849243165, 0.02123776054382324, 0.021180767059326172, 0.02208118438720703, 0.021354143142700194, 0.021305248260498046, 0.020998592376708983, 0.02143436813354492, 0.021390527725219727, 0.021293888092041014, 0.02107151985168457, 0.02122172737121582, 0.02129088020324707, 0.02134614372253418, 0.02181340789794922, 0.0211680965423584, 0.0211343994140625, 0.021046207427978515, 0.02126438331604004, 0.021235712051391603, 0.021006336212158205, 0.021342208862304687, 0.02118377685546875, 0.021199199676513673, 0.021342592239379884, 0.02116761589050293, 0.021182464599609374, 0.02093516731262207, 0.021243072509765624, 0.021455007553100584, 0.021393312454223632, 0.021426048278808594, 0.021234560012817382, 0.021153791427612305, 0.02108940887451172, 0.021180479049682618, 0.02119353675842285, 0.020998144149780275, 0.022128639221191407, 0.021429279327392577, 0.021238527297973632, 0.021395103454589844, 0.022294431686401366, 0.021851903915405275, 0.021373023986816408, 0.021379295349121093, 0.021215839385986326, 0.02130067253112793, 0.021154367446899414, 0.02102681541442871, 0.020940576553344727, 0.020832223892211912, 0.02086265563964844, 0.021137344360351563, 0.020875936508178712, 0.021136959075927733, 0.02103113555908203, 0.021098688125610353, 0.020962560653686523, 0.020986623764038086, 0.021190464019775392, 0.021381183624267577, 0.021098047256469726, 0.020918176651000975, 0.02086524772644043, 0.02102112007141113, 0.021308544158935547, 0.021170751571655273, 0.021203487396240235, 0.021114656448364258, 0.02099836730957031, 0.020975391387939454, 0.02105548858642578, 0.020813695907592772, 0.020861055374145506, 0.020968479156494142, 0.021102783203125, 0.021472000122070314, 0.021866592407226562, 0.02105958366394043, 0.02079689598083496, 0.02075641632080078, 0.020970079421997072, 0.021650560379028322, 0.021046207427978515, 0.02075436782836914, 0.020714719772338866, 0.020769567489624025, 0.021236991882324217, 0.02075315284729004, 0.020750335693359375, 0.020612192153930665, 0.020847488403320312, 0.020883487701416015, 0.02086297607421875, 0.02074985694885254, 0.020619743347167967, 0.020775968551635743, 0.02055062484741211, 0.02057401657104492, 0.02053548812866211, 0.020682687759399413, 0.020643903732299806, 0.020594688415527345, 0.020606752395629882, 0.020705440521240234, 0.020650047302246094, 0.022099967956542968, 0.020821216583251954, 0.020620063781738283, 0.02063155174255371, 0.020683839797973634, 0.020650400161743163, 0.020617599487304687, 0.020750495910644533, 0.020766687393188477, 0.020670719146728515, 0.020709152221679687, 0.02067865562438965, 0.020672416687011717, 0.020721151351928712, 0.020613311767578125, 0.02174118423461914, 0.020697887420654298, 0.02087881660461426, 0.02068662452697754, 0.020760448455810546, 0.020687040328979493, 0.020649856567382812, 0.020766944885253907, 0.020908639907836913, 0.021407743453979493, 0.021163328170776367, 0.020883712768554687, 0.020911840438842772, 0.020646400451660156, 0.020817535400390625, 0.021161951065063477, 0.021058111190795897, 0.020738079071044923, 0.02147737693786621, 0.021331968307495116, 0.021101696014404297, 0.02119708824157715, 0.0213090877532959, 0.02144879913330078, 0.022098783493041993, 0.021712896347045898, 0.023810176849365233, 0.02156844711303711, 0.02154537582397461, 0.02191209602355957, 0.02149580764770508, 0.021452096939086913, 0.021663583755493165, 0.021344287872314453, 0.021490495681762697, 0.02209324836730957, 0.02146566390991211, 0.02143436813354492, 0.021311487197875977, 0.021370880126953123, 0.021312543869018555, 0.021388256072998046, 0.021352447509765626, 0.021579776763916016, 0.021448703765869142, 0.02128691291809082, 0.021403776168823243, 0.021410943984985352, 0.022600160598754884, 0.021603872299194336, 0.021618656158447266, 0.021604415893554687, 0.02153750419616699, 0.021741567611694337, 0.02143027114868164, 0.021327871322631836, 0.021484703063964845, 0.021439327239990234, 0.021381343841552734, 0.02147052764892578, 0.021444608688354492, 0.0214102725982666, 0.02130352020263672, 0.021169952392578125, 0.021264192581176757, 0.021239999771118165, 0.021514240264892577, 0.02162073516845703, 0.02123075294494629, 0.021269344329833986, 0.02109235191345215, 0.021319135665893555, 0.023232351303100585, 0.02972947120666504, 0.021136640548706054, 0.02114761543273926, 0.02138528060913086, 0.020902624130249025, 0.020970592498779295, 0.02103388786315918, 0.02100662422180176, 0.021309440612792968, 0.021105663299560547, 0.02165225601196289, 0.021205087661743165, 0.02127020835876465, 0.021146047592163087, 0.0212807674407959, 0.021237119674682618, 0.021151968002319336, 0.02114761543273926, 0.021168447494506835, 0.020958879470825195, 0.021121503829956055, 0.02102252769470215, 0.021002431869506837, 0.021102304458618163, 0.021176607131958007, 0.02147532844543457, 0.02127462387084961, 0.02127257537841797, 0.021559295654296876, 0.021276416778564452, 0.021177696228027343, 0.021169055938720704, 0.02126233673095703, 0.021067520141601563, 0.020983680725097657, 0.021227615356445313, 0.02118275260925293, 0.021153791427612305, 0.021102592468261717, 0.021391008377075197, 0.0214531192779541, 0.02138319969177246, 0.021313440322875975, 0.022146528244018554, 0.021250688552856445, 0.021180448532104493, 0.021274911880493165, 0.021176223754882813, 0.021135135650634764, 0.021061632156372072, 0.020923839569091798, 0.021052032470703124, 0.021574752807617188, 0.021417823791503907, 0.021141952514648437, 0.021518911361694336, 0.021535999298095704, 0.021588735580444336, 0.021178367614746094, 0.023399456024169922, 0.022023040771484374, 0.021243999481201172, 0.021317472457885744, 0.021016288757324218, 0.021033311843872072, 0.02082195281982422, 0.02060310363769531, 0.0211778564453125, 0.02075276756286621, 0.02058559989929199, 0.020624319076538087]",tokens/s,46.96512818347703,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 609, in __init__ self.dense_4h_to_h = nn.Linear(config.intermediate_size, config.hidden_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 200.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 955, in __init__ self.model = MistralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 707, in __init__ [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 707, in [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 504, in __init__ self.self_attn = MISTRAL_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 196, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 162, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 163, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 164, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 768.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 608, in __init__ self.dense_h_to_4h = nn.Linear(config.hidden_size, config.intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,812.417024,12523.077632,0.0,12127.830016,12122.08896,s,1,7.2994873046875,7.2994873046875,0.0,7.2994873046875,7.2994873046875,7.2994873046875,7.2994873046875,[7.2994873046875],,kWh,1.1944894587497856e-05,1.2906058269482203e-06,6.215838306000091e-06,1.945133872044617e-05,,MB,1129.816064,12695.044096,0.0,12289.31072,12248.586752,s,10,11.432853271484374,1.1432853271484373,0.003098576912891139,1.1437304077148438,1.1467366577148437,1.146837567138672,1.1469182946777343,"[1.13678466796875, 1.139625, 1.1412708740234374, 1.1431728515625, 1.1437130126953126, 1.143747802734375, 1.144585205078125, 1.1467142333984375, 1.1469384765625, 1.1463011474609375]",tokens/s,223.9161073102467,kWh,3.350108923875007e-05,3.694341063623324e-06,2.2123045476200084e-05,5.9318475778573484e-05,tokens/kWh,4315687.4252063995,MB,1173.42208,12703.432704,0.0,12297.699328,12248.589312,s,10,33.81919946289062,3.381919946289062,0.002572435008086271,3.382077392578125,3.3849273437500003,3.3851793701171875,3.385380991210938,"[3.37895458984375, 3.37922119140625, 3.3785986328125, 3.38262890625, 3.383670166015625, 3.38152587890625, 3.379484130859375, 3.384813232421875, 3.384871337890625, 3.385431396484375]",tokens/s,18.628471696714495,kWh,9.873379739124999e-05,1.089143092125667e-05,6.570288589559988e-05,0.00017532811420810655,tokens/kWh,359326.285373844,,s,630,33.81635486221316,0.053676753749544656,0.00028610013942388234,0.05367704010009766,0.05391408386230469,0.05399905891418457,0.055077218322753904,"[0.05495600128173828, 0.053784576416015625, 0.05345059204101563, 0.05316624069213867, 0.05299609756469727, 0.052991519927978514, 0.053130943298339846, 0.05316793441772461, 0.05340208053588867, 0.053469345092773436, 0.05364944076538086, 0.053452129364013674, 0.05346198272705078, 0.05349929428100586, 0.053352161407470705, 0.053179264068603516, 0.05315584182739258, 0.05346303939819336, 0.053599361419677735, 0.05381177520751953, 0.05366815948486328, 0.05349990463256836, 0.05333606338500976, 0.05348556900024414, 0.05334000015258789, 0.05345862579345703, 0.053488094329833986, 0.05365119934082031, 0.05359231948852539, 0.05361423873901367, 0.053510494232177734, 0.05368832015991211, 0.053528575897216796, 0.05336403274536133, 0.053572288513183595, 0.053645313262939455, 0.05386608123779297, 0.05390582275390625, 0.053749088287353516, 0.05393436813354492, 0.05368051147460937, 0.05366579055786133, 0.053526527404785154, 0.05374959945678711, 0.05369241714477539, 0.05401206588745117, 0.05393936157226562, 0.053598751068115236, 0.05371907043457031, 0.054013729095458984, 0.05370230484008789, 0.053764289855957034, 0.05374736022949219, 0.053768417358398435, 0.053803455352783205, 0.05402159881591797, 0.053932350158691404, 0.05403910446166992, 0.053907455444335936, 0.05377433776855469, 0.053744895935058594, 0.05396950531005859, 0.05383184051513672, 0.054832416534423827, 0.05351532745361328, 0.05334614562988281, 0.05319641494750976, 0.05313766479492187, 0.05346460723876953, 0.05319097518920898, 0.053262401580810546, 0.05395814514160156, 0.05352483367919922, 0.05355295944213867, 0.053631553649902346, 0.05339910507202148, 0.053645278930664064, 0.05346966552734375, 0.053448318481445316, 0.05346342468261719, 0.0537245101928711, 0.053607070922851566, 0.053628929138183595, 0.05343148803710938, 0.05343110275268555, 0.05339340972900391, 0.05331148910522461, 0.05343139266967773, 0.05341686248779297, 0.05333340835571289, 0.053524192810058595, 0.053711776733398435, 0.05368761444091797, 0.0536459846496582, 0.05368627166748047, 0.053573696136474606, 0.0536165771484375, 0.05363097763061524, 0.05365760040283203, 0.053761184692382814, 0.053742431640625, 0.053937248229980465, 0.05370767974853516, 0.05350112152099609, 0.053456897735595706, 0.05333689498901367, 0.05341593551635742, 0.05351177597045898, 0.0536371841430664, 0.05365385437011719, 0.053651454925537106, 0.053754047393798826, 0.05389907073974609, 0.053835777282714846, 0.05384396743774414, 0.05366988754272461, 0.053929088592529296, 0.05394931030273437, 0.053892383575439455, 0.05420864105224609, 0.05401676940917969, 0.053827423095703125, 0.053940223693847655, 0.05376598358154297, 0.053733631134033205, 0.05384182357788086, 0.05520844650268555, 0.053714942932128903, 0.05346713638305664, 0.053260128021240236, 0.05318415832519531, 0.05330492782592773, 0.053156097412109374, 0.053682430267333985, 0.05339494323730469, 0.05366012954711914, 0.0536693115234375, 0.053482494354248046, 0.053348350524902347, 0.05353062438964844, 0.05345014572143555, 0.053559902191162106, 0.05342972946166992, 0.0535700798034668, 0.0537242546081543, 0.05374863815307617, 0.05347087860107422, 0.05345724868774414, 0.05344460678100586, 0.0533831672668457, 0.0534835205078125, 0.05330739212036133, 0.0535327033996582, 0.053762016296386717, 0.053597312927246094, 0.05369331359863281, 0.053583774566650394, 0.05375705718994141, 0.053465312957763675, 0.05362969589233398, 0.05357065582275391, 0.05365852737426758, 0.053819393157958986, 0.05367766571044922, 0.05378688049316406, 0.05364303970336914, 0.05351420974731445, 0.05360886383056641, 0.05351001739501953, 0.053602432250976564, 0.05360639953613281, 0.053663745880126956, 0.0538603515625, 0.05380441665649414, 0.05374835205078125, 0.05356492614746094, 0.053580097198486325, 0.05349596786499024, 0.05348969650268555, 0.05377024078369141, 0.053684223175048826, 0.05391299057006836, 0.053830047607421876, 0.05377657699584961, 0.05384313583374024, 0.053811710357666014, 0.05388320159912109, 0.053766143798828124, 0.05374771118164062, 0.05544809722900391, 0.05384396743774414, 0.05328486251831055, 0.05330739212036133, 0.05304912185668945, 0.05308745574951172, 0.05308729553222656, 0.05318761444091797, 0.05328947067260742, 0.0533570556640625, 0.05341116714477539, 0.05359430313110351, 0.05348390579223633, 0.053422080993652345, 0.05330944061279297, 0.053561344146728515, 0.05324758529663086, 0.0536539192199707, 0.053722942352294925, 0.05370006561279297, 0.053510879516601564, 0.05355868911743164, 0.05354713439941406, 0.053628639221191404, 0.05357849502563477, 0.0541080322265625, 0.05369187164306641, 0.05366550445556641, 0.05366265487670899, 0.05375590515136719, 0.053698558807373044, 0.05364432144165039, 0.05362502288818359, 0.053531425476074215, 0.05360639953613281, 0.05382758331298828, 0.053678081512451174, 0.05389311981201172, 0.05396480178833008, 0.05393203353881836, 0.05373747253417969, 0.05389430236816406, 0.05364822387695312, 0.05374566268920898, 0.05373132705688476, 0.05375942230224609, 0.05382815933227539, 0.05382758331298828, 0.05373132705688476, 0.053838977813720705, 0.05380777740478516, 0.05376800155639649, 0.05373583984375, 0.05400502395629883, 0.053840351104736325, 0.05401558303833008, 0.054010528564453125, 0.053902942657470705, 0.053889278411865235, 0.05393423843383789, 0.053794654846191406, 0.05386387252807617, 0.0538746223449707, 0.0554106559753418, 0.05382368087768555, 0.053427040100097654, 0.05347225570678711, 0.05326182556152344, 0.053436321258544923, 0.0534218864440918, 0.05341059112548828, 0.05333401489257812, 0.05345836639404297, 0.05381792068481445, 0.05348716735839844, 0.05361081695556641, 0.053583518981933594, 0.053561824798583985, 0.05373491287231445, 0.0534984016418457, 0.05378249740600586, 0.05387468719482422, 0.053852161407470706, 0.053704414367675785, 0.05369785690307617, 0.053596351623535154, 0.053617408752441406, 0.0536781120300293, 0.05357158279418945, 0.053645313262939455, 0.053556385040283205, 0.0536126708984375, 0.05380124664306641, 0.053623233795166016, 0.05365923309326172, 0.053676414489746097, 0.053685886383056644, 0.05365187072753906, 0.05381497573852539, 0.05378889465332031, 0.05379679870605469, 0.05389459228515625, 0.053725921630859375, 0.05366534423828125, 0.05367801666259766, 0.05370300674438477, 0.05370665740966797, 0.05364879989624023, 0.053658462524414065, 0.053626880645751954, 0.053642559051513675, 0.05359891128540039, 0.05358720016479492, 0.05376076889038086, 0.05372911834716797, 0.05383731079101563, 0.05401257705688477, 0.05399552154541016, 0.053907455444335936, 0.05390553665161133, 0.05377830505371094, 0.0537968635559082, 0.053816574096679684, 0.0536890869140625, 0.053704158782958984, 0.05390595245361328, 0.055509056091308594, 0.05405596923828125, 0.05329919815063477, 0.053298145294189456, 0.053141502380371096, 0.05313324737548828, 0.05328287887573242, 0.05331763076782227, 0.0533414077758789, 0.05339215850830078, 0.05334220886230469, 0.05344460678100586, 0.053403297424316404, 0.053373279571533205, 0.053489376068115234, 0.05355548858642578, 0.053378944396972654, 0.05360652923583984, 0.05364303970336914, 0.05360038375854492, 0.05348486328125, 0.053504798889160154, 0.053387264251708984, 0.053456897735595706, 0.05327667236328125, 0.05347942352294922, 0.05409366226196289, 0.053575199127197264, 0.053492351531982424, 0.0535200309753418, 0.053727584838867186, 0.05376409530639648, 0.05356339263916016, 0.053460990905761716, 0.053491168975830075, 0.05372774505615235, 0.05361872100830078, 0.05366483306884766, 0.05392236709594726, 0.05383206558227539, 0.053628929138183595, 0.05372108840942383, 0.05350153732299805, 0.05357814407348633, 0.054063102722167966, 0.05382144165039063, 0.053857311248779294, 0.05386134338378906, 0.053768192291259766, 0.05376768112182617, 0.054008289337158205, 0.053823520660400394, 0.053889022827148435, 0.05380505752563477, 0.053816864013671875, 0.05398780822753906, 0.05404467010498047, 0.053933345794677734, 0.05397331237792969, 0.05397135925292969, 0.05389625549316406, 0.05391996765136719, 0.05391843032836914, 0.05531846237182617, 0.05389315032958984, 0.0534615364074707, 0.05349728012084961, 0.05315430450439453, 0.05328028869628906, 0.05352054214477539, 0.05336307144165039, 0.05332588958740234, 0.05345868682861328, 0.053402816772460934, 0.05339433670043945, 0.05358796691894531, 0.05351023864746094, 0.053429534912109375, 0.05358803176879883, 0.053354400634765625, 0.05359283065795899, 0.05366742324829102, 0.05374403381347656, 0.05368569564819336, 0.0536929931640625, 0.05359001541137695, 0.053441665649414063, 0.053296001434326175, 0.05349785614013672, 0.05359548950195313, 0.05357372665405274, 0.05351795196533203, 0.05366259384155273, 0.0535428466796875, 0.05342425537109375, 0.0535470085144043, 0.0535838737487793, 0.053626304626464845, 0.053762622833251957, 0.05368012619018555, 0.05389644622802734, 0.0537628173828125, 0.05374294281005859, 0.053596832275390624, 0.05370675277709961, 0.053610496520996094, 0.053575199127197264, 0.053635551452636716, 0.05380662536621094, 0.05375862503051758, 0.05370582580566406, 0.053602336883544925, 0.0537504653930664, 0.05367561721801758, 0.05362729644775391, 0.05371020889282226, 0.05378713607788086, 0.05379072189331055, 0.05386159896850586, 0.053738399505615236, 0.0537784309387207, 0.0537861442565918, 0.053807582855224606, 0.05370675277709961, 0.05374566268920898, 0.053782527923583984, 0.05525897598266601, 0.0540054702758789, 0.053416385650634765, 0.05335244750976562, 0.053255840301513674, 0.053373279571533205, 0.053364734649658206, 0.05324342346191406, 0.05346963119506836, 0.05339344024658203, 0.05336064147949219, 0.0533831672668457, 0.05348543930053711, 0.05351414489746094, 0.053298465728759764, 0.053545921325683594, 0.05346281433105469, 0.053604705810546875, 0.05385801696777344, 0.05376416015625, 0.0536003532409668, 0.05374771118164062, 0.05359791946411133, 0.05368592071533203, 0.05364595031738281, 0.05363091278076172, 0.053657665252685546, 0.05381324768066406, 0.05364096069335938, 0.0536352653503418, 0.0537426872253418, 0.05385468673706055, 0.053723648071289064, 0.053694465637207034, 0.05368822479248047, 0.05380684661865234, 0.05364284896850586, 0.05376233673095703, 0.053752288818359376, 0.053743614196777346, 0.053743488311767576, 0.05385023880004883, 0.05374771118164062, 0.053823520660400394, 0.05392995071411133, 0.053691841125488284, 0.05390095901489258, 0.05385852813720703, 0.05373974227905273, 0.05390383911132812, 0.053806175231933595, 0.053894046783447266, 0.05378047943115234, 0.05391360092163086, 0.053800960540771485, 0.05385420989990235, 0.05386441421508789, 0.053878814697265624, 0.05392947387695313, 0.054104576110839846, 0.053850112915039064, 0.054106113433837894, 0.054091007232666015, 0.0550563850402832, 0.05373721694946289, 0.053444862365722656, 0.05348726272583008, 0.0535002555847168, 0.05351619338989258, 0.05347452926635742, 0.0535316162109375, 0.05334230422973633, 0.05328774261474609, 0.05359254455566406, 0.053590560913085936, 0.05337702560424805, 0.05365756988525391, 0.05352860641479492, 0.053528575897216796, 0.053423263549804687, 0.05366460800170898, 0.05386240005493164, 0.05399052810668945, 0.05366806411743164, 0.05364553451538086, 0.053688159942626955, 0.05376800155639649, 0.053596126556396485, 0.05370553588867188, 0.05354086303710937, 0.05362076950073242, 0.0536486701965332, 0.05386710357666016, 0.05376214218139649, 0.05376755142211914, 0.05368691253662109, 0.05370675277709961, 0.05368832015991211, 0.053790401458740235, 0.05386214447021485, 0.05390959930419922, 0.053823966979980466, 0.05384806442260742, 0.053823486328125, 0.053768192291259766, 0.05367603302001953, 0.05383990478515625, 0.05361043167114258, 0.05362662506103515, 0.053655742645263675, 0.05367958450317383, 0.053795265197753905, 0.0536451187133789, 0.05393446350097656, 0.05389923095703125, 0.05376121520996094, 0.053895103454589845, 0.054001953125, 0.05392652893066406, 0.05375356674194336, 0.053823486328125, 0.05378047943115234, 0.053982494354248046, 0.05390361785888672, 0.05382944107055664, 0.05381216049194336, 0.05508572769165039, 0.05351628875732422, 0.053292991638183594, 0.053448768615722654, 0.0531599349975586, 0.053352001190185544, 0.05328326416015625, 0.05325209426879883, 0.05356505584716797, 0.05363750457763672, 0.05351116943359375, 0.0538263053894043, 0.0534653434753418, 0.05360351943969727, 0.05360108947753906, 0.05351628875732422, 0.0534854736328125, 0.05369251251220703, 0.05362185668945312, 0.05373023986816406, 0.05360022354125977, 0.05363916778564453, 0.053548160552978515, 0.05368681716918945, 0.05357603073120117, 0.05383782577514649, 0.05398483276367187, 0.053844417572021484, 0.053714942932128903, 0.053984512329101564, 0.05382406234741211, 0.05377862548828125, 0.05389926528930664, 0.05377766418457031, 0.0537259521484375, 0.05377795028686523, 0.05363091278076172, 0.05368681716918945, 0.053907455444335936, 0.053866497039794924, 0.05379020690917969, 0.053799297332763674, 0.05369401550292969, 0.05387286376953125, 0.0538353271484375, 0.05376470565795898, 0.05388054275512695, 0.053758430480957034, 0.05378416061401367, 0.05396284866333008, 0.05388307189941406, 0.05383785629272461, 0.053768287658691405, 0.05382067108154297, 0.05376486587524414, 0.053788608551025394, 0.05379283142089844, 0.0539791374206543, 0.05387257766723633, 0.053682239532470706, 0.053768062591552736, 0.0541267204284668, 0.05395574569702148]",tokens/s,18.630038706625083,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 557, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 162, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 982, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.16 GiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 980, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 760, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 555, in __init__ self.self_attn = QWEN2_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 219, in __init__ self.o_proj = nn.Linear(self.num_heads * self.head_dim, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 98, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 78, in __torch_function__ return func(*args, **kwargs) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,814.907392,806.289408,0.0,411.041792,391.374848,s,1,7.3405439453125,7.3405439453125,0.0,7.3405439453125,7.3405439453125,7.3405439453125,7.3405439453125,[7.3405439453125],,kWh,4.876233570833695e-06,5.308025702608459e-07,1.983334919991586e-06,7.3903710610861275e-06,,MB,1112.3712,879.689728,0.0,473.956352,454.832128,s,14,0.35395107078552246,0.025282219341823033,0.0006675251077587096,0.02511552047729492,0.025283087921142578,0.02612307538986206,0.027355866603851316,"[0.027664064407348633, 0.025009151458740234, 0.0250250244140625, 0.025128000259399413, 0.02510304069519043, 0.02513587188720703, 0.024959232330322264, 0.02525923156738281, 0.024972927093505858, 0.025055456161499023, 0.0250283203125, 0.02516223907470703, 0.02515519905090332, 0.025293312072753905]",tokens/s,10125.69333960776,kWh,8.802714563880312e-07,9.703644657400391e-08,5.748766312530154e-07,1.5521845342150508e-06,tokens/kWh,164928843.41838953,MB,1139.023872,906.952704,0.0,501.219328,454.834688,s,14,10.088037719726563,0.7205741228376116,0.007503769931621452,0.7197563781738281,0.7292069396972656,0.7304784545898437,0.7312623901367188,"[0.7006243286132813, 0.7314583740234375, 0.7256400146484375, 0.7224078979492188, 0.7274712524414062, 0.72591748046875, 0.714115234375, 0.7185491943359374, 0.71629638671875, 0.7185131225585938, 0.7195335083007812, 0.7299508056640625, 0.719979248046875, 0.7175808715820312]",tokens/s,87.43028371863649,kWh,2.0470741543611325e-05,2.2576304109255806e-06,8.849634876604323e-06,3.157800683114122e-05,tokens/kWh,1995059.4202124062,,s,882,10.07979100131987,0.011428334468616649,0.00034070346924705217,0.011425951957702636,0.01164567985534668,0.011714222478866578,0.012337450504302967,"[0.011187392234802246, 0.01128428840637207, 0.011154335975646973, 0.011010080337524415, 0.010950079917907715, 0.011192223548889161, 0.01102511978149414, 0.01102400016784668, 0.010981760025024413, 0.011669504165649413, 0.011978719711303711, 0.013942432403564454, 0.011230751991271973, 0.011288736343383789, 0.011254464149475098, 0.011176959991455078, 0.01110102367401123, 0.011054783821105958, 0.012296671867370606, 0.011003168106079101, 0.010989760398864746, 0.010976960182189941, 0.010957216262817383, 0.010914239883422851, 0.011362303733825683, 0.011315199851989746, 0.010932288169860839, 0.010882623672485351, 0.010912287712097167, 0.010935232162475586, 0.011019167900085449, 0.010912960052490234, 0.010942815780639648, 0.01100870418548584, 0.011087648391723633, 0.010894559860229492, 0.010933247566223145, 0.010874912261962891, 0.010816800117492675, 0.010888895988464355, 0.010926464080810546, 0.010907648086547851, 0.010867232322692872, 0.010972928047180176, 0.010893535614013673, 0.010917792320251465, 0.010840031623840332, 0.010933823585510254, 0.010846688270568847, 0.01088652801513672, 0.010914048194885255, 0.010934623718261718, 0.01090294361114502, 0.010934911727905273, 0.010950655937194824, 0.011067647933959961, 0.011034367561340332, 0.010991616249084473, 0.010966015815734862, 0.011037152290344238, 0.011231743812561035, 0.0112326078414917, 0.011455136299133301, 0.011290047645568848, 0.01157363224029541, 0.011560928344726563, 0.011636768341064453, 0.011632479667663575, 0.011688096046447754, 0.011600192070007325, 0.011626175880432129, 0.011542752265930175, 0.011669376373291015, 0.011519904136657716, 0.011464768409729004, 0.011421631813049317, 0.01149289608001709, 0.01174176025390625, 0.011482560157775878, 0.01153395175933838, 0.011540863990783691, 0.011558752059936523, 0.011541119575500488, 0.01148470401763916, 0.011463135719299316, 0.011498496055603028, 0.01141427230834961, 0.01158176040649414, 0.011614144325256348, 0.011487232208251954, 0.011552767753601074, 0.012955615997314453, 0.012981951713562012, 0.011619903564453126, 0.01168012809753418, 0.011649439811706543, 0.012210176467895508, 0.01161404800415039, 0.011614368438720704, 0.011426048278808594, 0.01187228775024414, 0.01154428768157959, 0.011496512413024903, 0.011527104377746582, 0.011610367774963378, 0.011675392150878906, 0.011970815658569335, 0.01173299217224121, 0.011515647888183593, 0.011440128326416015, 0.011456512451171874, 0.011669568061828613, 0.011499456405639649, 0.011447808265686036, 0.01148134422302246, 0.011630240440368653, 0.011630623817443848, 0.01154310417175293, 0.011495455741882325, 0.011386207580566406, 0.011491680145263672, 0.011349632263183593, 0.011459551811218261, 0.011456319808959961, 0.01133513641357422, 0.011241888046264649, 0.011032575607299805, 0.011427840232849122, 0.0115447998046875, 0.011566975593566894, 0.011640735626220703, 0.011513728141784668, 0.011403039932250976, 0.011448672294616699, 0.011531840324401856, 0.011629216194152832, 0.011405088424682618, 0.011272192001342773, 0.011266048431396485, 0.01133516788482666, 0.011606687545776368, 0.011623776435852051, 0.011602432250976562, 0.011648320198059081, 0.011510463714599609, 0.011593728065490723, 0.011746527671813965, 0.011553728103637695, 0.011612000465393067, 0.011527199745178223, 0.011545568466186524, 0.011470720291137696, 0.011489567756652832, 0.011444064140319824, 0.0113536958694458, 0.011292703628540039, 0.011383071899414062, 0.011434080123901368, 0.011660544395446778, 0.011631360054016113, 0.011698176383972168, 0.011501567840576172, 0.011489279747009277, 0.011423680305480957, 0.01154047966003418, 0.011556927680969237, 0.011454463958740235, 0.011410783767700196, 0.01142585563659668, 0.011599679946899415, 0.011473600387573241, 0.011390368461608886, 0.011399168014526367, 0.011524736404418945, 0.01147116756439209, 0.0114749755859375, 0.011458271980285645, 0.01151910400390625, 0.011521023750305176, 0.011621536254882813, 0.011596608161926269, 0.011699616432189941, 0.011405599594116212, 0.011509119987487793, 0.011631520271301269, 0.011700160026550293, 0.011671551704406738, 0.011577343940734864, 0.011525664329528808, 0.011155008316040039, 0.011452863693237305, 0.011413503646850585, 0.011390975952148438, 0.0117391357421875, 0.011433792114257813, 0.011395584106445313, 0.011449983596801758, 0.011585599899291992, 0.011503616333007812, 0.011351200103759765, 0.011457375526428223, 0.011540831565856934, 0.011529855728149414, 0.01152012825012207, 0.01148470401763916, 0.011538016319274902, 0.011522720336914063, 0.011432064056396485, 0.011393024444580077, 0.011362303733825683, 0.011294719696044921, 0.011253824234008789, 0.011340831756591797, 0.011393535614013671, 0.011483551979064942, 0.011493375778198242, 0.01132953643798828, 0.011302016258239745, 0.011290656089782716, 0.011190688133239746, 0.011202048301696778, 0.011407936096191406, 0.01164352035522461, 0.011464127540588379, 0.011445664405822753, 0.01151478385925293, 0.011559200286865235, 0.011677408218383789, 0.01147052764892578, 0.011540127754211425, 0.011597503662109375, 0.011625503540039063, 0.011628576278686523, 0.011564959526062011, 0.011460288047790528, 0.011513567924499512, 0.011544768333435058, 0.011425503730773926, 0.01152070426940918, 0.01142745590209961, 0.011405856132507323, 0.011462400436401367, 0.011374143600463868, 0.011401663780212403, 0.011468480110168457, 0.011552255630493164, 0.011510944366455078, 0.0115217924118042, 0.011342016220092774, 0.011411264419555664, 0.01157475185394287, 0.011516448020935058, 0.011628543853759766, 0.013717184066772461, 0.011911487579345703, 0.011515904426574707, 0.011476415634155274, 0.011471103668212891, 0.01145248031616211, 0.01158351993560791, 0.011587712287902832, 0.01161843204498291, 0.011538399696350097, 0.011611840248107911, 0.011534111976623535, 0.011663935661315917, 0.01159119987487793, 0.011714240074157715, 0.01161292839050293, 0.011497792243957519, 0.011520031929016114, 0.01140003204345703, 0.011350879669189453, 0.0116627836227417, 0.011468607902526856, 0.01151369571685791, 0.011412320137023925, 0.011347711563110352, 0.011539775848388672, 0.01151478385925293, 0.01142307186126709, 0.011346688270568847, 0.011347935676574708, 0.011470656394958496, 0.01146236801147461, 0.01150819206237793, 0.011436032295227052, 0.01145241641998291, 0.011460448265075683, 0.011327360153198242, 0.011418047904968262, 0.011382368087768555, 0.01136076831817627, 0.011364095687866211, 0.011499808311462403, 0.011531871795654297, 0.01148969554901123, 0.011503328323364257, 0.011501855850219726, 0.0114518404006958, 0.011479328155517578, 0.011526111602783204, 0.011524127960205079, 0.011575296401977539, 0.011652768135070801, 0.011631135940551757, 0.011695327758789063, 0.01159228801727295, 0.011429887771606445, 0.011500896453857422, 0.01155958366394043, 0.011505663871765137, 0.01140940761566162, 0.011296319961547851, 0.011322943687438965, 0.011124128341674805, 0.011658207893371582, 0.011484512329101563, 0.011401568412780762, 0.011434144020080566, 0.011333791732788086, 0.011294719696044921, 0.012070624351501465, 0.011448512077331543, 0.011585280418395997, 0.011502079963684082, 0.011352160453796386, 0.0113570556640625, 0.01141823959350586, 0.011584063529968262, 0.011595775604248047, 0.011562463760375976, 0.011581727981567383, 0.011623616218566895, 0.01154047966003418, 0.011493120193481445, 0.01152444839477539, 0.01154105567932129, 0.011789983749389648, 0.01166585636138916, 0.011690079689025879, 0.011476479530334472, 0.011508128166198731, 0.011454591751098633, 0.011439935684204101, 0.011477055549621583, 0.011583488464355468, 0.011573023796081543, 0.011528127670288086, 0.011341152191162109, 0.011378879547119141, 0.011806528091430664, 0.011367487907409667, 0.011396703720092773, 0.011426079750061035, 0.011646976470947265, 0.012785216331481934, 0.011399295806884766, 0.011305024147033691, 0.011204863548278808, 0.011456640243530273, 0.011435903549194337, 0.01124687957763672, 0.01163747215270996, 0.011558912277221679, 0.011531904220581054, 0.011328895568847657, 0.011306303977966309, 0.011480768203735352, 0.011487104415893555, 0.011569503784179688, 0.011489055633544921, 0.01159926414489746, 0.0117990083694458, 0.011484992027282715, 0.011437824249267578, 0.011461183547973633, 0.011306271553039551, 0.011020383834838866, 0.011834495544433594, 0.011561599731445313, 0.011286751747131347, 0.011161536216735839, 0.011355487823486329, 0.011458239555358886, 0.01140220832824707, 0.01137052822113037, 0.01127830410003662, 0.01122713565826416, 0.011318400382995605, 0.011293567657470704, 0.011399168014526367, 0.011270496368408203, 0.011131775856018067, 0.011176735877990722, 0.01117734432220459, 0.011146271705627441, 0.011998815536499024, 0.011028608322143555, 0.011007648468017578, 0.011438303947448731, 0.011671551704406738, 0.01168716812133789, 0.01162668800354004, 0.011517631530761718, 0.01148630428314209, 0.011489055633544921, 0.011382399559020996, 0.011325535774230957, 0.01123750400543213, 0.011352224349975586, 0.011182368278503418, 0.01112179183959961, 0.011201120376586915, 0.011259455680847168, 0.011270112037658691, 0.011230815887451171, 0.01116044807434082, 0.011175423622131348, 0.01147481632232666, 0.011647616386413574, 0.011395071983337402, 0.011228192329406738, 0.011287520408630371, 0.011319231986999512, 0.011201919555664063, 0.011356863975524902, 0.011401439666748047, 0.011351840019226074, 0.011560959815979004, 0.01133743953704834, 0.011536928176879882, 0.01147980785369873, 0.011171039581298827, 0.01118505573272705, 0.0111494722366333, 0.011057439804077149, 0.010963104248046874, 0.010989151954650878, 0.011173695564270019, 0.011473119735717774, 0.01198265552520752, 0.011597791671752929, 0.011545023918151855, 0.011427231788635254, 0.011345727920532227, 0.011336480140686035, 0.011364480018615723, 0.011267104148864747, 0.011203071594238282, 0.011192192077636718, 0.011129311561584473, 0.011318559646606446, 0.011594207763671874, 0.011469056129455566, 0.011294719696044921, 0.01147935962677002, 0.011491007804870605, 0.011404831886291504, 0.011237824440002441, 0.011218015670776366, 0.011250335693359375, 0.011219584465026855, 0.011129695892333985, 0.011235967636108398, 0.011397184371948242, 0.011354240417480468, 0.011288576126098633, 0.011122271537780762, 0.01111900806427002, 0.011082847595214844, 0.011055968284606933, 0.010969152450561523, 0.01092403221130371, 0.011327263832092285, 0.011657024383544922, 0.011564736366271972, 0.01153536033630371, 0.011541664123535156, 0.011529024124145509, 0.011448224067687989, 0.011937952041625977, 0.01209926414489746, 0.011427840232849122, 0.012511296272277832, 0.011321120262145996, 0.011315360069274902, 0.011306912422180175, 0.011337408065795898, 0.011501983642578125, 0.011503968238830566, 0.011515551567077637, 0.011311103820800781, 0.011324704170227051, 0.011393759727478027, 0.011472895622253418, 0.01140940761566162, 0.0112576322555542, 0.011265536308288575, 0.011412192344665527, 0.011472415924072266, 0.011475104331970215, 0.011409728050231934, 0.011332991600036622, 0.011019136428833008, 0.011167743682861327, 0.011103263854980469, 0.01126307201385498, 0.011672960281372071, 0.011581695556640625, 0.011669055938720703, 0.011662015914916992, 0.011577343940734864, 0.011488415718078614, 0.011420512199401855, 0.01122713565826416, 0.011093376159667968, 0.011343903541564941, 0.011179807662963868, 0.01117471981048584, 0.011050335884094238, 0.011138943672180176, 0.01135696029663086, 0.01152409553527832, 0.011718463897705079, 0.011607904434204102, 0.01125158405303955, 0.011282848358154298, 0.01134598445892334, 0.01123145580291748, 0.011202527999877929, 0.011310912132263184, 0.011481087684631347, 0.011390975952148438, 0.011356160163879395, 0.011567104339599609, 0.011396575927734375, 0.011172479629516601, 0.011018176078796387, 0.010997376441955566, 0.011055711746215821, 0.011443679809570312, 0.011701567649841309, 0.011623456001281738, 0.011539999961853026, 0.01150614356994629, 0.011373855590820313, 0.011313983917236328, 0.011251744270324706, 0.011206463813781738, 0.01123737621307373, 0.011249024391174317, 0.011266752243041992, 0.01152627182006836, 0.01147475242614746, 0.011463744163513184, 0.011296799659729004, 0.011213600158691406, 0.01144547176361084, 0.011470815658569337, 0.011340543746948243, 0.011379167556762696, 0.01149510383605957, 0.011437824249267578, 0.011421759605407715, 0.011489503860473633, 0.011449503898620605, 0.011141119956970215, 0.011389984130859374, 0.011142047882080078, 0.011192383766174317, 0.011583200454711914, 0.011540639877319335, 0.011683903694152831, 0.01154054355621338, 0.011491328239440919, 0.011576800346374512, 0.01147548770904541, 0.01132755184173584, 0.011380672454833984, 0.011302911758422851, 0.011222463607788086, 0.011253567695617675, 0.011407487869262696, 0.011457216262817383, 0.011417792320251465, 0.011248415946960449, 0.01128060817718506, 0.011538271903991698, 0.011560959815979004, 0.011395456314086914, 0.011333312034606934, 0.011373408317565918, 0.011511584281921387, 0.011472352027893066, 0.01138764762878418, 0.011552448272705079, 0.011434304237365722, 0.01139065647125244, 0.011394880294799804, 0.011239935874938965, 0.011222528457641602, 0.011260416030883789, 0.011309056282043458, 0.011084863662719727, 0.011158432006835937, 0.011400320053100587, 0.011684767723083496, 0.011705856323242187, 0.011612223625183106, 0.011616928100585937, 0.01155782413482666, 0.011719264030456544, 0.011303168296813965, 0.01109228801727295, 0.01127619171142578, 0.011396096229553223, 0.011303071975708007, 0.011543007850646972, 0.011401375770568848, 0.011226719856262207, 0.011202336311340332, 0.01122374439239502, 0.011194304466247558, 0.011431936264038087, 0.011388928413391113, 0.011466303825378417, 0.011571935653686524, 0.011447456359863281, 0.01147049617767334, 0.011185471534729003, 0.01138268756866455, 0.011340576171875, 0.011339776039123535, 0.011591520309448242, 0.011524255752563477, 0.01160752010345459, 0.011643424034118652, 0.011480640411376954, 0.011534784317016602, 0.011390432357788086, 0.01132307243347168, 0.011221792221069336, 0.010997823715209961, 0.010950655937194824, 0.011142304420471191, 0.011305824279785156, 0.01115884780883789, 0.01132806396484375, 0.01159603214263916, 0.011435327529907226, 0.011380672454833984, 0.011412096023559571, 0.011396639823913575, 0.011386816024780273, 0.011530783653259278, 0.011334976196289063, 0.011316255569458008, 0.011413215637207031, 0.011395008087158202, 0.011255071640014649, 0.01150211238861084, 0.01143558406829834, 0.01192204761505127, 0.011956512451171875, 0.011598912239074707, 0.011410079956054687, 0.011385919570922852, 0.011469759941101075, 0.0114617919921875, 0.011524959564208985, 0.011692031860351563, 0.011333632469177245, 0.011304960250854493, 0.011593728065490723, 0.01165721607208252, 0.011419872283935546, 0.011534111976623535, 0.011802463531494141, 0.01147100830078125, 0.011757823944091797, 0.011482463836669923, 0.01131497573852539, 0.011324159622192383, 0.011302783966064453, 0.011247008323669434, 0.011336288452148437, 0.011482272148132324, 0.011504480361938477, 0.011187552452087402, 0.011014335632324219, 0.011006431579589844, 0.011196352005004883, 0.011448543548583984, 0.011786016464233398, 0.011518143653869628, 0.0113438081741333, 0.011450336456298829, 0.011713888168334961, 0.014022527694702148, 0.011655872344970702, 0.011632384300231934, 0.011481120109558105, 0.011462880134582519, 0.011303263664245606, 0.011177632331848144, 0.011169695854187011, 0.01145251178741455, 0.01146675205230713, 0.011394271850585938, 0.011402303695678712, 0.011368224143981934, 0.011376799583435059, 0.011758624076843261, 0.011551103591918944, 0.011362688064575195, 0.011249407768249512, 0.011210623741149903, 0.011183679580688477, 0.011477791786193847, 0.011648927688598633, 0.011614368438720704, 0.011456480026245117, 0.011481216430664062, 0.011396991729736327, 0.01120687961578369, 0.01105465602874756, 0.011036800384521485, 0.011175423622131348, 0.011260512351989747, 0.011165696144104004, 0.011392831802368164, 0.01124953556060791, 0.011456831932067871, 0.011280256271362305, 0.01153651237487793, 0.011376768112182618, 0.011472767829895019, 0.01147606372833252, 0.011362815856933594, 0.011204352378845215, 0.011276960372924805, 0.011318431854248048, 0.011479328155517578, 0.011385439872741699, 0.011321311950683593, 0.01127785587310791, 0.011346559524536132, 0.012291872024536132, 0.016046464920043944, 0.015373920440673828, 0.011554911613464355, 0.011342111587524415, 0.011253600120544433, 0.011236703872680665, 0.011177760124206543, 0.011076031684875488, 0.011390496253967285, 0.011491904258728028, 0.011560256004333497, 0.011459168434143066, 0.011396703720092773, 0.011481504440307617, 0.011438079833984375, 0.011304960250854493, 0.01139913558959961, 0.01140944004058838, 0.011347135543823243, 0.011286656379699707, 0.011404064178466797, 0.01150496006011963, 0.011397407531738282, 0.011417887687683105, 0.01113491153717041, 0.011211903572082519, 0.011645919799804687, 0.011658368110656738, 0.011613280296325683, 0.011558367729187011, 0.011491647720336914, 0.011978752136230468, 0.011326560020446777, 0.011172767639160155, 0.011209919929504394, 0.01115772819519043, 0.011117183685302734, 0.011470911979675293, 0.01134768009185791, 0.011332799911499023, 0.011502271652221679, 0.01136575984954834, 0.011318207740783692, 0.011222208023071289, 0.011400159835815429, 0.011456543922424317, 0.011430047988891601, 0.01132630443572998, 0.011294591903686523, 0.011506624221801758, 0.011339039802551269, 0.011465439796447754, 0.011308320045471191, 0.011220000267028808, 0.011183744430541992, 0.011493087768554688, 0.011569503784179688, 0.011720447540283204, 0.011773951530456543, 0.011538687705993653, 0.011544544219970703, 0.011444576263427734, 0.011851648330688476, 0.011355072021484376, 0.0114901762008667, 0.011317248344421387, 0.011271552085876465, 0.011473631858825684, 0.011474495887756347, 0.011527839660644532, 0.011315808296203614, 0.011546688079833984, 0.011382783889770508, 0.01132953643798828, 0.01128809642791748, 0.011333503723144532, 0.011448415756225586, 0.011491840362548827, 0.011517824172973633, 0.011509311676025391, 0.011655679702758789, 0.011647295951843262, 0.011771648406982423, 0.011629887580871582, 0.011601823806762696, 0.011498496055603028, 0.011361536026000976, 0.011356703758239746, 0.011286175727844239, 0.011306976318359375, 0.011382207870483399, 0.01155571174621582, 0.011362367630004883, 0.011471936225891113, 0.011461376190185547, 0.011335743904113769, 0.011432064056396485, 0.011531904220581054, 0.011321727752685546, 0.011353983879089355, 0.011331520080566405, 0.01130515193939209, 0.011405311584472656, 0.011334815979003906, 0.01132153606414795, 0.011439007759094238, 0.011288607597351074, 0.011207776069641113, 0.011099072456359863, 0.011328831672668457, 0.01164735984802246, 0.011677696228027343, 0.012042207717895508, 0.011521599769592284, 0.01209596824645996, 0.011251711845397949, 0.01110540771484375, 0.01113491153717041, 0.011086400032043458, 0.011023776054382324, 0.011246560096740722, 0.011314944267272949, 0.011133184432983398, 0.011300864219665528, 0.011206751823425292, 0.011374879837036132, 0.011299679756164551, 0.01111734390258789, 0.011167743682861327, 0.011170880317687988, 0.011125823974609376, 0.011093215942382812, 0.011279168128967286]",tokens/s,87.50181426227068,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.140864,3354.329088,0.0,2959.081472,2942.567424,s,1,7.48135009765625,7.48135009765625,0.0,7.48135009765625,7.48135009765625,7.48135009765625,7.48135009765625,[7.48135009765625],,kWh,1.0317070429154758e-05,1.1307956339052472e-06,3.3486137899982338e-06,1.4796479853058239e-05,,MB,1108.537344,3545.16992,0.0,3139.436544,3105.830912,s,10,2.5526253204345704,0.25526253204345706,0.0032615790050734743,0.254351676940918,0.2591282501220703,0.26135854339599607,0.26314277801513675,"[0.25863262939453124, 0.2520792694091797, 0.2542895355224609, 0.2635888366699219, 0.25307455444335936, 0.2544411163330078, 0.252842041015625, 0.254413818359375, 0.255574462890625, 0.25368905639648437]",tokens/s,1002.8890568100194,kWh,7.693318503070439e-06,8.48431306636885e-07,5.088183895105415e-06,1.3629933704812739e-05,tokens/kWh,18782189.667555477,MB,1134.329856,3587.11296,0.0,3181.379584,3162.0096,s,10,13.320837524414063,1.3320837524414064,0.010960308835285407,1.3341849365234375,1.3424161987304688,1.3445538635253906,1.346263995361328,"[1.3348258056640625, 1.329916259765625, 1.3039884033203124, 1.341941162109375, 1.3466915283203125, 1.3388785400390626, 1.3343978271484376, 1.3339720458984374, 1.3292396240234374, 1.326986328125]",tokens/s,47.29432356226501,kWh,3.850215672317936e-05,4.246395864887071e-06,2.49971735064944e-05,6.774572609456083e-05,tokens/kWh,929947.9632421881,,s,630,13.317967199325548,0.021139630475119936,0.0004475622198116647,0.021108351707458495,0.021405736923217775,0.021646073341369625,0.023140482158660904,"[0.021833984375, 0.021306880950927733, 0.021195552825927735, 0.02118454360961914, 0.021015647888183595, 0.021033056259155275, 0.020974239349365233, 0.02094095993041992, 0.021659648895263672, 0.02223865509033203, 0.021121599197387694, 0.021217248916625978, 0.021082304000854493, 0.020973407745361328, 0.02104934310913086, 0.021352127075195314, 0.021195072174072266, 0.021053440093994142, 0.02337785530090332, 0.02159212875366211, 0.02106777572631836, 0.021054975509643553, 0.0210068473815918, 0.021184511184692383, 0.02147737693786621, 0.021200895309448242, 0.021020671844482423, 0.020979711532592774, 0.020916223526000977, 0.021108863830566406, 0.02112499237060547, 0.0210882568359375, 0.02117849540710449, 0.02107187271118164, 0.021050432205200195, 0.02100511932373047, 0.02101641654968262, 0.021037216186523437, 0.02063564872741699, 0.02084864044189453, 0.02136911964416504, 0.02136444854736328, 0.021158912658691405, 0.021185535430908203, 0.021143552780151367, 0.021073919296264648, 0.02111052894592285, 0.02115814399719238, 0.02109644889831543, 0.020907424926757814, 0.02106540870666504, 0.02101340866088867, 0.021019712448120117, 0.020785184860229493, 0.020589471817016602, 0.0209215030670166, 0.022244192123413085, 0.021233375549316407, 0.021301536560058593, 0.02109971237182617, 0.021086271286010743, 0.021033504486083984, 0.021063455581665037, 0.021380319595336914, 0.020991104125976563, 0.021048704147338868, 0.020976415634155275, 0.02112892723083496, 0.02088083267211914, 0.020973600387573243, 0.020910335540771485, 0.020911455154418945, 0.020947807312011717, 0.020795263290405274, 0.020844736099243165, 0.021308639526367187, 0.021358816146850586, 0.021047903060913087, 0.020997535705566405, 0.020933216094970702, 0.020958335876464843, 0.021136159896850585, 0.021221664428710936, 0.021040800094604493, 0.021117088317871093, 0.021107839584350585, 0.02110323143005371, 0.021239423751831056, 0.021079872131347658, 0.020884288787841796, 0.0211494083404541, 0.021518815994262697, 0.022140384674072266, 0.021156192779541016, 0.02117251205444336, 0.02138412857055664, 0.021217472076416017, 0.020951648712158204, 0.021546527862548827, 0.021117408752441405, 0.02133363151550293, 0.021086591720581055, 0.021157087326049803, 0.020970079421997072, 0.020967008590698243, 0.021233247756958007, 0.021285888671875, 0.02114460754394531, 0.021117408752441405, 0.021198911666870116, 0.021263008117675782, 0.020953216552734376, 0.02087286376953125, 0.020772159576416014, 0.020996799468994142, 0.02124595260620117, 0.02109235191345215, 0.02108201599121094, 0.021047199249267578, 0.021129440307617188, 0.021151519775390624, 0.021106624603271486, 0.021313056945800782, 0.02098454475402832, 0.02085500717163086, 0.020627231597900392, 0.025465856552124022, 0.021867679595947265, 0.020703296661376953, 0.02128771209716797, 0.020574207305908202, 0.020930656433105467, 0.020615072250366212, 0.020625024795532226, 0.020816255569458007, 0.020501855850219727, 0.020576927185058595, 0.020494335174560546, 0.020558847427368163, 0.020536319732666015, 0.020864831924438478, 0.020586687088012694, 0.02050009536743164, 0.020443519592285155, 0.0204202880859375, 0.02048646354675293, 0.020508096694946288, 0.020478111267089844, 0.02050089645385742, 0.020532928466796874, 0.020633535385131838, 0.020799871444702148, 0.020707328796386718, 0.02069708824157715, 0.02052230453491211, 0.02055027198791504, 0.020664384841918945, 0.02058448028564453, 0.020551647186279297, 0.020512767791748047, 0.020531200408935548, 0.020485664367675783, 0.02046614456176758, 0.020410367965698242, 0.02046771240234375, 0.020534271240234374, 0.02051584053039551, 0.020463615417480468, 0.020531200408935548, 0.020572160720825194, 0.020596736907958983, 0.020590591430664062, 0.02063564872741699, 0.020537343978881836, 0.020507776260375976, 0.020572736740112306, 0.021655231475830077, 0.020709856033325195, 0.020465824127197267, 0.020414464950561522, 0.020525056838989256, 0.020492000579833983, 0.020572160720825194, 0.02050281524658203, 0.02046335983276367, 0.02056550407409668, 0.020781824111938477, 0.020545536041259766, 0.020535232543945313, 0.02125619125366211, 0.021133279800415038, 0.021141536712646486, 0.02124799919128418, 0.021296127319335938, 0.021138431549072266, 0.02128428840637207, 0.021332735061645507, 0.021277568817138673, 0.021272863388061523, 0.0212957763671875, 0.021181535720825196, 0.021203872680664062, 0.02129052734375, 0.021238239288330078, 0.02134534454345703, 0.021300224304199217, 0.022749120712280274, 0.02287513542175293, 0.02124083137512207, 0.021333471298217773, 0.02170217514038086, 0.022010879516601564, 0.02146214485168457, 0.021266815185546874, 0.02122217559814453, 0.021153184890747072, 0.021186880111694336, 0.021448703765869142, 0.02124799919128418, 0.021207040786743164, 0.021253599166870116, 0.02104368019104004, 0.02105708885192871, 0.02120083236694336, 0.021190271377563477, 0.021177280426025392, 0.021171871185302733, 0.021121120452880858, 0.021266687393188478, 0.021147327423095705, 0.02116640090942383, 0.021195808410644532, 0.021153087615966796, 0.02103875160217285, 0.02104265594482422, 0.021388992309570313, 0.021577856063842774, 0.021409984588623046, 0.021238304138183593, 0.021200992584228515, 0.02117827224731445, 0.021106719970703125, 0.021207008361816406, 0.021103872299194335, 0.021134336471557616, 0.021362272262573243, 0.021135200500488283, 0.021229888916015623, 0.021364543914794924, 0.021190847396850586, 0.021127168655395507, 0.0210882568359375, 0.021634880065917968, 0.021311103820800783, 0.02136025619506836, 0.021177248001098634, 0.02107187271118164, 0.02127052879333496, 0.021209087371826172, 0.021190271377563477, 0.021202592849731444, 0.02115247917175293, 0.021182207107543944, 0.021278751373291015, 0.02121340751647949, 0.021122432708740233, 0.021219167709350586, 0.021351200103759765, 0.022511135101318358, 0.02142665672302246, 0.02135856056213379, 0.021226655960083007, 0.021185407638549803, 0.021075679779052735, 0.021219615936279298, 0.02122528076171875, 0.021108991622924806, 0.02101478385925293, 0.021200576782226564, 0.021214879989624024, 0.0214052791595459, 0.021123199462890624, 0.021260927200317383, 0.021169408798217774, 0.021069631576538086, 0.023248863220214844, 0.0233604793548584, 0.021357952117919923, 0.02140985679626465, 0.02125062370300293, 0.021296735763549804, 0.0215513916015625, 0.02125632095336914, 0.02267087936401367, 0.021575935363769533, 0.021428287506103514, 0.021313247680664064, 0.02131603240966797, 0.021181568145751953, 0.021787519454956054, 0.02124492835998535, 0.021115135192871094, 0.02109913635253906, 0.021289087295532225, 0.02145484733581543, 0.0213309440612793, 0.02144358444213867, 0.021247711181640625, 0.021168415069580077, 0.0211167049407959, 0.021125343322753905, 0.021087520599365233, 0.02134204864501953, 0.021276895523071288, 0.021254816055297852, 0.021783199310302735, 0.021296543121337892, 0.02130601692199707, 0.02127257537841797, 0.021301248550415038, 0.021168127059936523, 0.02126857566833496, 0.021217344284057617, 0.021465951919555665, 0.021345279693603517, 0.02130534362792969, 0.021268672943115234, 0.02139731216430664, 0.022196224212646484, 0.02129305648803711, 0.021118688583374023, 0.021364511489868163, 0.021380992889404298, 0.021574272155761718, 0.02125823974609375, 0.021190656661987304, 0.021127168655395507, 0.02108415985107422, 0.021103776931762696, 0.021154655456542968, 0.021229087829589845, 0.02126019287109375, 0.021135295867919922, 0.021205631256103516, 0.021215232849121093, 0.02131865692138672, 0.021363712310791014, 0.021189823150634765, 0.02106822395324707, 0.02113692855834961, 0.0211812801361084, 0.0212457275390625, 0.02119830322265625, 0.02114841651916504, 0.020973567962646485, 0.021064735412597655, 0.021056480407714843, 0.021204992294311522, 0.021129215240478515, 0.021364160537719726, 0.02106835174560547, 0.021114879608154297, 0.02123161506652832, 0.0211615047454834, 0.021156320571899413, 0.021202943801879884, 0.02113865661621094, 0.021184736251831055, 0.02124473571777344, 0.021122400283813476, 0.021099199295043947, 0.02109561538696289, 0.021012832641601562, 0.021196992874145507, 0.02123161506652832, 0.022130687713623046, 0.021125247955322266, 0.021128480911254882, 0.02150399971008301, 0.02106915283203125, 0.021060447692871093, 0.020957183837890626, 0.02116783905029297, 0.021106624603271486, 0.02112246322631836, 0.021028959274291992, 0.02072812843322754, 0.021128992080688476, 0.021239839553833007, 0.021026912689208983, 0.021003103256225585, 0.020920095443725587, 0.020948223114013672, 0.020926752090454102, 0.02120137596130371, 0.02097727966308594, 0.020945056915283203, 0.020927871704101562, 0.021322080612182617, 0.020916351318359373, 0.02105209541320801, 0.020932287216186524, 0.020932607650756836, 0.021143327713012694, 0.021059200286865233, 0.021389919281005858, 0.021180383682250975, 0.021092256546020507, 0.021167552947998047, 0.021328575134277345, 0.02150547218322754, 0.02205936050415039, 0.021088064193725584, 0.020908447265625, 0.020983808517456053, 0.021020320892333983, 0.021470815658569335, 0.021037824630737306, 0.024286624908447265, 0.023694976806640625, 0.0210994873046875, 0.021122880935668945, 0.021075328826904296, 0.02095552062988281, 0.02068115234375, 0.02089369583129883, 0.021451839447021483, 0.021174495697021484, 0.021015264511108397, 0.021089279174804687, 0.020906816482543944, 0.021061824798583983, 0.021012479782104493, 0.02121478462219238, 0.021096160888671875, 0.021062368392944335, 0.02105958366394043, 0.020977664947509765, 0.020946271896362306, 0.020852544784545898, 0.020812639236450194, 0.021603967666625975, 0.02116441535949707, 0.021301248550415038, 0.021168127059936523, 0.021608448028564452, 0.02220412826538086, 0.021254432678222655, 0.02103500747680664, 0.021045055389404297, 0.021072191238403322, 0.020982656478881836, 0.021497888565063475, 0.020992223739624023, 0.02112588882446289, 0.02106982421875, 0.02084883117675781, 0.021087648391723633, 0.021252511978149414, 0.021145599365234375, 0.0210882568359375, 0.02107404708862305, 0.021006208419799804, 0.021013664245605468, 0.020988767623901367, 0.021276735305786134, 0.02109791946411133, 0.021077728271484374, 0.021059680938720703, 0.02102751922607422, 0.021155712127685546, 0.020973983764648436, 0.021064640045166016, 0.0209846076965332, 0.02143436813354492, 0.02102176094055176, 0.02098681640625, 0.02106777572631836, 0.021339231491088868, 0.02154572868347168, 0.021133472442626953, 0.021022048950195313, 0.02105411148071289, 0.020971519470214844, 0.02125209617614746, 0.021032447814941405, 0.02096998405456543, 0.020973567962646485, 0.021147647857666017, 0.021069055557250978, 0.02108470344543457, 0.021240032196044922, 0.021690624237060547, 0.021812992095947267, 0.021111040115356444, 0.021012224197387696, 0.020943199157714844, 0.021057184219360352, 0.02153385543823242, 0.02139632034301758, 0.021168127059936523, 0.021112831115722656, 0.02104115104675293, 0.021116064071655272, 0.021465087890625, 0.02104934310913086, 0.021317632675170898, 0.021401344299316408, 0.021164287567138673, 0.020992000579833983, 0.021013919830322265, 0.020989568710327148, 0.02105839920043945, 0.020936511993408204, 0.02097555160522461, 0.02079372787475586, 0.02094198417663574, 0.021275487899780274, 0.021153791427612305, 0.02108563232421875, 0.02097823905944824, 0.020971616744995116, 0.02099955177307129, 0.02100399971008301, 0.021025600433349608, 0.02082761573791504, 0.020922208786010744, 0.02202899169921875, 0.021259967803955077, 0.02228665542602539, 0.0210513916015625, 0.0209998722076416, 0.02102617645263672, 0.02117433547973633, 0.021346336364746095, 0.021043935775756837, 0.020899999618530275, 0.021032608032226563, 0.0213240966796875, 0.02104115104675293, 0.02110233688354492, 0.021078239440917967, 0.020967456817626955, 0.02117532730102539, 0.020890592575073242, 0.02086092758178711, 0.021466400146484373, 0.021222047805786133, 0.021151391983032227, 0.020962751388549805, 0.0210599365234375, 0.02111756706237793, 0.020979360580444337, 0.02103273582458496, 0.02095756721496582, 0.020985727310180665, 0.020980031967163085, 0.020983808517456053, 0.020975616455078124, 0.02097260856628418, 0.020999103546142577, 0.02111692810058594, 0.020922176361083983, 0.021043392181396486, 0.021008384704589843, 0.021149696350097655, 0.020944320678710937, 0.02152409553527832, 0.020810144424438477, 0.021338144302368165, 0.0209072322845459, 0.020828224182128905, 0.02098044776916504, 0.020930559158325195, 0.02107792091369629, 0.020764768600463866, 0.020851999282836913, 0.02126483154296875, 0.02117475128173828, 0.021118783950805665, 0.021018527984619142, 0.020957279205322265, 0.021032960891723632, 0.021018016815185548, 0.020867679595947267, 0.02102841567993164, 0.02101043128967285, 0.021172672271728515, 0.021028959274291992, 0.020977567672729493, 0.021052576065063475, 0.02134899139404297, 0.020979328155517576, 0.021035392761230468, 0.021062143325805666, 0.02100387191772461, 0.020889055252075194, 0.020812448501586915, 0.021323263168334963, 0.021208768844604493, 0.021117727279663087, 0.020932031631469727, 0.020933216094970702, 0.021006303787231444, 0.02101203155517578, 0.02131180763244629, 0.024764575958251954, 0.02135785675048828, 0.021234399795532228, 0.021270463943481446, 0.021692224502563476, 0.021104896545410156, 0.022007295608520508, 0.021066240310668945, 0.021188928604125978, 0.021292736053466797, 0.021004703521728514, 0.021165664672851563, 0.02072985649108887, 0.02064793586730957, 0.020494335174560546, 0.02056172752380371, 0.020389503479003906, 0.020437568664550782, 0.020658176422119142, 0.02050048065185547, 0.020569183349609374, 0.020695968627929686, 0.02067865562438965, 0.020507776260375976]",tokens/s,47.304516565553946,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,836.673536,4675.534848,0.0,4280.287232,4115.121152,s,1,7.6016826171875,7.6016826171875,0.0,7.6016826171875,7.6016826171875,7.6016826171875,7.6016826171875,[7.6016826171875],,kWh,1.0649561300040963e-05,1.1669500490514966e-06,4.535559183990734e-06,1.6352070533083194e-05,,MB,1141.620736,4981.71904,0.0,4575.985664,4408.408064,s,10,3.0703495788574213,0.3070349578857422,0.0031113508746365526,0.30735542297363283,0.30992298278808594,0.3105379837036133,0.3110299844360352,"[0.30563629150390625, 0.3078035583496094, 0.30978631591796874, 0.305809814453125, 0.31115298461914065, 0.3053193359375, 0.3092148132324219, 0.30690728759765623, 0.29955322265625, 0.30916595458984375]",tokens/s,833.7812793788322,kWh,9.109229113762307e-06,1.0042019392834388e-06,6.0174627264245135e-06,1.6130893779470258e-05,tokens/kWh,15870168.355197433,MB,1163.149312,4981.71904,0.0,4575.985664,4408.410624,s,10,15.703744506835939,1.570374450683594,0.010764770164829358,1.5709315795898438,1.5774057006835938,1.5861736145019532,1.5931879455566407,"[1.5635303955078126, 1.5949415283203126, 1.575457275390625, 1.567982177734375, 1.57343115234375, 1.5576832275390624, 1.5684320068359374, 1.5746732177734375, 1.5737548828125, 1.553858642578125]",tokens/s,40.117820289661296,kWh,4.525882735623948e-05,4.99226833244792e-06,2.9779704800174935e-05,8.003080048886235e-05,tokens/kWh,787196.9243737294,,s,630,15.701173805236804,0.024922498103550503,0.0004272402564130739,0.024874271392822264,0.02528845100402832,0.025565788555145262,0.026496753330230712,"[0.026102687835693358, 0.025196224212646483, 0.024997535705566405, 0.024463615417480468, 0.025049472808837892, 0.02510438346862793, 0.025030656814575194, 0.02476032066345215, 0.024825344085693358, 0.02540390396118164, 0.02489958381652832, 0.024928255081176756, 0.024862016677856445, 0.024836799621582032, 0.024797183990478516, 0.025020639419555665, 0.026484415054321288, 0.024778112411499024, 0.024988384246826173, 0.024788991928100586, 0.024749536514282227, 0.024975904464721678, 0.02467840003967285, 0.024720415115356446, 0.024675296783447265, 0.02478451156616211, 0.0245863037109375, 0.024650047302246094, 0.02476851272583008, 0.024532991409301756, 0.024600576400756836, 0.024489311218261717, 0.024676416397094728, 0.024684383392333985, 0.024685312271118164, 0.02476851272583008, 0.024755680084228515, 0.02483987236022949, 0.024685407638549806, 0.024723455429077147, 0.024803327560424804, 0.024874431610107422, 0.02490220832824707, 0.024626592636108398, 0.024721088409423827, 0.02501696014404297, 0.024639232635498047, 0.024637983322143554, 0.02453708839416504, 0.024753503799438477, 0.02453708839416504, 0.02462905693054199, 0.02465878486633301, 0.02449407958984375, 0.024461311340332033, 0.02452889633178711, 0.024820831298828124, 0.024779680252075196, 0.024526687622070314, 0.024524192810058593, 0.0245533447265625, 0.024827775955200197, 0.025082176208496093, 0.025601760864257812, 0.025077600479125977, 0.025168256759643556, 0.025092096328735353, 0.025173759460449217, 0.025379072189331053, 0.02611199951171875, 0.02509823989868164, 0.024986976623535155, 0.026501792907714844, 0.025174016952514647, 0.02531283187866211, 0.025022911071777343, 0.02510643196105957, 0.025440256118774415, 0.025280511856079102, 0.025208831787109375, 0.02533990478515625, 0.025112031936645508, 0.025060928344726563, 0.02517705535888672, 0.025235679626464842, 0.026005279541015624, 0.025058752059936525, 0.02528108787536621, 0.02527846336364746, 0.025186239242553712, 0.025094207763671876, 0.025026559829711914, 0.025834720611572267, 0.028717248916625977, 0.025162080764770507, 0.025092063903808595, 0.025484895706176756, 0.02531808090209961, 0.025070720672607422, 0.025049983978271486, 0.02565555191040039, 0.024929376602172853, 0.025104320526123047, 0.02495382308959961, 0.025081600189208984, 0.02486800003051758, 0.02741744041442871, 0.025100160598754882, 0.025208255767822266, 0.025230016708374024, 0.02483404731750488, 0.02494259262084961, 0.02655561637878418, 0.025391584396362306, 0.025010496139526366, 0.024936447143554686, 0.025272319793701172, 0.025309183120727538, 0.0249487361907959, 0.024983903884887696, 0.025122079849243164, 0.02492185592651367, 0.02502038383483887, 0.024807231903076172, 0.024941408157348632, 0.024813568115234375, 0.02556159973144531, 0.024815807342529295, 0.025108480453491212, 0.02476201629638672, 0.024852832794189452, 0.025132352828979493, 0.025187007904052733, 0.02479859161376953, 0.02477324867248535, 0.024879104614257814, 0.02485043144226074, 0.024979455947875977, 0.0246778564453125, 0.024840736389160158, 0.024905920028686523, 0.02506528091430664, 0.024967231750488282, 0.02499772834777832, 0.025595935821533203, 0.02516543960571289, 0.025006528854370116, 0.024720800399780272, 0.024848352432250975, 0.02477120018005371, 0.024868864059448242, 0.02488115119934082, 0.024781984329223634, 0.02485536003112793, 0.024868896484375, 0.024856447219848633, 0.024764543533325196, 0.025228511810302733, 0.02533452796936035, 0.024896896362304688, 0.02492483139038086, 0.02471731185913086, 0.024895488739013674, 0.02493235206604004, 0.024669792175292967, 0.02498192024230957, 0.0248353271484375, 0.024867456436157228, 0.024909952163696288, 0.024764415740966796, 0.02496512031555176, 0.02529280090332031, 0.025018367767333984, 0.024927520751953126, 0.024864896774291993, 0.025127519607543947, 0.024861696243286133, 0.024695808410644532, 0.024676000595092774, 0.024912191390991212, 0.024989984512329103, 0.02495257568359375, 0.026462207794189452, 0.02704310417175293, 0.025580255508422852, 0.025200096130371094, 0.024760160446166992, 0.02485318374633789, 0.02488934326171875, 0.025630720138549806, 0.025101696014404297, 0.02508198356628418, 0.02480179214477539, 0.02466160011291504, 0.024838144302368165, 0.02480684852600098, 0.024687583923339845, 0.024453119277954103, 0.02474777603149414, 0.02509971237182617, 0.024995935440063476, 0.02474985694885254, 0.02482681655883789, 0.024737951278686523, 0.025045984268188475, 0.024750495910644533, 0.024869344711303712, 0.024811807632446288, 0.025155296325683595, 0.024747711181640625, 0.02469875144958496, 0.024842687606811523, 0.02551398468017578, 0.025205888748168946, 0.02493529510498047, 0.024768192291259764, 0.024743776321411132, 0.024828384399414063, 0.024922111511230468, 0.02475212860107422, 0.0249051513671875, 0.0246625919342041, 0.024785919189453123, 0.024638463973999023, 0.024786815643310547, 0.025782400131225586, 0.025222591400146484, 0.024950944900512695, 0.02475200080871582, 0.02473628807067871, 0.025057279586791992, 0.024915136337280274, 0.024777536392211915, 0.02476995277404785, 0.024771167755126954, 0.02488528060913086, 0.024692703247070312, 0.02451251220703125, 0.025055007934570314, 0.025018592834472657, 0.02471651268005371, 0.024728160858154297, 0.024671903610229494, 0.024715808868408202, 0.02510643196105957, 0.024880607604980468, 0.025005983352661132, 0.02481558418273926, 0.02496169662475586, 0.024729600906372072, 0.024724863052368165, 0.025129600524902342, 0.025726688385009765, 0.025105791091918947, 0.02500495910644531, 0.024823808670043947, 0.024723295211791993, 0.024918176651000976, 0.025014272689819338, 0.02489753532409668, 0.024782848358154298, 0.02474809646606445, 0.024735679626464845, 0.024827903747558593, 0.02471900749206543, 0.024858879089355468, 0.02515567970275879, 0.024952831268310546, 0.024961023330688475, 0.02480931282043457, 0.02479283142089844, 0.025078176498413086, 0.024954879760742187, 0.024763904571533202, 0.02482431983947754, 0.024799232482910157, 0.025038175582885742, 0.024758495330810548, 0.02511644744873047, 0.026067615509033203, 0.025126911163330077, 0.025062976837158205, 0.024733951568603516, 0.025063583374023438, 0.02494063949584961, 0.02482975959777832, 0.024748159408569337, 0.02488096046447754, 0.024774848937988283, 0.024870336532592772, 0.024797760009765624, 0.024991743087768553, 0.025287967681884765, 0.02530748748779297, 0.024847808837890624, 0.02485958480834961, 0.02489952087402344, 0.02494441604614258, 0.024877344131469727, 0.02489753532409668, 0.0254748477935791, 0.024923648834228516, 0.024926464080810548, 0.02488368034362793, 0.025202592849731444, 0.025151584625244142, 0.024704191207885744, 0.025014591217041016, 0.02481203269958496, 0.025081439971923827, 0.025279232025146484, 0.025085248947143556, 0.024858976364135744, 0.02487411117553711, 0.025213823318481446, 0.025489887237548827, 0.02481564712524414, 0.024809663772583007, 0.02466377639770508, 0.02482614326477051, 0.024738815307617186, 0.024490400314331053, 0.024510879516601563, 0.024731840133666992, 0.02461033630371094, 0.02467238426208496, 0.024725727081298828, 0.024763967514038084, 0.02464614486694336, 0.02443199920654297, 0.024465375900268555, 0.024623327255249024, 0.024692960739135742, 0.024436511993408204, 0.024893951416015626, 0.02728246307373047, 0.02475926399230957, 0.02447100830078125, 0.024568447113037108, 0.02450979232788086, 0.024838720321655273, 0.024409311294555664, 0.02447849655151367, 0.024905439376831054, 0.025569215774536132, 0.024965471267700195, 0.024682079315185547, 0.02456220817565918, 0.024739072799682616, 0.024527488708496095, 0.024380672454833986, 0.025199359893798828, 0.024696832656860353, 0.02468659210205078, 0.02437443161010742, 0.024912736892700196, 0.02471673583984375, 0.024672672271728514, 0.024545024871826172, 0.024434431076049805, 0.024951007843017577, 0.02498147201538086, 0.024844768524169922, 0.02477414321899414, 0.02460723114013672, 0.024713279724121094, 0.024472864151000976, 0.024849056243896484, 0.02474630355834961, 0.02487468719482422, 0.024475616455078127, 0.0243917121887207, 0.024475648880004884, 0.024614431381225585, 0.02432044792175293, 0.024348352432250978, 0.024633695602416992, 0.024465055465698243, 0.02547884750366211, 0.024681856155395508, 0.02468118476867676, 0.024413759231567383, 0.024253215789794922, 0.025208671569824218, 0.024379392623901368, 0.02430735969543457, 0.02432035255432129, 0.02427449607849121, 0.024344255447387695, 0.02431667137145996, 0.02409267234802246, 0.02410851287841797, 0.024443424224853516, 0.02451878356933594, 0.02455779266357422, 0.02455311965942383, 0.024497600555419923, 0.02435744094848633, 0.024377344131469726, 0.0245166072845459, 0.02454732894897461, 0.024450624465942383, 0.024590784072875977, 0.02469478416442871, 0.024866559982299804, 0.024920320510864256, 0.025042943954467774, 0.02532307243347168, 0.025065919876098634, 0.02529859161376953, 0.025080160140991212, 0.025182207107543944, 0.02574131202697754, 0.025229312896728515, 0.024907039642333983, 0.025221855163574218, 0.02509414482116699, 0.025332927703857422, 0.02503763198852539, 0.0252969913482666, 0.025105663299560547, 0.025259807586669923, 0.025178720474243164, 0.02539507293701172, 0.025026208877563478, 0.02499580764770508, 0.025058080673217773, 0.02500806427001953, 0.025158815383911133, 0.024957056045532226, 0.024912448883056642, 0.025775711059570314, 0.02559449577331543, 0.02534604835510254, 0.025188352584838865, 0.02511257553100586, 0.024952192306518555, 0.02512249565124512, 0.024984384536743166, 0.02547110366821289, 0.02495078468322754, 0.02573311996459961, 0.02510438346862793, 0.024982816696166994, 0.025018304824829102, 0.0251297607421875, 0.024991743087768553, 0.024993791580200195, 0.02484809684753418, 0.024926496505737306, 0.02512009620666504, 0.024891040802001954, 0.024787967681884765, 0.02516377639770508, 0.025157312393188476, 0.02498796844482422, 0.025059328079223633, 0.02479497528076172, 0.025075872421264647, 0.024937952041625976, 0.025044607162475585, 0.02483228874206543, 0.02491827201843262, 0.024852319717407225, 0.024936479568481447, 0.024752639770507814, 0.02457747268676758, 0.02515385627746582, 0.025128351211547852, 0.024994848251342773, 0.02500966453552246, 0.024864288330078126, 0.025039264678955078, 0.02497747230529785, 0.02494211196899414, 0.024867231369018555, 0.024985984802246095, 0.024811103820800783, 0.02484003257751465, 0.024891807556152345, 0.02502467155456543, 0.025081823348999024, 0.025304895401000976, 0.024956863403320314, 0.024762655258178713, 0.02482713508605957, 0.024888032913208007, 0.024864927291870117, 0.02474380874633789, 0.02486262321472168, 0.025010271072387694, 0.02507161521911621, 0.024821760177612305, 0.024944032669067383, 0.02484694480895996, 0.02502182388305664, 0.025830015182495118, 0.02495692825317383, 0.025053152084350584, 0.025466911315917967, 0.02503987121582031, 0.024826400756835936, 0.02504038429260254, 0.025039392471313475, 0.025901952743530274, 0.025040319442749023, 0.02484486389160156, 0.0249467830657959, 0.024981407165527342, 0.02522915267944336, 0.027841856002807617, 0.025258848190307617, 0.025022464752197264, 0.025313247680664064, 0.024809503555297853, 0.02530860710144043, 0.024846912384033203, 0.024901216506958007, 0.024909568786621095, 0.024836767196655275, 0.0247127685546875, 0.024854976654052733, 0.0248090877532959, 0.024789375305175783, 0.02532099151611328, 0.025024991989135742, 0.02510438346862793, 0.02495692825317383, 0.02495078468322754, 0.024860448837280273, 0.024867040634155273, 0.02503887939453125, 0.024852447509765625, 0.02490096092224121, 0.02482441520690918, 0.024846399307250976, 0.024912927627563478, 0.024914016723632814, 0.025157983779907227, 0.025118976593017577, 0.024872928619384765, 0.024856895446777345, 0.025040447235107424, 0.024844736099243165, 0.02526518440246582, 0.025076416015625, 0.024860416412353516, 0.024975168228149415, 0.02530352020263672, 0.024951040267944338, 0.024993791580200195, 0.02516713523864746, 0.024832735061645506, 0.024532991409301756, 0.024606016159057616, 0.024582048416137696, 0.024684576034545897, 0.024572479248046876, 0.024503936767578127, 0.024566335678100584, 0.025038848876953124, 0.024696352005004883, 0.02490825653076172, 0.024750080108642578, 0.024944639205932616, 0.024710559844970705, 0.0245827522277832, 0.02560406494140625, 0.0246343994140625, 0.024334592819213866, 0.024294111251831056, 0.024363008499145508, 0.024772287368774414, 0.02504025650024414, 0.02467715263366699, 0.025089311599731445, 0.024744831085205077, 0.024397823333740236, 0.024467552185058594, 0.024422592163085937, 0.024391359329223632, 0.024465248107910155, 0.02428767967224121, 0.02412928009033203, 0.024260608673095704, 0.024224767684936522, 0.02447257614135742, 0.024227840423583984, 0.024276992797851563, 0.024223743438720705, 0.02434662437438965, 0.024319616317749024, 0.024489471435546875, 0.024296319961547852, 0.024321792602539062, 0.024270368576049806, 0.024369407653808593, 0.024230367660522462, 0.024231071472167968, 0.024239999771118164, 0.024346879959106445, 0.024429279327392577, 0.024449024200439453, 0.02481155204772949, 0.024452896118164064, 0.02439151954650879, 0.024455360412597656, 0.02438159942626953, 0.025869983673095703, 0.02511907196044922, 0.024579904556274415, 0.024358495712280274, 0.024795743942260744, 0.024793088912963866, 0.024440832138061523, 0.024411455154418945, 0.02466217613220215, 0.02484217643737793, 0.024576608657836913, 0.024827327728271485, 0.02606867218017578, 0.026467199325561522, 0.02553152084350586, 0.025037120819091797, 0.024994144439697264, 0.025038656234741212, 0.025051136016845704, 0.025047391891479493, 0.025286720275878905, 0.02515558433532715]",tokens/s,40.12438864856562,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,837.378048,14689.435648,0.0,14294.188032,14284.158464,s,1,7.67521337890625,7.67521337890625,0.0,7.67521337890625,7.67521337890625,7.67521337890625,7.67521337890625,[7.67521337890625],,kWh,1.4913201391709664e-05,1.6374225206308957e-06,7.240561347987562e-06,2.379118526032812e-05,,MB,1108.60288,14993.522688,0.0,14587.789312,14512.892416,s,10,13.831203735351563,1.3831203735351563,0.005871257155461055,1.3818245849609374,1.391693359375,1.3923574829101562,1.3928887817382811,"[1.3721275634765624, 1.3819266357421875, 1.3801014404296874, 1.3817225341796875, 1.37926953125, 1.3808548583984375, 1.3878416748046876, 1.3827921142578126, 1.3915457763671875, 1.3930216064453125]",tokens/s,185.08873479007644,kWh,4.046693066208491e-05,4.4630393216569606e-06,2.6789104764601834e-05,7.17190747483437e-05,tokens/kWh,3569482.747766655,MB,1123.889152,15098.380288,0.0,14692.646912,14646.153216,s,10,43.86718408203125,4.386718408203125,0.0031396886923479163,4.385417236328125,4.38997939453125,4.391894970703125,4.3934274316406245,"[4.3893564453125, 4.383873046875, 4.38734375, 4.38417919921875, 4.38458154296875, 4.38401171875, 4.38422119140625, 4.3862529296875, 4.3895537109375, 4.393810546875]",tokens/s,14.36153273075166,kWh,0.000128290075229163,1.4151362859655802e-05,8.529765157139767e-05,0.00022773908966021648,tokens/kWh,276632.3519339395,,s,630,43.86357635498046,0.06962472437298486,0.0003836243782883348,0.06962299346923828,0.06999763946533202,0.0701092010498047,0.07131899414062501,"[0.07148953247070312, 0.06933856201171874, 0.06915724945068359, 0.06906285095214844, 0.06904985809326172, 0.06947686767578125, 0.06943856048583984, 0.06923971557617188, 0.06963814544677735, 0.06922444915771485, 0.06935346984863282, 0.06929730987548828, 0.06950313568115235, 0.06970639801025391, 0.06972013092041016, 0.06969136047363281, 0.06957833862304688, 0.06917491149902344, 0.069153564453125, 0.06951094055175781, 0.06960326385498047, 0.06940262603759766, 0.06939826965332031, 0.06927823638916016, 0.06958080291748046, 0.06958284759521484, 0.06937728118896484, 0.06945049285888671, 0.0696258544921875, 0.0697630386352539, 0.06975081634521485, 0.06960521697998047, 0.06968335723876953, 0.0696975326538086, 0.06953369903564453, 0.07010099029541016, 0.069984130859375, 0.06956633758544922, 0.069604736328125, 0.06987436676025391, 0.06936790466308594, 0.0693853759765625, 0.06999468994140624, 0.06988992309570312, 0.06990275573730469, 0.07011484527587891, 0.07140643310546875, 0.06964166259765625, 0.06981903839111328, 0.06985507202148437, 0.06988339233398437, 0.06975350189208984, 0.06977913665771485, 0.0696671371459961, 0.0697891845703125, 0.06981683349609374, 0.06974435424804687, 0.06986959838867188, 0.06980226898193359, 0.06995555114746094, 0.06989209747314454, 0.06975667572021485, 0.06962217712402344, 0.07135222625732422, 0.06936370849609375, 0.06899712371826172, 0.06900297546386719, 0.06910348510742187, 0.06912380981445312, 0.06887699127197265, 0.06901145935058593, 0.069146240234375, 0.06937580871582032, 0.06944620513916015, 0.06926335906982421, 0.06919071960449219, 0.06935561370849609, 0.06974345397949219, 0.06968038177490235, 0.06923545837402344, 0.06927696228027344, 0.06910435485839844, 0.06937190246582031, 0.06935247802734375, 0.06953398132324219, 0.06913504028320312, 0.06951692962646484, 0.06967539215087891, 0.06966585540771485, 0.06957766723632812, 0.06952140808105468, 0.06944153594970703, 0.06963552093505859, 0.06984665679931641, 0.06970841979980469, 0.06940499114990234, 0.06934912109375, 0.06944576263427735, 0.06943142700195312, 0.06958451080322266, 0.06940300750732421, 0.06932275390625, 0.069570556640625, 0.06950819396972656, 0.06963292694091797, 0.06971596527099609, 0.06979366302490235, 0.06987789154052734, 0.069914306640625, 0.06977158355712891, 0.06962947082519531, 0.06972259521484375, 0.06952345275878906, 0.06967295837402344, 0.06967478179931641, 0.06976051330566406, 0.06990643310546875, 0.07005027008056641, 0.06974899291992187, 0.06984422302246093, 0.07026262664794922, 0.0700560302734375, 0.07003014373779297, 0.07040204620361327, 0.06994944000244141, 0.06991667175292969, 0.07123763275146484, 0.06943475341796874, 0.06907564544677734, 0.06919366455078126, 0.06899251556396484, 0.06901401519775391, 0.0690847396850586, 0.06907129669189453, 0.06923209381103515, 0.06926595306396484, 0.06939647674560546, 0.06921398162841796, 0.06941907501220704, 0.06978166198730469, 0.07022502136230468, 0.06957965087890625, 0.06956031799316406, 0.06957875061035156, 0.0694824981689453, 0.06914662170410156, 0.06923878479003906, 0.0692462387084961, 0.06963228607177735, 0.06935750579833984, 0.07023056030273438, 0.06960739135742187, 0.06967622375488282, 0.06983763122558594, 0.06990348815917968, 0.06985552215576171, 0.0697534408569336, 0.06964019012451172, 0.06963404846191407, 0.069316162109375, 0.06940624237060547, 0.06959401702880859, 0.06942924499511718, 0.06940672302246094, 0.06938540649414063, 0.06959801483154297, 0.0694988784790039, 0.06971548461914062, 0.06986799621582031, 0.0699669418334961, 0.06987257385253906, 0.069846435546875, 0.06980172729492187, 0.06991545867919922, 0.06973849487304687, 0.0696627197265625, 0.06974771118164062, 0.0695367660522461, 0.06954179382324219, 0.0698345947265625, 0.06991487884521484, 0.0698490219116211, 0.07006623840332031, 0.0699901123046875, 0.07022783660888672, 0.06992694091796875, 0.07000511932373046, 0.06994287872314453, 0.06979145812988281, 0.07158169555664062, 0.0693616943359375, 0.06920598602294922, 0.06920396423339843, 0.06907904052734375, 0.06913433837890624, 0.069123779296875, 0.0690643539428711, 0.06907766723632812, 0.06913200378417969, 0.06921449279785157, 0.06922022247314454, 0.06925865936279296, 0.06963385772705079, 0.06965862274169922, 0.06957711791992187, 0.06964889526367188, 0.06946406555175781, 0.06913228607177735, 0.06930786895751953, 0.06925965118408203, 0.06913606262207031, 0.06927613067626953, 0.0692674560546875, 0.06921401977539063, 0.06956050872802734, 0.06967910766601562, 0.06985731506347656, 0.06983881378173828, 0.06957846069335938, 0.07005983734130859, 0.06970825958251953, 0.06968112182617188, 0.06942912292480469, 0.0693814697265625, 0.06960006713867188, 0.06935266876220703, 0.06939523315429688, 0.07006963348388671, 0.06970406341552735, 0.06964864349365234, 0.06967091369628907, 0.06994310760498047, 0.06971206665039062, 0.0698502426147461, 0.06966675567626954, 0.06977632141113281, 0.06981427001953125, 0.06956963348388671, 0.06976092529296875, 0.06970265960693359, 0.06981807708740234, 0.06962579345703125, 0.06952339172363281, 0.06967132568359374, 0.06972402954101563, 0.0699024658203125, 0.06981836700439453, 0.06987558746337891, 0.0698694076538086, 0.07016067504882813, 0.06986547088623046, 0.0697092514038086, 0.07101286315917969, 0.06932505798339844, 0.06958454132080077, 0.06906095886230469, 0.06921193695068359, 0.0694151382446289, 0.06912409973144532, 0.06899468994140626, 0.06902742767333984, 0.06910975646972656, 0.06946431732177734, 0.06959926605224609, 0.06925772857666015, 0.069615234375, 0.06980032348632813, 0.069644287109375, 0.06918527984619141, 0.06913459014892578, 0.06914457702636718, 0.06926239776611329, 0.06924143981933593, 0.06921660614013672, 0.06917027282714844, 0.06918646240234375, 0.06943949127197266, 0.0693446044921875, 0.06938848114013672, 0.06991209411621094, 0.06993196868896484, 0.06977651214599609, 0.06958684539794922, 0.06940499114990234, 0.06962246704101563, 0.06975389099121093, 0.06944242858886719, 0.06974470520019531, 0.06949478149414062, 0.0693511962890625, 0.06948681640625, 0.06940643310546875, 0.06951350402832031, 0.06969139099121094, 0.07001910400390625, 0.06987158203125, 0.06981171417236329, 0.06973900604248047, 0.06990227508544922, 0.0697242202758789, 0.06982450866699219, 0.06977065277099609, 0.06951382446289063, 0.06979379272460938, 0.0696844482421875, 0.06960822296142578, 0.0696627197265625, 0.06997401428222656, 0.07002835083007812, 0.07021231842041016, 0.06982383728027344, 0.06993106842041015, 0.07039046478271484, 0.0699920654296875, 0.0699148178100586, 0.07136966705322266, 0.06942924499511718, 0.06906060791015625, 0.06910361480712891, 0.06933229064941407, 0.06928864288330078, 0.06911385345458984, 0.06926950073242187, 0.0690268783569336, 0.06917113494873046, 0.06912716674804688, 0.06924214172363281, 0.06921062469482422, 0.06965443420410156, 0.06963641357421875, 0.06942896270751953, 0.06951760101318359, 0.06914662170410156, 0.06940057373046875, 0.06948770904541016, 0.06956256103515625, 0.06950166320800781, 0.069168701171875, 0.0692040023803711, 0.06946173095703125, 0.06963629150390625, 0.06932077026367188, 0.06954000091552734, 0.06968144226074219, 0.06971139526367187, 0.06969391632080078, 0.06949874877929688, 0.06948876953125, 0.06937737274169922, 0.06966521453857422, 0.06973190307617187, 0.06942991638183593, 0.06932473754882812, 0.06936991882324219, 0.06970687866210938, 0.06935142517089844, 0.06942604827880859, 0.0697343978881836, 0.06979763031005859, 0.06985343933105469, 0.06974259185791015, 0.06971600341796876, 0.06965245056152344, 0.06945331573486328, 0.06983238220214844, 0.06994412994384766, 0.06974022674560547, 0.06960364532470703, 0.07042864227294922, 0.0698120346069336, 0.06976448059082031, 0.0698499526977539, 0.0700211181640625, 0.06990233612060547, 0.06990636444091797, 0.06999660491943359, 0.070076416015625, 0.06994944000244141, 0.07112908935546874, 0.06944153594970703, 0.06910771179199218, 0.06920396423339843, 0.06904994964599609, 0.06911138916015624, 0.06896636962890625, 0.06906124877929687, 0.06908902740478516, 0.06913686370849609, 0.06932479858398438, 0.06919891357421876, 0.06932572937011719, 0.06940879821777343, 0.06940467071533203, 0.06931635284423829, 0.06919602966308594, 0.06920191955566406, 0.06977519989013672, 0.06944579315185546, 0.06919782257080079, 0.06916268920898437, 0.069281982421875, 0.06962351989746093, 0.06941903686523437, 0.0693905258178711, 0.06978169250488281, 0.06978678131103516, 0.06952582550048828, 0.06979843139648438, 0.06958258819580078, 0.06958220672607422, 0.06942400360107422, 0.06928793334960938, 0.06970982360839843, 0.06955964660644531, 0.06970774078369141, 0.0696951675415039, 0.07006060791015625, 0.06964268493652344, 0.06992870330810547, 0.06993536376953124, 0.06962588500976563, 0.06974848175048828, 0.06974281311035156, 0.069930908203125, 0.06965257263183594, 0.06965042877197265, 0.06936083221435548, 0.06958573150634766, 0.06957164764404297, 0.06999750518798828, 0.06988777923583984, 0.06984111785888672, 0.06988582611083985, 0.0699208984375, 0.06986281585693359, 0.06980258941650391, 0.06999244689941406, 0.0698936996459961, 0.06990892791748046, 0.07008016204833985, 0.06992316436767577, 0.07142601776123046, 0.06943334197998047, 0.06899472045898437, 0.06901996612548827, 0.06916909027099609, 0.06907810974121094, 0.06906963348388671, 0.06919522857666016, 0.06920054626464844, 0.06921382141113282, 0.06952540588378907, 0.06939907073974609, 0.06955741119384766, 0.06974140930175782, 0.0699513931274414, 0.06965567779541015, 0.069333984375, 0.06922835540771484, 0.06917129516601563, 0.06928115081787109, 0.06926972961425781, 0.06931097412109374, 0.06932246398925782, 0.06922882843017578, 0.06941081237792969, 0.06967446136474609, 0.06946604919433594, 0.069552734375, 0.06974854278564453, 0.06982470703125, 0.06980515289306641, 0.07046377563476562, 0.06951705932617187, 0.06967724609375, 0.06934188842773438, 0.06961357116699218, 0.06940876770019531, 0.06930355072021484, 0.06956639862060547, 0.06951200103759765, 0.06944563293457032, 0.06945996856689453, 0.0700967025756836, 0.07010230255126954, 0.0700384292602539, 0.06978150177001953, 0.06976102447509766, 0.0696556167602539, 0.06966368103027344, 0.0696094741821289, 0.06994329833984375, 0.06966067504882813, 0.06963404846191407, 0.06972608184814454, 0.06978892517089844, 0.06986953735351563, 0.06990873718261718, 0.07002713775634765, 0.06994818878173828, 0.07004774475097657, 0.07004364776611328, 0.06999654388427734, 0.07003472137451172, 0.07095539093017578, 0.06944541168212891, 0.06913184356689453, 0.06909008026123047, 0.06914252471923828, 0.06910361480712891, 0.06931660461425782, 0.06918553924560547, 0.06922147369384765, 0.06920694732666016, 0.06938371276855469, 0.06932720184326172, 0.06940275573730469, 0.06963404846191407, 0.06965846252441406, 0.0699024658203125, 0.06981145477294921, 0.06921218872070313, 0.06909625244140626, 0.0693656997680664, 0.06919782257080079, 0.06940009307861328, 0.06948912048339843, 0.0693780517578125, 0.06926131439208984, 0.06955830383300782, 0.06982176208496094, 0.06976934051513672, 0.06990617370605469, 0.06990723419189453, 0.06957465362548829, 0.06962995147705078, 0.06959913635253906, 0.06938419342041016, 0.06941295623779296, 0.0693759994506836, 0.06954300689697265, 0.06951209259033203, 0.06935955047607421, 0.06955219268798828, 0.0697548828125, 0.07003103637695313, 0.06999884796142578, 0.06997772979736328, 0.0697041244506836, 0.06974463653564453, 0.06983270263671874, 0.06979923248291016, 0.0697946548461914, 0.06991241455078125, 0.07024832153320312, 0.06991462707519532, 0.06990563201904297, 0.06985932922363282, 0.07044528198242188, 0.07016313934326172, 0.07026467132568359, 0.0701822738647461, 0.07001372528076172, 0.07036431884765625, 0.0700681915283203, 0.06992371368408203, 0.07003529357910156, 0.07164012908935546, 0.06991149139404297, 0.06931251525878906, 0.06922374725341797, 0.06925142669677735, 0.06909372711181641, 0.06908665466308593, 0.06913286590576172, 0.06979174041748047, 0.06960694122314454, 0.06955260467529296, 0.0693759994506836, 0.06929129791259765, 0.06982886505126953, 0.06971849822998047, 0.06948863983154296, 0.06930809783935547, 0.06935078430175781, 0.06947936248779298, 0.06957997131347657, 0.07076537322998047, 0.06935529327392578, 0.06926771545410157, 0.06966614532470704, 0.06944217681884765, 0.0693207015991211, 0.06960511779785156, 0.06945613098144532, 0.06966681671142579, 0.07002480316162109, 0.07021609497070312, 0.06984633636474609, 0.06961017608642578, 0.06972621154785157, 0.06979993438720702, 0.07004080200195313, 0.06964304351806641, 0.069607421875, 0.06955548858642578, 0.06996355438232423, 0.06996399688720703, 0.06979452514648438, 0.06999244689941406, 0.06994124603271484, 0.06991462707519532, 0.0700145263671875, 0.06962630462646484, 0.06962179565429688, 0.07027247619628907, 0.06970829010009766, 0.06970982360839843, 0.06992272186279297, 0.06955836486816407, 0.06988390350341797, 0.06996355438232423, 0.06986774444580078, 0.06993852996826172, 0.06986953735351563, 0.070008544921875, 0.07031084442138671, 0.0700203857421875, 0.0699411849975586, 0.06998713684082031]",tokens/s,14.362713949759069,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,737.456128,804.192256,0.0,408.94464,387.119104,s,1,7.182080078125,7.182080078125,0.0,7.182080078125,7.182080078125,7.182080078125,7.182080078125,[7.182080078125],,kWh,6.235722179174748e-06,6.807830903083486e-07,2.0150016119997727e-06,8.931506881482869e-06,,MB,1039.253504,827.260928,0.0,421.527552,354.083328,s,17,0.44126082992553717,0.025956519407384537,0.0007704486004763689,0.025722015380859376,0.026036025619506836,0.02668463325500488,0.028530885696411133,"[0.028992448806762695, 0.025850559234619142, 0.02569664001464844, 0.02565315246582031, 0.025737695693969727, 0.025810623168945314, 0.025988256454467774, 0.02610767936706543, 0.025722015380859376, 0.02579302406311035, 0.02569443130493164, 0.025657024383544922, 0.0255994873046875, 0.02593222427368164, 0.02568422317504883, 0.025718751907348632, 0.02562259292602539]",tokens/s,9862.647452152962,kWh,8.819642737411628e-07,9.724077051836299e-08,5.812534022296101e-07,1.5604584464891359e-06,tokens/kWh,164054352.473129,MB,1065.844736,841.940992,0.0,436.207616,354.085888,s,17,10.024740356445314,0.5896906092026655,0.0030526179067493597,0.5890370483398437,0.593540087890625,0.5946925415039063,0.5948475415039063,"[0.5899246215820313, 0.5886675415039062, 0.5948862915039063, 0.5928040771484375, 0.588941650390625, 0.58674462890625, 0.5922803344726563, 0.5868428955078125, 0.5838924560546875, 0.5850997924804687, 0.5876492919921875, 0.5888758544921875, 0.5890370483398437, 0.59021142578125, 0.5946441040039062, 0.592195556640625, 0.5920427856445313]",tokens/s,106.83568470792468,kWh,1.686057835910163e-05,1.8594751716190853e-06,7.630632252240906e-06,2.6350685782961627e-05,tokens/kWh,2390829.617069619,,s,1071,10.016516191482534,0.009352489441160173,0.00018683751353915182,0.00931488037109375,0.00946723175048828,0.00958407974243164,0.010190162849426268,"[0.009582559585571289, 0.009558079719543457, 0.009326144218444824, 0.00931488037109375, 0.009281375885009765, 0.00934665584564209, 0.009284159660339356, 0.009285344123840331, 0.00930777645111084, 0.009285280227661133, 0.009210240364074708, 0.009363807678222657, 0.009299391746520997, 0.009382816314697265, 0.009649696350097657, 0.00953593635559082, 0.009332480430603028, 0.009400511741638183, 0.009327520370483398, 0.009412575721740722, 0.009391103744506836, 0.009379615783691406, 0.009423423767089843, 0.009365280151367188, 0.009382111549377441, 0.009430463790893556, 0.009339103698730469, 0.009338175773620606, 0.009308863639831542, 0.009281760215759277, 0.009324607849121093, 0.009309247970581054, 0.009366368293762207, 0.009301823616027832, 0.009455615997314454, 0.009381695747375489, 0.009307904243469238, 0.009355711936950683, 0.009283424377441406, 0.009275424003601073, 0.009264800071716308, 0.009260607719421386, 0.00930457592010498, 0.00935974407196045, 0.009297951698303223, 0.00934227180480957, 0.009354240417480468, 0.009420479774475098, 0.009416383743286133, 0.009494912147521973, 0.009358752250671386, 0.009275039672851563, 0.00933568000793457, 0.009369152069091797, 0.009308544158935547, 0.009293888092041016, 0.009302016258239745, 0.009267200469970703, 0.009359071731567383, 0.009388319969177246, 0.009350751876831055, 0.009293503761291504, 0.009546784400939942, 0.009229887962341308, 0.009464256286621094, 0.009391807556152343, 0.009322815895080567, 0.0092871675491333, 0.009281951904296875, 0.009401760101318359, 0.009308863639831542, 0.009303263664245606, 0.009335007667541504, 0.009238176345825196, 0.00947702407836914, 0.009273216247558594, 0.009261183738708496, 0.009357312202453612, 0.00928508758544922, 0.009359487533569336, 0.00931388759613037, 0.009306943893432618, 0.009261055946350098, 0.009347423553466797, 0.009307680130004882, 0.009312479972839356, 0.009352640151977539, 0.009262784004211426, 0.009347871780395508, 0.009293824195861817, 0.0093023681640625, 0.009313952445983887, 0.009267200469970703, 0.009287712097167968, 0.009363007545471192, 0.00931436824798584, 0.009352864265441895, 0.009267040252685547, 0.009341152191162109, 0.009306752204895019, 0.009350527763366699, 0.009271871566772462, 0.009363743782043457, 0.009260543823242188, 0.00932271957397461, 0.009631903648376464, 0.009445088386535645, 0.009341119766235351, 0.00930406379699707, 0.009351167678833008, 0.009528575897216797, 0.009360128402709961, 0.009375743865966797, 0.009318400382995605, 0.009344799995422363, 0.00946723175048828, 0.009370431900024415, 0.009443391799926757, 0.00932044792175293, 0.009281760215759277, 0.009328224182128907, 0.009242815971374512, 0.009369279861450195, 0.0093221435546875, 0.009335455894470214, 0.00932863998413086, 0.009151904106140137, 0.009402976036071778, 0.009377887725830078, 0.009296031951904296, 0.00934217643737793, 0.009245408058166505, 0.009254719734191895, 0.009275744438171387, 0.009334591865539551, 0.00929315185546875, 0.009347583770751953, 0.00936355209350586, 0.009344927787780762, 0.009375712394714356, 0.009305248260498047, 0.009325440406799317, 0.009369407653808594, 0.009300160408020019, 0.009312512397766114, 0.009222240447998046, 0.00925046443939209, 0.009249919891357422, 0.00932953643798828, 0.00932044792175293, 0.00925068759918213, 0.009352767944335937, 0.009396224021911622, 0.009417183876037597, 0.00983568000793457, 0.01030185604095459, 0.012427807807922363, 0.009649888038635254, 0.009404704093933106, 0.009422975540161133, 0.009332320213317872, 0.00932688045501709, 0.009476351737976074, 0.009602687835693359, 0.009342623710632323, 0.009554400444030761, 0.00932249641418457, 0.009361568450927735, 0.00965129566192627, 0.009378560066223144, 0.009418432235717773, 0.009400639533996582, 0.00941055965423584, 0.009401984214782715, 0.009322272300720216, 0.009316960334777831, 0.009431039810180664, 0.00940067195892334, 0.009299615859985351, 0.009313504219055175, 0.009947839736938477, 0.009334912300109863, 0.009376959800720215, 0.009346879959106446, 0.009292767524719239, 0.009319583892822265, 0.009216095924377441, 0.009306879997253418, 0.009346943855285644, 0.009099776268005372, 0.009375871658325196, 0.009298048019409179, 0.009340928077697755, 0.009361536026000976, 0.00932646369934082, 0.009373696327209472, 0.009654272079467773, 0.009332544326782226, 0.009395392417907714, 0.009556991577148437, 0.00935910415649414, 0.009406720161437988, 0.00928115177154541, 0.009389792442321777, 0.009453951835632324, 0.009525728225708009, 0.009358271598815918, 0.010694656372070312, 0.00983232021331787, 0.0095098237991333, 0.009506464004516602, 0.009488800048828124, 0.009463808059692384, 0.00937334442138672, 0.009331295967102051, 0.009312031745910644, 0.009614879608154298, 0.009484736442565918, 0.009418208122253418, 0.009451168060302734, 0.009443615913391113, 0.009316224098205567, 0.00940009593963623, 0.009788543701171874, 0.009307519912719726, 0.009364928245544434, 0.009255935668945312, 0.009406368255615234, 0.009400416374206542, 0.00928767967224121, 0.009222368240356445, 0.009295519828796386, 0.009244799613952637, 0.009234623908996582, 0.00927945613861084, 0.00929980754852295, 0.009388031959533692, 0.0093306884765625, 0.009385984420776367, 0.00949465560913086, 0.00952678394317627, 0.009320128440856933, 0.009306976318359375, 0.00930799961090088, 0.009289728164672852, 0.009250495910644531, 0.0092675199508667, 0.00928694438934326, 0.009321184158325195, 0.009313535690307618, 0.009277888298034668, 0.009314784049987793, 0.00906982421875, 0.009359968185424805, 0.009304415702819825, 0.009256768226623534, 0.009293824195861817, 0.009252927780151366, 0.009289088249206543, 0.009329216003417969, 0.009387071609497071, 0.009224960327148438, 0.00925715160369873, 0.00931430435180664, 0.009562272071838379, 0.009327615737915039, 0.009347935676574708, 0.009308416366577149, 0.009322015762329102, 0.009470175743103028, 0.00957875156402588, 0.009448800086975098, 0.009357536315917969, 0.009420096397399902, 0.009341823577880859, 0.009324543952941895, 0.009308159828186035, 0.00937168025970459, 0.00924668788909912, 0.009451519966125489, 0.009265248298645019, 0.009242527961730957, 0.009265151977539063, 0.00926959991455078, 0.0092642240524292, 0.009244319915771484, 0.009247648239135741, 0.009342687606811523, 0.009256383895874024, 0.009243552207946776, 0.009234399795532226, 0.009244480133056641, 0.009204928398132323, 0.009315648078918458, 0.009270943641662598, 0.009279071807861328, 0.009279328346252442, 0.009320480346679687, 0.00926159954071045, 0.00941055965423584, 0.010364831924438477, 0.010149024009704589, 0.009276448249816894, 0.009248671531677246, 0.009282848358154298, 0.00920854377746582, 0.009211551666259766, 0.009228639602661133, 0.009354911804199218, 0.009247072219848633, 0.009311296463012696, 0.009252127647399902, 0.009608896255493164, 0.009420160293579102, 0.009310815811157227, 0.00909334373474121, 0.009426048278808594, 0.009326815605163574, 0.009212672233581543, 0.00938588809967041, 0.00931948757171631, 0.010384639739990235, 0.00937337589263916, 0.009319968223571777, 0.00918883228302002, 0.009166879653930664, 0.009378175735473632, 0.009242688179016113, 0.009269791603088379, 0.00920969581604004, 0.009250752449035645, 0.00924614429473877, 0.00924454402923584, 0.009275615692138672, 0.009290399551391602, 0.009238080024719238, 0.009220576286315918, 0.009341119766235351, 0.009290911674499512, 0.009302687644958497, 0.009265119552612305, 0.009315839767456055, 0.009231167793273926, 0.009231167793273926, 0.009227168083190919, 0.009326592445373535, 0.009236479759216308, 0.009264351844787598, 0.009355839729309081, 0.009318623542785644, 0.009252863883972168, 0.00919961643218994, 0.009207807540893554, 0.00919155216217041, 0.00931606388092041, 0.009405599594116212, 0.009313280105590821, 0.009308159828186035, 0.009265151977539063, 0.009363455772399902, 0.009298239707946777, 0.009211071968078613, 0.009244959831237793, 0.009258399963378907, 0.009238431930541992, 0.009318400382995605, 0.0092741117477417, 0.009277600288391114, 0.009654272079467773, 0.009286975860595703, 0.009239232063293457, 0.009236607551574707, 0.009185152053833008, 0.00931663990020752, 0.009518719673156739, 0.009375295639038085, 0.009372063636779785, 0.009367072105407714, 0.009246848106384277, 0.00948857593536377, 0.009408255577087403, 0.009277407646179198, 0.009625568389892578, 0.009420607566833496, 0.009289919853210449, 0.009356831550598145, 0.009369888305664063, 0.009299424171447754, 0.009392864227294922, 0.009289759635925293, 0.009330656051635742, 0.009374752044677734, 0.009351936340332032, 0.009333215713500976, 0.009350336074829102, 0.009298496246337891, 0.009273344039916993, 0.009283519744873047, 0.009346688270568847, 0.00929366397857666, 0.009285247802734375, 0.009324895858764648, 0.00926534366607666, 0.009318816184997558, 0.009306143760681153, 0.00933852767944336, 0.009578847885131836, 0.009422080039978027, 0.009304896354675293, 0.009281824111938477, 0.009369248390197754, 0.009293824195861817, 0.009319711685180664, 0.009269984245300292, 0.009285632133483887, 0.009611328125, 0.009494463920593261, 0.0093306884765625, 0.009375583648681641, 0.009339039802551269, 0.00928767967224121, 0.009339103698730469, 0.009454367637634277, 0.00925817584991455, 0.00930726432800293, 0.009274016380310059, 0.009363136291503905, 0.009355615615844727, 0.009283103942871095, 0.009296383857727051, 0.009316320419311524, 0.009545568466186524, 0.009543935775756835, 0.010135104179382324, 0.009476448059082031, 0.010182751655578613, 0.009299872398376464, 0.00939136028289795, 0.009285408020019531, 0.009450464248657227, 0.010119423866271973, 0.009173184394836427, 0.009430848121643066, 0.009290047645568848, 0.00930735969543457, 0.00931062412261963, 0.0093635196685791, 0.009310208320617675, 0.009398271560668945, 0.009271295547485351, 0.00937382411956787, 0.009297792434692382, 0.009215840339660645, 0.009279871940612793, 0.009381664276123048, 0.009347071647644043, 0.009403903961181641, 0.009288127899169923, 0.00922214412689209, 0.00928767967224121, 0.009275168418884277, 0.009224512100219727, 0.009352704048156739, 0.009284064292907715, 0.009377792358398437, 0.00932863998413086, 0.009361408233642577, 0.009314240455627442, 0.009421088218688966, 0.009314080238342286, 0.009451519966125489, 0.009287872314453126, 0.009256768226623534, 0.009238752365112304, 0.009267104148864747, 0.00965824031829834, 0.009270400047302245, 0.009736703872680665, 0.009259391784667968, 0.009185279846191406, 0.009274656295776368, 0.009287520408630371, 0.009208703994750976, 0.009278464317321777, 0.009452383995056152, 0.009211711883544922, 0.00941500759124756, 0.009242624282836913, 0.009279423713684083, 0.009306464195251465, 0.009230048179626464, 0.009218048095703125, 0.009267200469970703, 0.009207424163818359, 0.009251199722290038, 0.009260160446166992, 0.009249152183532715, 0.009248479843139649, 0.009273119926452637, 0.009309184074401856, 0.00923033618927002, 0.009227583885192871, 0.009291999816894532, 0.009308735847473144, 0.00910540771484375, 0.009277440071105958, 0.009303839683532714, 0.009309503555297851, 0.009239456176757813, 0.009203295707702636, 0.009234880447387695, 0.009244768142700196, 0.009210975646972656, 0.009274144172668457, 0.009235679626464844, 0.00916915225982666, 0.009206303596496582, 0.009383968353271484, 0.009508831977844238, 0.009307840347290038, 0.009314144134521484, 0.009212160110473632, 0.009339327812194825, 0.009394047737121582, 0.00922374439239502, 0.009267552375793457, 0.009302271842956542, 0.009228032112121582, 0.009305088043212891, 0.00923750400543213, 0.009281824111938477, 0.009211615562438965, 0.009268544197082519, 0.009233023643493652, 0.009265215873718261, 0.009225600242614746, 0.009302304267883301, 0.009249119758605958, 0.00927948760986328, 0.009299615859985351, 0.009204352378845215, 0.009264384269714356, 0.009226847648620605, 0.009230208396911622, 0.009228287696838379, 0.009211168289184571, 0.009224672317504883, 0.009439776420593262, 0.009491904258728028, 0.009291999816894532, 0.00929798412322998, 0.009214240074157715, 0.009209600448608399, 0.009218015670776368, 0.009261216163635254, 0.009230175971984863, 0.009261055946350098, 0.009211935997009278, 0.009202783584594726, 0.009260992050170898, 0.009212863922119141, 0.009252448081970215, 0.009194016456604004, 0.009207679748535156, 0.009240511894226075, 0.009217663764953614, 0.009230655670166016, 0.00898204803466797, 0.00936188793182373, 0.009355327606201173, 0.009562047958374023, 0.009285728454589843, 0.009355263710021973, 0.009256863594055175, 0.009267200469970703, 0.009303680419921875, 0.009226271629333496, 0.009550368309020996, 0.009272640228271484, 0.009310144424438477, 0.009318943977355956, 0.009279520034790038, 0.009268223762512207, 0.009288703918457031, 0.009285056114196777, 0.009388383865356445, 0.00924079990386963, 0.009244671821594238, 0.009370688438415528, 0.009347552299499512, 0.00933683204650879, 0.009302207946777344, 0.0092511043548584, 0.009271455764770507, 0.00923737621307373, 0.009248064041137696, 0.009347968101501465, 0.009206560134887696, 0.009258591651916503, 0.009243040084838868, 0.009221343994140626, 0.00930076789855957, 0.00923356819152832, 0.009190560340881348, 0.009210944175720215, 0.009256832122802735, 0.00921670436859131, 0.009274623870849609, 0.009196352005004883, 0.009223648071289062, 0.009234175682067872, 0.009193440437316895, 0.009231167793273926, 0.009285632133483887, 0.009223872184753418, 0.009269696235656738, 0.00923635196685791, 0.009233407974243164, 0.009345215797424316, 0.009255743980407715, 0.009161727905273438, 0.009211999893188477, 0.009285599708557128, 0.009230976104736327, 0.009527968406677247, 0.009436832427978516, 0.009246720314025878, 0.009275584220886231, 0.00927519989013672, 0.009316351890563965, 0.008990336418151855, 0.00931388759613037, 0.009284480094909668, 0.009310208320617675, 0.009287520408630371, 0.009326016426086426, 0.009269984245300292, 0.009320351600646972, 0.009328960418701172, 0.009236255645751952, 0.009233983993530273, 0.00934342384338379, 0.009218048095703125, 0.009275391578674316, 0.009269248008728028, 0.00929753589630127, 0.009259391784667968, 0.009266464233398438, 0.0093088960647583, 0.00930735969543457, 0.009229248046875, 0.009288607597351074, 0.009585599899291992, 0.009276896476745606, 0.009320992469787598, 0.009342464447021484, 0.009284095764160156, 0.009261055946350098, 0.009336095809936523, 0.009263232231140136, 0.009298879623413086, 0.009286815643310548, 0.009265664100646973, 0.00931388759613037, 0.009264863967895508, 0.009244671821594238, 0.009248479843139649, 0.009271679878234863, 0.00942563247680664, 0.00931827163696289, 0.009319744110107421, 0.009265055656433105, 0.009269951820373535, 0.00934102439880371, 0.009291775703430176, 0.009302016258239745, 0.009224191665649414, 0.009610560417175293, 0.009357312202453612, 0.00929043197631836, 0.009299967765808105, 0.009461440086364747, 0.009367487907409667, 0.009306495666503906, 0.009332736015319825, 0.009302047729492188, 0.009275263786315917, 0.00942908763885498, 0.00926534366607666, 0.009265215873718261, 0.009313823699951172, 0.0102074556350708, 0.009504768371582031, 0.009276800155639649, 0.0096428804397583, 0.010266624450683593, 0.010410176277160645, 0.009338687896728516, 0.009452896118164062, 0.009317312240600586, 0.009315775871276856, 0.009291999816894532, 0.009273407936096191, 0.009287839889526367, 0.009293824195861817, 0.009262432098388671, 0.009273920059204101, 0.009293408393859863, 0.009293343544006348, 0.009330880165100098, 0.009287903785705567, 0.00926966381072998, 0.00930947208404541, 0.009453311920166015, 0.009278047561645507, 0.009201472282409667, 0.009331263542175294, 0.009273344039916993, 0.009267200469970703, 0.009284735679626465, 0.009252896308898926, 0.009236448287963868, 0.009229184150695801, 0.009250816345214843, 0.009284832000732423, 0.009222944259643555, 0.009518943786621093, 0.009407936096191406, 0.009324607849121093, 0.00930457592010498, 0.009425056457519532, 0.009283583641052246, 0.009467904090881347, 0.009242752075195312, 0.009289631843566895, 0.00928547191619873, 0.009242752075195312, 0.009254912376403808, 0.009334336280822754, 0.009660832405090332, 0.009266464233398438, 0.009409279823303222, 0.00922163200378418, 0.009245183944702149, 0.009266912460327148, 0.00924505615234375, 0.009300928115844726, 0.00925385570526123, 0.009256735801696777, 0.00934502410888672, 0.009263039588928223, 0.009220704078674317, 0.009312288284301758, 0.009251935958862305, 0.009263680458068848, 0.009187328338623046, 0.00903433609008789, 0.009310144424438477, 0.009361184120178223, 0.009350879669189453, 0.009290207862854004, 0.009477439880371094, 0.009307200431823731, 0.009276479721069335, 0.00930406379699707, 0.009276000022888184, 0.009291328430175782, 0.009377535820007324, 0.009314944267272949, 0.009339136123657226, 0.009301983833312988, 0.009321632385253907, 0.00929043197631836, 0.009332736015319825, 0.009293600082397461, 0.009255328178405763, 0.009305919647216797, 0.009291232109069824, 0.009271200180053712, 0.009571167945861816, 0.009295647621154784, 0.00933471965789795, 0.009310272216796875, 0.009709792137145996, 0.009309696197509766, 0.009388319969177246, 0.0094551362991333, 0.00937007999420166, 0.009434144020080566, 0.009367903709411622, 0.009301664352416993, 0.009374176025390625, 0.009345631599426269, 0.009271200180053712, 0.00930406379699707, 0.009631744384765625, 0.00935929584503174, 0.009351327896118164, 0.00927507209777832, 0.00927996826171875, 0.009363200187683106, 0.009275135993957519, 0.009322943687438965, 0.009436991691589356, 0.009347071647644043, 0.009305855751037598, 0.009371904373168946, 0.009276415824890137, 0.009339903831481934, 0.009406463623046875, 0.009357248306274414, 0.009307552337646484, 0.009370400428771972, 0.009398143768310547, 0.009359423637390136, 0.009347007751464844, 0.009267200469970703, 0.00931827163696289, 0.009377087593078613, 0.009158368110656739, 0.009416031837463378, 0.009450464248657227, 0.009379584312438965, 0.009367456436157226, 0.009423487663269043, 0.009392895698547364, 0.009302975654602051, 0.00932044792175293, 0.00932863998413086, 0.00974403190612793, 0.009664608001708984, 0.009421055793762206, 0.009391231536865234, 0.009415552139282227, 0.00939840030670166, 0.009391072273254395, 0.009423935890197754, 0.009293984413146973, 0.009279040336608887, 0.009312383651733399, 0.009363327980041504, 0.009557184219360351, 0.00951801586151123, 0.009336511611938476, 0.009390399932861329, 0.009302016258239745, 0.009353311538696289, 0.009319999694824218, 0.009361760139465332, 0.009403871536254884, 0.00933465576171875, 0.009347935676574708, 0.00935910415649414, 0.009283647537231446, 0.009309951782226562, 0.009257216453552247, 0.009302176475524903, 0.009273183822631837, 0.009367584228515625, 0.00937775993347168, 0.00936963176727295, 0.009315391540527344, 0.009363776206970214, 0.009361536026000976, 0.009418304443359376, 0.009331199645996094, 0.009363936424255372, 0.009273280143737793, 0.009315936088562012, 0.009310912132263184, 0.009324095726013183, 0.009287039756774902, 0.009337632179260254, 0.009395584106445313, 0.00935321617126465, 0.009310175895690918, 0.009345279693603516, 0.009323136329650879, 0.009289119720458984, 0.009332608222961426, 0.00931062412261963, 0.009304160118103028, 0.009390015602111817, 0.01005571174621582, 0.00972812843322754, 0.00993017578125, 0.00998243236541748, 0.00983580780029297, 0.009865471839904785, 0.009544159889221191, 0.00945257568359375, 0.009515392303466796, 0.009323103904724121, 0.009315808296203614, 0.009628191947937011, 0.009510687828063965, 0.00943286418914795, 0.009342752456665038, 0.009320927619934081, 0.009361056327819825, 0.009409055709838868, 0.009338879585266113, 0.009335840225219726, 0.009258272171020508, 0.009227423667907715, 0.009277440071105958, 0.009250559806823731, 0.009253664016723633, 0.009301664352416993, 0.009342623710632323, 0.009272000312805177, 0.009388031959533692, 0.009406496047973632, 0.00943286418914795, 0.009498815536499023, 0.009447456359863281, 0.009408479690551757, 0.009414239883422852, 0.00937820816040039, 0.009417823791503906, 0.009405344009399415, 0.009406047821044922, 0.009339296340942382, 0.009326687812805176, 0.00931011199951172, 0.00932863998413086, 0.009326592445373535, 0.009296159744262695, 0.009278176307678222, 0.009294848442077636, 0.009273344039916993, 0.009346240043640137, 0.00934342384338379, 0.009381312370300293, 0.009421759605407715, 0.009392127990722657, 0.00944870376586914, 0.009519488334655761, 0.009467583656311035, 0.009406559944152832, 0.009483967781066895, 0.009472224235534668, 0.009401023864746094, 0.009396032333374023, 0.00947219181060791, 0.009058303833007812, 0.009429280281066894, 0.009375455856323243, 0.009388031959533692, 0.009302016258239745, 0.009302080154418945, 0.009297599792480469, 0.009392416000366211, 0.009243680000305175, 0.009372608184814453, 0.009371871948242188, 0.009387455940246581, 0.009383359909057616, 0.009420831680297852, 0.00939743995666504, 0.009375616073608399, 0.009361151695251466, 0.009373760223388672, 0.009269023895263672, 0.009298144340515136, 0.009275391578674316, 0.009240575790405273, 0.009262656211853027, 0.009314751625061036, 0.009300000190734863, 0.009223872184753418, 0.009277728080749512, 0.009318431854248048, 0.009277503967285156, 0.00926694393157959, 0.00937929630279541, 0.00927830410003662, 0.009463647842407226, 0.01049942398071289, 0.010584095954895019, 0.009760383605957032, 0.009409536361694336, 0.009306303977966309, 0.00923423957824707, 0.009381407737731934, 0.009287775993347168, 0.009288415908813477, 0.009655967712402343, 0.009798720359802246, 0.009536352157592774, 0.009316448211669923, 0.009263168334960938, 0.009664704322814941, 0.009436927795410156, 0.009302016258239745, 0.009363455772399902, 0.009362848281860351, 0.00927945613861084, 0.009253567695617675, 0.009689023971557617, 0.009287263870239258, 0.009290111541748048, 0.009318240165710449, 0.00927359962463379, 0.009348223686218262, 0.009276224136352538, 0.009318464279174805, 0.00923027229309082, 0.009006752014160156, 0.009349472045898437, 0.009334783554077148, 0.009344160079956055, 0.009336992263793946, 0.009339103698730469, 0.009207903861999512, 0.009306655883789063, 0.009336671829223632, 0.009260767936706544, 0.009298208236694337, 0.00929587173461914, 0.009564448356628417, 0.010582752227783203, 0.010077183723449706, 0.009422623634338378, 0.009394335746765136, 0.009386048316955567, 0.009317728042602539, 0.009282208442687989, 0.009294112205505372, 0.009288736343383789, 0.009263263702392579, 0.009316608428955079, 0.00932483196258545, 0.00933683204650879, 0.009328543663024902, 0.009317567825317383, 0.00934988784790039, 0.009373855590820313, 0.009349120140075684, 0.009439231872558594, 0.009383456230163574, 0.009354880332946777, 0.009323360443115235, 0.009363327980041504, 0.009371487617492676, 0.00935580825805664, 0.009596159934997558, 0.009355487823486329, 0.009414527893066406, 0.00935977554321289, 0.009340031623840332, 0.00941759967803955, 0.009453248023986817, 0.009453344345092773, 0.00946668815612793, 0.00952905559539795, 0.009493599891662598, 0.009460639953613282, 0.009455615997314454, 0.009390048027038574, 0.009468992233276368, 0.00931270408630371, 0.009323040008544921, 0.009329728126525879, 0.009382847785949708, 0.009388031959533692, 0.009437184333801269, 0.009324128150939942, 0.009252544403076171, 0.009305088043212891, 0.009282367706298828]",tokens/s,106.9234032597796,,