config.name,config.backend.name,config.backend.version,config.backend._target_,config.backend.task,config.backend.library,config.backend.model_type,config.backend.model,config.backend.processor,config.backend.device,config.backend.device_ids,config.backend.seed,config.backend.inter_op_num_threads,config.backend.intra_op_num_threads,config.backend.model_kwargs.trust_remote_code,config.backend.no_weights,config.backend.device_map,config.backend.torch_dtype,config.backend.eval_mode,config.backend.to_bettertransformer,config.backend.low_cpu_mem_usage,config.backend.attn_implementation,config.backend.cache_implementation,config.backend.autocast_enabled,config.backend.autocast_dtype,config.backend.torch_compile,config.backend.torch_compile_target,config.backend.quantization_scheme,config.backend.quantization_config.bits,config.backend.quantization_config.use_exllama ,config.backend.quantization_config.version,config.backend.quantization_config.model_seqlen,config.backend.deepspeed_inference,config.backend.peft_type,config.scenario.name,config.scenario._target_,config.scenario.iterations,config.scenario.duration,config.scenario.warmup_runs,config.scenario.input_shapes.batch_size,config.scenario.input_shapes.num_choices,config.scenario.input_shapes.sequence_length,config.scenario.new_tokens,config.scenario.memory,config.scenario.latency,config.scenario.energy,config.scenario.generate_kwargs.max_new_tokens,config.scenario.generate_kwargs.min_new_tokens,config.launcher.name,config.launcher._target_,config.launcher.device_isolation,config.launcher.device_isolation_action,config.launcher.numactl,config.launcher.start_method,config.environment.cpu,config.environment.cpu_count,config.environment.cpu_ram_mb,config.environment.system,config.environment.machine,config.environment.platform,config.environment.processor,config.environment.python_version,config.environment.gpu,config.environment.gpu_count,config.environment.gpu_vram_mb,config.environment.optimum_benchmark_version,config.environment.optimum_benchmark_commit,config.environment.transformers_version,config.environment.transformers_commit,config.environment.accelerate_version,config.environment.accelerate_commit,config.environment.diffusers_version,config.environment.diffusers_commit,config.environment.optimum_version,config.environment.optimum_commit,config.environment.timm_version,config.environment.timm_commit,config.environment.peft_version,config.environment.peft_commit,report.traceback,report.load.memory.unit,report.load.memory.max_ram,report.load.memory.max_global_vram,report.load.memory.max_process_vram,report.load.memory.max_reserved,report.load.memory.max_allocated,report.load.latency.unit,report.load.latency.count,report.load.latency.total,report.load.latency.mean,report.load.latency.stdev,report.load.latency.p50,report.load.latency.p90,report.load.latency.p95,report.load.latency.p99,report.load.latency.values,report.load.throughput,report.load.energy.unit,report.load.energy.cpu,report.load.energy.ram,report.load.energy.gpu,report.load.energy.total,report.load.efficiency,report.prefill.memory.unit,report.prefill.memory.max_ram,report.prefill.memory.max_global_vram,report.prefill.memory.max_process_vram,report.prefill.memory.max_reserved,report.prefill.memory.max_allocated,report.prefill.latency.unit,report.prefill.latency.count,report.prefill.latency.total,report.prefill.latency.mean,report.prefill.latency.stdev,report.prefill.latency.p50,report.prefill.latency.p90,report.prefill.latency.p95,report.prefill.latency.p99,report.prefill.latency.values,report.prefill.throughput.unit,report.prefill.throughput.value,report.prefill.energy.unit,report.prefill.energy.cpu,report.prefill.energy.ram,report.prefill.energy.gpu,report.prefill.energy.total,report.prefill.efficiency.unit,report.prefill.efficiency.value,report.decode.memory.unit,report.decode.memory.max_ram,report.decode.memory.max_global_vram,report.decode.memory.max_process_vram,report.decode.memory.max_reserved,report.decode.memory.max_allocated,report.decode.latency.unit,report.decode.latency.count,report.decode.latency.total,report.decode.latency.mean,report.decode.latency.stdev,report.decode.latency.p50,report.decode.latency.p90,report.decode.latency.p95,report.decode.latency.p99,report.decode.latency.values,report.decode.throughput.unit,report.decode.throughput.value,report.decode.energy.unit,report.decode.energy.cpu,report.decode.energy.ram,report.decode.energy.gpu,report.decode.energy.total,report.decode.efficiency.unit,report.decode.efficiency.value,report.per_token.memory,report.per_token.latency.unit,report.per_token.latency.count,report.per_token.latency.total,report.per_token.latency.mean,report.per_token.latency.stdev,report.per_token.latency.p50,report.per_token.latency.p90,report.per_token.latency.p95,report.per_token.latency.p99,report.per_token.latency.values,report.per_token.throughput.unit,report.per_token.throughput.value,report.per_token.energy,report.per_token.efficiency 4bit-gptq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4000, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2871, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert return t.to( torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1709, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,11136.868352,12227.3792,0.0,11848.9088,11814.752256,s,1,16.2327216796875,16.2327216796875,0.0,16.2327216796875,16.2327216796875,16.2327216796875,16.2327216796875,[16.2327216796875],,kWh,0.00026652513839583964,2.939236722192133e-05,8.68925695139966e-05,0.00038281007513175756,,MB,2067.243008,14033.027072,0.0,13625.196544,13297.870848,s,10,22.9517958984375,2.29517958984375,0.0008820631523208208,2.2952384033203126,2.296043969726562,2.2965340698242187,2.296926149902344,"[2.294716552734375, 2.293971923828125, 2.294431884765625, 2.29522900390625, 2.295479248046875, 2.29407421875, 2.295247802734375, 2.29568603515625, 2.29593505859375, 2.297024169921875]",tokens/s,111.53811280511947,kWh,6.69229592179181e-05,7.379358171590439e-06,4.43492299238013e-05,0.00011865154731330984,tokens/kWh,2157578.2684401874,MB,2071.445504,14335.01696,0.0,13927.186432,13689.870848,s,10,1357.6977968750002,135.76977968750003,0.03905213328064623,135.753359375,135.8288109375,135.83175703125,135.83411390625,"[135.726765625, 135.7214375, 135.73778125, 135.75059375, 135.75296875, 135.75375, 135.787078125, 135.8045625, 135.82815625, 135.834703125]",tokens/s,0.4640207868423038,kWh,0.003959727415000008,0.0004367888125291182,0.0026341422462009987,0.007030658473730125,tokens/kWh,8960.753851918405,,s,630,1357.6917192382805,2.155066221013145,0.0009203916848711222,2.154985595703125,2.1563291015625,2.156659765625,2.1572001464843753,"[2.15377978515625, 2.15362353515625, 2.154102783203125, 2.152888427734375, 2.153879638671875, 2.153713623046875, 2.1530703125, 2.153484619140625, 2.15326904296875, 2.15319775390625, 2.154849609375, 2.15480126953125, 2.154350830078125, 2.153640380859375, 2.15429931640625, 2.153489501953125, 2.155323974609375, 2.1542197265625, 2.154045654296875, 2.15464697265625, 2.154449462890625, 2.15431298828125, 2.154271484375, 2.15437890625, 2.1539443359375, 2.15463818359375, 2.1542236328125, 2.154859619140625, 2.154393798828125, 2.15407470703125, 2.154638671875, 2.154382080078125, 2.154458251953125, 2.1549345703125, 2.15419140625, 2.154756103515625, 2.15472314453125, 2.154986572265625, 2.153968017578125, 2.154627685546875, 2.154684326171875, 2.154903564453125, 2.154151611328125, 2.154827880859375, 2.15438232421875, 2.153913330078125, 2.155095458984375, 2.154193603515625, 2.15475, 2.155017822265625, 2.1539208984375, 2.154256103515625, 2.1548037109375, 2.154446044921875, 2.155332275390625, 2.15518212890625, 2.15475537109375, 2.154677001953125, 2.15482568359375, 2.15515771484375, 2.154954833984375, 2.154704833984375, 2.1553427734375, 2.15356201171875, 2.153287109375, 2.15346142578125, 2.15304443359375, 2.15363134765625, 2.15324658203125, 2.153400390625, 2.1534482421875, 2.15419482421875, 2.153303955078125, 2.1533408203125, 2.154169921875, 2.15413916015625, 2.153862060546875, 2.153770751953125, 2.154093994140625, 2.1539375, 2.153967529296875, 2.154238037109375, 2.15394921875, 2.15406396484375, 2.154262451171875, 2.154094482421875, 2.15416015625, 2.153773193359375, 2.154221435546875, 2.154911865234375, 2.154006591796875, 2.154006591796875, 2.15403466796875, 2.15473388671875, 2.153947265625, 2.154082275390625, 2.1540576171875, 2.1540595703125, 2.153810302734375, 2.15459765625, 2.154572021484375, 2.1547216796875, 2.15456787109375, 2.154612548828125, 2.154059814453125, 2.154459228515625, 2.15434033203125, 2.154974853515625, 2.1545126953125, 2.15436083984375, 2.15488427734375, 2.15504296875, 2.154566162109375, 2.1549423828125, 2.15516357421875, 2.15512060546875, 2.154883056640625, 2.155005859375, 2.155716064453125, 2.154611328125, 2.155560302734375, 2.154902099609375, 2.155333251953125, 2.154781005859375, 2.15556298828125, 2.154715087890625, 2.154047607421875, 2.15368701171875, 2.15332666015625, 2.153343017578125, 2.154083740234375, 2.15368115234375, 2.15378759765625, 2.15322802734375, 2.15338232421875, 2.15393408203125, 2.1533740234375, 2.15382666015625, 2.1550546875, 2.15336181640625, 2.15402294921875, 2.153701416015625, 2.15465087890625, 2.1540625, 2.154532958984375, 2.154282958984375, 2.154406005859375, 2.154145751953125, 2.153818115234375, 2.15392041015625, 2.15444287109375, 2.153777099609375, 2.1542412109375, 2.154507080078125, 2.15466943359375, 2.154838134765625, 2.154627685546875, 2.15440966796875, 2.154647216796875, 2.15475244140625, 2.154928466796875, 2.154638427734375, 2.153850830078125, 2.15507666015625, 2.15435986328125, 2.155682861328125, 2.154911865234375, 2.155358154296875, 2.15447509765625, 2.15534765625, 2.154611083984375, 2.155585205078125, 2.1549453125, 2.15463720703125, 2.155443359375, 2.155148193359375, 2.155347412109375, 2.154822021484375, 2.154760009765625, 2.15597216796875, 2.155488037109375, 2.155116455078125, 2.1555029296875, 2.155637451171875, 2.155179931640625, 2.155326904296875, 2.15504736328125, 2.156221923828125, 2.155205322265625, 2.15432421875, 2.153321044921875, 2.15322216796875, 2.15401416015625, 2.154091064453125, 2.15307666015625, 2.154629150390625, 2.15393896484375, 2.15340771484375, 2.153814697265625, 2.153936279296875, 2.154150390625, 2.153486572265625, 2.154694091796875, 2.15483154296875, 2.154114013671875, 2.153946044921875, 2.1546708984375, 2.15391015625, 2.1548994140625, 2.154569091796875, 2.154937255859375, 2.154496826171875, 2.154492919921875, 2.154501708984375, 2.15464599609375, 2.154235107421875, 2.154486572265625, 2.15464697265625, 2.154468017578125, 2.155227294921875, 2.154882568359375, 2.154663330078125, 2.153976806640625, 2.15427001953125, 2.1545458984375, 2.154760009765625, 2.155342041015625, 2.1553828125, 2.154933837890625, 2.154500732421875, 2.15493212890625, 2.15534375, 2.1550771484375, 2.155051513671875, 2.155622314453125, 2.155462646484375, 2.155150390625, 2.15542578125, 2.15520458984375, 2.15525, 2.15550732421875, 2.15552490234375, 2.1555712890625, 2.1557412109375, 2.155888671875, 2.15494384765625, 2.155469482421875, 2.1559990234375, 2.156082763671875, 2.1556552734375, 2.156632568359375, 2.156023681640625, 2.15436474609375, 2.153816650390625, 2.154111083984375, 2.15410400390625, 2.154985595703125, 2.15435107421875, 2.153399658203125, 2.154334228515625, 2.153712158203125, 2.1545986328125, 2.15471875, 2.1541240234375, 2.154567626953125, 2.1542216796875, 2.15448486328125, 2.1534501953125, 2.1555380859375, 2.154695068359375, 2.1541083984375, 2.154921875, 2.154289306640625, 2.154621337890625, 2.15501416015625, 2.1543505859375, 2.154255859375, 2.15504052734375, 2.154267578125, 2.154643310546875, 2.154623291015625, 2.1547373046875, 2.1549189453125, 2.15486279296875, 2.154537109375, 2.15472119140625, 2.15438818359375, 2.154477783203125, 2.155399169921875, 2.15554248046875, 2.155152587890625, 2.155150146484375, 2.155085693359375, 2.154963134765625, 2.154284423828125, 2.1551826171875, 2.15504296875, 2.1550439453125, 2.15484326171875, 2.15489306640625, 2.155017822265625, 2.155342529296875, 2.15538623046875, 2.15503662109375, 2.154445068359375, 2.15603759765625, 2.15510107421875, 2.156349365234375, 2.15521826171875, 2.155336181640625, 2.15549951171875, 2.1556796875, 2.155181396484375, 2.156218505859375, 2.155602783203125, 2.154482177734375, 2.153758544921875, 2.15450244140625, 2.15416357421875, 2.153860107421875, 2.153955078125, 2.154624267578125, 2.15406884765625, 2.1538037109375, 2.15387744140625, 2.154406005859375, 2.154027099609375, 2.154689697265625, 2.15476708984375, 2.15424169921875, 2.15487939453125, 2.154475341796875, 2.1543466796875, 2.15468994140625, 2.154701416015625, 2.15486669921875, 2.1554482421875, 2.15520458984375, 2.15374853515625, 2.153969482421875, 2.154931640625, 2.154985595703125, 2.15489794921875, 2.154056884765625, 2.15442724609375, 2.15461181640625, 2.15497216796875, 2.15499462890625, 2.154851318359375, 2.154536376953125, 2.154569580078125, 2.1551787109375, 2.15495703125, 2.154661376953125, 2.154598876953125, 2.1551669921875, 2.154668701171875, 2.155399169921875, 2.15532958984375, 2.15520263671875, 2.15521484375, 2.15503466796875, 2.1550322265625, 2.154961181640625, 2.1554052734375, 2.154875, 2.1551328125, 2.1555322265625, 2.15577392578125, 2.1547763671875, 2.155614013671875, 2.1548134765625, 2.15576953125, 2.1561640625, 2.155378662109375, 2.155300048828125, 2.15560888671875, 2.156122314453125, 2.15542138671875, 2.15410693359375, 2.15370751953125, 2.15404345703125, 2.15413134765625, 2.15398388671875, 2.154730712890625, 2.154179443359375, 2.1545166015625, 2.15442236328125, 2.153889404296875, 2.1549775390625, 2.154921875, 2.15444189453125, 2.15519091796875, 2.154968505859375, 2.155385498046875, 2.154739013671875, 2.1551982421875, 2.15461572265625, 2.154502197265625, 2.155098388671875, 2.15466259765625, 2.1547666015625, 2.15472607421875, 2.1551494140625, 2.154901611328125, 2.15481689453125, 2.155205322265625, 2.15503466796875, 2.156209228515625, 2.155299560546875, 2.1552314453125, 2.155640869140625, 2.155310791015625, 2.1551865234375, 2.155093994140625, 2.155556884765625, 2.155173828125, 2.1561201171875, 2.1563681640625, 2.15586328125, 2.15506396484375, 2.155530029296875, 2.155470703125, 2.15595166015625, 2.156271484375, 2.156151123046875, 2.155916748046875, 2.155580322265625, 2.15549951171875, 2.156786865234375, 2.156016357421875, 2.15598291015625, 2.1558330078125, 2.15690283203125, 2.156203125, 2.1568740234375, 2.156804443359375, 2.156328857421875, 2.15601171875, 2.15711328125, 2.156693603515625, 2.155583251953125, 2.154637451171875, 2.154427734375, 2.15435888671875, 2.15477880859375, 2.154145263671875, 2.15453759765625, 2.1538388671875, 2.154702880859375, 2.154565185546875, 2.154799560546875, 2.155702392578125, 2.155001708984375, 2.15463134765625, 2.155059326171875, 2.15557080078125, 2.155644287109375, 2.155435791015625, 2.155279296875, 2.155478271484375, 2.155039306640625, 2.1556474609375, 2.155759521484375, 2.1556796875, 2.155052978515625, 2.155063232421875, 2.156125244140625, 2.155266845703125, 2.15598583984375, 2.15598583984375, 2.155769775390625, 2.155423828125, 2.156295166015625, 2.155389892578125, 2.154956787109375, 2.1549384765625, 2.155702392578125, 2.156359130859375, 2.15553466796875, 2.156149169921875, 2.15556884765625, 2.15610107421875, 2.156392822265625, 2.15614208984375, 2.155922119140625, 2.155926513671875, 2.15628515625, 2.1563779296875, 2.156031982421875, 2.156570556640625, 2.155749267578125, 2.156455810546875, 2.156539306640625, 2.156162109375, 2.156419921875, 2.156111083984375, 2.1556201171875, 2.157068115234375, 2.15618359375, 2.15617529296875, 2.156353515625, 2.156662841796875, 2.156812255859375, 2.15483154296875, 2.154446533203125, 2.1550927734375, 2.155013427734375, 2.155321044921875, 2.153851806640625, 2.15472314453125, 2.154208740234375, 2.155258544921875, 2.15474609375, 2.155627685546875, 2.1557392578125, 2.1559658203125, 2.155125244140625, 2.15587890625, 2.15556103515625, 2.155802734375, 2.156337158203125, 2.155599853515625, 2.155122802734375, 2.154838134765625, 2.15573291015625, 2.155853271484375, 2.15604833984375, 2.155375244140625, 2.155175048828125, 2.156044677734375, 2.155989501953125, 2.156078857421875, 2.156032470703125, 2.156312255859375, 2.1562412109375, 2.155767333984375, 2.15622900390625, 2.155845703125, 2.156160888671875, 2.155933349609375, 2.156331298828125, 2.156830810546875, 2.155802734375, 2.156553466796875, 2.15624560546875, 2.156656005859375, 2.156861083984375, 2.155530029296875, 2.155895751953125, 2.156756591796875, 2.15710546875, 2.156838623046875, 2.15633251953125, 2.156612060546875, 2.156882080078125, 2.156545166015625, 2.15689306640625, 2.1569375, 2.156795654296875, 2.156451904296875, 2.157452392578125, 2.15695458984375, 2.15725048828125, 2.15652978515625, 2.15766015625, 2.1569189453125, 2.15613720703125, 2.154751953125, 2.15535205078125, 2.155133056640625, 2.155610107421875, 2.15499560546875, 2.155558837890625, 2.155183837890625, 2.15576611328125, 2.155431884765625, 2.15549072265625, 2.155479736328125, 2.15575341796875, 2.15545654296875, 2.156015380859375, 2.15538916015625, 2.156316650390625, 2.156632080078125, 2.156115966796875, 2.15632275390625, 2.1554462890625, 2.156004638671875, 2.156026611328125, 2.156030029296875, 2.156322998046875, 2.155793212890625, 2.15531201171875, 2.15627783203125, 2.1556279296875, 2.15658349609375, 2.156557373046875, 2.15524658203125, 2.156424560546875, 2.156294921875, 2.155815185546875, 2.15559130859375, 2.15583447265625, 2.156342041015625, 2.156015625, 2.156478271484375, 2.156675048828125, 2.1556572265625, 2.156151123046875, 2.156275634765625, 2.156451904296875, 2.1573857421875, 2.156472412109375, 2.155664794921875, 2.156966552734375, 2.156527587890625, 2.156632080078125, 2.156265625, 2.1570087890625, 2.15602685546875, 2.156317138671875, 2.157116943359375, 2.1564873046875, 2.15671337890625, 2.156304931640625, 2.157250732421875, 2.15592333984375, 2.157657470703125, 2.157234130859375]",tokens/s,0.4640228640073425,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,3563.528192,4495.179776,0.0,4116.709376,3980.386816,s,1,10.4573623046875,10.4573623046875,0.0,10.4573623046875,10.4573623046875,10.4573623046875,10.4573623046875,[10.4573623046875],,kWh,9.463678974167351e-05,1.0428446453185679e-05,2.954335696799676e-05,0.00013460859316285595,,MB,3465.621504,4826.529792,0.0,4418.699264,4245.764608,s,10,6.514956481933594,0.6514956481933594,0.0008382393185001823,0.6516083984375001,0.6524576538085937,0.6526914428710937,0.6528784741210938,"[0.6507066650390625, 0.6516373901367187, 0.6515794067382813, 0.6514669799804688, 0.6501195068359376, 0.6517879638671875, 0.6524057006835937, 0.6503561401367187, 0.6529252319335938, 0.6519714965820312]",tokens/s,392.94199540688413,kWh,1.9035242236718612e-05,2.0992695936843544e-06,1.2672371248999888e-05,3.380688307940286e-05,tokens/kWh,7572422.438316127,MB,3469.758464,4837.015552,0.0,4429.185024,4245.767168,s,10,384.47273437499996,38.447273437499994,0.025700547755004608,38.459927734375,38.465746484375,38.4658712890625,38.4659711328125,"[38.38426171875, 38.41635546875, 38.44120703125, 38.4502265625, 38.4564609375, 38.46393359375, 38.46571875, 38.46339453125, 38.46599609375, 38.4651796875]",tokens/s,1.638607744250395,kWh,0.0011221179002866139,0.00012377796577120082,0.0007458437355633994,0.001991739601621214,tokens/kWh,31630.640847187035,,s,630,384.46834374999986,0.6102672123015873,0.0005672002210965215,0.6103543701171874,0.6108875366210937,0.6110109985351563,0.6112549932861329,"[0.6090960083007813, 0.6084502563476563, 0.608760498046875, 0.6085468139648438, 0.6086820068359375, 0.608964599609375, 0.6090198364257813, 0.6083390502929688, 0.60896337890625, 0.6084527587890625, 0.6092669677734375, 0.6092006225585938, 0.6084193115234375, 0.6095037231445313, 0.6087141723632813, 0.6089382934570312, 0.6088442993164063, 0.6087393188476562, 0.6091402587890625, 0.6090695190429688, 0.608712646484375, 0.6101893310546875, 0.6080819091796875, 0.6089801025390625, 0.6094119873046875, 0.608943603515625, 0.610287109375, 0.6082645874023438, 0.6091060180664063, 0.6097963256835938, 0.6085675659179688, 0.6099081420898438, 0.6093010864257813, 0.608392578125, 0.6096492309570313, 0.6088048706054687, 0.610361328125, 0.6084198608398438, 0.6096354370117187, 0.6103089599609375, 0.6092575073242188, 0.6096132202148438, 0.6098903198242187, 0.6089487915039062, 0.6100578002929687, 0.60937255859375, 0.6100594482421875, 0.6092112426757812, 0.6090424194335937, 0.6103367919921875, 0.6088253173828125, 0.6102559814453125, 0.6092584228515625, 0.609912841796875, 0.6095703735351562, 0.6097042846679688, 0.6093067626953125, 0.60988330078125, 0.6095225219726562, 0.6101954345703124, 0.6098411865234376, 0.6093475341796875, 0.6101852416992187, 0.6092809448242188, 0.6101094360351562, 0.6082826538085937, 0.6092533569335937, 0.6097572021484375, 0.6091590576171875, 0.6098058471679687, 0.609083740234375, 0.610011474609375, 0.609449462890625, 0.609554931640625, 0.6091915283203125, 0.6089580688476562, 0.6100978393554688, 0.60969384765625, 0.60918115234375, 0.6101787109375, 0.6088201293945312, 0.6105702514648438, 0.6092473754882812, 0.6092838134765625, 0.6100452270507812, 0.6097061767578125, 0.6102218627929688, 0.6087933959960937, 0.6097158813476562, 0.6104061889648438, 0.6090422973632813, 0.6100711059570313, 0.6098965454101563, 0.6100003051757813, 0.6095570678710938, 0.6095827026367188, 0.610037353515625, 0.60992333984375, 0.6097310791015625, 0.61005419921875, 0.6099200439453125, 0.6091724243164063, 0.6103319091796875, 0.6097229614257812, 0.609804443359375, 0.6102564697265624, 0.609752685546875, 0.6098746948242187, 0.6103369750976563, 0.6098450927734375, 0.6097754516601562, 0.6103055419921875, 0.6095953979492188, 0.6103026123046875, 0.6103894653320312, 0.6097640991210938, 0.6101337890625, 0.6101913452148438, 0.6103736572265624, 0.6095872192382813, 0.6099100952148437, 0.6101613159179687, 0.610305908203125, 0.6104202270507812, 0.6102186279296875, 0.6095318603515625, 0.6104039916992188, 0.6099992065429688, 0.6098433227539063, 0.6099674682617188, 0.6096532592773437, 0.6100801391601562, 0.6097854614257813, 0.6095120239257813, 0.6103056640625, 0.6099771728515625, 0.609578369140625, 0.6103779907226563, 0.6103719482421875, 0.6095155029296875, 0.6104514770507813, 0.6100208740234375, 0.6097679443359375, 0.6097694702148437, 0.6106480712890625, 0.6100399169921875, 0.6097529296875, 0.610355224609375, 0.609977783203125, 0.6100938720703125, 0.610328369140625, 0.6099795532226563, 0.6102411499023438, 0.609832763671875, 0.6103208618164062, 0.610081787109375, 0.6100912475585938, 0.6101895751953125, 0.610353515625, 0.6101071166992188, 0.6107816772460938, 0.6100471801757813, 0.6102208862304688, 0.6105856323242187, 0.6097660522460937, 0.6105784301757813, 0.6104160766601563, 0.6101141967773438, 0.6105151977539063, 0.60994970703125, 0.6100787353515625, 0.6105149536132812, 0.61008056640625, 0.6100582275390625, 0.6106473999023437, 0.6098645629882813, 0.6100541381835938, 0.6104146118164062, 0.6102568969726563, 0.610663818359375, 0.610531982421875, 0.6101392822265626, 0.6105437622070312, 0.6101736450195312, 0.6103326416015625, 0.61028759765625, 0.610763916015625, 0.6102324829101563, 0.6103866577148438, 0.6103729248046875, 0.6099683227539062, 0.6100995483398437, 0.610337890625, 0.6098373413085938, 0.6101339111328125, 0.61000927734375, 0.6096589965820313, 0.6109815063476562, 0.6097310180664063, 0.61061767578125, 0.610209716796875, 0.6096117553710938, 0.6103079223632812, 0.6105637817382813, 0.610521484375, 0.6099417114257812, 0.6102159423828125, 0.610492431640625, 0.6102261962890625, 0.610174072265625, 0.6103909301757813, 0.6102794189453125, 0.609967529296875, 0.6100049438476562, 0.6103330688476563, 0.6102509765625, 0.6103941040039063, 0.6103749389648437, 0.60979638671875, 0.6105010375976563, 0.610197509765625, 0.6100459594726563, 0.6109686889648438, 0.6098619384765624, 0.6103475952148437, 0.6103693237304687, 0.6103880615234375, 0.6099354858398438, 0.6102138671875, 0.6106185913085938, 0.6098746948242187, 0.6102774047851562, 0.6105184326171875, 0.6099586181640625, 0.6109733276367187, 0.6100340576171875, 0.6104267578125, 0.61057421875, 0.6102254638671875, 0.6105209350585937, 0.6104053955078125, 0.61082421875, 0.610802734375, 0.6102413940429687, 0.6108724975585937, 0.6108927001953125, 0.6102405395507813, 0.6106473999023437, 0.6105316772460937, 0.6103616943359375, 0.6100801391601562, 0.6112569580078125, 0.6104248046875, 0.6101829833984375, 0.6103634643554687, 0.6095133666992187, 0.6109862060546875, 0.610718994140625, 0.6098419189453125, 0.6104116821289063, 0.6095, 0.6103265380859375, 0.6106603393554687, 0.6099819946289062, 0.6097412719726563, 0.6111395874023438, 0.6095827026367188, 0.6108819580078125, 0.6099450073242187, 0.6100506591796875, 0.6106132202148438, 0.6102151489257812, 0.6099361572265625, 0.6103777465820313, 0.6101986083984375, 0.6105732421875, 0.61054931640625, 0.610621826171875, 0.6099002075195312, 0.61038818359375, 0.6103051147460937, 0.6107286376953125, 0.610433349609375, 0.6107156372070313, 0.6105252075195312, 0.6105189819335938, 0.61015576171875, 0.6110543823242187, 0.610620849609375, 0.6102533569335937, 0.6102498168945313, 0.6105712280273438, 0.6102937622070312, 0.6101273193359374, 0.610498291015625, 0.6101920166015625, 0.6104590454101563, 0.610794189453125, 0.6101744384765625, 0.6108883056640625, 0.610123779296875, 0.6103654174804688, 0.6105947875976563, 0.6106603393554687, 0.6108671264648438, 0.6105887451171875, 0.6101355590820312, 0.610887451171875, 0.610244873046875, 0.6103985595703125, 0.6108070068359375, 0.6105148315429687, 0.6110955200195313, 0.6108405151367188, 0.6107074584960938, 0.6110088500976563, 0.6105499267578125, 0.6099988403320312, 0.6107484130859375, 0.61009716796875, 0.6108262329101563, 0.6096029052734375, 0.610570556640625, 0.610397705078125, 0.609651611328125, 0.6106480712890625, 0.61031005859375, 0.6104925537109375, 0.6104390869140625, 0.6105394897460937, 0.6101621704101563, 0.6107075805664063, 0.6103699340820312, 0.6103582763671875, 0.6105354614257813, 0.610065185546875, 0.610385986328125, 0.6104017333984375, 0.6105086669921875, 0.6109661865234375, 0.610418701171875, 0.6102097778320312, 0.6107197265625, 0.6104658203125, 0.6108712768554687, 0.610318359375, 0.6100889892578125, 0.6107152709960938, 0.6109371948242187, 0.6102835083007813, 0.6110003051757813, 0.6104874267578125, 0.6102968139648437, 0.61108154296875, 0.6101611938476562, 0.6107177124023437, 0.6106275634765626, 0.6112010498046875, 0.6103121337890625, 0.6109384765625, 0.6103938598632812, 0.6107551879882812, 0.610673828125, 0.61046630859375, 0.610532958984375, 0.6106406860351562, 0.6106869506835938, 0.6105858154296875, 0.6110704956054688, 0.610211669921875, 0.610906005859375, 0.6102861328125, 0.6105409545898437, 0.6107675170898438, 0.61068603515625, 0.6106018676757813, 0.6112501831054687, 0.6102958374023437, 0.6106951904296875, 0.6100008544921875, 0.6104107666015625, 0.610318115234375, 0.6100132446289063, 0.6111651611328125, 0.6099130249023438, 0.610789794921875, 0.6106354370117187, 0.61014697265625, 0.6103236694335937, 0.610946044921875, 0.6106171264648438, 0.610324462890625, 0.6105066528320312, 0.6101213989257812, 0.6107918701171875, 0.6104920043945312, 0.61086083984375, 0.6102144775390625, 0.6106419067382812, 0.6106988525390625, 0.610336181640625, 0.6100687255859375, 0.6110331420898437, 0.6101918334960937, 0.6103512573242188, 0.6108096923828125, 0.6102547607421875, 0.6108285522460938, 0.6110157470703125, 0.6107221069335937, 0.6106929931640624, 0.6105497436523437, 0.610037841796875, 0.61115380859375, 0.6100897216796874, 0.6103983154296875, 0.610609130859375, 0.6102425537109375, 0.6104266967773437, 0.6110127563476563, 0.6101115112304687, 0.6108079833984374, 0.6104503784179688, 0.6108599243164062, 0.610407958984375, 0.6111585083007812, 0.6106439819335937, 0.6110303955078125, 0.610271240234375, 0.6106846313476563, 0.6108927001953125, 0.6101299438476563, 0.6107689208984375, 0.6109710693359375, 0.6105823364257813, 0.61077783203125, 0.6106378173828125, 0.6107095336914062, 0.610710693359375, 0.6108168334960937, 0.6104063720703125, 0.6110336303710937, 0.610076171875, 0.6098011474609375, 0.6101842651367188, 0.6108245239257812, 0.6096748657226563, 0.610658935546875, 0.6100380859375, 0.6109224853515625, 0.6098063354492187, 0.61053076171875, 0.610688720703125, 0.6106028442382813, 0.610356201171875, 0.6102913818359375, 0.6101486206054687, 0.6109524536132812, 0.610075439453125, 0.610819580078125, 0.6102649536132813, 0.6107093505859374, 0.6104130859375, 0.6104392700195312, 0.61022021484375, 0.6107393798828125, 0.6105497436523437, 0.6100529174804687, 0.6108324584960938, 0.61034228515625, 0.61023291015625, 0.6107208251953125, 0.6103804931640625, 0.6105541381835937, 0.61051904296875, 0.6109224853515625, 0.6100930786132812, 0.610405517578125, 0.6113425903320312, 0.61042724609375, 0.6109452514648438, 0.6101565551757813, 0.6107545776367187, 0.6108323974609375, 0.610150390625, 0.6103222045898438, 0.6109002075195312, 0.610995849609375, 0.6104231567382813, 0.6113660278320312, 0.6103663330078125, 0.6107053833007813, 0.6102097778320312, 0.610680419921875, 0.6104946899414062, 0.6103143920898437, 0.6110802001953125, 0.6103775024414062, 0.6107463989257812, 0.6105205078125, 0.6111751708984375, 0.6102006225585938, 0.61135498046875, 0.6102411499023438, 0.6107234497070313, 0.6103710327148437, 0.61014111328125, 0.6103770141601562, 0.6103634033203125, 0.61069384765625, 0.6104965209960938, 0.6104432373046875, 0.61039208984375, 0.61032373046875, 0.6102998657226563, 0.6108250122070312, 0.610334716796875, 0.6104146118164062, 0.6105001831054687, 0.6102429809570312, 0.6105613403320312, 0.6103067626953125, 0.6107095336914062, 0.6102866821289062, 0.6101697387695313, 0.6107027587890625, 0.61000927734375, 0.6103187866210937, 0.610703369140625, 0.610620849609375, 0.6109596557617187, 0.6103853149414062, 0.6109967651367187, 0.6100850830078125, 0.6110531616210938, 0.6100833740234375, 0.6104219360351563, 0.6108773803710937, 0.6101287231445313, 0.6106746826171875, 0.6106624145507813, 0.610909912109375, 0.6104019775390624, 0.61080029296875, 0.6104452514648437, 0.6104496459960937, 0.6108167724609375, 0.6103964233398438, 0.6106337890625, 0.6113442993164062, 0.6102157592773437, 0.610970458984375, 0.6105879516601562, 0.6110172729492187, 0.6101951293945312, 0.6109410400390625, 0.6104273681640625, 0.6108549194335937, 0.6107973022460937, 0.610593017578125, 0.6105426025390625, 0.6108866577148437, 0.6106618041992188, 0.610755126953125, 0.6107421875, 0.6102978515625, 0.6112400512695313, 0.6103338623046874, 0.6110248413085938, 0.6092952880859375, 0.6105497436523437, 0.6100065307617187, 0.6108591918945312, 0.6095916137695313, 0.6105489501953125, 0.6105137329101562, 0.6101810913085938, 0.6112948608398437, 0.60995849609375, 0.6097098388671875, 0.61123583984375, 0.6098510131835938, 0.6106725463867188, 0.6104515991210937, 0.6106319580078124, 0.6102650756835938, 0.61079736328125, 0.6099724731445313, 0.6109937744140626, 0.6100443115234375, 0.6105042724609375, 0.61080810546875, 0.6102652587890625, 0.6105252075195312, 0.61050830078125, 0.6104171142578125, 0.610428955078125, 0.610639892578125, 0.6108561401367187, 0.6106427612304688, 0.6103059692382813, 0.6109490966796876, 0.6110658569335937, 0.6107730102539063, 0.6106644287109375, 0.6104304809570312, 0.610922607421875, 0.6101253662109375, 0.6109519653320312, 0.6105042114257813, 0.610482666015625, 0.6106760864257812, 0.6104493408203125, 0.6108674926757812, 0.6101047973632813, 0.6105118408203125, 0.610850830078125, 0.6100556030273437, 0.6108903198242187, 0.6104965209960938, 0.6106492309570313, 0.61102685546875, 0.6107894287109376, 0.6108814697265625, 0.6110543823242187, 0.6104352416992187, 0.6114283447265625, 0.61051904296875, 0.6107484130859375, 0.6108098754882813]",tokens/s,1.6386264571359783,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4000, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2871, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert return t.to( torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,3728.146432,4375.642112,0.0,3997.171712,3878.257152,s,1,10.0629853515625,10.0629853515625,0.0,10.0629853515625,10.0629853515625,10.0629853515625,10.0629853515625,[10.0629853515625],,kWh,9.067295107500967e-05,9.994629891378651e-06,2.7987522389999464e-05,0.00012865510335638777,,MB,1964.519424,4862.181376,0.0,4454.350848,4371.844096,s,10,6.671113952636719,0.6671113952636719,0.0009954266792010686,0.6670467224121094,0.6679490539550781,0.6684646697998047,0.6688771624755859,"[0.66518603515625, 0.6659949951171875, 0.66783447265625, 0.6668709716796875, 0.6672155151367187, 0.6689802856445313, 0.6677422485351563, 0.6668779296875, 0.666758056640625, 0.6676534423828125]",tokens/s,383.7440070991704,kWh,1.9423147584895633e-05,2.1412516791791457e-06,1.2930131871874938e-05,3.4494531135949714e-05,tokens/kWh,7421466.2895707665,MB,1959.440384,5063.507968,0.0,4655.67744,4530.328576,s,10,392.1630078125,39.21630078125,0.020344922816255443,39.211791015624996,39.23774375,39.25023125,39.26022125,"[39.19016015625, 39.1965625, 39.20012890625, 39.20765234375, 39.26271875, 39.23496875, 39.22264453125, 39.2159296875, 39.2257734375, 39.20646875]",tokens/s,1.606474826664972,kWh,0.0011421220202313548,0.00012598509379877328,0.0007595156805287254,0.0020276227945588534,tokens/kWh,31070.86789962174,,s,630,392.15904455566374,0.6224746738978795,0.0008655115987336269,0.6223991088867188,0.6236659790039062,0.6239581451416015,0.6245922937011719,"[0.6200103759765625, 0.6197701416015625, 0.6201177368164063, 0.6205819091796875, 0.6210646362304687, 0.6203760986328125, 0.6204375610351562, 0.6206837158203125, 0.6204436645507813, 0.6202265014648437, 0.6222623901367188, 0.621433837890625, 0.6215280151367187, 0.62188134765625, 0.62137060546875, 0.6221484985351563, 0.62211083984375, 0.621301513671875, 0.621649658203125, 0.6213512573242187, 0.62057861328125, 0.6210986328125, 0.6208408813476562, 0.6229445190429688, 0.6221722412109375, 0.6218251953125, 0.6227607421875, 0.623111572265625, 0.6230137939453125, 0.6239930419921875, 0.6230023193359375, 0.6225838012695313, 0.622202880859375, 0.6215720825195312, 0.6214387817382813, 0.6213839111328125, 0.6214364013671875, 0.6218626708984375, 0.622228271484375, 0.62367333984375, 0.6235811767578125, 0.6236504516601562, 0.6236121215820313, 0.6237943115234375, 0.6235645141601562, 0.623310791015625, 0.6226824340820313, 0.6235648193359375, 0.6227107543945313, 0.6225997924804687, 0.6235303955078125, 0.622992919921875, 0.622903564453125, 0.6229484252929688, 0.6223258056640625, 0.623763427734375, 0.6220636596679687, 0.6224998168945313, 0.6232493896484375, 0.6211295776367187, 0.6223375244140625, 0.621364990234375, 0.6211470336914062, 0.6223689575195313, 0.622508056640625, 0.6228316040039062, 0.6213529663085937, 0.6221004638671875, 0.6214983520507813, 0.6214381713867188, 0.6226903686523437, 0.6209523315429688, 0.6207808227539062, 0.6214124145507812, 0.620971923828125, 0.6218842163085937, 0.6220738525390626, 0.621828125, 0.621791259765625, 0.6213467407226563, 0.6219163818359374, 0.6220556030273438, 0.6224869384765624, 0.6239113159179688, 0.6213734130859375, 0.6217617797851562, 0.6224534912109375, 0.6221947021484375, 0.6215208740234375, 0.6229728393554688, 0.622939697265625, 0.6218276977539062, 0.6214949340820313, 0.6219102783203125, 0.6232936401367187, 0.6222221069335937, 0.6208645629882813, 0.6215435791015625, 0.6210117797851562, 0.6210989379882812, 0.6213345336914062, 0.6213837890625, 0.6225961303710937, 0.62291552734375, 0.6242017211914063, 0.6238248901367187, 0.6230337524414062, 0.6229817504882813, 0.6226165771484375, 0.62198291015625, 0.6223634643554687, 0.6218589477539063, 0.6218522338867187, 0.621098876953125, 0.622270263671875, 0.6222772216796875, 0.6219161376953125, 0.621995849609375, 0.621876953125, 0.6222908935546875, 0.6215070190429688, 0.623447265625, 0.6230023803710938, 0.6239747314453125, 0.6237037353515625, 0.6231859130859375, 0.6228176879882813, 0.6215496826171875, 0.6214759521484375, 0.6214410400390625, 0.6216576538085937, 0.6214496459960938, 0.6215905151367187, 0.6229995727539063, 0.6222479248046875, 0.621897705078125, 0.6223482666015625, 0.62142578125, 0.6226810302734375, 0.6223095092773437, 0.6218217163085937, 0.6223750610351563, 0.6216907958984375, 0.62226220703125, 0.6223396606445313, 0.6222217407226562, 0.6226906127929688, 0.6216229858398438, 0.6219797973632812, 0.622447998046875, 0.6220122680664063, 0.6219999389648437, 0.6224759521484375, 0.6215211181640625, 0.6214738159179688, 0.6216309204101562, 0.6225701293945313, 0.6222386474609375, 0.6228568115234375, 0.6218812866210938, 0.6217998657226562, 0.6232882080078125, 0.6228809204101563, 0.6232711181640626, 0.6232659301757812, 0.6227479248046875, 0.6222993774414063, 0.6229536743164062, 0.622607666015625, 0.6231890869140625, 0.623139404296875, 0.6220667114257813, 0.6217567749023437, 0.6218615112304687, 0.6213767700195313, 0.6219066162109375, 0.621654052734375, 0.6217686767578126, 0.6228472900390625, 0.62200439453125, 0.6217765502929687, 0.6235244750976563, 0.622743896484375, 0.621717529296875, 0.62225, 0.6226636962890625, 0.6220015258789062, 0.6222670288085937, 0.62209228515625, 0.62174755859375, 0.621290283203125, 0.6210610961914063, 0.6216672973632813, 0.6211151123046875, 0.62069580078125, 0.6213017578125, 0.6216365966796875, 0.6210303955078125, 0.6210828247070312, 0.6210349731445313, 0.6217708129882813, 0.6215928955078125, 0.62160693359375, 0.6215065307617188, 0.621294921875, 0.6211319580078125, 0.6208429565429687, 0.6212675170898437, 0.62142626953125, 0.6217486572265625, 0.6214430541992187, 0.6210413818359375, 0.6219266967773438, 0.622487548828125, 0.6214102783203125, 0.6214717407226562, 0.622182373046875, 0.6214000854492188, 0.6219857788085937, 0.6214390258789062, 0.6237880249023438, 0.6231531372070312, 0.6232815551757812, 0.6228436279296875, 0.62311083984375, 0.6232412719726562, 0.6218281860351562, 0.62165576171875, 0.62274609375, 0.6230274658203125, 0.6233749389648438, 0.6227387084960937, 0.62357373046875, 0.6229483642578125, 0.6236651611328125, 0.6231387939453125, 0.6232576293945312, 0.6238064575195312, 0.6240092163085937, 0.623517822265625, 0.6245886840820313, 0.6245232543945313, 0.6244086303710937, 0.6249613647460938, 0.623146484375, 0.6227442626953125, 0.6224465942382813, 0.6225262451171875, 0.6226920776367187, 0.6234403686523438, 0.6223196411132812, 0.62211279296875, 0.621302978515625, 0.6227747802734375, 0.6231701049804688, 0.6235216064453125, 0.6242981567382813, 0.6236590576171875, 0.6236607055664063, 0.624884033203125, 0.6239848022460938, 0.62408642578125, 0.6232537841796875, 0.6225061645507812, 0.6219195556640625, 0.6220023803710938, 0.622193115234375, 0.6218832397460937, 0.622002197265625, 0.6220484619140625, 0.621796630859375, 0.6219608154296875, 0.6227579345703125, 0.6230382690429688, 0.6230971069335938, 0.6237848510742188, 0.6233681640625, 0.6225735473632813, 0.6229154052734375, 0.6248265380859375, 0.6229381103515625, 0.6229155883789063, 0.622581787109375, 0.6224097290039062, 0.6226590576171875, 0.6229775390625, 0.6237630004882813, 0.6239482421875, 0.6238617553710938, 0.6235765991210938, 0.6235120849609375, 0.623795654296875, 0.6234913330078125, 0.622713134765625, 0.6221495971679688, 0.6220308227539062, 0.6224120483398438, 0.6234224853515625, 0.6228380126953125, 0.6244541625976563, 0.6237429809570313, 0.6242688598632813, 0.6238717651367187, 0.623831787109375, 0.6240173950195312, 0.6237614135742188, 0.6245928955078125, 0.6245908203125, 0.62287255859375, 0.6223831176757812, 0.6224219970703125, 0.6238592529296875, 0.623743408203125, 0.624645263671875, 0.6239662475585938, 0.622827392578125, 0.6229811401367188, 0.6220188598632812, 0.6238758544921875, 0.6241565551757813, 0.6236318969726562, 0.6231119995117187, 0.6219984741210938, 0.622020751953125, 0.6231390991210938, 0.6225299682617188, 0.6230388793945313, 0.6222376708984375, 0.6226309204101562, 0.6222622680664063, 0.6220226440429687, 0.6226022338867188, 0.6219960327148437, 0.6217298583984375, 0.6219807739257812, 0.6221729736328125, 0.6236580200195313, 0.6226025390625, 0.6238562622070313, 0.6229373779296875, 0.6232030639648437, 0.6226739501953125, 0.6230437622070313, 0.6224904174804687, 0.621959228515625, 0.6215430908203124, 0.6218031005859375, 0.62206005859375, 0.6217870483398438, 0.6218916015625, 0.6230121459960938, 0.6222722778320312, 0.6226165161132813, 0.6225267944335937, 0.6229401245117188, 0.6235381469726563, 0.623805908203125, 0.6235285034179687, 0.6229237060546875, 0.6232515258789062, 0.623276123046875, 0.6229988403320312, 0.6240543212890625, 0.6237988891601562, 0.6229699096679687, 0.622371826171875, 0.62280908203125, 0.6232158203125, 0.6228017578125, 0.6229827270507813, 0.62249560546875, 0.6228301391601563, 0.6217666625976562, 0.6224403076171875, 0.6227353515625, 0.6230809326171876, 0.6235040893554687, 0.623515625, 0.623261962890625, 0.6221266479492188, 0.6223284912109375, 0.6214202880859375, 0.6217576904296875, 0.6218575439453125, 0.6223890991210937, 0.6238516235351562, 0.623969482421875, 0.6214234008789062, 0.6214876098632812, 0.621227783203125, 0.6215134887695313, 0.6225276489257813, 0.6228345336914063, 0.6229075317382813, 0.6235851440429687, 0.6217216186523438, 0.6213507080078124, 0.6221498413085937, 0.6238248901367187, 0.623427734375, 0.6234868774414063, 0.6225546264648437, 0.6222705688476563, 0.6223138427734375, 0.6221107177734375, 0.623091796875, 0.62264111328125, 0.6221414184570313, 0.6228643798828125, 0.6230693969726563, 0.6235441284179688, 0.622635009765625, 0.6219710083007812, 0.62380078125, 0.62267626953125, 0.6227508544921875, 0.6230059814453125, 0.621990234375, 0.624089111328125, 0.6222274780273438, 0.6224034423828125, 0.6229784545898438, 0.6222505493164062, 0.622751953125, 0.62209228515625, 0.6220062866210937, 0.6222725219726563, 0.6221803588867187, 0.6223031005859375, 0.6222578125, 0.6225883178710937, 0.6229033813476562, 0.6239375610351563, 0.6232567138671875, 0.6232687377929688, 0.6230304565429687, 0.62258154296875, 0.6221129760742188, 0.6221259765625, 0.6225110473632812, 0.6222695922851562, 0.6221068115234375, 0.6221582641601563, 0.6215231323242187, 0.6219796752929687, 0.62226025390625, 0.62123828125, 0.6218137817382813, 0.6220238037109375, 0.622521240234375, 0.623515625, 0.6222418212890625, 0.6231427001953125, 0.622386474609375, 0.6224884643554688, 0.6225634765625, 0.6221185913085937, 0.6217791137695312, 0.621676513671875, 0.6228825073242188, 0.6224960327148438, 0.6222042846679687, 0.6219224853515625, 0.622136962890625, 0.6224452514648438, 0.6224302368164063, 0.622475341796875, 0.6222970581054688, 0.6220874633789063, 0.6237088623046875, 0.6230947875976562, 0.62299462890625, 0.622970703125, 0.6222347412109375, 0.6225335083007812, 0.62200830078125, 0.6229933471679687, 0.6223155517578125, 0.6216693725585938, 0.6226721801757813, 0.6223756713867188, 0.622839599609375, 0.6227332763671874, 0.6231636962890625, 0.623339111328125, 0.6221480102539062, 0.6229912719726562, 0.6224384765625, 0.6230159301757813, 0.622149658203125, 0.6223973999023438, 0.6233170776367187, 0.6224008178710938, 0.6225232543945313, 0.623578857421875, 0.6218077392578125, 0.6247218627929687, 0.6235191650390625, 0.6220989990234375, 0.6221145629882813, 0.6218303833007812, 0.6222553100585938, 0.622135498046875, 0.6215460815429688, 0.6213087158203126, 0.622620849609375, 0.6242772827148437, 0.6232037963867187, 0.6247526245117188, 0.6224877319335937, 0.6222564697265625, 0.6226760864257812, 0.6223031616210938, 0.6220322265625, 0.6223919067382813, 0.6222521362304687, 0.6217685546875, 0.6215347900390625, 0.6217870483398438, 0.6225313720703125, 0.621974853515625, 0.6225928344726562, 0.6223268432617187, 0.6222604370117187, 0.6231044921875, 0.6236356201171875, 0.6232913208007812, 0.6224424438476562, 0.622376953125, 0.6224302368164063, 0.6221639404296875, 0.622761962890625, 0.6221732788085937, 0.6236161499023437, 0.6223853759765625, 0.6218573608398438, 0.62199169921875, 0.6220431518554688, 0.621332763671875, 0.622243896484375, 0.62230322265625, 0.6227742919921875, 0.6223658447265625, 0.6220337524414062, 0.6244816284179687, 0.6226843872070312, 0.6229847412109375, 0.6227628784179687, 0.62247900390625, 0.6219923095703125, 0.6221475830078125, 0.6218950805664063, 0.622811279296875, 0.6216702880859375, 0.6225107421875, 0.6226431274414063, 0.6224752807617188, 0.6241111450195312, 0.6234976806640625, 0.6229844970703124, 0.6231921997070312, 0.6222235107421875, 0.6225997924804687, 0.623913818359375, 0.62384912109375, 0.6238252563476563, 0.6229421997070312, 0.621756591796875, 0.6219512939453125, 0.6219854736328125, 0.6218219604492188, 0.622545166015625, 0.621743896484375, 0.6217643432617187, 0.6226516723632812, 0.622095947265625, 0.622987060546875, 0.6225020141601563, 0.622263916015625, 0.6228427734375, 0.6223524169921875, 0.6226841430664063, 0.623431884765625, 0.6231898193359375, 0.622972900390625, 0.62239111328125, 0.621619384765625, 0.6229232788085938, 0.6225082397460937, 0.6230407104492187, 0.6244061889648438, 0.622408447265625, 0.6222244873046875, 0.6240897216796875, 0.6229627075195312, 0.6230916748046875, 0.6225797119140625, 0.6231428833007813, 0.6217769165039062, 0.6236446533203125, 0.6240706787109375, 0.6222821044921875, 0.6225885009765625, 0.6225654296875, 0.6230643920898438, 0.62136767578125, 0.62153369140625, 0.6229584350585937, 0.6218322143554688, 0.6215249633789063, 0.6218395385742187, 0.6217871704101563, 0.6217344970703125, 0.6213716430664062, 0.621669677734375, 0.6217459716796875, 0.621306640625, 0.6215516357421875, 0.6213387451171875, 0.6225891723632813, 0.6224488525390625, 0.6220723876953125, 0.6219570922851563, 0.6220472412109375, 0.6218035278320313, 0.6217536010742187, 0.6216425170898437, 0.6220791625976563, 0.6216027221679687, 0.6216016845703125]",tokens/s,1.6064910620991075,, 4bit-gptq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1709, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,2174.373888,2194.604032,0.0,1816.133632,1727.29344,s,1,8.94433203125,8.94433203125,0.0,8.94433203125,8.94433203125,8.94433203125,8.94433203125,[8.94433203125],,kWh,5.511606295416034e-05,6.0725403847365365e-06,1.7106958129980976e-05,7.829556146887786e-05,,MB,2227.195904,2406.416384,0.0,1998.585856,1980.448768,s,10,3.2092273559570312,0.3209227355957031,0.0008386638202555853,0.3211023712158203,0.32185628967285157,0.32205032196044925,0.32220554779052735,"[0.3199826354980469, 0.32017123413085935, 0.31957742309570314, 0.32088507080078127, 0.32027764892578126, 0.32133026123046876, 0.3218131713867188, 0.3213196716308594, 0.32224435424804687, 0.32162588500976563]",tokens/s,797.6997937675178,kWh,9.385472596354085e-06,1.0347539388508268e-06,6.223433798187121e-06,1.6643660333392034e-05,tokens/kWh,15381231.944897924,MB,2235.76064,2597.257216,0.0,2189.426688,2078.022144,s,10,177.43118554687499,17.7431185546875,0.033526370132177866,17.758843749999997,17.7714302734375,17.77307646484375,17.77439341796875,"[17.675099609375, 17.694265625, 17.717541015625, 17.745, 17.7532890625, 17.7643984375, 17.769435546875, 17.766369140625, 17.771064453125, 17.77472265625]",tokens/s,3.5506723243618428,kWh,0.0005187076917844781,5.721722755934871e-05,0.0003448937915398119,0.0009208187108836387,tokens/kWh,68417.37603218744,,s,630,177.42690451049805,0.2816300071595207,0.0006598392608408472,0.2817473449707031,0.28242669372558593,0.28255672454833985,0.2828116159057617,"[0.2800423889160156, 0.2798489685058594, 0.27987677001953126, 0.28069677734375, 0.2809182739257812, 0.2799151611328125, 0.2804708557128906, 0.2804883117675781, 0.280727294921875, 0.28029803466796877, 0.2804562683105469, 0.2802956237792969, 0.28037606811523436, 0.2803502502441406, 0.2799678955078125, 0.28061727905273437, 0.27993701171875, 0.28083200073242187, 0.28064358520507815, 0.27996954345703123, 0.2806561279296875, 0.28070816040039065, 0.28012542724609374, 0.2807080383300781, 0.2807132263183594, 0.28060049438476564, 0.28059661865234375, 0.2804834289550781, 0.2809849548339844, 0.28008346557617186, 0.28019439697265625, 0.28069955444335937, 0.28046044921875, 0.2801589660644531, 0.28077825927734373, 0.2805921020507812, 0.28040835571289063, 0.28039385986328125, 0.28098391723632815, 0.280465087890625, 0.28005825805664064, 0.28069232177734377, 0.2807298278808594, 0.2803466796875, 0.280330322265625, 0.2809014892578125, 0.2810062255859375, 0.2803957824707031, 0.2808667602539062, 0.28111810302734375, 0.2807504272460937, 0.28062753295898435, 0.28100579833984374, 0.28070892333984376, 0.2809819946289063, 0.28046047973632815, 0.2810714111328125, 0.2802329711914063, 0.2805138854980469, 0.28088592529296874, 0.28113006591796874, 0.2816296081542969, 0.28068658447265626, 0.281005859375, 0.28015133666992187, 0.2804008178710938, 0.28073983764648436, 0.28049612426757814, 0.28073321533203127, 0.28063790893554685, 0.28020941162109375, 0.2803833618164063, 0.2812335510253906, 0.28066201782226563, 0.2800002136230469, 0.28072726440429685, 0.2811516418457031, 0.2803818664550781, 0.2807459716796875, 0.28107571411132815, 0.2807296142578125, 0.280090087890625, 0.28065847778320313, 0.280848388671875, 0.2805125122070313, 0.2809405517578125, 0.2810262451171875, 0.28134841918945314, 0.28045413208007813, 0.28098153686523436, 0.2812651062011719, 0.28072549438476563, 0.28061270141601563, 0.2810942993164062, 0.28120046997070314, 0.2811516418457031, 0.28120883178710937, 0.28136788940429686, 0.28082965087890627, 0.2804236755371094, 0.28094329833984377, 0.2808046264648438, 0.2805627746582031, 0.28110296630859377, 0.28122930908203125, 0.2805247802734375, 0.2808934631347656, 0.28068658447265626, 0.2811058349609375, 0.28089788818359374, 0.2809177551269531, 0.2811558837890625, 0.2807339172363281, 0.28060467529296873, 0.28165310668945315, 0.2810533142089844, 0.280748046875, 0.2808401794433594, 0.28158770751953127, 0.28121279907226565, 0.2812069091796875, 0.2809117431640625, 0.28113461303710935, 0.2811109008789062, 0.2807441711425781, 0.2812673645019531, 0.2806640625, 0.2807945556640625, 0.28124627685546877, 0.28088262939453124, 0.281145751953125, 0.28130291748046876, 0.2816937255859375, 0.2803898620605469, 0.28093280029296874, 0.2809283447265625, 0.28078048706054687, 0.280913330078125, 0.2810458984375, 0.28148532104492185, 0.280922119140625, 0.2811125793457031, 0.28112240600585936, 0.2808047790527344, 0.2807490539550781, 0.28160614013671875, 0.281249755859375, 0.28072171020507813, 0.2812303771972656, 0.2810395812988281, 0.28151602172851564, 0.2808606872558594, 0.28136856079101563, 0.2807767028808594, 0.2813706359863281, 0.28103884887695313, 0.2810142822265625, 0.28121701049804687, 0.28157131958007814, 0.28107366943359374, 0.28137210083007813, 0.2811848449707031, 0.2810491027832031, 0.281143310546875, 0.28145895385742187, 0.28184756469726563, 0.28124978637695314, 0.2812653503417969, 0.28144723510742187, 0.2813807678222656, 0.2816123962402344, 0.28132147216796877, 0.2814075012207031, 0.28182464599609375, 0.28112857055664064, 0.2807162780761719, 0.2815958251953125, 0.2814337158203125, 0.28157586669921875, 0.2812951049804687, 0.28182913208007815, 0.2811023254394531, 0.2814849853515625, 0.2818131103515625, 0.2814261169433594, 0.2813286437988281, 0.28186517333984373, 0.2815753173828125, 0.2808505554199219, 0.2822403564453125, 0.2807236938476563, 0.2805497741699219, 0.2815038146972656, 0.28116583251953126, 0.28076220703125, 0.2811209411621094, 0.2815037536621094, 0.28119210815429685, 0.28136688232421875, 0.28179364013671876, 0.28147393798828124, 0.2810163269042969, 0.2815672302246094, 0.281280517578125, 0.28149090576171876, 0.28075677490234374, 0.28159796142578125, 0.28178399658203124, 0.2808909912109375, 0.2817575073242187, 0.2825286865234375, 0.28195806884765623, 0.28188088989257815, 0.28218746948242185, 0.28228436279296876, 0.28108184814453124, 0.28176177978515626, 0.282838134765625, 0.2814873352050781, 0.2818338012695312, 0.2822354736328125, 0.28211404418945313, 0.28158770751953127, 0.2818495788574219, 0.28214300537109377, 0.2822717590332031, 0.28159591674804685, 0.2825904541015625, 0.2814175720214844, 0.2816391296386719, 0.2814712219238281, 0.2818414611816406, 0.2815576171875, 0.28091595458984375, 0.28136032104492187, 0.2823326416015625, 0.28136099243164064, 0.28139706420898436, 0.28150320434570314, 0.28204721069335936, 0.28181292724609375, 0.28150790405273435, 0.28198297119140625, 0.2825850830078125, 0.2817843322753906, 0.28202392578125, 0.28227609252929686, 0.28162753295898435, 0.28117041015625, 0.2816596374511719, 0.2821185607910156, 0.28147891235351563, 0.28207513427734376, 0.28206472778320313, 0.2810799560546875, 0.2822287292480469, 0.281380859375, 0.28096307373046875, 0.28201495361328127, 0.28191543579101563, 0.2808220825195312, 0.2812441101074219, 0.2820792236328125, 0.28129074096679685, 0.2807245483398437, 0.2816378173828125, 0.28133172607421875, 0.2814259338378906, 0.2809405517578125, 0.2820807189941406, 0.2818770446777344, 0.2812231750488281, 0.28140945434570314, 0.2818765869140625, 0.281585205078125, 0.2813772277832031, 0.28210791015625, 0.2815528869628906, 0.28134954833984377, 0.281118408203125, 0.28153668212890626, 0.2811440124511719, 0.281315185546875, 0.28166574096679686, 0.28222409057617187, 0.2818480529785156, 0.28228182983398437, 0.2820284729003906, 0.28187362670898436, 0.2818568115234375, 0.28186767578125, 0.28210015869140626, 0.2811656494140625, 0.2817580261230469, 0.2824253540039062, 0.2816795654296875, 0.28214822387695315, 0.282633056640625, 0.28246231079101564, 0.28224920654296876, 0.28219390869140626, 0.28176177978515626, 0.28234295654296876, 0.281440673828125, 0.28233282470703125, 0.28235589599609373, 0.28176202392578126, 0.28263742065429687, 0.2827757263183594, 0.28194482421875, 0.2814791564941406, 0.28246823120117187, 0.28238861083984373, 0.28188467407226564, 0.2819947509765625, 0.2818296813964844, 0.28161227416992185, 0.28164312744140624, 0.2824798583984375, 0.28089599609375, 0.2816155395507812, 0.282393310546875, 0.28163201904296875, 0.28096578979492187, 0.2822208251953125, 0.2825495910644531, 0.28141635131835935, 0.2813686218261719, 0.28233929443359373, 0.28234951782226564, 0.2814049377441406, 0.2820491027832031, 0.2822655944824219, 0.28181671142578124, 0.28181951904296876, 0.282474365234375, 0.2828248291015625, 0.28128375244140624, 0.2819653930664062, 0.28253378295898435, 0.28245318603515623, 0.28302224731445313, 0.2817261962890625, 0.2820779418945312, 0.28135592651367186, 0.28173553466796875, 0.2820218811035156, 0.2824163818359375, 0.281559814453125, 0.2818511962890625, 0.281985595703125, 0.28193911743164063, 0.2824345397949219, 0.2821079406738281, 0.28175128173828123, 0.28165960693359376, 0.28184490966796877, 0.2818196411132812, 0.2816590576171875, 0.28129541015625, 0.2822452087402344, 0.282148681640625, 0.28179647827148435, 0.2818431701660156, 0.281916259765625, 0.282071044921875, 0.28207308959960936, 0.28224102783203125, 0.28240692138671875, 0.28170367431640625, 0.28196322631835935, 0.2822586975097656, 0.2821107482910156, 0.2816573486328125, 0.28200115966796874, 0.28276107788085936, 0.28166796875, 0.28264389038085935, 0.28165985107421876, 0.2816119079589844, 0.2818493041992188, 0.28241342163085936, 0.28160269165039065, 0.2821709289550781, 0.28233157348632815, 0.281638916015625, 0.2820690002441406, 0.2820444030761719, 0.2819522705078125, 0.2817003479003906, 0.28188671875, 0.2821754760742187, 0.28198440551757814, 0.28190985107421873, 0.28162771606445314, 0.28219027709960937, 0.2819363708496094, 0.28173867797851565, 0.2817145080566406, 0.2827763061523437, 0.2823818664550781, 0.282061279296875, 0.2822955322265625, 0.28185458374023437, 0.2820845947265625, 0.28183645629882814, 0.2820997009277344, 0.28219384765625, 0.2820178527832031, 0.28301669311523436, 0.2816857604980469, 0.2826753845214844, 0.28165997314453123, 0.2818326110839844, 0.28254217529296877, 0.28154898071289064, 0.2824791564941406, 0.28230215454101565, 0.282519287109375, 0.282200439453125, 0.2817508544921875, 0.28248358154296876, 0.2819420166015625, 0.2820316467285156, 0.2820674133300781, 0.2824532775878906, 0.2819747619628906, 0.281317626953125, 0.2821370849609375, 0.28193997192382814, 0.28199856567382814, 0.282194580078125, 0.28177011108398436, 0.2821119995117187, 0.2824540100097656, 0.2815442199707031, 0.28249545288085937, 0.2822952880859375, 0.281781005859375, 0.28232345581054685, 0.2816731872558594, 0.2817425842285156, 0.2815733032226562, 0.282046875, 0.28263876342773436, 0.2819246826171875, 0.280864990234375, 0.2823441467285156, 0.2817774047851562, 0.28158438110351564, 0.281775390625, 0.2825755920410156, 0.281280517578125, 0.28094049072265626, 0.28242132568359374, 0.2824335327148437, 0.2814668884277344, 0.28139007568359375, 0.28251776123046873, 0.28208615112304686, 0.2814786376953125, 0.28204290771484375, 0.28198822021484377, 0.28178521728515626, 0.28226739501953124, 0.2823375244140625, 0.2817307739257813, 0.28193624877929685, 0.28203347778320315, 0.2822744140625, 0.2821096496582031, 0.2826099548339844, 0.2821754760742187, 0.28191094970703123, 0.2817907104492188, 0.2819154968261719, 0.282218505859375, 0.28213824462890624, 0.2821349182128906, 0.282040283203125, 0.28201541137695313, 0.2820816345214844, 0.28174261474609374, 0.2815003967285156, 0.2821997375488281, 0.28205908203125, 0.2816632385253906, 0.282668701171875, 0.28182794189453125, 0.2821672973632813, 0.2818334655761719, 0.28256256103515626, 0.28206491088867186, 0.2816860046386719, 0.2823475341796875, 0.2824228515625, 0.28201397705078124, 0.2820582275390625, 0.2820000305175781, 0.2821888427734375, 0.28197781372070313, 0.28243557739257813, 0.28195431518554687, 0.28218572998046876, 0.28233468627929686, 0.28162115478515626, 0.2822348937988281, 0.2820526123046875, 0.2816813659667969, 0.28182583618164064, 0.2823434143066406, 0.28173226928710937, 0.2815312194824219, 0.2821590270996094, 0.28229574584960937, 0.2820245666503906, 0.28214068603515624, 0.282492919921875, 0.28185305786132814, 0.28190194702148436, 0.2820603332519531, 0.28267156982421876, 0.28136856079101563, 0.282112060546875, 0.2824962463378906, 0.2821064453125, 0.2815693359375, 0.28216445922851563, 0.28250607299804686, 0.281864013671875, 0.28174533081054687, 0.2819934692382812, 0.281740478515625, 0.28147732543945314, 0.2822243957519531, 0.28174221801757815, 0.2820280456542969, 0.28201895141601563, 0.2824937744140625, 0.2817577514648438, 0.28169625854492186, 0.28246426391601565, 0.282492919921875, 0.2821160888671875, 0.282071044921875, 0.2819317626953125, 0.281697998046875, 0.281837890625, 0.2820362243652344, 0.2827120666503906, 0.28151602172851564, 0.28276840209960935, 0.28217239379882814, 0.2814392395019531, 0.2822522888183594, 0.2821119995117187, 0.2827202453613281, 0.28199502563476564, 0.28225689697265627, 0.2823175354003906, 0.2820803833007812, 0.28190399169921876, 0.28221826171875, 0.2828515625, 0.2820765380859375, 0.2819029235839844, 0.2825469970703125, 0.281478759765625, 0.28158453369140624, 0.2819154052734375, 0.2817404479980469, 0.2814493103027344, 0.2822504272460937, 0.2821331787109375, 0.2818151550292969, 0.281635986328125, 0.2826691589355469, 0.28135467529296876, 0.28190548706054686, 0.28174935913085936, 0.2820091247558594, 0.2818586730957031, 0.28164913940429687, 0.2826322021484375, 0.2821663818359375, 0.2818118591308594, 0.2821775207519531, 0.28203826904296875, 0.2821048278808594, 0.2819526672363281, 0.28202249145507813, 0.28223480224609376, 0.2818842163085937, 0.28232101440429686, 0.28194351196289064, 0.28260369873046876, 0.2824259338378906, 0.28248236083984374, 0.28222836303710935, 0.2819715576171875, 0.2824027404785156, 0.2820341796875, 0.281385009765625, 0.28212225341796876, 0.2824920349121094, 0.28204327392578127, 0.28189816284179686, 0.282149658203125, 0.28204217529296877, 0.28256689453125, 0.2821114807128906, 0.2821842041015625, 0.2820157470703125, 0.2827670288085937, 0.28207037353515624, 0.28220645141601564, 0.28214492797851565, 0.28206887817382814, 0.28248272705078126, 0.28248028564453126, 0.2826987609863281, 0.2822668762207031, 0.282213134765625, 0.28295150756835935, 0.282881591796875, 0.2827792663574219, 0.2821653747558594, 0.2824528503417969, 0.2820559387207031, 0.281964599609375]",tokens/s,3.550757996585146,, 4bit-gptq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,4372.512768,4566.482944,0.0,4188.012544,4187.049984,s,1,10.332669921875,10.332669921875,0.0,10.332669921875,10.332669921875,10.332669921875,10.332669921875,[10.332669921875],,kWh,0.00010267034029583305,1.1317863835509072e-05,3.084280245199992e-05,0.00014483100658334205,,MB,4080.443392,4962.844672,0.0,4555.014144,4514.269184,s,10,7.865827026367188,0.7865827026367188,0.002886280729514126,0.7850776062011718,0.7900420410156249,0.7905072570800781,0.7908794299316406,"[0.782828369140625, 0.7894422607421875, 0.7844773559570313, 0.7850126953125, 0.7840709228515625, 0.7897021484375, 0.7842396240234375, 0.7899386596679687, 0.7851425170898437, 0.7909724731445312]",tokens/s,325.4584662767914,kWh,2.2918281938461133e-05,2.5274666805281998e-06,1.5180375392153848e-05,4.062612401114318e-05,tokens/kWh,6301364.115606568,MB,4093.394944,4979.621888,0.0,4571.79136,4514.271744,s,10,467.798828125,46.7798828125,0.03317416613981883,46.7930546875,46.8095484375,46.81011796875,46.81057359375,"[46.7034296875, 46.74363671875, 46.7610078125, 46.77698046875, 46.78384375, 46.80287109375, 46.802265625, 46.809421875, 46.80468359375, 46.8106875]",tokens/s,1.3467327451954592,kWh,0.0013654926656507058,0.000150622730472546,0.0009080636687580463,0.002424179064881298,tokens/kWh,25988.179220203292,,s,630,467.7878660888669,0.7425204223632813,0.0006844989959605742,0.7426236572265625,0.7431574462890624,0.7433347930908203,0.7435878228759765,"[0.7409111938476562, 0.740742431640625, 0.7414298706054687, 0.74062060546875, 0.7404442138671875, 0.7408536987304688, 0.7409601440429687, 0.7411483764648438, 0.74078662109375, 0.7409418334960938, 0.7408561401367187, 0.7414207153320312, 0.7407407836914063, 0.7410364379882812, 0.7412715454101563, 0.7415802612304687, 0.7406494750976562, 0.7412059936523437, 0.7412817993164063, 0.741195068359375, 0.7406961669921875, 0.7416878662109375, 0.740632080078125, 0.7417431030273437, 0.7411015625, 0.741113525390625, 0.740952392578125, 0.7412178955078125, 0.7411934204101562, 0.7409158935546875, 0.741591064453125, 0.7417518920898437, 0.740760498046875, 0.7420047607421875, 0.7417855834960938, 0.741658203125, 0.7413723754882813, 0.7413206787109375, 0.7414326171875, 0.74105517578125, 0.7418880004882813, 0.7414899291992187, 0.7414234619140625, 0.7416295166015625, 0.7411597290039063, 0.7413309326171875, 0.741670166015625, 0.7417576904296875, 0.7414681396484375, 0.7412572021484375, 0.7414517822265625, 0.7421171264648437, 0.7413455200195312, 0.741306396484375, 0.74177490234375, 0.7413662109375, 0.7415738525390625, 0.7421973266601563, 0.741670654296875, 0.7415838623046875, 0.742012939453125, 0.7419064331054688, 0.7414863891601563, 0.7411098022460938, 0.7416702880859375, 0.7419783935546875, 0.7411612548828125, 0.7413350219726562, 0.7418200073242187, 0.741941650390625, 0.7415685424804688, 0.7422476196289063, 0.7416410522460938, 0.7412612915039063, 0.7419465942382812, 0.7418907470703126, 0.7419454345703125, 0.7413104858398437, 0.7418941650390625, 0.741507080078125, 0.7417914428710938, 0.7418411865234374, 0.74210302734375, 0.7418203735351563, 0.7427645874023437, 0.7417232055664063, 0.7411324462890625, 0.7418697509765625, 0.7416817016601562, 0.742033203125, 0.7417816772460938, 0.7421992797851562, 0.7420682373046875, 0.7422382202148438, 0.7418982543945313, 0.742023193359375, 0.74232763671875, 0.7421692504882812, 0.7418585205078125, 0.7418699951171875, 0.74219482421875, 0.7417301025390625, 0.741923583984375, 0.7422916259765625, 0.74202734375, 0.7420128784179687, 0.7420436401367188, 0.74224853515625, 0.7418878173828125, 0.7422157592773437, 0.7422811889648437, 0.7418238525390625, 0.741875732421875, 0.7418477172851563, 0.7424307250976563, 0.7423569946289063, 0.7418634033203125, 0.7422015991210937, 0.7421802978515625, 0.741806396484375, 0.742371337890625, 0.742463623046875, 0.7422913208007812, 0.742451171875, 0.7423262329101562, 0.7423321533203125, 0.7417332763671876, 0.7420638427734375, 0.7420910034179687, 0.7422750854492187, 0.7414292602539062, 0.7421375732421875, 0.742060302734375, 0.7421511840820313, 0.7419559936523438, 0.7420770263671875, 0.7415540771484375, 0.741965087890625, 0.7424330444335937, 0.7415834350585937, 0.74231396484375, 0.741759033203125, 0.7421541748046875, 0.742, 0.7415894165039062, 0.7419058227539063, 0.7423639526367187, 0.7421171875, 0.7419309692382813, 0.7422015380859375, 0.742751953125, 0.7422569580078126, 0.7420476684570313, 0.7418409423828125, 0.742561767578125, 0.7424368896484375, 0.742372802734375, 0.7426729736328125, 0.7423775024414062, 0.7418040161132813, 0.742319580078125, 0.7422195434570312, 0.7423568725585937, 0.7422083740234375, 0.7421634521484375, 0.742289794921875, 0.7424149169921875, 0.7425884399414062, 0.7427276611328125, 0.74241845703125, 0.7423897705078125, 0.7428956298828125, 0.7419310302734375, 0.742454345703125, 0.7419319458007813, 0.7426245727539063, 0.7416572875976563, 0.742635498046875, 0.7424710693359375, 0.7423289184570312, 0.742068115234375, 0.7431066284179687, 0.7425264892578125, 0.7424691162109375, 0.7425054931640624, 0.7422279663085938, 0.7425269775390625, 0.7424140625, 0.7426787719726563, 0.7426966552734375, 0.7415352172851563, 0.7422392578125, 0.7424102172851562, 0.742135009765625, 0.7422636108398437, 0.7420353393554687, 0.74172021484375, 0.74231103515625, 0.7420568237304688, 0.7420620727539062, 0.7426764526367188, 0.742227783203125, 0.7420909423828125, 0.7422996215820312, 0.742192626953125, 0.7425576171875, 0.7425132446289062, 0.7418695678710937, 0.7424423217773437, 0.7422299194335937, 0.7424633178710938, 0.7417919311523438, 0.742465576171875, 0.7422327270507812, 0.7426171264648438, 0.742260009765625, 0.7421241455078125, 0.7425425415039062, 0.7420343017578125, 0.742509765625, 0.7425892944335938, 0.7424696044921875, 0.7419692993164062, 0.7424620971679687, 0.742371337890625, 0.7421785888671875, 0.7423941040039063, 0.7422993774414063, 0.7428602905273437, 0.7424945068359375, 0.7421895141601562, 0.7425863647460937, 0.7418142700195313, 0.7424737548828125, 0.7430901489257813, 0.7421828002929688, 0.7430094604492188, 0.7428085327148437, 0.7429366455078125, 0.7423128662109375, 0.7424125366210937, 0.7427161254882813, 0.7429427490234375, 0.7427215576171875, 0.7429300537109375, 0.7427828369140625, 0.7426687622070313, 0.7426682739257813, 0.7428546752929688, 0.7472858276367188, 0.7425925903320313, 0.7426705932617188, 0.7421051025390625, 0.7418914184570312, 0.7425516967773438, 0.7425357055664062, 0.7420498046875, 0.7424694213867188, 0.7420654907226563, 0.7424951171875, 0.7422190551757812, 0.7421734008789063, 0.7426744384765624, 0.7417849731445313, 0.7421753540039062, 0.7425023803710937, 0.7420511474609375, 0.7422849731445312, 0.7427368774414063, 0.7424122924804688, 0.7424389038085938, 0.7426638793945313, 0.7428917846679688, 0.742703125, 0.7427176513671875, 0.7426803588867188, 0.7424163818359375, 0.742846435546875, 0.7427088623046875, 0.7428919067382812, 0.7428956298828125, 0.7427333984375, 0.7425970458984374, 0.7429732666015625, 0.7428092041015625, 0.7422715454101563, 0.7426232299804687, 0.7429365234375, 0.7422946166992187, 0.7425728759765625, 0.7430309448242187, 0.7424307250976563, 0.7419351196289062, 0.7428231201171875, 0.7425114135742188, 0.74288330078125, 0.742803466796875, 0.7427543334960938, 0.7422457885742187, 0.7433970947265625, 0.7424357299804687, 0.7428424072265625, 0.7427125244140625, 0.74261572265625, 0.7431414184570313, 0.7426329345703125, 0.7430205078125, 0.7426730346679687, 0.7428487548828125, 0.7423252563476562, 0.7427470703125, 0.7431675415039063, 0.742809814453125, 0.7430718383789062, 0.742619140625, 0.7426431274414063, 0.7423594970703125, 0.7428670654296875, 0.7424796752929688, 0.7430535278320313, 0.7420407104492187, 0.7421405639648437, 0.7426275634765624, 0.7426573486328125, 0.7423167114257813, 0.7425001831054687, 0.7434381713867187, 0.7422440185546875, 0.7427672119140625, 0.74290087890625, 0.742628173828125, 0.742381591796875, 0.742920166015625, 0.7431004028320313, 0.742475341796875, 0.7425416870117187, 0.7425712890625, 0.7420874633789063, 0.743067626953125, 0.7426334838867188, 0.7430707397460937, 0.7431751708984375, 0.7429895629882812, 0.7426439819335937, 0.74296728515625, 0.7430427856445313, 0.7428402099609375, 0.74272802734375, 0.7427555541992188, 0.7429782104492187, 0.7431640014648437, 0.7428445434570312, 0.7430631713867187, 0.7427684326171875, 0.7425909423828125, 0.742961181640625, 0.7423259887695313, 0.74312060546875, 0.7428482055664063, 0.7428569946289062, 0.7432578125, 0.7426240844726563, 0.743041015625, 0.742920166015625, 0.7431248779296875, 0.7429837646484375, 0.7431264038085937, 0.7429137573242187, 0.743111572265625, 0.7432763061523437, 0.7424658203125, 0.7430245971679688, 0.7433277587890625, 0.7429918823242188, 0.7425571899414063, 0.7433282470703125, 0.7428646850585937, 0.743037109375, 0.7425703125, 0.7429478759765625, 0.7422984619140625, 0.742576171875, 0.7425693359375, 0.7424683227539063, 0.7430922241210938, 0.742842529296875, 0.7428029174804688, 0.7423410034179687, 0.7428628540039063, 0.7429490356445313, 0.742640625, 0.7428678588867188, 0.7427801513671874, 0.7429978637695313, 0.7429356689453125, 0.7426107788085937, 0.7426436767578125, 0.742823974609375, 0.7427727661132812, 0.7442432250976563, 0.7429447631835937, 0.7430430908203125, 0.7428211059570312, 0.7427387084960938, 0.7429916381835937, 0.742849853515625, 0.7425873413085937, 0.742582275390625, 0.7429816284179688, 0.743125, 0.742664306640625, 0.7427764892578125, 0.7429058837890625, 0.743031005859375, 0.7429959716796875, 0.742522705078125, 0.7427412719726563, 0.7431339111328125, 0.7428846435546875, 0.7428405151367188, 0.7426342163085937, 0.7431577758789063, 0.742472900390625, 0.7430396728515625, 0.742861083984375, 0.7432180786132813, 0.7428720703125, 0.7430491943359375, 0.7427246704101562, 0.743373779296875, 0.7430021362304687, 0.7431200561523438, 0.7430538330078125, 0.7428633422851563, 0.7426433715820312, 0.7431026611328125, 0.742940673828125, 0.7435852661132812, 0.7429833984375, 0.7430778198242187, 0.7431577758789063, 0.7431532592773438, 0.7425741577148437, 0.7428815307617187, 0.7427513427734375, 0.7430576171875, 0.7426541137695313, 0.7425327758789062, 0.7432507934570313, 0.7426170654296875, 0.7427522583007813, 0.7428915405273437, 0.7424327392578125, 0.7430390014648437, 0.7426314086914062, 0.7429222412109375, 0.7431489868164063, 0.7427958374023438, 0.7436157836914062, 0.7425665893554687, 0.7428231201171875, 0.742515380859375, 0.7431558227539062, 0.7427317504882812, 0.7427861938476562, 0.7427708740234376, 0.7428307495117188, 0.7424716796875, 0.7424383544921875, 0.7432681274414062, 0.7425994262695312, 0.7430263671875, 0.7423818359375, 0.7434302368164063, 0.7426638793945313, 0.7433649291992187, 0.7426434326171875, 0.7430082397460938, 0.7428876953125, 0.7429345092773437, 0.7427215576171875, 0.7425567016601563, 0.7435888671875, 0.742903564453125, 0.7430648193359375, 0.7429068603515625, 0.7430791015625, 0.7429815673828125, 0.7432242431640625, 0.743103759765625, 0.7426015014648437, 0.7428159790039063, 0.7433822631835938, 0.7434183959960937, 0.7428648681640625, 0.7430839233398437, 0.7431104736328125, 0.7426787719726563, 0.7426023559570313, 0.7431417846679688, 0.743210693359375, 0.7427914428710938, 0.7485798950195313, 0.7432222290039062, 0.7423714599609375, 0.7427328491210937, 0.7425728759765625, 0.742863037109375, 0.742371337890625, 0.742516845703125, 0.7428583984375, 0.7426624145507813, 0.7424384155273438, 0.7427845458984375, 0.7433097534179688, 0.74272412109375, 0.7423936157226563, 0.7428265380859375, 0.7431574096679687, 0.7425597534179688, 0.7424810180664062, 0.7430985107421875, 0.7432814331054688, 0.7429212646484376, 0.742916259765625, 0.7430966796875, 0.7423143310546875, 0.7430452270507812, 0.7427455444335938, 0.7430186157226563, 0.742625732421875, 0.7432007446289063, 0.743462890625, 0.742307861328125, 0.742781005859375, 0.7429815063476563, 0.7430794067382812, 0.7428836669921876, 0.74315185546875, 0.7430922241210938, 0.7428587646484375, 0.7430697021484375, 0.7426969604492187, 0.7434301147460938, 0.74296630859375, 0.743320556640625, 0.7429058837890625, 0.7432684326171874, 0.7429072265625, 0.7427815551757813, 0.7426837768554687, 0.7428739624023437, 0.7429467163085938, 0.7428485717773438, 0.7434170532226563, 0.7433401489257813, 0.7428226928710937, 0.7428097534179687, 0.7429998168945312, 0.743096435546875, 0.7431119384765625, 0.7429306640625, 0.7432291259765625, 0.7430718383789062, 0.7435374145507813, 0.7433564453125, 0.7432493896484375, 0.7426903686523437, 0.7428428344726562, 0.7428362426757813, 0.7431109008789063, 0.7427963256835938, 0.7429578247070312, 0.7426099243164063, 0.7430643920898438, 0.7425025634765625, 0.742628662109375, 0.7431317138671875, 0.74294287109375, 0.7432335205078126, 0.742649658203125, 0.7429142456054687, 0.7428546752929688, 0.7432407836914062, 0.7422105102539063, 0.7429706420898438, 0.7433489990234375, 0.7431597900390625, 0.742063720703125, 0.7429671020507812, 0.7425288696289063, 0.7424314575195312, 0.7430123291015625, 0.7431261596679688, 0.743260009765625, 0.743404541015625, 0.7429522705078125, 0.7430451049804687, 0.7432089233398438, 0.7428690795898437, 0.7429454956054687, 0.74342822265625, 0.74301123046875, 0.7425867919921875, 0.7429883422851562, 0.7435775756835937, 0.742518798828125, 0.7427189331054688, 0.7433569946289063, 0.7431270141601563, 0.7429991455078125, 0.7428924560546875, 0.7433113403320313, 0.743530517578125, 0.7433584594726562, 0.7430082397460938, 0.743664794921875, 0.7432050170898438, 0.7432547607421875, 0.7431248779296875, 0.7426888427734375, 0.7428217163085937, 0.7434033203125, 0.7434325561523437, 0.7432396850585937, 0.743583740234375, 0.743044921875, 0.7427442626953125, 0.743669189453125, 0.7433466796875]",tokens/s,1.3467643042291242,, 4bit-gptq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1474.31424,1326.383104,0.0,947.912704,945.250304,s,1,8.1235908203125,8.1235908203125,0.0,8.1235908203125,8.1235908203125,8.1235908203125,8.1235908203125,[8.1235908203125],,kWh,3.681505969166968e-05,4.053782199671776e-06,1.08444531200097e-05,5.1713295011351154e-05,,MB,1517.285376,1519.321088,0.0,1111.49056,1098.82368,s,10,1.6054995269775392,0.1605499526977539,0.000562217233122397,0.16068774414062498,0.16113861389160156,0.1612920928955078,0.1614148760986328,"[0.15935037231445312, 0.16074298095703124, 0.16031491088867186, 0.16026255798339845, 0.16086659240722656, 0.16110450744628907, 0.16076307678222657, 0.16063250732421874, 0.16001644897460937, 0.16144557189941405]",tokens/s,1594.5193112696659,kWh,4.704023410647971e-06,5.185785808379412e-07,3.1308005646033637e-06,8.353402556089277e-06,tokens/kWh,30646194.563362308,MB,1527.250944,1653.538816,0.0,1245.708288,1164.242432,s,10,88.924287109375,8.8924287109375,0.01463182725578284,8.894529296875,8.90892509765625,8.910288720703125,8.911379619140625,"[8.863833984375, 8.874208984375, 8.8817548828125, 8.88940234375, 8.8925712890625, 8.8964873046875, 8.901982421875, 8.903771484375, 8.9086220703125, 8.91165234375]",tokens/s,7.084678668552195,kWh,0.00026004556043518647,2.8684570792508123e-05,0.00017283075687119642,0.00046156088809889105,tokens/kWh,136493.36766702388,,s,630,88.92091477966304,0.1411443091740684,0.0003321701887789103,0.14115912628173827,0.14155792083740235,0.1416606658935547,0.14184947067260742,"[0.14091314697265625, 0.1400463409423828, 0.14019378662109375, 0.1403412780761719, 0.14071157836914064, 0.14059756469726561, 0.140287841796875, 0.1404704284667969, 0.14042112731933593, 0.14061305236816407, 0.14044627380371094, 0.14091673278808595, 0.14092445373535156, 0.14058134460449218, 0.14066204833984375, 0.140548828125, 0.14014402770996093, 0.14071049499511717, 0.14068080139160155, 0.14068368530273437, 0.14069728088378905, 0.14054237365722655, 0.14061270141601562, 0.14057350158691406, 0.14058090209960938, 0.1405625, 0.1407724151611328, 0.1408070068359375, 0.14080995178222655, 0.14059344482421876, 0.14050918579101562, 0.1408125762939453, 0.14064405822753906, 0.14094857788085938, 0.1414276123046875, 0.14073036193847657, 0.14074406433105469, 0.14078016662597656, 0.14097613525390626, 0.14056646728515626, 0.14067132568359375, 0.14057647705078125, 0.140930908203125, 0.1405853729248047, 0.14074649047851562, 0.14074266052246093, 0.1403574981689453, 0.14065061950683594, 0.14066259765625, 0.1412671356201172, 0.14080819702148437, 0.1402882537841797, 0.14103030395507812, 0.1404550018310547, 0.1405806121826172, 0.1409269714355469, 0.14149609375, 0.14093927001953124, 0.14108905029296875, 0.14070346069335937, 0.14065397644042968, 0.14053817749023437, 0.14121200561523437, 0.14041920471191408, 0.14058505249023437, 0.14048573303222656, 0.14045890808105468, 0.1407631378173828, 0.14065078735351563, 0.14117654418945313, 0.1406402587890625, 0.1406543731689453, 0.14084451293945313, 0.14039251708984374, 0.14085804748535155, 0.14065565490722656, 0.1406164093017578, 0.14096954345703125, 0.14056108093261718, 0.14060304260253906, 0.14066943359375, 0.14078140258789062, 0.14096771240234374, 0.14086781311035157, 0.14094950866699218, 0.14097613525390626, 0.14097613525390626, 0.14083065795898436, 0.14094090270996093, 0.14127740478515624, 0.14087564086914062, 0.14126527404785155, 0.14073849487304688, 0.1407611541748047, 0.1407836151123047, 0.14070950317382813, 0.14091065979003906, 0.1409438018798828, 0.14117808532714843, 0.14086611938476562, 0.14110723876953124, 0.14052120971679688, 0.14061395263671875, 0.14123526000976563, 0.14069036865234374, 0.14108262634277344, 0.1409551696777344, 0.14087590026855468, 0.140957275390625, 0.14098713684082032, 0.14085673522949219, 0.140778076171875, 0.14141439819335938, 0.14121369934082031, 0.14089631652832033, 0.14098220825195312, 0.1410248565673828, 0.1407983703613281, 0.14067916870117186, 0.141046875, 0.14133529663085936, 0.14077967834472657, 0.1410885772705078, 0.14058709716796874, 0.1410274200439453, 0.14074879455566405, 0.1414819793701172, 0.1406320343017578, 0.14080975341796875, 0.1405788116455078, 0.14077548217773436, 0.140767578125, 0.14120150756835936, 0.14111439514160157, 0.14093606567382813, 0.14092707824707032, 0.14038761901855468, 0.14095750427246093, 0.1405572204589844, 0.14129151916503907, 0.14103321838378907, 0.14111318969726563, 0.1406754913330078, 0.14069325256347656, 0.1408350067138672, 0.1406519317626953, 0.14105410766601562, 0.14085699462890625, 0.14122685241699218, 0.14077337646484375, 0.14075820922851562, 0.14098915100097656, 0.1408527069091797, 0.14077719116210938, 0.14111036682128905, 0.14086038208007812, 0.14077772521972656, 0.14123190307617187, 0.1409031982421875, 0.1410068817138672, 0.14092643737792968, 0.14090847778320312, 0.14137834167480468, 0.14119949340820312, 0.14095872497558593, 0.14095021057128906, 0.1410662384033203, 0.14060748291015626, 0.14102857971191407, 0.14121244812011718, 0.14095578002929687, 0.14115213012695313, 0.14081622314453124, 0.14129078674316406, 0.1409135284423828, 0.14096298217773437, 0.1407189483642578, 0.14160211181640625, 0.1412403564453125, 0.14106655883789063, 0.14107887268066407, 0.14084095764160157, 0.14129766845703126, 0.14104098510742188, 0.1412626953125, 0.14095616149902343, 0.14118739318847656, 0.14098588562011719, 0.1411047668457031, 0.14146517944335937, 0.140505126953125, 0.14051954650878906, 0.1408916473388672, 0.14044364929199218, 0.14086749267578125, 0.14098722839355468, 0.14116181945800782, 0.14096646118164063, 0.14073251342773438, 0.1410846710205078, 0.14103453063964844, 0.14096015930175781, 0.14124691772460937, 0.14110508728027343, 0.14089360046386717, 0.1405447998046875, 0.1410396728515625, 0.14141229248046874, 0.14115225219726563, 0.141127685546875, 0.14089010620117187, 0.1409697570800781, 0.14090229797363282, 0.14100691223144532, 0.14112384033203124, 0.140943359375, 0.14119081115722656, 0.14099020385742186, 0.1411037139892578, 0.14079994201660156, 0.14107449340820313, 0.14137962341308594, 0.14098835754394531, 0.14149020385742186, 0.14116455078125, 0.14111485290527342, 0.1410089569091797, 0.141339111328125, 0.14120755004882812, 0.14123622131347657, 0.14131132507324218, 0.141191650390625, 0.14100679016113282, 0.14122575378417968, 0.1411846466064453, 0.14110092163085938, 0.1412843780517578, 0.14137338256835938, 0.14112127685546874, 0.14088943481445312, 0.14117100524902343, 0.14134538269042968, 0.14094744873046874, 0.14173388671875, 0.1417090606689453, 0.1409927978515625, 0.14111946105957032, 0.14147584533691407, 0.14122950744628907, 0.1409974060058594, 0.14135621643066407, 0.14124911499023438, 0.14080812072753907, 0.14106434631347656, 0.14090316772460937, 0.14094137573242188, 0.14078764343261718, 0.14124447631835937, 0.14094540405273437, 0.14107192993164064, 0.14081272888183594, 0.14098629760742187, 0.14104786682128906, 0.14087948608398437, 0.1411075897216797, 0.14107752990722655, 0.14116117858886718, 0.14137779235839842, 0.14069480895996095, 0.14110794067382812, 0.1410867156982422, 0.14105746459960938, 0.1413347473144531, 0.14127040100097657, 0.1412352294921875, 0.14103955078125, 0.14105142211914062, 0.14108694458007812, 0.14104156494140624, 0.1414041290283203, 0.14131033325195314, 0.14109660339355468, 0.14108026123046874, 0.14129379272460937, 0.14114370727539063, 0.14088800048828126, 0.14112380981445313, 0.1410167999267578, 0.14114044189453126, 0.14090614318847655, 0.14132540893554688, 0.14110432434082032, 0.1412960968017578, 0.14134803771972657, 0.14135177612304686, 0.14112477111816407, 0.14128215026855467, 0.14144102478027343, 0.1413396759033203, 0.1413314208984375, 0.14117231750488282, 0.1411895294189453, 0.1410109405517578, 0.1414615020751953, 0.1413253479003906, 0.14125106811523438, 0.1414619903564453, 0.1411438446044922, 0.14117683410644533, 0.14145968627929686, 0.14107606506347656, 0.14098602294921875, 0.14136595153808593, 0.14145738220214843, 0.141125732421875, 0.14125836181640625, 0.14098045349121094, 0.1406304931640625, 0.141155517578125, 0.14097430419921875, 0.1408042297363281, 0.1411931915283203, 0.14117324829101563, 0.14086326599121093, 0.140996826171875, 0.1409129638671875, 0.14099130249023437, 0.14105186462402344, 0.1415013427734375, 0.14101539611816405, 0.14118159484863282, 0.14115927124023436, 0.1410498504638672, 0.14098591613769532, 0.1414351043701172, 0.14136767578125, 0.14105401611328125, 0.14114297485351562, 0.14099737548828126, 0.14109504699707032, 0.14125637817382813, 0.14104386901855467, 0.14164405822753906, 0.1409231719970703, 0.14128099060058594, 0.14119731140136718, 0.1410287628173828, 0.14150083923339843, 0.14118896484375, 0.14140623474121095, 0.1410780487060547, 0.14124520874023438, 0.140906494140625, 0.14107034301757812, 0.14149337768554687, 0.14125555419921876, 0.141459716796875, 0.14138064575195314, 0.14129020690917968, 0.14112358093261718, 0.1411494140625, 0.14130050659179688, 0.14126838684082033, 0.1414068145751953, 0.14136642456054688, 0.141455810546875, 0.1411219482421875, 0.14143618774414063, 0.14122000122070313, 0.14144940185546875, 0.1413504638671875, 0.14166099548339844, 0.14205746459960938, 0.14088552856445313, 0.14123802185058593, 0.14116732788085937, 0.14148809814453125, 0.14142057800292968, 0.14111001586914063, 0.14105331420898437, 0.14058790588378905, 0.14132199096679687, 0.14124986267089842, 0.1409297637939453, 0.14166026306152343, 0.1415801544189453, 0.1410846710205078, 0.14109933471679686, 0.14105158996582032, 0.14124850463867186, 0.1412460174560547, 0.14157868957519532, 0.1410190734863281, 0.14139808654785158, 0.14098136901855468, 0.1408275146484375, 0.14113154602050781, 0.1414126434326172, 0.14120498657226563, 0.14177325439453126, 0.1409249267578125, 0.1407422332763672, 0.141103515625, 0.14103753662109375, 0.14172163391113282, 0.14125465393066405, 0.1415202178955078, 0.14110992431640626, 0.14101475524902343, 0.14113821411132813, 0.14140406799316407, 0.14150869750976564, 0.14212300109863282, 0.14134259033203125, 0.1410952911376953, 0.1411822052001953, 0.1411012725830078, 0.14132048034667968, 0.14128128051757813, 0.141540771484375, 0.1415870361328125, 0.1413233642578125, 0.14125289916992187, 0.14118115234375, 0.1415970916748047, 0.14127513122558594, 0.1414757080078125, 0.14155789184570314, 0.14129971313476564, 0.1411746520996094, 0.1413448944091797, 0.14137753295898436, 0.14144511413574218, 0.14146560668945313, 0.1413017578125, 0.141461181640625, 0.14157037353515625, 0.14140316772460937, 0.14147273254394532, 0.14152499389648437, 0.1415355224609375, 0.14115440368652343, 0.14105430603027344, 0.1410768585205078, 0.14114201354980468, 0.14118911743164062, 0.14103330993652344, 0.14093959045410157, 0.14133859252929687, 0.14137242126464844, 0.1411961669921875, 0.14122125244140624, 0.14143869018554686, 0.1410592041015625, 0.14174595642089843, 0.14114714050292967, 0.1411287078857422, 0.14108038330078124, 0.14141629028320313, 0.14125823974609375, 0.1409291229248047, 0.14135760498046876, 0.1410474853515625, 0.14117123413085939, 0.14119436645507813, 0.14120640563964842, 0.14140165710449218, 0.1414405059814453, 0.14144607543945312, 0.14124832153320313, 0.14134471130371093, 0.14116685485839844, 0.14116864013671876, 0.14139974975585937, 0.14128477478027343, 0.14142477416992189, 0.14146409606933594, 0.14134707641601563, 0.14145542907714845, 0.14159353637695313, 0.14130476379394533, 0.14162358093261718, 0.14142031860351562, 0.14124237060546874, 0.1410867156982422, 0.1413605499267578, 0.14173654174804687, 0.14172808837890624, 0.14161404418945311, 0.14129740905761717, 0.1413477783203125, 0.1413305206298828, 0.14149746704101562, 0.14171420288085937, 0.14129270935058594, 0.1413824920654297, 0.1411788787841797, 0.14122189331054688, 0.14151065063476562, 0.1415146942138672, 0.14180972290039062, 0.14133485412597657, 0.14142022705078125, 0.14140211486816406, 0.14115020751953125, 0.14143775939941405, 0.14173721313476562, 0.14108937072753908, 0.14117056274414064, 0.1409701690673828, 0.14117269897460938, 0.14116358947753907, 0.14133544921875, 0.14150253295898438, 0.14108262634277344, 0.1408184356689453, 0.14107852172851562, 0.14129971313476564, 0.14138674926757813, 0.14119778442382813, 0.14169346618652343, 0.14154339599609375, 0.14124797058105468, 0.1411589813232422, 0.14185664367675782, 0.1412752685546875, 0.14132633972167968, 0.1414607391357422, 0.14117964172363281, 0.141127685546875, 0.14095542907714845, 0.14141658020019532, 0.14147596740722657, 0.14144610595703125, 0.14137855529785157, 0.14123519897460937, 0.14108924865722655, 0.14104835510253907, 0.14133436584472656, 0.1416848907470703, 0.14157005310058593, 0.1415720977783203, 0.1416171569824219, 0.14143043518066406, 0.1413410186767578, 0.14120355224609374, 0.14171720886230468, 0.14160914611816405, 0.1415068817138672, 0.14204444885253906, 0.1414127655029297, 0.1418275146484375, 0.14153298950195312, 0.14173846435546875, 0.14175830078125, 0.14171180725097657, 0.14144447326660156, 0.141304443359375, 0.1413570556640625, 0.14150656127929687, 0.14165196228027344, 0.14159461975097656, 0.14157618713378906, 0.1415265350341797, 0.14148483276367188, 0.14153494262695313, 0.1411744384765625, 0.1412696990966797, 0.14068531799316406, 0.14130989074707032, 0.14100486755371094, 0.14111669921875, 0.14115708923339843, 0.14146931457519532, 0.14154118347167968, 0.14108285522460937, 0.14133436584472656, 0.14112409973144532, 0.14095974731445313, 0.14117263793945312, 0.14198179626464844, 0.14138983154296875, 0.14141007995605467, 0.14138316345214844, 0.14127381896972657, 0.14123008728027345, 0.14127740478515624, 0.14158416748046876, 0.14147789001464844, 0.14150825500488282, 0.14162281799316406, 0.14142886352539064, 0.14126559448242187, 0.14148812866210939, 0.14190751647949218, 0.1417527618408203, 0.14164787292480469, 0.14117225646972656, 0.1413586883544922, 0.14116543579101562, 0.14146488952636718, 0.14179420471191406, 0.14158210754394532, 0.1416592254638672, 0.14134701538085936, 0.14158717346191407, 0.14171340942382812, 0.1413324737548828, 0.14132415771484375, 0.14199411010742188, 0.14161509704589845, 0.14164492797851563, 0.1412657928466797, 0.1414632568359375, 0.1413104705810547, 0.14140599060058595, 0.14164178466796876, 0.14155564880371094, 0.14166639709472656, 0.141671875, 0.1415581817626953, 0.14161517333984375, 0.14151609802246093, 0.14178134155273436, 0.14151641845703125, 0.1415933074951172, 0.1413324737548828, 0.1413324737548828, 0.14165536499023437, 0.1418319091796875]",tokens/s,7.084947355310901,, 4bit-gptq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,11138.854912,12227.3792,0.0,11848.9088,11814.752256,s,1,16.331890625,16.331890625,0.0,16.331890625,16.331890625,16.331890625,16.331890625,[16.331890625],,kWh,0.000269950213837501,2.977020798288645e-05,8.404562279201722e-05,0.0003837660446124047,,MB,2093.58848,14033.027072,0.0,13625.196544,13298.00192,s,10,23.36663818359375,2.336663818359375,0.0021511089569970668,2.337625244140625,2.338669384765625,2.3388350585937503,2.33896759765625,"[2.338632568359375, 2.338152099609375, 2.339000732421875, 2.338243408203125, 2.337871337890625, 2.33632177734375, 2.337379150390625, 2.3351865234375, 2.33296484375, 2.3328857421875]",tokens/s,109.55790815460284,kWh,6.795378911708592e-05,7.494725000763435e-06,4.506350827299943e-05,0.00012051202239084878,tokens/kWh,2124269.387578045,MB,2099.965952,14184.022016,0.0,13776.191488,13689.859584,s,10,1365.8407656250001,136.5840765625,0.14099416845937957,136.63813281249998,136.729778125,136.73538906250002,136.73987781250003,"[136.741, 136.71475, 136.72853125, 136.681078125, 136.67503125, 136.601234375, 136.51703125, 136.45, 136.38490625, 136.347203125]",tokens/s,0.46125435398885317,kWh,0.0039742203255445805,0.00043838653352902454,0.0026434413647514004,0.007056048223825005,tokens/kWh,8928.510407182053,,s,630,1365.835356933595,2.167992630053323,0.00245803797327439,2.168363525390625,2.170982055664062,2.1715647216796876,2.172534157714844,"[2.169362548828125, 2.169524169921875, 2.169290283203125, 2.169293212890625, 2.17150390625, 2.17208203125, 2.170980224609375, 2.17135791015625, 2.16968408203125, 2.171004150390625, 2.171822998046875, 2.1700087890625, 2.169798583984375, 2.170569580078125, 2.1727841796875, 2.170962158203125, 2.171938720703125, 2.170955810546875, 2.1710234375, 2.17062109375, 2.170538818359375, 2.170964111328125, 2.17099853515625, 2.170173828125, 2.16871630859375, 2.170280517578125, 2.170905029296875, 2.170300537109375, 2.169556884765625, 2.1686904296875, 2.17009716796875, 2.168494873046875, 2.171104736328125, 2.1698603515625, 2.170408447265625, 2.169908203125, 2.169794677734375, 2.17245068359375, 2.171545654296875, 2.17277783203125, 2.17004931640625, 2.170314697265625, 2.170281494140625, 2.1694814453125, 2.170204345703125, 2.16982763671875, 2.17168896484375, 2.169891845703125, 2.171984375, 2.170481201171875, 2.170078857421875, 2.17049658203125, 2.17175439453125, 2.170583984375, 2.17089013671875, 2.168748046875, 2.1698232421875, 2.170303955078125, 2.170503662109375, 2.170760498046875, 2.169337646484375, 2.17056982421875, 2.170290283203125, 2.16931982421875, 2.1692529296875, 2.1683291015625, 2.17041455078125, 2.1696865234375, 2.16855517578125, 2.169948486328125, 2.168610107421875, 2.167562255859375, 2.168133544921875, 2.1682509765625, 2.170392333984375, 2.169739501953125, 2.167556396484375, 2.168411865234375, 2.168475341796875, 2.168783447265625, 2.16992578125, 2.170560546875, 2.169288818359375, 2.16925390625, 2.167908203125, 2.17023095703125, 2.16987451171875, 2.1698251953125, 2.1699169921875, 2.17007080078125, 2.168732421875, 2.17103369140625, 2.170037841796875, 2.1694931640625, 2.1693408203125, 2.16977001953125, 2.16997216796875, 2.171922607421875, 2.16950390625, 2.169200439453125, 2.170351806640625, 2.170050537109375, 2.17153125, 2.17074072265625, 2.169933349609375, 2.1697294921875, 2.16966552734375, 2.17122216796875, 2.170271484375, 2.171137939453125, 2.170353515625, 2.1701796875, 2.17076318359375, 2.172542724609375, 2.16906591796875, 2.170171142578125, 2.17135302734375, 2.17271728515625, 2.17183642578125, 2.171159912109375, 2.17287255859375, 2.17251318359375, 2.171107421875, 2.17227880859375, 2.172880615234375, 2.170507080078125, 2.17068896484375, 2.17104150390625, 2.17034228515625, 2.171580322265625, 2.171598876953125, 2.1708017578125, 2.17172216796875, 2.1705146484375, 2.16976220703125, 2.168747802734375, 2.168450927734375, 2.1688330078125, 2.1695283203125, 2.16977197265625, 2.169739013671875, 2.170224853515625, 2.170828857421875, 2.169964599609375, 2.17046435546875, 2.16966552734375, 2.171493896484375, 2.1710830078125, 2.1718798828125, 2.169415771484375, 2.16923876953125, 2.169569580078125, 2.169186767578125, 2.169117919921875, 2.170444580078125, 2.170138671875, 2.171592529296875, 2.1686962890625, 2.169596435546875, 2.168596435546875, 2.168890625, 2.17133349609375, 2.169258056640625, 2.169445556640625, 2.17002001953125, 2.169176513671875, 2.171047607421875, 2.1707880859375, 2.16980419921875, 2.17070458984375, 2.171602294921875, 2.171460205078125, 2.171439208984375, 2.171408447265625, 2.1716806640625, 2.17069482421875, 2.170970947265625, 2.1697451171875, 2.1705810546875, 2.168915283203125, 2.169745849609375, 2.16980859375, 2.17103662109375, 2.1712587890625, 2.171217041015625, 2.16958056640625, 2.1689609375, 2.17033935546875, 2.172760009765625, 2.16794189453125, 2.168345947265625, 2.168537109375, 2.169506103515625, 2.16912109375, 2.16852880859375, 2.169100341796875, 2.16913037109375, 2.1684619140625, 2.168895263671875, 2.169536865234375, 2.16985400390625, 2.170060791015625, 2.16954248046875, 2.170759521484375, 2.169196533203125, 2.1683466796875, 2.1684892578125, 2.1680517578125, 2.169093017578125, 2.16945263671875, 2.16905859375, 2.170333984375, 2.168133544921875, 2.168465087890625, 2.1704296875, 2.170662841796875, 2.169109619140625, 2.169588623046875, 2.167689208984375, 2.168756103515625, 2.169784423828125, 2.16871533203125, 2.169174072265625, 2.169288818359375, 2.169388427734375, 2.171112060546875, 2.1708349609375, 2.16998095703125, 2.169499755859375, 2.170353759765625, 2.17191015625, 2.1708798828125, 2.171993896484375, 2.17151513671875, 2.17060546875, 2.168620849609375, 2.169801025390625, 2.170787841796875, 2.169780029296875, 2.16985986328125, 2.169104736328125, 2.1704267578125, 2.169712890625, 2.170366455078125, 2.16974951171875, 2.16888330078125, 2.169206787109375, 2.170090576171875, 2.168535888671875, 2.169146484375, 2.169555908203125, 2.169720947265625, 2.168891357421875, 2.16675927734375, 2.16760546875, 2.16756640625, 2.168850341796875, 2.168493408203125, 2.168814208984375, 2.168524658203125, 2.168668212890625, 2.167869140625, 2.169588134765625, 2.167732177734375, 2.167242431640625, 2.168169921875, 2.16750537109375, 2.167734619140625, 2.169392333984375, 2.1687666015625, 2.16872802734375, 2.167601318359375, 2.16911669921875, 2.1690546875, 2.16907958984375, 2.16874853515625, 2.167593017578125, 2.169420166015625, 2.16717626953125, 2.168871337890625, 2.16750537109375, 2.168604736328125, 2.16865966796875, 2.1699580078125, 2.169855712890625, 2.17080419921875, 2.16951708984375, 2.169409423828125, 2.169374755859375, 2.17058935546875, 2.1702998046875, 2.17072021484375, 2.1687197265625, 2.170218505859375, 2.168195068359375, 2.170111572265625, 2.170776123046875, 2.170766845703125, 2.170847900390625, 2.171146240234375, 2.170884033203125, 2.17183544921875, 2.171052978515625, 2.168711181640625, 2.17054833984375, 2.169819091796875, 2.170898193359375, 2.1709560546875, 2.17090869140625, 2.17165576171875, 2.17193115234375, 2.170635498046875, 2.17216796875, 2.171198486328125, 2.171590576171875, 2.166392822265625, 2.1678125, 2.167912353515625, 2.16743115234375, 2.167509033203125, 2.16749072265625, 2.168342529296875, 2.167932373046875, 2.167357666015625, 2.16689013671875, 2.166894775390625, 2.1672587890625, 2.16954345703125, 2.168363037109375, 2.166921142578125, 2.167406005859375, 2.167665283203125, 2.16841015625, 2.168255859375, 2.16693408203125, 2.16949755859375, 2.167058349609375, 2.1681904296875, 2.16927294921875, 2.168059814453125, 2.167815673828125, 2.168355224609375, 2.168860595703125, 2.1682236328125, 2.169315185546875, 2.16764208984375, 2.167444580078125, 2.168672119140625, 2.1676962890625, 2.171187255859375, 2.1680087890625, 2.1676337890625, 2.167380126953125, 2.1674638671875, 2.168512451171875, 2.167825927734375, 2.16818115234375, 2.167401611328125, 2.1679296875, 2.168057373046875, 2.169559326171875, 2.16978173828125, 2.168203857421875, 2.168690673828125, 2.167989501953125, 2.170017578125, 2.16817041015625, 2.168364013671875, 2.168296875, 2.16830615234375, 2.16907568359375, 2.169552978515625, 2.168143798828125, 2.169496826171875, 2.168421142578125, 2.169944091796875, 2.170005615234375, 2.170220458984375, 2.170427490234375, 2.16810498046875, 2.16730615234375, 2.167529541015625, 2.16633349609375, 2.16644921875, 2.166615966796875, 2.16589404296875, 2.16842626953125, 2.166503173828125, 2.165834228515625, 2.166220703125, 2.16660986328125, 2.16643994140625, 2.168268798828125, 2.1664296875, 2.16646240234375, 2.166199951171875, 2.167081298828125, 2.167373779296875, 2.165665771484375, 2.16609375, 2.1672197265625, 2.16670458984375, 2.16618798828125, 2.166564208984375, 2.1669462890625, 2.16717919921875, 2.167109619140625, 2.165676025390625, 2.16823291015625, 2.166917724609375, 2.16533154296875, 2.165361572265625, 2.16673486328125, 2.167796875, 2.167021728515625, 2.16681884765625, 2.16614794921875, 2.167887939453125, 2.1662373046875, 2.168397216796875, 2.16859716796875, 2.166555908203125, 2.167212646484375, 2.168458251953125, 2.167673828125, 2.166921142578125, 2.16628759765625, 2.166884765625, 2.167589111328125, 2.166208740234375, 2.16766796875, 2.16665185546875, 2.166697021484375, 2.1662607421875, 2.16687548828125, 2.16793505859375, 2.16679248046875, 2.1663173828125, 2.1664091796875, 2.166824951171875, 2.166919189453125, 2.165501953125, 2.1651494140625, 2.16407666015625, 2.164482177734375, 2.167457763671875, 2.165520263671875, 2.166302734375, 2.165307373046875, 2.165833740234375, 2.166099609375, 2.166630859375, 2.1645966796875, 2.16576416015625, 2.16566796875, 2.165785888671875, 2.1655, 2.165670654296875, 2.165544921875, 2.1652744140625, 2.16552783203125, 2.16616552734375, 2.16677685546875, 2.166128662109375, 2.165551025390625, 2.16499609375, 2.16361083984375, 2.167022216796875, 2.1652890625, 2.165182373046875, 2.165781494140625, 2.164755126953125, 2.1667412109375, 2.165505615234375, 2.166676025390625, 2.1669765625, 2.16542626953125, 2.166435791015625, 2.16665087890625, 2.16688037109375, 2.166635986328125, 2.165787109375, 2.166326904296875, 2.167841064453125, 2.16712109375, 2.165900146484375, 2.166140869140625, 2.165682373046875, 2.16465771484375, 2.165961181640625, 2.16589306640625, 2.167142333984375, 2.1649755859375, 2.16568212890625, 2.16656884765625, 2.167205810546875, 2.165231689453125, 2.166363525390625, 2.16429736328125, 2.16599853515625, 2.16683251953125, 2.16555908203125, 2.1658466796875, 2.16557763671875, 2.164621337890625, 2.164137451171875, 2.164543212890625, 2.164142333984375, 2.16470361328125, 2.165547119140625, 2.16657275390625, 2.1632021484375, 2.16575390625, 2.16450244140625, 2.163293212890625, 2.16456298828125, 2.164375244140625, 2.163614013671875, 2.163464111328125, 2.164674560546875, 2.164279296875, 2.16469287109375, 2.16445947265625, 2.164264892578125, 2.1646865234375, 2.16453759765625, 2.166032470703125, 2.163771240234375, 2.16451806640625, 2.163108642578125, 2.1642138671875, 2.1642236328125, 2.16492626953125, 2.165387451171875, 2.163768798828125, 2.164283203125, 2.164267822265625, 2.166134765625, 2.165104736328125, 2.1657763671875, 2.16464990234375, 2.165478515625, 2.16481005859375, 2.16541455078125, 2.165032470703125, 2.165539306640625, 2.164387939453125, 2.164507568359375, 2.166477783203125, 2.1652705078125, 2.16556298828125, 2.165947998046875, 2.165300048828125, 2.16605224609375, 2.165786865234375, 2.16467822265625, 2.16627490234375, 2.164760498046875, 2.16550927734375, 2.16473388671875, 2.164972412109375, 2.1643857421875, 2.164823486328125, 2.16431396484375, 2.165788818359375, 2.165119873046875, 2.16464794921875, 2.164505126953125, 2.163439208984375, 2.16489013671875, 2.16233544921875, 2.16322314453125, 2.16285791015625, 2.163093505859375, 2.16430126953125, 2.163749267578125, 2.164066162109375, 2.162819091796875, 2.163954833984375, 2.163837890625, 2.165333984375, 2.16505712890625, 2.162974365234375, 2.165384033203125, 2.163882080078125, 2.164766357421875, 2.16448193359375, 2.163464599609375, 2.163142578125, 2.163967041015625, 2.16417578125, 2.165642333984375, 2.163696533203125, 2.162642822265625, 2.162888671875, 2.16344580078125, 2.165002197265625, 2.164923828125, 2.16365234375, 2.16443359375, 2.163681396484375, 2.1636767578125, 2.1659365234375, 2.1631240234375, 2.1641806640625, 2.165008544921875, 2.16462744140625, 2.165252197265625, 2.165598388671875, 2.1647626953125, 2.164516845703125, 2.16500439453125, 2.164822021484375, 2.166564208984375, 2.165697021484375, 2.163859375, 2.163810302734375, 2.16506494140625, 2.164789794921875, 2.16618408203125, 2.16356201171875, 2.163326416015625, 2.16323486328125, 2.164265869140625, 2.164746337890625, 2.16414208984375, 2.163759033203125, 2.162353515625, 2.165184814453125, 2.1658955078125]",tokens/s,0.4612561805504866,, 4bit-gptq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,3550.654464,4495.179776,0.0,4116.709376,3980.386816,s,1,9.758755859375,9.758755859375,0.0,9.758755859375,9.758755859375,9.758755859375,9.758755859375,[9.758755859375],,kWh,8.898960568749884e-05,9.809050610969499e-06,2.695057711600015e-05,0.00012574923341446848,,MB,3549.011968,4826.529792,0.0,4418.699264,4245.89568,s,10,6.605035339355468,0.6605035339355468,0.0006230609261115123,0.6603740234375,0.6612575622558594,0.661468197631836,0.6616367059326171,"[0.6597408447265625, 0.6600413818359375, 0.6596790771484375, 0.660224853515625, 0.6602737426757812, 0.6604743041992187, 0.6611260375976562, 0.6612107543945313, 0.6616788330078125, 0.6605855102539062]",tokens/s,387.5830890330724,kWh,1.9281606503906238e-05,2.1255516435658325e-06,1.287422904937502e-05,3.428138719684709e-05,tokens/kWh,7467609.1294095805,MB,3556.94592,4837.015552,0.0,4429.185024,4245.89824,s,10,385.3308124999999,38.53308125,0.0249248607635426,38.5474375,38.55015078125,38.55081171875,38.55134046875,"[38.47213671875, 38.50425390625, 38.5239609375, 38.53500390625, 38.54686328125, 38.55147265625, 38.54991015625, 38.54801171875, 38.55000390625, 38.5491953125]",tokens/s,1.63495879271269,kWh,0.0011238995098256774,0.0001239736496154392,0.0007469280732364251,0.0019948012326775418,tokens/kWh,31582.093979076613,,s,630,385.32706719970645,0.6116302653963603,0.0005920977871824857,0.6116809997558594,0.6123485229492188,0.6124803436279297,0.6127918884277344,"[0.6100867309570313, 0.610197021484375, 0.6103533325195313, 0.6096705932617188, 0.6108343505859375, 0.6095738525390625, 0.6110203857421875, 0.6089094848632812, 0.6107077026367187, 0.610044921875, 0.609010009765625, 0.6115314331054688, 0.60957080078125, 0.6103983154296875, 0.6107708740234375, 0.610209716796875, 0.6111192016601562, 0.6099435424804688, 0.610407470703125, 0.6109091186523438, 0.6098042602539062, 0.6107578125, 0.6106380004882812, 0.6103272094726563, 0.6102745361328125, 0.6112899169921875, 0.6105045166015625, 0.610654296875, 0.6111459350585937, 0.6102752075195312, 0.610801513671875, 0.6112227783203125, 0.6105524291992187, 0.61095556640625, 0.6106604614257812, 0.611110107421875, 0.610850830078125, 0.6102777709960937, 0.6112688598632813, 0.6107890014648437, 0.610859375, 0.6106542358398438, 0.610566162109375, 0.6109616088867188, 0.6111590576171875, 0.610697998046875, 0.6113421630859375, 0.6108383178710938, 0.6102860107421875, 0.6111743774414062, 0.6108426513671875, 0.6111047973632813, 0.610873046875, 0.6104456176757812, 0.6114224243164063, 0.6110021362304687, 0.6109299926757813, 0.61133447265625, 0.61034521484375, 0.6125866088867188, 0.6106936645507812, 0.6112772827148437, 0.6109339599609375, 0.6115963134765625, 0.61043017578125, 0.6112429809570312, 0.6107515258789062, 0.61059912109375, 0.6110541381835938, 0.6098611450195313, 0.611631591796875, 0.6110841674804688, 0.610819580078125, 0.61090234375, 0.6101548461914063, 0.6113565673828125, 0.6110004272460937, 0.6110596923828125, 0.6109173583984375, 0.6111201171875, 0.6114240112304687, 0.6107047729492188, 0.6108209228515625, 0.6112788696289062, 0.6108262329101563, 0.6119588012695313, 0.61079345703125, 0.61098388671875, 0.6112564086914063, 0.6110984497070312, 0.61134423828125, 0.6106360473632813, 0.6111801147460938, 0.6110778198242187, 0.6114282836914062, 0.6116187744140625, 0.6108777465820312, 0.6109251098632813, 0.6113710327148437, 0.61138330078125, 0.6113792114257812, 0.6106234741210937, 0.611322998046875, 0.6113616943359375, 0.61153076171875, 0.6110494995117187, 0.6110392456054687, 0.6116618041992188, 0.6112166748046876, 0.6115838012695313, 0.6112612915039063, 0.6110740356445312, 0.6114365234375, 0.611292724609375, 0.6114259643554687, 0.6112550659179687, 0.6111948852539062, 0.6115687255859374, 0.61121630859375, 0.6115549926757813, 0.6113662719726562, 0.6114887084960937, 0.6114703979492188, 0.6114799194335937, 0.6118580932617188, 0.61162548828125, 0.6115798950195312, 0.611135498046875, 0.6108765258789063, 0.6112225952148438, 0.6110739135742187, 0.611142822265625, 0.6112203369140625, 0.610957275390625, 0.6111561279296875, 0.6113258056640625, 0.611061767578125, 0.6114877319335937, 0.6107731323242187, 0.6111189575195313, 0.6112454833984375, 0.61106640625, 0.6115083618164062, 0.611123046875, 0.6112849731445312, 0.6114754638671875, 0.6112337646484375, 0.6113956298828125, 0.6112267456054687, 0.611590087890625, 0.611854248046875, 0.6115601806640625, 0.6119142456054687, 0.6109790649414063, 0.6119214477539062, 0.612299560546875, 0.611412109375, 0.6113074340820313, 0.611765869140625, 0.6118466186523438, 0.611181884765625, 0.611301513671875, 0.6116542358398438, 0.6109094848632812, 0.6116719360351562, 0.6122107543945312, 0.611000244140625, 0.611877685546875, 0.6112620239257812, 0.61207763671875, 0.6121326293945313, 0.6109374389648438, 0.6120220947265625, 0.6113857421875, 0.6120875244140624, 0.6114581909179687, 0.6120641479492187, 0.6115491943359375, 0.6118441162109375, 0.6118250732421875, 0.611924560546875, 0.6112235717773438, 0.6115874633789062, 0.6117986450195313, 0.6119608154296875, 0.611535400390625, 0.6116593017578125, 0.6118470458984375, 0.6114503173828125, 0.612738037109375, 0.6118358764648437, 0.6109839477539063, 0.6119075927734375, 0.611090576171875, 0.6118807983398438, 0.611280029296875, 0.6113063354492188, 0.6113076782226563, 0.6112477416992188, 0.6117296142578125, 0.6115123291015625, 0.6111682739257812, 0.611694580078125, 0.6109653930664063, 0.6115628662109375, 0.6117786254882812, 0.6112447509765625, 0.611982666015625, 0.611343017578125, 0.6116022338867187, 0.6114140014648437, 0.6116100463867188, 0.6115374145507813, 0.6111929931640625, 0.61177880859375, 0.6119177856445313, 0.611346435546875, 0.6115819702148437, 0.6109470825195312, 0.6124107666015625, 0.6109736938476562, 0.6119307250976562, 0.6117291870117187, 0.6115064086914063, 0.61217578125, 0.6114890747070313, 0.61170166015625, 0.6121551513671875, 0.6119916381835937, 0.6127471923828125, 0.6110349731445313, 0.6121996459960938, 0.6112449951171876, 0.6121649169921874, 0.612045166015625, 0.6112271118164062, 0.6116300048828125, 0.6117802734375, 0.6122683715820313, 0.6113378295898437, 0.6117656860351562, 0.6114736328125, 0.611730224609375, 0.6118562622070313, 0.611344482421875, 0.6121122436523437, 0.6114164428710938, 0.6118440551757812, 0.6117517700195313, 0.6120194091796874, 0.6115458374023437, 0.6125361938476562, 0.612640380859375, 0.6116910400390625, 0.6108280029296875, 0.6117782592773438, 0.6113224487304687, 0.612083740234375, 0.6109819946289062, 0.6113761596679688, 0.6123519897460937, 0.6109807739257812, 0.6120018920898438, 0.61100634765625, 0.6117019653320312, 0.6111486206054687, 0.611885009765625, 0.6125772705078125, 0.6108948364257812, 0.6113424682617188, 0.61180810546875, 0.6117007446289062, 0.6113034057617187, 0.6120345458984375, 0.6122250366210937, 0.611577880859375, 0.6113272705078125, 0.6122667236328125, 0.6115052490234375, 0.6119617309570312, 0.6121383056640625, 0.6112713012695312, 0.6125997924804687, 0.6112788696289062, 0.612013671875, 0.6117134399414063, 0.6122352905273437, 0.6120238647460937, 0.6116519775390625, 0.6117332763671876, 0.6118280029296875, 0.6121653442382813, 0.6120798950195312, 0.6116488647460937, 0.6122659301757812, 0.6116287841796875, 0.6124349365234375, 0.6120017700195313, 0.61205908203125, 0.6118953247070312, 0.6121962280273437, 0.6119605712890624, 0.6128274536132813, 0.6122230834960938, 0.6122086181640625, 0.6117847290039062, 0.6124800415039062, 0.6116911010742188, 0.6117849731445313, 0.6125952758789063, 0.6111460571289062, 0.6127905883789062, 0.6115914306640625, 0.6121683349609375, 0.61209716796875, 0.61191796875, 0.612640625, 0.6110637817382812, 0.6115594482421876, 0.612121826171875, 0.6112201538085937, 0.6117783203125, 0.6113529663085937, 0.611819091796875, 0.6113018798828125, 0.6122822265625, 0.6118338623046875, 0.61203662109375, 0.61149951171875, 0.6117914428710938, 0.611531982421875, 0.6117403564453125, 0.6119852294921875, 0.6121980590820313, 0.6109639892578125, 0.6124329833984375, 0.6110625, 0.6117787475585937, 0.611724365234375, 0.6115828857421876, 0.612011962890625, 0.6118659057617187, 0.6120718383789062, 0.6121231689453125, 0.6124175415039063, 0.6121798095703125, 0.6114788818359375, 0.61227294921875, 0.6122002563476563, 0.6121082763671875, 0.6117191772460937, 0.6123735961914063, 0.611915771484375, 0.6119144897460937, 0.6118585815429688, 0.6122119140625, 0.6117993774414062, 0.6126126098632813, 0.6121094360351562, 0.61227099609375, 0.6115380249023438, 0.6126572875976563, 0.6113551025390624, 0.61220458984375, 0.6124115600585938, 0.6120941162109375, 0.6118339233398438, 0.6122511596679687, 0.6117440795898438, 0.6119541015625, 0.6115374145507813, 0.6120458984375, 0.6121747436523437, 0.612713623046875, 0.6117813110351562, 0.6124095458984375, 0.6112662963867187, 0.61240771484375, 0.6112577514648437, 0.6117216796875, 0.61115185546875, 0.6116004028320312, 0.6115552978515625, 0.6116796875, 0.6114078979492188, 0.612432373046875, 0.6116531372070313, 0.6120514526367188, 0.612039794921875, 0.611510498046875, 0.6129464111328125, 0.6108401489257812, 0.612366943359375, 0.611108642578125, 0.612112548828125, 0.6117539672851563, 0.6120386352539062, 0.6122014770507812, 0.610892822265625, 0.6124089965820313, 0.6112544555664062, 0.612296875, 0.6117885131835937, 0.6124813232421875, 0.6114488525390624, 0.6121001586914062, 0.6117293701171875, 0.6118292846679687, 0.6114818725585938, 0.6121261596679688, 0.6116583862304688, 0.611817626953125, 0.6119771728515625, 0.6119376220703125, 0.6120693359375, 0.6117487182617187, 0.6118214111328125, 0.6121790771484374, 0.6123486328125, 0.6122989501953126, 0.6123724975585938, 0.6121533203125, 0.6122147827148438, 0.6122823486328125, 0.6118863525390625, 0.6121823120117188, 0.6119112548828125, 0.6119692993164062, 0.6123930053710938, 0.6113568115234375, 0.6123175659179687, 0.6118001098632813, 0.6119844970703125, 0.6119932250976563, 0.6120860595703125, 0.6121143188476562, 0.6115952758789063, 0.6121318359375, 0.6116773681640625, 0.6123018188476562, 0.6116881103515625, 0.6114631958007812, 0.6121328735351562, 0.6111272583007813, 0.6119588623046875, 0.61170263671875, 0.6119564208984375, 0.6112684326171876, 0.6118877563476562, 0.6115038452148438, 0.6114746704101562, 0.6119205932617188, 0.6120281372070312, 0.6116987915039063, 0.6114064331054687, 0.6124113159179687, 0.6112781372070313, 0.6120066528320313, 0.611626953125, 0.6123621215820313, 0.61093798828125, 0.6119782104492187, 0.6115983276367187, 0.6121513061523437, 0.61161474609375, 0.6117186889648437, 0.6113179321289063, 0.611909912109375, 0.6120181884765625, 0.6120342407226562, 0.6114204711914063, 0.612114013671875, 0.6120016479492187, 0.612050537109375, 0.6117672729492187, 0.6117969360351563, 0.61220361328125, 0.6111314086914063, 0.6126417846679687, 0.611280029296875, 0.6126109008789062, 0.6116351318359375, 0.6120182495117188, 0.6121041259765625, 0.6115387573242187, 0.6120286254882813, 0.6117396240234375, 0.6120509643554688, 0.61136279296875, 0.6125336303710938, 0.61175439453125, 0.6116763305664062, 0.6122394409179688, 0.61179443359375, 0.6122379760742187, 0.6120364379882812, 0.6118072509765625, 0.6126284790039063, 0.6119752807617187, 0.6128693237304688, 0.6118898315429687, 0.6119874267578125, 0.6124451904296875, 0.6117816162109375, 0.6116331176757812, 0.6122645874023438, 0.6111027221679688, 0.6122823486328125, 0.61180419921875, 0.6120498657226563, 0.6111559448242188, 0.611862548828125, 0.6112052001953125, 0.6119134521484375, 0.61221826171875, 0.6110249633789062, 0.6118604736328125, 0.6112611694335938, 0.6116834106445312, 0.6120723876953125, 0.6109813842773437, 0.6128661499023438, 0.6112119140625, 0.612406494140625, 0.6119605712890624, 0.6113814086914062, 0.6115191040039063, 0.6121307983398437, 0.6115082397460937, 0.6124724731445312, 0.611697021484375, 0.6117130126953125, 0.6124393310546875, 0.6120467529296875, 0.6117588500976563, 0.6121447143554688, 0.6123012084960937, 0.611454345703125, 0.6122400512695313, 0.6113724365234375, 0.6123485107421875, 0.6113218383789063, 0.6121974487304688, 0.6117467041015625, 0.612235107421875, 0.61198291015625, 0.6116659545898437, 0.6121889038085937, 0.6113687744140625, 0.612431884765625, 0.6117457885742188, 0.612025634765625, 0.612070068359375, 0.6119446411132813, 0.61252490234375, 0.6114900512695313, 0.612707275390625, 0.6114993896484375, 0.6124589233398438, 0.6123704223632812, 0.612073486328125, 0.6117560424804688, 0.6119668579101563, 0.6117354736328126, 0.6124805908203125, 0.61143701171875, 0.6127924194335937, 0.6124965209960938, 0.6111691284179688, 0.61187890625, 0.6114877319335937, 0.6118578491210938, 0.6114903564453125, 0.61153076171875, 0.6115978393554687, 0.61136962890625, 0.6120692138671875, 0.6116220092773438, 0.6122158813476563, 0.6112848510742187, 0.6118174438476562, 0.6116864013671875, 0.6121880493164062, 0.611337646484375, 0.6113819580078125, 0.6126708374023437, 0.6112508544921875, 0.6123948974609374, 0.61131787109375, 0.6128312377929688, 0.6115405883789062, 0.6118854370117187, 0.6120525512695313, 0.6114081420898437, 0.6123910522460938, 0.6116823120117187, 0.6123521118164063, 0.6113831787109375, 0.6121242065429687, 0.611533447265625, 0.6117701416015625, 0.6116065063476562, 0.6117473754882813, 0.6116456909179687, 0.6114653930664062, 0.61201611328125, 0.6121922607421875, 0.6118807373046875, 0.612284912109375, 0.6114402465820312, 0.612060791015625, 0.6114349975585938, 0.6126071166992187, 0.6115418701171875, 0.61243798828125, 0.6115164184570312, 0.61290673828125, 0.611835693359375, 0.61220703125, 0.6122926025390625, 0.6111846313476562, 0.6127697143554688, 0.6120305786132813, 0.6122406616210937, 0.6120680541992187, 0.6117254638671875, 0.6124521484375, 0.6115799560546875, 0.6124517822265625, 0.6120985717773437]",tokens/s,1.6349746841778028,, 4bit-gptq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,837.746688,556.72832,0.0,178.25792,176.52224,s,1,7.51967626953125,7.51967626953125,0.0,7.51967626953125,7.51967626953125,7.51967626953125,7.51967626953125,[7.51967626953125],,kWh,1.982594503750761e-05,2.179734761547903e-06,5.404726546004435e-06,2.741040634505995e-05,,MB,1194.688512,669.974528,0.0,262.144,221.118976,s,10,0.23773827171325684,0.023773827171325684,0.00024283595479606624,0.02371673583984375,0.024118610572814943,0.02418594560623169,0.02423981363296509,"[0.02391481590270996, 0.023476768493652343, 0.024103647232055665, 0.023773056030273437, 0.02366041564941406, 0.024253280639648437, 0.023616479873657226, 0.023656160354614257, 0.02347360038757324, 0.023810047149658203]",tokens/s,10768.1442350506,kWh,6.932403663103739e-07,7.645196220950019e-08,4.059873926176255e-07,1.1756797211374995e-06,tokens/kWh,217746377.17856833,MB,1228.357632,684.654592,0.0,276.824064,221.271552,s,10,13.6162353515625,1.36162353515625,0.004976171477569547,1.3627197875976562,1.3672108764648436,1.369263397216797,1.3709054138183594,"[1.362764404296875, 1.36269970703125, 1.37131591796875, 1.3627398681640626, 1.36317236328125, 1.3562159423828124, 1.355752197265625, 1.3607286376953125, 1.354091552734375, 1.3667547607421875]",tokens/s,46.2682954380416,kWh,3.9575149517440624e-05,4.3647022323524135e-06,2.0418029266581237e-05,6.435788101637427e-05,tokens/kWh,978901.0919108914,,s,630,13.611357091903685,0.021605328717307436,0.0004793603854581813,0.02150827217102051,0.021850230407714846,0.022027892112731935,0.024121036033630375,"[0.02136659240722656, 0.0215164794921875, 0.021698240280151368, 0.021655712127685547, 0.02155523109436035, 0.02168614387512207, 0.02161235237121582, 0.021825983047485353, 0.021884639739990233, 0.02169878387451172, 0.02162518310546875, 0.021489376068115233, 0.021884767532348633, 0.021545440673828124, 0.021544639587402343, 0.021530336380004882, 0.021551551818847655, 0.021600095748901368, 0.021439872741699218, 0.021392000198364257, 0.02143833541870117, 0.021472639083862304, 0.024135967254638672, 0.02312588882446289, 0.021810911178588868, 0.02173776054382324, 0.021477535247802736, 0.02143440055847168, 0.02140985679626465, 0.02182588768005371, 0.021575679779052736, 0.02159187126159668, 0.02151030349731445, 0.021499935150146483, 0.021395423889160155, 0.02136476707458496, 0.021339935302734377, 0.021369056701660158, 0.021441631317138672, 0.02128700828552246, 0.021468032836914064, 0.021498016357421875, 0.021790655136108398, 0.021624223709106445, 0.02154902458190918, 0.021496288299560545, 0.021591232299804686, 0.021506591796875, 0.021518943786621093, 0.021501056671142577, 0.02148726463317871, 0.021643680572509767, 0.02153558349609375, 0.021646175384521484, 0.021754688262939453, 0.02164735984802246, 0.021940448760986327, 0.021501056671142577, 0.021501663208007813, 0.021425088882446288, 0.021379072189331053, 0.021536991119384764, 0.021421056747436523, 0.02101702308654785, 0.021399999618530275, 0.021557247161865235, 0.021405696868896484, 0.021420320510864257, 0.02151340866088867, 0.02134003257751465, 0.021434240341186524, 0.021394208908081056, 0.02137276840209961, 0.02144476890563965, 0.021656831741333007, 0.02145254325866699, 0.021547552108764648, 0.021483327865600584, 0.021494495391845704, 0.02148086357116699, 0.021369279861450194, 0.021417760848999025, 0.02136300849914551, 0.021350400924682617, 0.021356800079345702, 0.021284704208374024, 0.021188512802124023, 0.021666944503784178, 0.02145552062988281, 0.021422367095947265, 0.021528703689575195, 0.021296384811401368, 0.021418432235717773, 0.02142972755432129, 0.021324447631835938, 0.021401952743530274, 0.021565088272094725, 0.02145020866394043, 0.0215283203125, 0.02165830421447754, 0.021699871063232422, 0.021638336181640624, 0.021610111236572267, 0.02158835220336914, 0.021537471771240234, 0.02157459259033203, 0.021479328155517577, 0.02157708740234375, 0.021626976013183592, 0.021566240310668946, 0.021578655242919922, 0.021658559799194337, 0.02170579147338867, 0.021830528259277344, 0.021774368286132814, 0.022078975677490235, 0.021792512893676758, 0.02187104034423828, 0.021737823486328123, 0.02186240005493164, 0.021937952041625977, 0.021948991775512697, 0.02188047981262207, 0.022009855270385743, 0.024481439590454103, 0.023181695938110352, 0.02150982475280762, 0.02202150344848633, 0.022033119201660158, 0.021868223190307616, 0.02185004806518555, 0.021687904357910157, 0.021854944229125976, 0.022159263610839842, 0.022081920623779297, 0.02169068717956543, 0.02184694480895996, 0.021613311767578126, 0.021553184509277342, 0.02147737693786621, 0.021483776092529296, 0.021738815307617187, 0.02158140754699707, 0.021500736236572265, 0.02176527976989746, 0.02169862365722656, 0.026327936172485352, 0.02262182426452637, 0.021944063186645508, 0.021870687484741212, 0.0217607364654541, 0.021548992156982423, 0.02157548713684082, 0.02147123146057129, 0.02151628875732422, 0.021580127716064452, 0.021536415100097656, 0.02148476791381836, 0.021438880920410155, 0.021467519760131837, 0.021615999221801758, 0.021488256454467773, 0.021593151092529298, 0.021485759735107423, 0.021507135391235353, 0.02160310363769531, 0.02150275230407715, 0.021628480911254883, 0.021714656829833985, 0.02171913528442383, 0.021863487243652342, 0.021599264144897462, 0.021681983947753905, 0.02163603210449219, 0.021661600112915038, 0.021764095306396485, 0.0217391357421875, 0.021729375839233397, 0.02162076759338379, 0.021944000244140626, 0.021716575622558593, 0.021654367446899414, 0.02160985565185547, 0.021559680938720703, 0.02159654426574707, 0.021668895721435547, 0.021605375289916993, 0.021594432830810546, 0.021518335342407227, 0.02150399971008301, 0.021709152221679687, 0.02188902473449707, 0.0218656005859375, 0.021591936111450195, 0.02156835174560547, 0.021468416213989257, 0.021535680770874022, 0.02148054313659668, 0.021408639907836913, 0.02152448081970215, 0.02137843132019043, 0.021545600891113282, 0.021481472015380858, 0.021436447143554686, 0.021485536575317384, 0.02149344062805176, 0.02163539123535156, 0.021420383453369142, 0.021362335205078124, 0.02138047981262207, 0.021594751358032228, 0.02137660789489746, 0.021499391555786132, 0.02147011184692383, 0.022449600219726563, 0.02164588737487793, 0.02155958366394043, 0.02158393669128418, 0.02158729553222656, 0.021549152374267577, 0.021548671722412108, 0.02177699279785156, 0.021866207122802735, 0.021884191513061525, 0.02242252731323242, 0.021835519790649415, 0.022227264404296874, 0.02184185600280762, 0.021837215423583984, 0.021719776153564452, 0.02168409538269043, 0.021589824676513672, 0.021589599609375, 0.02159881591796875, 0.02153494453430176, 0.02167532730102539, 0.021672351837158203, 0.02173139190673828, 0.021700159072875976, 0.021606847763061522, 0.021892927169799806, 0.021809152603149414, 0.021710304260253905, 0.021492576599121092, 0.021417856216430664, 0.021518335342407227, 0.021376224517822267, 0.021529151916503907, 0.02148111915588379, 0.021357120513916014, 0.02145631980895996, 0.02141417694091797, 0.021029344558715822, 0.021476415634155272, 0.021285823822021484, 0.02128281593322754, 0.021509632110595703, 0.021264352798461915, 0.02138175964355469, 0.02148566436767578, 0.021392351150512696, 0.02136150360107422, 0.021458879470825195, 0.021374912261962892, 0.021343360900878905, 0.02145417594909668, 0.021397151947021485, 0.021501951217651367, 0.02157535934448242, 0.021520544052124023, 0.02142838478088379, 0.02166988754272461, 0.021341663360595703, 0.021469728469848633, 0.021317632675170898, 0.021250272750854494, 0.021408544540405274, 0.021305856704711915, 0.02129929542541504, 0.021445024490356446, 0.021794815063476563, 0.0216494083404541, 0.021599552154541016, 0.021498559951782226, 0.024669408798217773, 0.027470624923706055, 0.021816768646240235, 0.021571647644042968, 0.021488128662109376, 0.021507999420166016, 0.021315744400024414, 0.021516223907470704, 0.021454240798950194, 0.021448448181152345, 0.0213832950592041, 0.02142313575744629, 0.02150592041015625, 0.021527679443359374, 0.021436927795410156, 0.021508544921875, 0.02133286476135254, 0.021478271484375, 0.021581823348999024, 0.021728256225585937, 0.021507200241088868, 0.021416896820068358, 0.021629823684692382, 0.022895904541015626, 0.021436256408691408, 0.0215031681060791, 0.02138083267211914, 0.021462944030761717, 0.0218789119720459, 0.021342496871948242, 0.021495231628417967, 0.021202688217163087, 0.021759647369384766, 0.021487871170043946, 0.021495168685913085, 0.022068191528320312, 0.021354496002197267, 0.021526239395141603, 0.021440799713134766, 0.02154195213317871, 0.021497119903564454, 0.021533632278442384, 0.02234822463989258, 0.021639455795288087, 0.02178691291809082, 0.02157334327697754, 0.021510143280029297, 0.021716991424560548, 0.021518239974975584, 0.021790271759033204, 0.02147724723815918, 0.021378944396972657, 0.02156982421875, 0.02136729621887207, 0.021319583892822267, 0.021343360900878905, 0.02132476806640625, 0.021348352432250976, 0.02162713623046875, 0.02153011131286621, 0.02192745590209961, 0.022114368438720704, 0.02157814407348633, 0.021609920501708985, 0.021959327697753907, 0.02147164726257324, 0.021321216583251954, 0.021495071411132813, 0.021558528900146486, 0.021425888061523436, 0.02142348861694336, 0.02158451271057129, 0.021415775299072265, 0.021557407379150392, 0.0214052791595459, 0.021579296112060546, 0.021452959060668946, 0.02134294319152832, 0.02145894432067871, 0.02146918487548828, 0.02140166473388672, 0.02139743995666504, 0.02135206413269043, 0.021328256607055663, 0.02135990333557129, 0.021315296173095702, 0.021390335083007812, 0.02136649513244629, 0.021463264465332033, 0.021280096054077147, 0.02136899185180664, 0.021541439056396484, 0.021438144683837892, 0.021502016067504882, 0.021125503540039062, 0.021542911529541017, 0.02150716781616211, 0.021465856552124022, 0.021705984115600586, 0.02132467269897461, 0.021297536849975585, 0.021431615829467773, 0.021381471633911135, 0.021415327072143556, 0.021660255432128905, 0.021360639572143555, 0.023858240127563476, 0.02167683219909668, 0.021663904190063477, 0.021458623886108398, 0.021592639923095704, 0.021266176223754884, 0.021278976440429687, 0.021657344818115234, 0.02128691291809082, 0.021370880126953123, 0.0214932804107666, 0.021445087432861328, 0.02157382392883301, 0.02152364730834961, 0.021480064392089843, 0.021593791961669922, 0.021950784683227538, 0.021729280471801758, 0.021604352951049805, 0.021302911758422853, 0.021516063690185546, 0.02138332748413086, 0.021313472747802733, 0.021348543167114258, 0.021307712554931642, 0.021313472747802733, 0.021487327575683595, 0.02147158432006836, 0.021497856140136717, 0.021367040634155274, 0.0216428165435791, 0.021883327484130858, 0.02157360076904297, 0.021518112182617188, 0.0214866886138916, 0.021397600173950194, 0.021535615921020507, 0.021375200271606446, 0.0216343994140625, 0.021422464370727538, 0.02157155227661133, 0.021391519546508787, 0.02129724884033203, 0.021313119888305664, 0.021440704345703124, 0.02131065559387207, 0.02140243148803711, 0.021535072326660156, 0.021449472427368162, 0.021588895797729494, 0.021469024658203124, 0.02101139259338379, 0.02142790412902832, 0.021323455810546874, 0.021323360443115235, 0.021256927490234376, 0.02135590362548828, 0.02130803108215332, 0.021661088943481444, 0.02154966354370117, 0.021672191619873046, 0.022133663177490236, 0.021733631134033204, 0.021709535598754885, 0.021573087692260743, 0.021472799301147462, 0.021500543594360353, 0.021590272903442384, 0.021389312744140625, 0.021393312454223632, 0.021434783935546875, 0.02144611167907715, 0.021376480102539064, 0.02150271987915039, 0.02147929573059082, 0.021456192016601563, 0.02157859230041504, 0.021972959518432617, 0.02150399971008301, 0.021497856140136717, 0.02151628875732422, 0.021491519927978514, 0.021552608489990233, 0.02160099220275879, 0.0216944637298584, 0.02154521560668945, 0.02156723213195801, 0.02408448028564453, 0.022454208374023437, 0.021556480407714844, 0.021659711837768554, 0.02144767951965332, 0.02155084800720215, 0.021359615325927735, 0.021895647048950195, 0.021415775299072265, 0.02160268783569336, 0.021319999694824218, 0.021606559753417968, 0.021476831436157227, 0.02144732856750488, 0.021390047073364258, 0.021418495178222655, 0.02194175910949707, 0.021502880096435546, 0.02160643196105957, 0.021497983932495118, 0.02175993537902832, 0.021633056640625, 0.021353471755981446, 0.022502368927001953, 0.021356544494628905, 0.021319679260253906, 0.02150297546386719, 0.020975616455078124, 0.021389312744140625, 0.021456895828247072, 0.02142646408081055, 0.021393375396728517, 0.021432096481323243, 0.02130940818786621, 0.021851871490478517, 0.021533088684082033, 0.021405088424682618, 0.021502111434936525, 0.021416255950927734, 0.021691936492919922, 0.021596160888671875, 0.02161097526550293, 0.021569440841674805, 0.021600351333618165, 0.021448703765869142, 0.021496095657348634, 0.02140105628967285, 0.021399808883666993, 0.021622783660888673, 0.02129532814025879, 0.02148534393310547, 0.021506240844726562, 0.021433759689331054, 0.021508512496948243, 0.021340063095092773, 0.02126857566833496, 0.02141900825500488, 0.02148044776916504, 0.021336063385009766, 0.021426368713378906, 0.02143008041381836, 0.021508031845092774, 0.021411455154418946, 0.021389759063720704, 0.021316736221313477, 0.02183772850036621, 0.02150499153137207, 0.02183286476135254, 0.021707136154174803, 0.02158665657043457, 0.021503744125366212, 0.021475008010864258, 0.021441919326782227, 0.021293760299682617, 0.021324031829833983, 0.021388736724853516, 0.02142880058288574, 0.021581024169921876, 0.02160310363769531, 0.021423360824584962, 0.021594175338745115, 0.021502784729003906, 0.021476703643798827, 0.021461536407470703, 0.021444480895996095, 0.021692768096923828, 0.02169219207763672, 0.021501056671142577, 0.02172198486328125, 0.02150320053100586, 0.0217126407623291, 0.021554624557495117, 0.02150662422180176, 0.02162124824523926, 0.02140310478210449, 0.021394880294799804, 0.02128700828552246, 0.02142220878601074, 0.021569856643676756, 0.021457216262817384, 0.021395807266235353, 0.02435158348083496, 0.02265769577026367, 0.02180009651184082, 0.021558368682861328, 0.02162063980102539, 0.02207539176940918, 0.02153696060180664, 0.021947391510009767, 0.021596128463745118, 0.021631103515625, 0.022167583465576172, 0.02152230453491211, 0.02148249626159668, 0.021597343444824217, 0.021512128829956054, 0.02139155197143555, 0.021371423721313478, 0.02148761558532715, 0.02141209602355957, 0.021384960174560548, 0.021474720001220703, 0.021643871307373046, 0.021642751693725586, 0.02240153694152832, 0.024633472442626952, 0.02183359909057617, 0.02170787239074707, 0.021553056716918945, 0.021473344802856446, 0.02142300796508789, 0.02148464012145996, 0.021558399200439452, 0.022250335693359376, 0.021557600021362304, 0.02152822494506836, 0.021601408004760743, 0.021382335662841798, 0.021621376037597655, 0.021519968032836914, 0.021360960006713867, 0.02138912010192871, 0.021373247146606444, 0.0214835205078125, 0.021461183547973633, 0.021361536026000976, 0.02125257682800293, 0.021729759216308594, 0.022515615463256835, 0.021492000579833984, 0.021348159790039064, 0.0212739200592041, 0.021522495269775392]",tokens/s,46.28487782270711,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4000, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2871, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert return t.to( torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 1013, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 839, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 467, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 397, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 199, in _flash_attention_forward attn_output_unpad = flash_attn_varlen_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 1124, in flash_attn_varlen_func return FlashAttnVarlenFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 620, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_varlen_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 90, in _flash_attn_varlen_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.varlen_fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1709, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 864, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,4385.583104,4566.482944,0.0,4188.012544,4187.049984,s,1,10.2750205078125,10.2750205078125,0.0,10.2750205078125,10.2750205078125,10.2750205078125,10.2750205078125,[10.2750205078125],,kWh,9.746523692499522e-05,1.0743721955271504e-05,3.126280278799992e-05,0.00013947176166826666,,MB,4391.743488,4962.844672,0.0,4555.014144,4514.269184,s,10,7.850749145507812,0.7850749145507813,0.0029818719380852246,0.7838614196777344,0.789292431640625,0.7898505920410157,0.7902971203613282,"[0.7818594360351563, 0.7877095947265625, 0.7823174438476562, 0.7825433959960938, 0.7844056396484375, 0.7904087524414063, 0.7833171997070313, 0.7865393676757813, 0.7824799194335937, 0.7891683959960938]",tokens/s,326.08353069908344,kWh,2.281054614999978e-05,2.5155927895983764e-06,1.517390102799981e-05,4.0500039967597965e-05,tokens/kWh,6320981.4164334815,MB,4399.607808,4979.621888,0.0,4571.79136,4514.271744,s,10,466.8263046875,46.68263046875,0.015457591777333574,46.686087890625004,46.693671093750005,46.69438828125,46.69496203125,"[46.64066015625, 46.670609375, 46.685453125, 46.69510546875, 46.69034375, 46.69351171875, 46.68563671875, 46.69313671875, 46.6865390625, 46.68530859375]",tokens/s,1.3495383479337797,kWh,0.0013616506224445842,0.00015019915050282654,0.0009056978078910003,0.0024175475808384107,tokens/kWh,26059.466419333705,,s,630,466.81600244140645,0.7409777816530257,0.0005184305986670322,0.7410007019042969,0.7414539123535155,0.7416112060546874,0.7419606964111328,"[0.7400110473632813, 0.7402894897460938, 0.7400440063476562, 0.7403200073242188, 0.7396430053710937, 0.7401265869140625, 0.7395269165039062, 0.7397890625, 0.7398823852539063, 0.73982421875, 0.740220947265625, 0.7401492309570312, 0.7397346801757813, 0.7396583862304688, 0.7403153076171874, 0.7400753784179688, 0.7398667602539063, 0.7393792114257812, 0.7404309692382812, 0.74035400390625, 0.7401881103515625, 0.7396765747070313, 0.7399342041015625, 0.7405410766601562, 0.7398911743164063, 0.7407472534179688, 0.7407882080078125, 0.7400386352539062, 0.73987890625, 0.7409293212890625, 0.7406143798828125, 0.7407343139648438, 0.7397560424804688, 0.7399384155273437, 0.740581298828125, 0.7402973022460938, 0.740220947265625, 0.740360107421875, 0.7411057739257813, 0.740449462890625, 0.7406700439453126, 0.7400977172851563, 0.7408890991210938, 0.740763671875, 0.7403888549804688, 0.7405721435546875, 0.7406007080078125, 0.7408599853515625, 0.740433349609375, 0.7407367553710937, 0.7409295654296875, 0.7406866455078125, 0.7404031982421875, 0.7403448486328125, 0.7409674072265625, 0.7407156982421875, 0.7408065795898438, 0.740592529296875, 0.7401984252929688, 0.7403945922851562, 0.7403984985351563, 0.741126953125, 0.7402785034179687, 0.7403516235351563, 0.7404179077148437, 0.7406781005859375, 0.7401555786132813, 0.7410413208007812, 0.74025439453125, 0.7404482421875, 0.7400786743164063, 0.74094482421875, 0.7405525512695312, 0.7404586791992187, 0.7402434692382812, 0.7404423217773437, 0.74058935546875, 0.740398681640625, 0.7412412719726562, 0.7406448364257813, 0.7404994506835938, 0.7403804931640625, 0.7406102294921875, 0.7411128540039063, 0.7417538452148438, 0.7404564819335937, 0.7408516845703125, 0.7410953979492187, 0.740627685546875, 0.7406206665039062, 0.740828857421875, 0.741005859375, 0.7405958862304688, 0.7401922607421875, 0.7412469482421875, 0.74111181640625, 0.7408843994140625, 0.7404359741210937, 0.7411138916015625, 0.7408823852539063, 0.7407124633789063, 0.74094384765625, 0.7413555297851563, 0.7404906616210938, 0.7410133666992188, 0.7409793701171875, 0.740929443359375, 0.7408640747070312, 0.7405813598632812, 0.7402782592773437, 0.7413414916992187, 0.7409857788085937, 0.7406785888671875, 0.7406057739257812, 0.7411691284179688, 0.7411315307617188, 0.74105859375, 0.7411226806640625, 0.7413324584960937, 0.7406558227539063, 0.740982666015625, 0.74103369140625, 0.7415252685546875, 0.7407310180664063, 0.7412965698242188, 0.7410989990234375, 0.740869873046875, 0.740468994140625, 0.7411036376953125, 0.740706298828125, 0.7408719482421875, 0.740638916015625, 0.7408571166992187, 0.7408074340820312, 0.7406796875, 0.7405250854492188, 0.7408786010742188, 0.741176025390625, 0.740675048828125, 0.7408501586914062, 0.74096435546875, 0.741158935546875, 0.741158935546875, 0.7413923950195312, 0.740759521484375, 0.7410171508789063, 0.7406097412109375, 0.7411217651367188, 0.7407523803710937, 0.7410889892578125, 0.7411448974609375, 0.740943359375, 0.7407252197265625, 0.7412610473632812, 0.7411159057617187, 0.7411837768554688, 0.7409470825195312, 0.7410205688476562, 0.7412899780273438, 0.740505615234375, 0.7411202392578125, 0.7413648071289063, 0.7410611572265625, 0.7406178588867187, 0.7403484497070313, 0.7415132446289062, 0.7412777099609374, 0.7411558837890625, 0.7409234619140626, 0.7417967529296875, 0.7408416748046875, 0.7407122802734375, 0.7415451049804688, 0.74143994140625, 0.7403668823242188, 0.7410130004882812, 0.7412227783203125, 0.7412770385742188, 0.7410735473632812, 0.7407284545898437, 0.7417527465820313, 0.7415567626953125, 0.7406766967773437, 0.7416492309570313, 0.7416729736328125, 0.7409459228515625, 0.7413637084960938, 0.7415271606445313, 0.7411019287109375, 0.7406522827148437, 0.7408670654296875, 0.7413903198242188, 0.7409971313476562, 0.7406673583984374, 0.7407609252929688, 0.7414934692382813, 0.7408578491210938, 0.7410231323242188, 0.7411513671875, 0.7412428588867187, 0.74096435546875, 0.7406705322265625, 0.7410676879882813, 0.7414188232421876, 0.741134521484375, 0.74067333984375, 0.740796630859375, 0.7407656860351562, 0.7411671142578125, 0.7410687255859375, 0.740404541015625, 0.7409191284179687, 0.7406130981445312, 0.7412982788085938, 0.7409271240234375, 0.7409646606445313, 0.7411522827148438, 0.7412577514648437, 0.7407513427734375, 0.7421317138671875, 0.7406898193359375, 0.741083251953125, 0.741148681640625, 0.7414967041015625, 0.741117919921875, 0.741251220703125, 0.7408323364257813, 0.7416058349609375, 0.7406943969726563, 0.7411171264648437, 0.7410963134765625, 0.74192236328125, 0.74168505859375, 0.7406979370117187, 0.7414682006835938, 0.7410567016601562, 0.7410836181640625, 0.7407222290039063, 0.7419583129882813, 0.7410271606445312, 0.7411206665039063, 0.74096435546875, 0.7410503540039063, 0.7414824829101563, 0.7414549560546875, 0.7413988037109375, 0.7414192504882813, 0.741542236328125, 0.7408271484375, 0.7461724243164063, 0.7407713623046875, 0.741369873046875, 0.741060791015625, 0.7408067016601563, 0.741339599609375, 0.7412830810546875, 0.7406840209960938, 0.740358642578125, 0.7411712036132813, 0.741201904296875, 0.7406981201171875, 0.7412203369140625, 0.7410372924804688, 0.7412518920898438, 0.74081689453125, 0.74126953125, 0.740822021484375, 0.7409920043945313, 0.7408836669921876, 0.741301025390625, 0.7412136840820313, 0.7412188110351563, 0.7408919067382812, 0.7412188720703125, 0.7414063720703125, 0.7408522338867187, 0.741738525390625, 0.7410501098632812, 0.7414287719726562, 0.7408514404296875, 0.7411904907226563, 0.7410075073242187, 0.7409111328125, 0.7416375732421875, 0.741001708984375, 0.7411466674804688, 0.7413488159179688, 0.7409137573242187, 0.7413770751953125, 0.7412225952148438, 0.7407398071289063, 0.7414620361328125, 0.7413800659179688, 0.7403970336914063, 0.7408448486328125, 0.7415630493164063, 0.7409124755859375, 0.7413480224609375, 0.7406469116210938, 0.741533203125, 0.7410672607421875, 0.7411220703125, 0.7410269165039063, 0.7413400268554687, 0.7415316772460937, 0.740600830078125, 0.74132373046875, 0.7415449829101562, 0.7412333984375, 0.740911376953125, 0.7413792114257812, 0.7409547119140625, 0.7410911254882813, 0.7410734252929687, 0.741003173828125, 0.74100146484375, 0.7405606689453125, 0.7409389038085937, 0.7408321533203125, 0.741001220703125, 0.7408599243164062, 0.7409613647460938, 0.7415075073242188, 0.7406945190429688, 0.7410227661132812, 0.7412909545898437, 0.7417645263671875, 0.7411779174804688, 0.7408681030273437, 0.7413229370117187, 0.7407265625, 0.7409848022460938, 0.7410198364257813, 0.7413305053710938, 0.7408888549804687, 0.7409436645507812, 0.7412243041992187, 0.741204345703125, 0.7410892944335937, 0.7410191650390625, 0.7411611938476562, 0.74098095703125, 0.7409533081054688, 0.7411494750976563, 0.7408394165039063, 0.7410585327148438, 0.7416481323242188, 0.7412449951171876, 0.740628662109375, 0.74061962890625, 0.7409568481445312, 0.7417200927734375, 0.7409229736328125, 0.7412781372070313, 0.7406406860351562, 0.7419451293945313, 0.7410582275390625, 0.7408953247070312, 0.7413330078125, 0.7406510009765624, 0.7417876586914063, 0.7407821044921875, 0.741222412109375, 0.7412777099609374, 0.7410585327148438, 0.741105712890625, 0.7408578491210938, 0.740893798828125, 0.7417701416015625, 0.7412296752929688, 0.740856689453125, 0.7409622802734375, 0.74039501953125, 0.741365234375, 0.7414215698242187, 0.7406849365234375, 0.7411270141601562, 0.7410870361328125, 0.7406585083007813, 0.7405780029296875, 0.74112939453125, 0.7410717163085937, 0.7406264038085938, 0.7405916137695312, 0.7411134033203125, 0.740692626953125, 0.74117236328125, 0.7411555786132813, 0.7407922973632812, 0.74039501953125, 0.7411712036132813, 0.7411712036132813, 0.7413074951171875, 0.7409837036132813, 0.7411322631835937, 0.7407247314453125, 0.7409061889648437, 0.741374755859375, 0.7408959350585937, 0.7425195922851563, 0.740439453125, 0.7409522094726563, 0.7411123046875, 0.7409574584960937, 0.741372802734375, 0.7410932006835937, 0.7408209838867188, 0.7407821044921875, 0.7407656860351562, 0.74102685546875, 0.7413851928710937, 0.7412572021484375, 0.741064697265625, 0.741011474609375, 0.7408129272460937, 0.7410192260742188, 0.74131640625, 0.7405298461914063, 0.7413767700195313, 0.74063671875, 0.7410964965820312, 0.7412127075195313, 0.7412821655273437, 0.7412142333984375, 0.7414312744140625, 0.7404844360351562, 0.7407822265625, 0.7410919189453125, 0.7412132568359375, 0.7407432250976562, 0.741016357421875, 0.7410842895507812, 0.741087646484375, 0.7406998291015625, 0.7413154907226562, 0.7415840454101562, 0.7410225830078125, 0.7411015625, 0.7414537963867187, 0.740874267578125, 0.7414203491210938, 0.7403264770507813, 0.7410634155273438, 0.7409185180664063, 0.7408353271484375, 0.7404669189453125, 0.7404203491210938, 0.7403519897460937, 0.7420149536132813, 0.7408612670898438, 0.7405206298828125, 0.7407544555664063, 0.7414341430664062, 0.7410803833007813, 0.7410902099609376, 0.7408519287109375, 0.7409415893554687, 0.7404564208984376, 0.7404268188476563, 0.740907958984375, 0.741411865234375, 0.7406228637695312, 0.741372314453125, 0.7408510131835937, 0.7408688354492188, 0.741296142578125, 0.7404994506835938, 0.74149267578125, 0.74127978515625, 0.7410421752929688, 0.7401585693359375, 0.7415073852539062, 0.7416324462890626, 0.7409112548828125, 0.7406940307617188, 0.7410747680664063, 0.7413629760742187, 0.7412233276367187, 0.74068994140625, 0.7412100830078125, 0.7406735229492187, 0.7413104858398437, 0.7413859252929688, 0.7409503173828125, 0.7408230590820313, 0.7416156005859375, 0.741074951171875, 0.7407103881835938, 0.741961669921875, 0.7415742797851562, 0.7415607299804687, 0.7408815307617187, 0.741004150390625, 0.7419085083007813, 0.7408836059570313, 0.7411778564453125, 0.7420112915039062, 0.7413792114257812, 0.7412273559570313, 0.7409903564453125, 0.7410182495117188, 0.74169140625, 0.746708984375, 0.7411466064453125, 0.741240478515625, 0.7410732421875, 0.7407437744140625, 0.740569091796875, 0.7410682983398438, 0.7411778564453125, 0.7406277465820312, 0.7407330322265625, 0.7405321044921875, 0.741503662109375, 0.7409111328125, 0.7410657348632812, 0.74077490234375, 0.7412572021484375, 0.740464599609375, 0.7408700561523438, 0.7406626586914062, 0.7411368408203125, 0.741069091796875, 0.74134326171875, 0.7410131225585938, 0.7410650634765625, 0.7405731811523437, 0.7413833618164063, 0.7407764282226562, 0.7408101196289063, 0.7409360961914062, 0.7406024169921875, 0.7416668090820312, 0.7409581909179688, 0.740361572265625, 0.7412241821289063, 0.7413707275390625, 0.7408877563476562, 0.7406815795898437, 0.74096728515625, 0.7412142944335938, 0.7409121704101562, 0.7410260620117187, 0.74164501953125, 0.7411445922851563, 0.741074951171875, 0.7407349853515625, 0.7418388671875, 0.7409418334960938, 0.7413053588867188, 0.7412003173828124, 0.7406268310546875, 0.741662841796875, 0.7410089721679688, 0.7414379272460937, 0.7410524291992188, 0.741074951171875, 0.7409862670898437, 0.741231201171875, 0.7412080688476562, 0.7415582885742188, 0.7415643920898437, 0.7411831665039063, 0.740969970703125, 0.7412069091796875, 0.7410032348632812, 0.7411319580078125, 0.7406467895507812, 0.7410001831054688, 0.7411732177734375, 0.7405772705078125, 0.7407882080078125, 0.74119580078125, 0.7415930786132813, 0.7405238647460938, 0.74088671875, 0.7413165893554687, 0.740890625, 0.7410768432617187, 0.7412532958984375, 0.7409111328125, 0.7414476928710938, 0.740789794921875, 0.740991455078125, 0.7406918334960938, 0.7410219116210938, 0.7410482788085937, 0.74030078125, 0.74102783203125, 0.7408057861328124, 0.7415857543945312, 0.7404413452148437, 0.7403440551757813, 0.7411328125, 0.7417835693359375, 0.741223876953125, 0.7406965942382813, 0.7409930419921875, 0.7410515747070312, 0.7409672241210937, 0.7407218627929687, 0.7408848876953125, 0.7415095825195313, 0.7411261596679688, 0.7403001098632812, 0.740924072265625, 0.7412849731445312, 0.74093994140625, 0.7412880249023438, 0.7411759643554687, 0.7412305908203125, 0.7412200317382812, 0.7410457763671875, 0.74151953125, 0.7411492919921875, 0.7410728759765625, 0.741060302734375, 0.7409923706054687, 0.7410328979492188, 0.7411978149414062, 0.7412183227539062, 0.7411544189453125, 0.7412242431640625, 0.7409833984375, 0.7409845581054687, 0.7409912109375, 0.7409848022460938, 0.7414599609375, 0.7411077270507812, 0.74080419921875]",tokens/s,1.349568131137656,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 864, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1141, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 944, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 677, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 500, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 864, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4000, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2871, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert return t.to( torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,3809.091584,4375.642112,0.0,3997.171712,3878.257152,s,1,10.1610673828125,10.1610673828125,0.0,10.1610673828125,10.1610673828125,10.1610673828125,10.1610673828125,[10.1610673828125],,kWh,9.453367005832356e-05,1.0420346157089993e-05,3.048863550200731e-05,0.00013544265171742086,,MB,2068.639744,4862.181376,0.0,4454.350848,4371.844096,s,10,6.675686889648437,0.6675686889648438,0.001182260049307542,0.6671465148925781,0.6692501159667968,0.6694332855224608,0.6695798211669921,"[0.6663990478515625, 0.665687744140625, 0.668204345703125, 0.6683736572265625, 0.669616455078125, 0.6692094116210937, 0.66723583984375, 0.666953125, 0.6670571899414063, 0.6669500732421875]",tokens/s,383.48113719497974,kWh,1.942514973437639e-05,2.142278729064239e-06,1.2911815885000184e-05,3.4479244348440813e-05,tokens/kWh,7424756.6858748915,MB,2082.807808,5063.507968,0.0,4655.67744,4530.328576,s,10,392.17560546875006,39.217560546875,0.04674525619346836,39.191431640625,39.287291015625,39.2940673828125,39.2994884765625,"[39.161171875, 39.1854765625, 39.26026171875, 39.28578515625, 39.30084375, 39.2348046875, 39.185828125, 39.19703515625, 39.18309375, 39.1813046875]",tokens/s,1.6064232226963457,kWh,0.0011423082741539557,0.00012600511666804526,0.0007596047187943987,0.0020279181096164,tokens/kWh,31066.343212407653,,s,630,392.1716875610349,0.6224947421603733,0.000999651843105283,0.6221973571777344,0.6240720092773437,0.6245301025390625,0.6251899475097656,"[0.6208662719726562, 0.6207017211914062, 0.6207979125976563, 0.620652587890625, 0.6211030883789063, 0.6210949096679688, 0.6213795776367188, 0.6215086059570313, 0.6209710693359375, 0.620970947265625, 0.6212689208984375, 0.6219121704101562, 0.621631103515625, 0.6217035522460937, 0.6210741577148438, 0.6216338500976563, 0.621465576171875, 0.6212767944335937, 0.6213320922851563, 0.6221156005859375, 0.621531005859375, 0.6213934326171875, 0.6221089477539062, 0.6209555053710938, 0.6218368530273437, 0.6214320068359375, 0.6214171142578125, 0.621451416015625, 0.6215410766601562, 0.6220205688476562, 0.6226742553710938, 0.6227169189453124, 0.621549560546875, 0.6219837646484375, 0.6218383178710938, 0.6212990112304687, 0.621251220703125, 0.6218731689453125, 0.6219625244140625, 0.6210096435546875, 0.6220963745117187, 0.6212274169921875, 0.6215418090820313, 0.6216337890625, 0.6213214111328125, 0.6221744384765625, 0.6212382202148438, 0.6219413452148438, 0.6216642456054687, 0.6220770263671875, 0.6223819580078125, 0.621633544921875, 0.6220308227539062, 0.6218731079101563, 0.621556884765625, 0.6221239624023438, 0.6228521118164062, 0.6217460327148437, 0.6216721801757813, 0.6218858642578124, 0.6213114013671875, 0.6218358764648437, 0.6216796264648438, 0.6216312255859375, 0.6215479125976563, 0.62104296875, 0.6209993896484375, 0.6224806518554687, 0.6214332275390625, 0.6217445068359375, 0.6213316040039063, 0.6214786376953125, 0.6217033081054687, 0.6214041748046875, 0.621117431640625, 0.62139599609375, 0.6212894897460938, 0.6211517944335937, 0.6212304077148437, 0.6213383178710937, 0.6229478149414063, 0.62127783203125, 0.6217813110351562, 0.6214799194335937, 0.621645751953125, 0.6212383422851563, 0.6216724243164062, 0.622635009765625, 0.621991943359375, 0.62181298828125, 0.6215087890625, 0.621041748046875, 0.6216663818359375, 0.622186279296875, 0.6211631469726563, 0.6220943603515625, 0.621770751953125, 0.6214201049804687, 0.6217911376953125, 0.6219002685546875, 0.6219304809570313, 0.6224404296875, 0.6215733642578125, 0.622050048828125, 0.622199951171875, 0.6227362670898438, 0.6235402221679688, 0.621139404296875, 0.6225966796875, 0.6220348510742187, 0.6215162963867188, 0.62206005859375, 0.6221367797851562, 0.6229017944335937, 0.6219788818359375, 0.6231944580078125, 0.622035400390625, 0.6224629516601563, 0.6234024047851563, 0.62279736328125, 0.62261865234375, 0.6231654663085937, 0.6234412841796875, 0.6231921997070312, 0.6231817626953124, 0.6234131469726563, 0.6228252563476563, 0.623083740234375, 0.62229296875, 0.6221814575195312, 0.6223729858398438, 0.6235595092773437, 0.6238056030273438, 0.6229471435546875, 0.6226575317382812, 0.6221455078125, 0.6221781005859375, 0.621883056640625, 0.6221412353515625, 0.6218841552734375, 0.6221474609375, 0.6222430419921875, 0.6224573974609375, 0.6244620361328125, 0.6231836547851562, 0.6232293090820312, 0.6236917724609375, 0.623435546875, 0.6235765991210938, 0.621744873046875, 0.6233309326171875, 0.6228955078125, 0.6224219360351563, 0.6232003173828125, 0.6229783935546875, 0.6228689575195312, 0.6229219970703125, 0.62275927734375, 0.62302685546875, 0.6236508178710938, 0.6232855834960938, 0.62368017578125, 0.6235176391601562, 0.6234476928710937, 0.62329248046875, 0.6232743530273438, 0.6232473754882812, 0.6228970336914063, 0.6231710815429687, 0.6233646850585938, 0.6227291870117188, 0.623120361328125, 0.6228684692382812, 0.6239575805664063, 0.6243536987304688, 0.6232473754882812, 0.624051513671875, 0.6230247192382813, 0.6234602661132812, 0.6252095947265625, 0.6237081298828125, 0.6246498413085938, 0.6240689697265625, 0.6241747436523437, 0.6232557983398438, 0.6239718627929688, 0.6235609741210938, 0.623882568359375, 0.6232124633789062, 0.6231897583007813, 0.6230674438476562, 0.6230609741210937, 0.624166748046875, 0.6237717895507813, 0.6243942260742188, 0.6245888061523438, 0.625301513671875, 0.6247157592773438, 0.6230240478515625, 0.625141845703125, 0.6228869018554688, 0.6232366943359375, 0.62269873046875, 0.6231851806640625, 0.6220228271484375, 0.622017333984375, 0.6219386596679688, 0.6229503784179687, 0.6238351440429688, 0.62455810546875, 0.62429736328125, 0.6225447387695312, 0.623547119140625, 0.6232158203125, 0.6232850341796875, 0.6230805053710937, 0.6226461791992187, 0.62251171875, 0.62471533203125, 0.6247340698242188, 0.6254151000976562, 0.623763427734375, 0.6232801513671875, 0.62427685546875, 0.6225496826171875, 0.6223196411132812, 0.6223701171875, 0.6233524780273437, 0.6226534423828125, 0.6228392944335938, 0.6217301635742187, 0.62282568359375, 0.6233374633789063, 0.6248344116210938, 0.624234619140625, 0.6246707153320312, 0.624437255859375, 0.6261248168945313, 0.624300048828125, 0.6247915649414062, 0.6247709350585937, 0.623346923828125, 0.62327490234375, 0.6231838989257813, 0.6232733764648437, 0.624319091796875, 0.623203369140625, 0.6235957641601563, 0.6232378540039063, 0.6230384521484374, 0.6235934448242187, 0.62410546875, 0.624552734375, 0.622970703125, 0.6232434692382812, 0.6235320434570313, 0.6216826171875, 0.6229218139648437, 0.6228759765625, 0.622852783203125, 0.6244249267578125, 0.6225879516601562, 0.6232387084960938, 0.6226347045898437, 0.622448974609375, 0.6230695190429687, 0.6228805541992187, 0.62324755859375, 0.6231512451171874, 0.6230609741210937, 0.623164794921875, 0.6216956176757813, 0.6223012084960937, 0.623686767578125, 0.6242169189453125, 0.624099365234375, 0.623435791015625, 0.6225490112304688, 0.6222274780273438, 0.6235869140625, 0.623337890625, 0.6240316772460938, 0.6234563598632813, 0.62336962890625, 0.6241322021484375, 0.6243677978515625, 0.6238907470703124, 0.6246154174804688, 0.62384130859375, 0.6250864868164062, 0.6254507446289063, 0.6248143920898438, 0.6251089477539062, 0.6250797119140625, 0.6246277465820312, 0.6248864135742187, 0.6241300659179687, 0.6247028198242187, 0.62443994140625, 0.62450244140625, 0.6245584106445312, 0.6252871704101562, 0.6252933349609375, 0.6248130493164062, 0.6243065185546876, 0.6248900756835938, 0.6244520263671876, 0.624332275390625, 0.624459716796875, 0.6245643920898437, 0.6243065185546876, 0.6236464233398438, 0.6242077026367188, 0.624173583984375, 0.622940185546875, 0.6232531127929688, 0.623549072265625, 0.6243225708007812, 0.6241682739257812, 0.623501708984375, 0.6244286499023437, 0.6234568481445313, 0.6228500366210937, 0.62369189453125, 0.622315185546875, 0.6235504760742188, 0.6238941650390625, 0.622547607421875, 0.6224956665039062, 0.6225897216796875, 0.6231893920898437, 0.6223553466796875, 0.6224359130859375, 0.6219324951171875, 0.6216434326171875, 0.6220637817382813, 0.622115478515625, 0.621849609375, 0.6217793579101563, 0.6230022583007813, 0.622055419921875, 0.6230033569335938, 0.6232311401367188, 0.6235029296875, 0.6232695922851562, 0.6238994750976562, 0.6235864868164063, 0.6242169189453125, 0.62344384765625, 0.62461962890625, 0.6235402221679688, 0.6238945922851562, 0.6230337524414062, 0.6217464599609375, 0.6219135131835938, 0.6227423706054688, 0.6214451293945312, 0.6217722778320313, 0.6224737548828125, 0.6223883666992187, 0.6219784545898438, 0.6218731689453125, 0.6218629150390625, 0.6218421020507813, 0.6229135131835938, 0.6222031860351562, 0.6221757202148438, 0.6219984130859375, 0.6235321044921875, 0.6232987670898438, 0.62225, 0.6229113159179688, 0.6220924682617187, 0.6214716186523438, 0.6222839965820313, 0.6220870971679687, 0.622716552734375, 0.6221619262695313, 0.62134326171875, 0.6216268920898438, 0.62130224609375, 0.6210475463867188, 0.621955322265625, 0.6211614990234375, 0.6220830688476563, 0.6220595092773438, 0.62343359375, 0.62227880859375, 0.6221425170898438, 0.6217224731445312, 0.6216438598632813, 0.6211522827148438, 0.62152294921875, 0.6214039916992188, 0.62175146484375, 0.6217625732421875, 0.6218528442382812, 0.6212329711914063, 0.6213324584960938, 0.6222576904296875, 0.6224789428710937, 0.6215234985351562, 0.6221717529296875, 0.6216543579101562, 0.6217588500976563, 0.62298876953125, 0.621654541015625, 0.6225194091796875, 0.621765380859375, 0.6218440551757812, 0.6219963989257813, 0.621807861328125, 0.6218424072265625, 0.621813232421875, 0.6216232299804687, 0.6219638671875, 0.6222356567382813, 0.6216294555664063, 0.622065673828125, 0.6220779418945312, 0.6213734130859375, 0.621981689453125, 0.6218458862304688, 0.6219042358398438, 0.6228216552734375, 0.62259814453125, 0.622703857421875, 0.6227461547851563, 0.6227069702148438, 0.6219407348632813, 0.6222471313476563, 0.6224263305664063, 0.6222280883789062, 0.6221859130859375, 0.6219306640625, 0.62283349609375, 0.621927001953125, 0.6228889770507813, 0.6222005615234375, 0.6224120483398438, 0.6220709228515625, 0.62156201171875, 0.6224703979492188, 0.6221766357421875, 0.622147216796875, 0.6220294799804688, 0.6219849243164063, 0.6213472900390625, 0.6218675537109375, 0.6220467529296875, 0.6216909790039062, 0.6217915649414063, 0.6212394409179688, 0.62157080078125, 0.6216124877929687, 0.6215706176757813, 0.62171923828125, 0.6212115478515625, 0.6219024047851562, 0.6219141235351563, 0.6221842041015625, 0.6226741333007813, 0.6221475219726562, 0.62283984375, 0.6219939575195312, 0.621854736328125, 0.6222540893554688, 0.621770263671875, 0.6217395629882813, 0.6221505737304688, 0.6216661987304688, 0.6218499145507812, 0.6217726440429687, 0.6218966674804688, 0.622606201171875, 0.6219019775390625, 0.6216640625, 0.6225096435546875, 0.622348876953125, 0.6223441772460937, 0.62247119140625, 0.622202880859375, 0.6227476196289062, 0.6222908935546875, 0.6219326171875, 0.6220206298828125, 0.6229647216796875, 0.6225469360351562, 0.62230322265625, 0.6233429565429688, 0.6229756469726563, 0.62355859375, 0.6229109497070312, 0.6216934814453124, 0.62272265625, 0.622436767578125, 0.6221782836914063, 0.621923828125, 0.6228751831054687, 0.6234577026367187, 0.6220641479492187, 0.6225791015625, 0.6223378295898437, 0.6220719604492188, 0.6216929321289062, 0.6215291137695312, 0.6213193359375, 0.62173681640625, 0.62162255859375, 0.621828857421875, 0.6211678466796875, 0.621646240234375, 0.6214249877929687, 0.6218397827148437, 0.621078857421875, 0.621701171875, 0.6215374755859375, 0.6221947631835938, 0.6217788696289063, 0.6222756958007812, 0.6222693481445313, 0.6219939575195312, 0.6229188232421875, 0.6223552856445312, 0.6213916625976562, 0.6219757690429687, 0.6217498168945312, 0.6219063720703125, 0.6214553833007812, 0.6221741943359375, 0.6214533081054687, 0.6217052001953125, 0.621414306640625, 0.621573486328125, 0.6216115112304688, 0.6217853393554688, 0.623085205078125, 0.6220038452148438, 0.6223834228515625, 0.622569091796875, 0.6228565673828125, 0.6226044311523438, 0.6217865600585938, 0.622486328125, 0.6222801513671875, 0.6225289306640625, 0.6220162963867187, 0.6221029052734375, 0.6229155883789063, 0.62178271484375, 0.6214864501953125, 0.6217922973632812, 0.6217655639648437, 0.6219755249023438, 0.6215924072265625, 0.6230631103515625, 0.6217892456054688, 0.6223544311523438, 0.62274755859375, 0.6217227783203125, 0.621640625, 0.6221250610351563, 0.6218157958984375, 0.621573486328125, 0.6218594360351563, 0.6215670166015625, 0.6223014526367188, 0.6217172241210938, 0.6220453491210938, 0.6212354736328125, 0.621681396484375, 0.621613037109375, 0.6219480590820312, 0.6224996948242187, 0.6217942504882813, 0.6217050170898437, 0.6220220336914063, 0.6217483520507813, 0.6217632446289062, 0.6217789306640625, 0.6213734130859375, 0.6209495239257813, 0.6216151123046875, 0.6211727294921875, 0.6215557250976562, 0.6225057373046875, 0.6211812133789063, 0.6214102783203125, 0.6219301147460937, 0.6214495849609375, 0.6215593872070313, 0.6216724243164062, 0.6226577758789062, 0.6225548706054688, 0.6222279663085938, 0.6219957885742188, 0.6224058837890625, 0.6217394409179687, 0.6216561889648438, 0.6219207153320313, 0.6218521118164062, 0.6216215209960938, 0.621865234375, 0.621643798828125, 0.6220712890625, 0.6220040283203125, 0.6222424926757812, 0.6220428466796875, 0.6224735107421875, 0.6216294555664063, 0.622002197265625, 0.6223499145507813, 0.6226825561523438, 0.6219522705078125, 0.622004150390625, 0.6220866088867187, 0.6224613647460937, 0.621686767578125, 0.6215733642578125, 0.6223180541992187, 0.6216582641601562, 0.6230263061523438, 0.6226472778320312, 0.6219386596679688, 0.6220728759765625, 0.622066650390625, 0.62205517578125, 0.6220638427734375, 0.6219235229492187, 0.62180224609375]",tokens/s,1.6064392713253954,, 4bit-gptq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1709, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,2163.884032,2194.604032,0.0,1816.133632,1727.29344,s,1,9.0174072265625,9.0174072265625,0.0,9.0174072265625,9.0174072265625,9.0174072265625,9.0174072265625,[9.0174072265625],,kWh,5.976673873331038e-05,6.585365099496029e-06,1.910834862001165e-05,8.546045245281806e-05,,MB,2234.83904,2406.416384,0.0,1998.585856,1980.448768,s,10,3.2212420043945316,0.32212420043945317,0.00027432602111490955,0.3221717987060547,0.3224371398925781,0.32244046325683595,0.3224431219482422,"[0.32209222412109373, 0.32156170654296873, 0.32244378662109374, 0.3224364013671875, 0.3221385803222656, 0.32232028198242185, 0.3222109985351562, 0.32169442749023436, 0.32217874145507813, 0.32216485595703126]",tokens/s,794.7245182161284,kWh,9.410542486587303e-06,1.037575628221308e-06,6.269536265624508e-06,1.6717654380433117e-05,tokens/kWh,15313153.040155603,MB,2248.503296,2597.257216,0.0,2189.426688,2078.022144,s,10,177.882412109375,17.7882412109375,0.020592925819032642,17.790093749999997,17.8097537109375,17.81297841796875,17.81555818359375,"[17.74033203125, 17.77685546875, 17.772400390625, 17.789318359375, 17.816203125, 17.78546484375, 17.790869140625, 17.7986328125, 17.803298828125, 17.809037109375]",tokens/s,3.541665488618573,kWh,0.0005192048609513302,5.727199115571418e-05,0.0003450639392453764,0.0009215407913524209,tokens/kWh,68363.76706400963,,s,630,177.87812567138693,0.28234623122442337,0.0005002065340032206,0.2823446197509766,0.28293623046875,0.2831180969238281,0.2836704473876953,"[0.2817083740234375, 0.281108642578125, 0.28102655029296875, 0.28117919921875, 0.28116668701171876, 0.2813500671386719, 0.28130703735351564, 0.2817966003417969, 0.28123318481445314, 0.28118856811523435, 0.28105682373046875, 0.2815637817382812, 0.2812744445800781, 0.28089865112304685, 0.2817504272460937, 0.28136038208007813, 0.28095693969726565, 0.2816244812011719, 0.2815550537109375, 0.2813931579589844, 0.280936767578125, 0.2819615783691406, 0.2811295166015625, 0.2813604431152344, 0.2813358154296875, 0.28161227416992185, 0.2817576904296875, 0.2811202392578125, 0.2818810729980469, 0.28143618774414064, 0.2814886474609375, 0.2817134094238281, 0.2818887023925781, 0.2820078735351563, 0.28157867431640626, 0.2822633972167969, 0.28165399169921873, 0.28145050048828124, 0.28195550537109376, 0.2812294921875, 0.2821034851074219, 0.2815816345214844, 0.2820033264160156, 0.2816337890625, 0.28149554443359376, 0.28178369140625, 0.28162832641601565, 0.28182830810546877, 0.28160409545898435, 0.2818009033203125, 0.282184814453125, 0.2814942321777344, 0.28167578125, 0.2819154052734375, 0.28225762939453125, 0.2818082580566406, 0.2819129638671875, 0.2820143127441406, 0.2816607360839844, 0.28195724487304685, 0.28182733154296874, 0.28184173583984373, 0.2815572814941406, 0.2825162048339844, 0.2816000061035156, 0.28129278564453125, 0.28238134765625, 0.28212322998046874, 0.28228607177734377, 0.28375567626953124, 0.28194082641601564, 0.2814460754394531, 0.2823106994628906, 0.28207748413085937, 0.28189495849609375, 0.2820966491699219, 0.28307351684570314, 0.28216644287109377, 0.2815824279785156, 0.2825994262695313, 0.2821038208007812, 0.2816468200683594, 0.2814876403808594, 0.28208740234375, 0.2819433898925781, 0.28198684692382814, 0.28177877807617185, 0.2824645690917969, 0.2820888061523438, 0.28176986694335937, 0.2818728332519531, 0.282263916015625, 0.2813644714355469, 0.2818531494140625, 0.282159912109375, 0.28162213134765623, 0.2818133850097656, 0.281831298828125, 0.2822569580078125, 0.2817108154296875, 0.28234378051757814, 0.28229971313476565, 0.28194680786132814, 0.2821629638671875, 0.282089599609375, 0.28266885375976564, 0.2821143493652344, 0.28176177978515626, 0.28232809448242185, 0.28198574829101564, 0.282744140625, 0.2830360107421875, 0.2825934448242188, 0.2832596130371094, 0.28283056640625, 0.28232498168945314, 0.2819215393066406, 0.28268655395507813, 0.2822210693359375, 0.2829930419921875, 0.28259872436523437, 0.28216085815429687, 0.2821068420410156, 0.28219186401367186, 0.28174951171875, 0.28200244140625, 0.2818103637695312, 0.2818050537109375, 0.28194964599609373, 0.28164801025390623, 0.2816470947265625, 0.28212225341796876, 0.28157131958007814, 0.28144024658203126, 0.28197479248046875, 0.2823550415039062, 0.28141839599609375, 0.28154266357421875, 0.28250521850585936, 0.2820546569824219, 0.2807807922363281, 0.2821868591308594, 0.28201666259765623, 0.2817181396484375, 0.2816468200683594, 0.2822828063964844, 0.2825413818359375, 0.2814042053222656, 0.28236123657226564, 0.28229693603515627, 0.2812333984375, 0.28171878051757815, 0.2826264038085938, 0.2821457824707031, 0.281993896484375, 0.2820177307128906, 0.28194412231445315, 0.28198910522460935, 0.2825441284179688, 0.2827960205078125, 0.2821397399902344, 0.28224371337890625, 0.2820672302246094, 0.28260906982421874, 0.2822496643066406, 0.2821588745117187, 0.2823231811523437, 0.2821611328125, 0.28179608154296876, 0.2822761535644531, 0.2825519104003906, 0.28220291137695314, 0.28239599609375, 0.28195034790039064, 0.28220880126953124, 0.2816912536621094, 0.28253070068359376, 0.2816629943847656, 0.28212066650390627, 0.28200961303710936, 0.28256051635742185, 0.2823777770996094, 0.2829818420410156, 0.282862548828125, 0.28231884765625, 0.28236764526367186, 0.2824459228515625, 0.28264678955078126, 0.28200732421875, 0.2823489990234375, 0.28172946166992185, 0.2823695373535156, 0.28246728515625, 0.28236184692382815, 0.28178411865234376, 0.2822180480957031, 0.2826302185058594, 0.28216717529296875, 0.28230303955078123, 0.28263436889648436, 0.28219390869140626, 0.2823474426269531, 0.28219732666015623, 0.2821455383300781, 0.2820280456542969, 0.2820444030761719, 0.28189697265625, 0.28263177490234376, 0.282442138671875, 0.28246426391601565, 0.28231436157226564, 0.28231512451171875, 0.28233053588867185, 0.28185836791992186, 0.28206314086914064, 0.28238027954101563, 0.2822102966308594, 0.2821610412597656, 0.282501220703125, 0.2822912902832031, 0.2823437194824219, 0.28220660400390624, 0.28217788696289064, 0.28252175903320315, 0.2824208068847656, 0.28231231689453123, 0.28185836791992186, 0.28272256469726564, 0.28227789306640627, 0.2822451171875, 0.28217138671875, 0.28220416259765624, 0.28259686279296875, 0.28197296142578127, 0.28252093505859377, 0.2827130126953125, 0.28260107421875, 0.2826029663085938, 0.2827558898925781, 0.28268142700195314, 0.2825068359375, 0.28281671142578124, 0.2828082580566406, 0.2827901611328125, 0.2826642761230469, 0.28224517822265627, 0.28231536865234375, 0.28244378662109376, 0.2823638916015625, 0.28244992065429686, 0.2825603332519531, 0.283182373046875, 0.2824149169921875, 0.28214898681640627, 0.28238482666015624, 0.2825198974609375, 0.28207839965820314, 0.28243026733398435, 0.2831707458496094, 0.28252578735351563, 0.2823947448730469, 0.28252685546875, 0.2841689147949219, 0.28261346435546875, 0.2823191223144531, 0.28272845458984375, 0.28269329833984375, 0.28300521850585936, 0.28303466796875, 0.28245709228515625, 0.2824744873046875, 0.281973876953125, 0.28299066162109376, 0.28248556518554685, 0.2822279357910156, 0.2835259094238281, 0.2836643371582031, 0.2839423828125, 0.2836729431152344, 0.28421142578125, 0.2832015380859375, 0.2830908203125, 0.2829354248046875, 0.28259231567382814, 0.2824910278320312, 0.2826756591796875, 0.2829981689453125, 0.28238104248046875, 0.2826528625488281, 0.28226971435546877, 0.2829794006347656, 0.282104736328125, 0.28296807861328127, 0.2830824279785156, 0.28277996826171875, 0.28335104370117187, 0.28256869506835935, 0.2826506042480469, 0.2839163208007813, 0.2831787414550781, 0.2821790161132812, 0.282032958984375, 0.28291915893554687, 0.28312551879882814, 0.2829066162109375, 0.28294964599609373, 0.2832795104980469, 0.28237298583984377, 0.28226153564453127, 0.28324884033203124, 0.28287667846679687, 0.2823016052246094, 0.2827273254394531, 0.282625, 0.2823463134765625, 0.2826581726074219, 0.28176394653320314, 0.2821018981933594, 0.2824953918457031, 0.28187026977539065, 0.28230593872070314, 0.2822806091308594, 0.2824806518554687, 0.281499755859375, 0.2825419921875, 0.28195135498046875, 0.28178289794921874, 0.2821245422363281, 0.28252569580078124, 0.28158108520507813, 0.28236846923828124, 0.2817228698730469, 0.28231884765625, 0.28214019775390625, 0.2818748474121094, 0.2822943115234375, 0.28198092651367185, 0.2826136779785156, 0.28222476196289065, 0.2823065490722656, 0.2823987121582031, 0.2818655700683594, 0.28213723754882813, 0.2818925476074219, 0.2822043762207031, 0.2822302551269531, 0.28203631591796874, 0.28266143798828125, 0.2824294128417969, 0.2824390869140625, 0.28216995239257814, 0.2819297180175781, 0.28267111206054685, 0.2821983337402344, 0.2819841003417969, 0.28223959350585937, 0.28231475830078123, 0.28229632568359375, 0.28255587768554685, 0.28281704711914063, 0.28269158935546873, 0.2822287292480469, 0.2822830505371094, 0.2823976745605469, 0.28256997680664064, 0.2825523071289063, 0.2826650390625, 0.282540771484375, 0.282830810546875, 0.2821959533691406, 0.28281964111328123, 0.2822872314453125, 0.2828547668457031, 0.28284771728515623, 0.282723876953125, 0.28235415649414064, 0.28240869140625, 0.2825396728515625, 0.28260894775390627, 0.28239041137695314, 0.28224798583984373, 0.2827673645019531, 0.28229428100585935, 0.2820782775878906, 0.2822442626953125, 0.2819520263671875, 0.2820889892578125, 0.2825404968261719, 0.282499267578125, 0.28215277099609376, 0.28228512573242187, 0.28275830078125, 0.2826625671386719, 0.28218994140625, 0.28231884765625, 0.28244610595703123, 0.2819981079101562, 0.2818420715332031, 0.282692138671875, 0.282208251953125, 0.2823740844726563, 0.28222061157226563, 0.28262374877929686, 0.28240887451171875, 0.28204629516601565, 0.28255691528320315, 0.28304940795898437, 0.28197909545898436, 0.28234378051757814, 0.28224920654296876, 0.2820809326171875, 0.2819485168457031, 0.2823261413574219, 0.2826187438964844, 0.2821982116699219, 0.2824906005859375, 0.282846923828125, 0.2824277954101562, 0.2825990295410156, 0.2828082580566406, 0.28278585815429685, 0.2824396667480469, 0.28202023315429686, 0.2823386840820313, 0.28228607177734377, 0.282679931640625, 0.2820782775878906, 0.28205517578125, 0.2826039733886719, 0.2818924865722656, 0.28278207397460936, 0.2825441284179688, 0.2825146179199219, 0.2823456115722656, 0.28275888061523435, 0.28253692626953125, 0.2823302612304687, 0.2827496643066406, 0.2826689147949219, 0.2818419189453125, 0.2827179565429688, 0.2824683532714844, 0.28237203979492187, 0.2814505615234375, 0.28211163330078126, 0.2820980224609375, 0.2821591186523438, 0.28242718505859377, 0.28200775146484375, 0.2821791687011719, 0.28169052124023436, 0.2829434814453125, 0.282093017578125, 0.2827833862304687, 0.28231283569335935, 0.28199798583984376, 0.28288214111328125, 0.282334228515625, 0.2829158020019531, 0.28237109375, 0.28195120239257815, 0.28232235717773435, 0.28237677001953126, 0.28265472412109377, 0.2826524658203125, 0.282267333984375, 0.2826734619140625, 0.2824541015625, 0.283025390625, 0.28260562133789063, 0.28247674560546876, 0.2826320495605469, 0.28254443359375, 0.28234735107421877, 0.282380126953125, 0.28298239135742187, 0.282345458984375, 0.282208251953125, 0.2828554382324219, 0.2826026306152344, 0.28202203369140627, 0.2827025451660156, 0.28374346923828125, 0.28333273315429686, 0.28216339111328126, 0.28254568481445314, 0.28293011474609375, 0.2825252380371094, 0.2828234252929688, 0.282904296875, 0.2824818420410156, 0.2824889831542969, 0.28289688110351563, 0.2820088195800781, 0.28275912475585935, 0.28214938354492186, 0.283443603515625, 0.28240280151367186, 0.28236163330078123, 0.28312188720703124, 0.282489990234375, 0.282265625, 0.28280435180664065, 0.2829030151367187, 0.28249981689453124, 0.2827157897949219, 0.2818607788085937, 0.28287387084960935, 0.2825871276855469, 0.28198211669921874, 0.28234225463867185, 0.2826886901855469, 0.28228436279296876, 0.2823562316894531, 0.28245303344726563, 0.2820813293457031, 0.2823396911621094, 0.2829031066894531, 0.28270797729492186, 0.2821275024414063, 0.2832491455078125, 0.28295849609375, 0.28206051635742185, 0.2828975219726563, 0.282876708984375, 0.28224725341796875, 0.2821158447265625, 0.28251162719726564, 0.2826855163574219, 0.28227783203125, 0.28352947998046873, 0.28261312866210936, 0.28258547973632814, 0.28250323486328127, 0.28231478881835936, 0.283111328125, 0.28280416870117187, 0.282333251953125, 0.28235589599609373, 0.28247406005859377, 0.28249728393554685, 0.28256256103515626, 0.28277120971679687, 0.28252108764648437, 0.2828787231445313, 0.28241510009765625, 0.2828995666503906, 0.282104736328125, 0.2827202453613281, 0.283009033203125, 0.2821937561035156, 0.28226309204101563, 0.2824374694824219, 0.28251800537109373, 0.2822692260742187, 0.2821905517578125, 0.28261135864257814, 0.2824639892578125, 0.28300875854492186, 0.2826269226074219, 0.2827056579589844, 0.28254635620117186, 0.28302133178710936, 0.2829619445800781, 0.2832652587890625, 0.2830226135253906, 0.2830750732421875, 0.2827343139648438, 0.2820447082519531, 0.2826931762695313, 0.2829349365234375, 0.2825732421875, 0.2821565246582031, 0.28265933227539064, 0.28219830322265627, 0.2824922180175781, 0.2822982177734375, 0.28269049072265623, 0.2825780944824219, 0.28191806030273436, 0.2827262268066406, 0.28285516357421875, 0.282481201171875, 0.282574951171875, 0.28316058349609374, 0.2822366027832031, 0.28232736206054687, 0.2828328857421875, 0.28230859375, 0.28245196533203126, 0.28311346435546875, 0.2831810607910156, 0.2820157470703125, 0.28297354125976565, 0.2824956359863281, 0.28224920654296876, 0.28210519409179685, 0.28260140991210936, 0.28277188110351564, 0.2822618103027344, 0.2835672302246094, 0.28298330688476564, 0.2829537353515625, 0.28221600341796876, 0.2828394775390625, 0.2828790588378906, 0.2829869689941406, 0.28328958129882814, 0.2826731262207031, 0.28282318115234373, 0.28256979370117186, 0.28257949829101564, 0.28313018798828127, 0.28251962280273435, 0.28293533325195314, 0.283202880859375, 0.2822878723144531, 0.2829209594726563, 0.28301806640625, 0.2826587829589844, 0.282595458984375, 0.2827507629394531, 0.28313739013671874, 0.28288912963867185, 0.282435546875, 0.2828511657714844, 0.2827154235839844, 0.2824917907714844, 0.2834964599609375, 0.28251898193359376]",tokens/s,3.541750834298009,, 4bit-gptq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,4376.895488,4566.482944,0.0,4188.012544,4187.049984,s,1,10.3305439453125,10.3305439453125,0.0,10.3305439453125,10.3305439453125,10.3305439453125,10.3305439453125,[10.3305439453125],,kWh,9.650066008333719e-05,1.0637245674047769e-05,3.166196977399949e-05,0.00013879987553138445,,MB,4312.846336,4962.844672,0.0,4555.014144,4514.269184,s,10,7.85486279296875,0.785486279296875,0.0024385657671590765,0.7846548767089844,0.7884500244140625,0.7891213439941406,0.7896583996582032,"[0.78331787109375, 0.7868609619140625, 0.7877884521484375, 0.7831114501953125, 0.7833239135742187, 0.783201416015625, 0.78598583984375, 0.7883008422851563, 0.7831793823242188, 0.7897926635742187]",tokens/s,325.9127584369232,kWh,2.2828943265063682e-05,2.516557984143609e-06,1.5149542034153764e-05,4.049504328336105e-05,tokens/kWh,6321761.362462538,MB,4320.833536,4979.621888,0.0,4571.79136,4514.271744,s,10,466.9446015625,46.69446015625,0.009211138575253765,46.69581640625,46.701953515625,46.704838085937496,46.7071457421875,"[46.67080078125, 46.69037109375, 46.7013125, 46.69963671875, 46.69367578125, 46.69544140625, 46.69619140625, 46.70772265625, 46.69769140625, 46.6917578125]",tokens/s,1.3491964526238884,kWh,0.0013617363878057702,0.0001502109238077487,0.0009058930003548463,0.0024178403119683653,tokens/kWh,26056.31136520826,,s,630,466.9331598510745,0.7411637457953559,0.0004398744652296437,0.7411553955078125,0.7415886169433593,0.741754150390625,0.7420652239990234,"[0.7400548706054687, 0.7412684326171874, 0.7400137939453125, 0.7404874267578125, 0.7405681762695312, 0.7407911376953125, 0.740642333984375, 0.7402091674804687, 0.740773193359375, 0.7406885986328124, 0.74061376953125, 0.7400984497070312, 0.7404476928710938, 0.7406184692382812, 0.7407650146484375, 0.7409305419921876, 0.7405218505859374, 0.7407485961914062, 0.7407849731445313, 0.740691650390625, 0.7404711303710938, 0.7407369995117188, 0.7407920532226563, 0.740763916015625, 0.740427734375, 0.7405476684570312, 0.7409570922851563, 0.7412613525390624, 0.74046875, 0.7403253784179687, 0.7409314575195313, 0.7408845825195313, 0.7408425903320313, 0.7404266967773437, 0.7407796020507813, 0.7410220947265626, 0.7409541015625, 0.7407222900390625, 0.7410399169921875, 0.7417101440429688, 0.740493408203125, 0.7410515747070312, 0.7407597045898437, 0.741287841796875, 0.740619140625, 0.7408491821289063, 0.7407881469726563, 0.7413148803710937, 0.7415169067382813, 0.7405165405273437, 0.7405834350585937, 0.7412981567382813, 0.74104150390625, 0.7410548095703124, 0.740962646484375, 0.741011474609375, 0.7411691284179688, 0.7408289794921875, 0.7414130859375, 0.7407860107421875, 0.741963134765625, 0.7403405151367187, 0.74083740234375, 0.7407962646484375, 0.7412327880859375, 0.7407361450195312, 0.7408401489257812, 0.7409166870117188, 0.7415664672851563, 0.7402646484375, 0.7407185668945313, 0.7409295654296875, 0.7411096801757813, 0.7403663940429688, 0.7409575805664063, 0.7412445678710937, 0.7411536865234375, 0.7405241088867187, 0.7409991455078125, 0.7411691284179688, 0.741085205078125, 0.7407554321289063, 0.740642822265625, 0.7411220703125, 0.7422764892578125, 0.7411022338867187, 0.74110498046875, 0.7409793701171875, 0.7408450317382812, 0.7406897583007812, 0.7414053955078125, 0.7411097412109375, 0.7411240844726562, 0.7409183349609375, 0.7414363403320312, 0.7412568359375, 0.7411265258789063, 0.7414898071289062, 0.7413748168945312, 0.7415459594726562, 0.7411568603515625, 0.7411302490234375, 0.7410421752929688, 0.74123876953125, 0.7412080078125, 0.7415253295898437, 0.7408212280273437, 0.741011474609375, 0.741369873046875, 0.741525390625, 0.7403992309570312, 0.741412841796875, 0.7414989013671875, 0.7410319213867187, 0.741306396484375, 0.7412183227539062, 0.741035888671875, 0.7411995239257813, 0.7411327514648437, 0.7411178588867188, 0.74104638671875, 0.7415275268554687, 0.7411786499023437, 0.7417346801757813, 0.7408972778320313, 0.7411565551757813, 0.74149169921875, 0.7405444946289063, 0.7411159057617187, 0.7413350219726562, 0.7413760375976562, 0.7407493286132812, 0.7412242431640625, 0.7418446044921875, 0.7410775756835938, 0.7404031982421875, 0.7409903564453125, 0.7414483032226562, 0.7410680541992187, 0.74062451171875, 0.7414396362304687, 0.741398193359375, 0.7410178833007812, 0.7408229370117188, 0.7414319458007812, 0.7412367553710938, 0.7406735229492187, 0.7419535522460937, 0.741868896484375, 0.740987548828125, 0.7405936889648438, 0.7413206787109375, 0.7413677978515625, 0.741185546875, 0.7414883422851563, 0.7411674194335938, 0.74111181640625, 0.7407308959960938, 0.741491943359375, 0.7411206665039063, 0.7415253295898437, 0.7414500732421875, 0.7409674682617188, 0.7418802490234375, 0.7409812622070312, 0.7410646362304687, 0.7412675170898437, 0.7410360107421875, 0.741416015625, 0.7407230224609375, 0.74164013671875, 0.7409830322265625, 0.7409074096679688, 0.7415291748046875, 0.7415167846679688, 0.7413599853515624, 0.7414026489257812, 0.741384765625, 0.7411292724609375, 0.741028564453125, 0.7413885498046875, 0.7414886474609375, 0.7408046264648438, 0.7417279663085937, 0.7412145385742187, 0.7414307250976563, 0.7414728393554687, 0.7443988647460937, 0.7417261962890624, 0.74151123046875, 0.74071875, 0.7410575561523437, 0.7411285400390625, 0.7408866577148437, 0.7407396240234375, 0.7418423461914062, 0.74116357421875, 0.740701904296875, 0.7411981201171876, 0.7411087646484374, 0.7412780151367188, 0.7409827880859375, 0.7410835571289063, 0.7407659301757813, 0.741185546875, 0.7409679565429688, 0.74139697265625, 0.7413186645507812, 0.7406237182617188, 0.741683837890625, 0.7403906860351562, 0.7412840576171875, 0.7410258178710938, 0.7409801635742187, 0.74120361328125, 0.7415632934570312, 0.7408532104492187, 0.741116455078125, 0.7411136474609376, 0.7415145874023438, 0.7412662963867187, 0.7409889526367187, 0.7410872192382812, 0.7411751708984375, 0.7410032348632812, 0.7412933349609375, 0.7412293701171875, 0.741222412109375, 0.7412467651367187, 0.740702392578125, 0.7414190063476562, 0.7414312744140625, 0.741305908203125, 0.7415014038085938, 0.7416770629882813, 0.7416893310546875, 0.7411199951171875, 0.7407368774414063, 0.7420499267578125, 0.7413145751953125, 0.7412017211914063, 0.7411181640625, 0.741326171875, 0.7411607666015625, 0.7411056518554687, 0.7413658447265625, 0.741214111328125, 0.74111474609375, 0.7406795043945312, 0.7464931640625, 0.7411426391601562, 0.7410634765625, 0.7404827880859375, 0.7409974365234375, 0.7406197509765625, 0.7408665771484375, 0.7403984375, 0.7413561401367188, 0.7419658203125, 0.7407144165039062, 0.7410791015625, 0.7407882080078125, 0.7407738647460937, 0.7412030029296875, 0.741058837890625, 0.7410695190429688, 0.7415043334960938, 0.7412430419921875, 0.7411819458007812, 0.740724609375, 0.7415598754882813, 0.7413863525390625, 0.74105615234375, 0.740856689453125, 0.7412633666992188, 0.7410808715820313, 0.7409461059570313, 0.7410293579101562, 0.7410462646484375, 0.7412589111328125, 0.7407010498046875, 0.7408455810546875, 0.7406441650390625, 0.7417471313476562, 0.7409865112304688, 0.7412426147460938, 0.74111181640625, 0.7409611206054687, 0.7407144165039062, 0.7407513427734375, 0.7415823364257812, 0.7408370361328125, 0.7415029907226562, 0.740903564453125, 0.7415789794921875, 0.74149072265625, 0.7410846557617188, 0.741435791015625, 0.7410320434570312, 0.7414353637695312, 0.741074951171875, 0.7409930419921875, 0.7410699462890625, 0.7412860107421875, 0.7413436279296876, 0.7414398803710938, 0.7414678955078124, 0.7416117553710937, 0.74132421875, 0.7413458862304687, 0.7408488159179687, 0.7418765258789063, 0.7413718872070313, 0.7415316772460937, 0.7410808715820313, 0.741275390625, 0.7411348876953125, 0.7408580322265625, 0.7411875610351563, 0.7405813598632812, 0.7406832275390625, 0.7408024291992188, 0.7411677856445312, 0.74102783203125, 0.74060595703125, 0.741514404296875, 0.7412142944335938, 0.740965087890625, 0.7410953979492187, 0.7418016357421875, 0.7406708374023437, 0.7412825317382813, 0.741028076171875, 0.7408599243164062, 0.741296142578125, 0.7411630249023438, 0.7415357666015625, 0.7415214233398437, 0.74069921875, 0.7410963745117187, 0.7411138305664062, 0.741212158203125, 0.7413514404296875, 0.7405642700195313, 0.741257080078125, 0.7412816162109375, 0.7410144653320313, 0.7413077392578125, 0.7413639526367187, 0.7413846435546875, 0.74137939453125, 0.7410100708007813, 0.7410584106445313, 0.7407945556640625, 0.7417665405273437, 0.740880859375, 0.7411976928710937, 0.7413947143554688, 0.7411568603515625, 0.741291748046875, 0.7409461669921875, 0.7417200927734375, 0.741274658203125, 0.7410894775390625, 0.7413556518554687, 0.7417534790039062, 0.74169140625, 0.7408414916992188, 0.7413616943359375, 0.74169873046875, 0.7417782592773438, 0.7409581909179688, 0.7410831298828126, 0.7413800659179688, 0.741080322265625, 0.741001953125, 0.7416782836914062, 0.7413665771484375, 0.74156640625, 0.7408585815429688, 0.7410933837890625, 0.7409397583007813, 0.740996826171875, 0.7409339599609375, 0.7410524291992188, 0.7409766235351563, 0.7410626831054687, 0.7407861938476562, 0.741518310546875, 0.7409890747070312, 0.7406717529296875, 0.741853759765625, 0.7413754272460937, 0.7404221801757812, 0.7417546997070312, 0.7414581298828125, 0.7407673950195313, 0.7408295288085938, 0.7406038818359375, 0.7417908935546875, 0.7411633911132812, 0.7414458618164063, 0.7409154052734375, 0.7411790771484374, 0.7408560791015625, 0.741474365234375, 0.740706298828125, 0.741369873046875, 0.7410830688476563, 0.74087841796875, 0.7412449340820313, 0.7413771362304687, 0.7411264038085937, 0.7408414916992188, 0.7414598999023437, 0.741509765625, 0.7406919555664062, 0.7413411865234375, 0.7411691284179688, 0.7416173706054687, 0.7413078002929687, 0.741866455078125, 0.7411542358398437, 0.7416036987304687, 0.7412963256835937, 0.740874267578125, 0.741128173828125, 0.7413411865234375, 0.74148046875, 0.7407715454101562, 0.7412323608398438, 0.741462646484375, 0.7415228881835938, 0.7410775756835938, 0.7413575439453125, 0.7420906372070313, 0.7412470703125, 0.7411732177734375, 0.7411302490234375, 0.7413616943359375, 0.7414722290039063, 0.7408867797851563, 0.7420254516601562, 0.7411176147460937, 0.7410732421875, 0.7404628295898438, 0.7416627197265625, 0.741654541015625, 0.741158935546875, 0.7411773681640625, 0.7414108276367187, 0.7412153930664063, 0.7408607788085938, 0.7411317749023437, 0.741876220703125, 0.7417835693359375, 0.7407921142578126, 0.7413551025390624, 0.74128857421875, 0.741025390625, 0.74086962890625, 0.7408670043945312, 0.7433052368164063, 0.741336181640625, 0.741034912109375, 0.74132275390625, 0.7412633666992188, 0.74140673828125, 0.7411273803710937, 0.7411863403320312, 0.7417216796875, 0.741171630859375, 0.74113818359375, 0.74107275390625, 0.7415269775390625, 0.7411803588867187, 0.7416565551757812, 0.7418121948242188, 0.7415275268554687, 0.7414517822265625, 0.7410155639648438, 0.7417405395507812, 0.7411896362304687, 0.7407821044921875, 0.7414673461914062, 0.7416921997070313, 0.7413944091796875, 0.7407239379882813, 0.7417884521484375, 0.7412769165039063, 0.7408805541992187, 0.7412127685546875, 0.7414353637695312, 0.7416436767578125, 0.7408827514648437, 0.741300537109375, 0.7416688842773438, 0.74085302734375, 0.7411145629882813, 0.741289306640625, 0.7416286010742188, 0.7413043212890625, 0.7413575439453125, 0.7412879028320313, 0.740908203125, 0.7408115844726563, 0.7414262084960938, 0.7415858764648438, 0.740864013671875, 0.7411909790039063, 0.7413412475585938, 0.7407028198242187, 0.7415029907226562, 0.7410174560546875, 0.7412430419921875, 0.7408756713867187, 0.7415670776367187, 0.7412177124023438, 0.7410222778320312, 0.7408429565429687, 0.7411594848632812, 0.7414948120117187, 0.740800537109375, 0.7413162231445313, 0.7411687622070312, 0.7415235595703125, 0.7405779418945313, 0.7415884399414062, 0.7405798950195313, 0.7411806030273438, 0.7407767944335938, 0.7414476928710938, 0.7411036376953125, 0.7408927001953125, 0.7414456176757812, 0.741211181640625, 0.740918212890625, 0.7409500122070313, 0.7412920532226562, 0.741231689453125, 0.7421244506835938, 0.7411846313476562, 0.7412574462890625, 0.7411528930664063, 0.7410980224609375, 0.740861328125, 0.7416243896484375, 0.7414328002929688, 0.7416283569335937, 0.741484619140625, 0.74087158203125, 0.7415220336914062, 0.7411978149414062, 0.7420714721679688, 0.7412723999023437, 0.7412981567382813, 0.7415902099609375, 0.7411629638671875, 0.7409154663085937, 0.7412301635742188, 0.7416303100585937, 0.7414974975585937, 0.7410515747070312, 0.7414230346679688, 0.7410341796875, 0.741006103515625, 0.741768310546875, 0.7406672973632813, 0.7406749877929687, 0.7412117919921875, 0.7410226440429688, 0.7402691650390625, 0.7408823852539063, 0.74189306640625, 0.7408681030273437, 0.74082470703125, 0.7407844848632813, 0.741823974609375, 0.7409332885742187, 0.7408006591796875, 0.7410634155273438, 0.7409213256835937, 0.7413677978515625, 0.7405787963867188, 0.7414502563476563, 0.741238525390625, 0.7408397216796875, 0.7412633666992188, 0.7413800659179688, 0.741254150390625, 0.74080908203125, 0.74107275390625, 0.7413040771484375, 0.7409694213867187, 0.7408125610351562, 0.74138037109375, 0.7410497436523438, 0.741210693359375, 0.7412342529296875, 0.741245361328125, 0.741232666015625, 0.7413637084960938, 0.7414763793945313, 0.741001220703125, 0.7413013916015625, 0.7409385986328125, 0.7413507690429687, 0.74101123046875, 0.7411248779296875, 0.7411139526367188, 0.7416995849609375, 0.7412953491210937, 0.7412572631835938, 0.74071728515625, 0.7409677734375, 0.7418569946289062, 0.7414486694335938, 0.7406571655273437, 0.7415848999023438, 0.7412244262695312, 0.741185546875, 0.7410769653320313, 0.7414312744140625, 0.7413309326171875, 0.7411056518554687, 0.7408967895507812, 0.7413881225585938, 0.7412020874023437, 0.7409971313476562, 0.740896240234375]",tokens/s,1.349229513279663,, 4bit-gptq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1472.057344,1326.383104,0.0,947.912704,945.250304,s,1,8.178099609375,8.178099609375,0.0,8.178099609375,8.178099609375,8.178099609375,8.178099609375,[8.178099609375],,kWh,4.011619602085451e-05,4.417830343113296e-06,1.32619550540225e-05,5.7795981417990304e-05,,MB,1331.990528,1519.321088,0.0,1111.49056,1098.82368,s,10,1.6119050292968748,0.16119050292968748,0.0005511915126697848,0.16121893310546875,0.16174463500976563,0.16185521240234377,0.16194367431640624,"[0.1598438720703125, 0.16109616088867187, 0.16131968688964843, 0.16081129455566406, 0.1617200622558594, 0.161114013671875, 0.16135888671875, 0.16111817932128905, 0.16196578979492188, 0.16155708312988282]",tokens/s,1588.1828975474389,kWh,4.707219859811589e-06,5.191210867039741e-07,3.1204326038710095e-06,8.346773550386572e-06,tokens/kWh,30670533.764288314,MB,1345.765376,1653.538816,0.0,1245.708288,1164.242432,s,10,89.03407812500001,8.903407812500001,0.010136610228676235,8.90321484375,8.9131953125,8.9178447265625,8.9215642578125,"[8.887240234375, 8.8903544921875, 8.8948603515625, 8.9013642578125, 8.90372265625, 8.90270703125, 8.9090263671875, 8.910146484375, 8.912162109375, 8.922494140625]",tokens/s,7.075942305097012,kWh,0.000260164826486438,2.8697575803232348e-05,0.0001730542361135283,0.00046191663840319856,tokens/kWh,136388.24576180012,,s,630,89.03100794982909,0.14131906023782398,0.0002831820943250541,0.14131715393066407,0.14165845794677734,0.1417887367248535,0.14212909545898436,"[0.14145948791503907, 0.14063404846191407, 0.14118077087402345, 0.14077389526367187, 0.14100070190429687, 0.14090165710449218, 0.1408253173828125, 0.1409248046875, 0.1407427215576172, 0.1410826873779297, 0.14081024169921874, 0.14104165649414063, 0.14084259033203125, 0.14099270629882812, 0.14092218017578126, 0.14089222717285158, 0.1408658905029297, 0.1405506591796875, 0.14191206359863281, 0.14092825317382812, 0.14114483642578124, 0.1407093505859375, 0.14122364807128907, 0.1407639617919922, 0.1410618896484375, 0.14123388671875, 0.14120109558105468, 0.14125914001464843, 0.14094154357910157, 0.14102940368652345, 0.14082275390625, 0.14112265014648437, 0.1410016326904297, 0.14109405517578125, 0.14125535583496093, 0.14095747375488282, 0.14095747375488282, 0.14070387268066406, 0.14129808044433595, 0.141316162109375, 0.14115635681152344, 0.14103961181640626, 0.14105599975585936, 0.14100070190429687, 0.1411903076171875, 0.14090675354003906, 0.14128182983398438, 0.1409761962890625, 0.1414606475830078, 0.14112582397460938, 0.14143350219726564, 0.14087484741210937, 0.1413939208984375, 0.14115933227539063, 0.14100405883789063, 0.14126153564453126, 0.14099430847167968, 0.14152114868164062, 0.14120072937011718, 0.14130181884765625, 0.1408948211669922, 0.14096989440917967, 0.14135836791992187, 0.14088572692871093, 0.1407283172607422, 0.140797607421875, 0.14076358032226563, 0.14064128112792967, 0.14102015686035158, 0.14123622131347657, 0.14100889587402343, 0.14116864013671876, 0.14103347778320313, 0.14112733459472657, 0.140813720703125, 0.14087420654296876, 0.14126124572753906, 0.1410738525390625, 0.1411774444580078, 0.14085894775390626, 0.14095199584960938, 0.14083482360839844, 0.14101298522949218, 0.1409863739013672, 0.1407665557861328, 0.1413351287841797, 0.14102125549316405, 0.14103961181640626, 0.1418887939453125, 0.14091746520996093, 0.14107565307617187, 0.14160263061523437, 0.1410345001220703, 0.14137957763671874, 0.14093927001953124, 0.14087577819824218, 0.14114163208007813, 0.14073663330078126, 0.14112159729003906, 0.14095936584472657, 0.141291259765625, 0.1413738250732422, 0.1412646026611328, 0.14135165405273437, 0.14123622131347657, 0.1409944610595703, 0.14084719848632812, 0.14152703857421875, 0.14121778869628906, 0.14128742980957032, 0.14146697998046875, 0.14108303833007813, 0.14127714538574218, 0.14101123046875, 0.14129766845703126, 0.14123826599121095, 0.1412954559326172, 0.1415836181640625, 0.14099539184570312, 0.14113095092773437, 0.14120643615722656, 0.1411658935546875, 0.14150930786132812, 0.14132838439941406, 0.14111279296875, 0.14087632751464843, 0.14100889587402343, 0.14088534545898437, 0.140742431640625, 0.14105279541015625, 0.1409410858154297, 0.14057455444335937, 0.140972412109375, 0.1408675842285156, 0.14127529907226563, 0.140922119140625, 0.14108230590820312, 0.14098728942871094, 0.14095881652832032, 0.14126991271972655, 0.14094070434570313, 0.1412122497558594, 0.14120140075683593, 0.14098335266113282, 0.14122285461425782, 0.14086537170410157, 0.14103363037109376, 0.14096588134765625, 0.14114405822753906, 0.1411051483154297, 0.14105804443359374, 0.14094950866699218, 0.14117478942871095, 0.14093516540527343, 0.14135910034179688, 0.14086697387695313, 0.14140617370605468, 0.1413658905029297, 0.14137753295898436, 0.14078073120117188, 0.14133740234375, 0.14125430297851563, 0.14134716796875, 0.1413137969970703, 0.14140786743164063, 0.1412918701171875, 0.14121603393554688, 0.14110086059570312, 0.14103570556640624, 0.14148403930664064, 0.14160896301269532, 0.14127923583984375, 0.1415755157470703, 0.14136131286621093, 0.14128134155273436, 0.14130630493164062, 0.1414983673095703, 0.14163885498046874, 0.14148236083984375, 0.14129930114746095, 0.14142684936523436, 0.14081878662109376, 0.14149375915527343, 0.14150743103027344, 0.14132838439941406, 0.14153114318847657, 0.1415167999267578, 0.1410723876953125, 0.1412458953857422, 0.1413120574951172, 0.14130975341796875, 0.14153952026367186, 0.14124761962890625, 0.14087033081054687, 0.14102537536621093, 0.14111549377441407, 0.1412437744140625, 0.1412491455078125, 0.14141798400878905, 0.14094140625, 0.14138575744628906, 0.14074844360351563, 0.14118576049804688, 0.141338623046875, 0.14120889282226562, 0.14171820068359375, 0.14092652893066407, 0.14119923400878906, 0.14101356506347656, 0.14121165466308594, 0.14130995178222655, 0.1415244140625, 0.14148637390136717, 0.14099017333984376, 0.14118771362304688, 0.14125254821777344, 0.1413441925048828, 0.1413570556640625, 0.14167507934570311, 0.1414082489013672, 0.14083686828613282, 0.14121888732910157, 0.14102330017089842, 0.14108761596679686, 0.14108876037597656, 0.14177676391601562, 0.14139610290527344, 0.14163558959960937, 0.14149221801757814, 0.1413080596923828, 0.14131350708007812, 0.14142604064941405, 0.14151373291015626, 0.1414710693359375, 0.14130447387695313, 0.14119322204589843, 0.1412434539794922, 0.14107084655761717, 0.1411844787597656, 0.14153555297851564, 0.1413782043457031, 0.14137344360351561, 0.14123008728027345, 0.1413570556640625, 0.14129560852050782, 0.14134585571289063, 0.14143994140625, 0.14125260925292968, 0.14129766845703126, 0.14135267639160157, 0.14133071899414062, 0.14153523254394532, 0.14170620727539063, 0.14111888122558594, 0.14121014404296875, 0.1411152648925781, 0.1412181091308594, 0.14105996704101562, 0.1413324737548828, 0.14158029174804687, 0.14123213195800782, 0.14176255798339843, 0.14107034301757812, 0.14107571411132813, 0.1408577880859375, 0.14136761474609374, 0.14135501098632813, 0.14120652770996095, 0.1410795593261719, 0.14096588134765625, 0.1412196807861328, 0.14103388977050782, 0.1415078125, 0.14150096130371093, 0.14145330810546874, 0.1414593963623047, 0.14109458923339843, 0.14106866455078124, 0.141127685546875, 0.14121075439453126, 0.14146444702148436, 0.1415925750732422, 0.14147523498535156, 0.14118563842773438, 0.14137554931640625, 0.14125459289550782, 0.14156390380859374, 0.14137957763671874, 0.14138163757324218, 0.14120057678222656, 0.14118380737304687, 0.14109286499023438, 0.14136026000976562, 0.14123858642578124, 0.14148051452636717, 0.14127923583984375, 0.14153318786621094, 0.1418035125732422, 0.14136729431152345, 0.14140594482421875, 0.14169523620605468, 0.14147715759277343, 0.14160684204101562, 0.14127781677246093, 0.14146546936035156, 0.14147772216796875, 0.14130221557617187, 0.1414368896484375, 0.141285400390625, 0.1415166778564453, 0.1412359619140625, 0.14129600524902344, 0.1411788787841797, 0.14131365966796874, 0.14122227478027344, 0.14144685363769532, 0.14057533264160157, 0.14106214904785155, 0.14102313232421876, 0.1410397186279297, 0.14097389221191406, 0.14116064453125, 0.14121778869628906, 0.14116249084472657, 0.14119241333007812, 0.14129014587402344, 0.1412691192626953, 0.140943359375, 0.14146131896972655, 0.14128076171875, 0.14126150512695312, 0.1416697540283203, 0.14129817199707032, 0.14105625915527345, 0.1409814453125, 0.14132704162597656, 0.14131814575195312, 0.1410846710205078, 0.141053955078125, 0.1413017578125, 0.14129273986816407, 0.14124281311035156, 0.1413492431640625, 0.1412147216796875, 0.1410463104248047, 0.14133091735839845, 0.1413119354248047, 0.14121171569824217, 0.1411112976074219, 0.1413668212890625, 0.14136679077148437, 0.1416386260986328, 0.14119635009765624, 0.14147471618652344, 0.14152006530761718, 0.1413456268310547, 0.14147357177734374, 0.1412395782470703, 0.14143174743652343, 0.14132342529296876, 0.14156787109375, 0.14134127807617186, 0.1413492431640625, 0.14145481872558593, 0.1411212158203125, 0.14166717529296874, 0.14157005310058593, 0.1414956817626953, 0.14128192138671875, 0.14131382751464844, 0.14174435424804688, 0.1415755157470703, 0.14143760681152343, 0.1418260498046875, 0.14139744567871093, 0.14164784240722655, 0.14133670043945312, 0.14132392883300782, 0.14165811157226563, 0.1410662384033203, 0.14105923461914063, 0.14149449157714844, 0.14112422180175782, 0.1410908203125, 0.1413035888671875, 0.1414105224609375, 0.1412912902832031, 0.14114633178710936, 0.14122979736328126, 0.14147132873535156, 0.1413987274169922, 0.14137548828125, 0.14150860595703124, 0.14153446960449217, 0.14137831115722657, 0.1412833251953125, 0.14143487548828124, 0.14144717407226562, 0.14104701232910155, 0.14170806884765624, 0.14126255798339843, 0.14171165466308594, 0.14156185913085936, 0.14114521789550782, 0.14112448120117188, 0.14133978271484374, 0.14135952758789064, 0.14126332092285157, 0.14150186157226563, 0.14135562133789062, 0.14144102478027343, 0.1411604461669922, 0.14132838439941406, 0.14138983154296875, 0.14135090637207032, 0.14146258544921875, 0.14141445922851562, 0.14108546447753906, 0.1413159942626953, 0.14144432067871093, 0.1418162841796875, 0.14188800048828126, 0.14148512268066407, 0.1415213165283203, 0.14119290161132814, 0.14145826721191407, 0.14137139892578124, 0.14140573120117186, 0.14166883850097656, 0.14143487548828124, 0.14152432250976563, 0.1412425994873047, 0.14132269287109375, 0.14122189331054688, 0.14141644287109376, 0.1420226593017578, 0.14211891174316407, 0.1415925750732422, 0.1414307861328125, 0.14179122924804688, 0.14129379272460937, 0.1421332550048828, 0.14138819885253906, 0.14116876220703126, 0.141025146484375, 0.14109849548339845, 0.1408599090576172, 0.14175628662109374, 0.1413796844482422, 0.14152093505859376, 0.14128034973144532, 0.14126908874511718, 0.14096263122558594, 0.14094744873046874, 0.14207589721679686, 0.14147366333007813, 0.14132850646972656, 0.14128073120117188, 0.14121533203125, 0.1412303009033203, 0.1411808624267578, 0.14171420288085937, 0.14133042907714843, 0.14161305236816407, 0.14135699462890625, 0.14132640075683595, 0.14123826599121095, 0.1412095947265625, 0.141486083984375, 0.14173954772949218, 0.14187362670898437, 0.1414819793701172, 0.141366943359375, 0.14103538513183594, 0.14115213012695313, 0.14150306701660156, 0.14143600463867187, 0.14144195556640626, 0.14122393798828126, 0.1413507537841797, 0.14107049560546875, 0.14114405822753906, 0.14135910034179688, 0.14179737854003907, 0.14163555908203124, 0.14138348388671876, 0.1414575653076172, 0.14106629943847657, 0.14138291931152344, 0.14153919982910157, 0.1421853485107422, 0.14193827819824217, 0.14152691650390625, 0.1418429718017578, 0.1414737548828125, 0.1414469451904297, 0.1414263000488281, 0.14154960632324218, 0.14178569030761717, 0.141623291015625, 0.14146322631835936, 0.1412360382080078, 0.14148031616210938, 0.14157632446289062, 0.1414430694580078, 0.14132147216796875, 0.14133938598632811, 0.14099046325683592, 0.14133453369140625, 0.14104371643066407, 0.14164787292480469, 0.1412768249511719, 0.14141270446777343, 0.14125033569335937, 0.141218017578125, 0.14125465393066405, 0.14102117919921875, 0.14165811157226563, 0.14155775451660157, 0.14152499389648437, 0.14134271240234375, 0.1411399688720703, 0.1412689971923828, 0.14137139892578124, 0.14142892456054687, 0.141627197265625, 0.14138914489746093, 0.14128195190429688, 0.14127308654785156, 0.14122393798828126, 0.1413570556640625, 0.14147517395019532, 0.14134701538085936, 0.141489990234375, 0.14188188171386718, 0.14152716064453125, 0.14145535278320312, 0.1416165771484375, 0.141664794921875, 0.141548828125, 0.14149913024902344, 0.14151589965820313, 0.14165020751953125, 0.14187171936035156, 0.14137344360351561, 0.14165402221679688, 0.14144102478027343, 0.14138163757324218, 0.14151174926757812, 0.14158944702148438, 0.14166426086425782, 0.14127513122558594, 0.14161882019042968, 0.1414470672607422, 0.1414599304199219, 0.14133859252929687, 0.14159858703613282, 0.14153334045410157, 0.1415147247314453, 0.1416859588623047, 0.14156402587890626, 0.14190870666503907, 0.14160281372070313, 0.1415535430908203, 0.1415230712890625, 0.14143283081054686, 0.14166157531738283, 0.14140249633789062, 0.14101913452148437, 0.1411907501220703, 0.14109432983398437, 0.14126588439941407, 0.1412947540283203, 0.14145417785644532, 0.1417216033935547, 0.14114154052734376, 0.14110357666015624, 0.14177894592285156, 0.14144102478027343, 0.14146969604492188, 0.14183815002441405, 0.14160914611816405, 0.14141439819335938, 0.14153932189941407, 0.14139596557617187, 0.1414713592529297, 0.14152128601074218, 0.1418354949951172, 0.14162818908691407, 0.14148812866210939, 0.14158642578125, 0.14165577697753906, 0.1413962860107422, 0.14158026123046874, 0.14171884155273437, 0.1416956787109375, 0.14155775451660157, 0.14166835021972657, 0.14216192626953125, 0.14160415649414063, 0.14148268127441407, 0.14198080444335937, 0.14197616577148436, 0.14152668762207032, 0.14170994567871092, 0.14188934326171876, 0.14164601135253907, 0.14136524963378908, 0.1416458282470703, 0.1414998779296875, 0.14226486206054687, 0.14176856994628906, 0.1414739227294922, 0.14199349975585937, 0.1415172119140625, 0.14173802185058593, 0.14159181213378907, 0.14214810180664061, 0.14181800842285155, 0.141580322265625, 0.14169097900390626, 0.14160850524902344, 0.14166819763183594, 0.14131056213378906, 0.14222950744628907, 0.1419325408935547, 0.14162124633789064, 0.14177484130859375, 0.1416510467529297, 0.14232669067382814]",tokens/s,7.076186314267256,, 4bit-gptq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,11136.073728,12227.3792,0.0,11848.9088,11814.752256,s,1,16.2592177734375,16.2592177734375,0.0,16.2592177734375,16.2592177734375,16.2592177734375,16.2592177734375,[16.2592177734375],,kWh,0.00026480793421250536,2.9202987075294415e-05,8.6119513340005e-05,0.0003801304346278048,,MB,2084.450304,14033.027072,0.0,13625.196544,13298.00192,s,10,23.3345458984375,2.33345458984375,0.0006364246180502187,2.3333216552734375,2.33433662109375,2.3344462646484376,2.3345339794921878,"[2.33269580078125, 2.333192138671875, 2.332452392578125, 2.334312255859375, 2.334555908203125, 2.333282958984375, 2.333640625, 2.333096923828125, 2.33395654296875, 2.3333603515625]",tokens/s,109.70858447994995,kWh,6.801452597916902e-05,7.501730836656543e-06,4.510703608559819e-05,0.00012062329290142372,tokens/kWh,2122309.8279136634,MB,2088.77568,14184.022016,0.0,13776.191488,13689.859584,s,10,1365.1401875000001,136.51401875,0.043220834864750195,136.508390625,136.56333750000002,136.5687234375,136.57303218750002,"[136.474234375, 136.480890625, 136.51365625, 136.574109375, 136.554078125, 136.443140625, 136.558046875, 136.562140625, 136.476765625, 136.503125]",tokens/s,0.4614910657298337,kWh,0.003982938044971668,0.0004393487290448445,0.0026491697860008046,0.0070714565600173175,tokens/kWh,8909.05564720682,,s,630,1365.1345581054693,2.1668802509610616,0.001184149768945829,2.1668848876953124,2.1684020751953126,2.1688128173828125,2.1695393676757813,"[2.16586181640625, 2.165533447265625, 2.16496337890625, 2.164727783203125, 2.166044677734375, 2.166136474609375, 2.165833251953125, 2.164509033203125, 2.164980224609375, 2.16477294921875, 2.1657529296875, 2.165496826171875, 2.164805419921875, 2.1644208984375, 2.1651064453125, 2.166121826171875, 2.16661279296875, 2.165902587890625, 2.1653994140625, 2.167724609375, 2.1663828125, 2.166310791015625, 2.165684326171875, 2.16618798828125, 2.166972412109375, 2.165771240234375, 2.16618017578125, 2.16656689453125, 2.16574853515625, 2.16681884765625, 2.166228515625, 2.166172119140625, 2.166724609375, 2.167048095703125, 2.165712158203125, 2.165650146484375, 2.16532275390625, 2.166369140625, 2.167191650390625, 2.167224365234375, 2.167033935546875, 2.1665341796875, 2.16774658203125, 2.167560302734375, 2.16664892578125, 2.16663671875, 2.16684326171875, 2.167447509765625, 2.167486328125, 2.16671044921875, 2.167170166015625, 2.16604541015625, 2.1677705078125, 2.166477783203125, 2.166822265625, 2.1664443359375, 2.16675537109375, 2.166044189453125, 2.166268310546875, 2.16554052734375, 2.1667373046875, 2.16703173828125, 2.166902099609375, 2.164823974609375, 2.16512109375, 2.166529296875, 2.166343994140625, 2.16569873046875, 2.166558837890625, 2.167146484375, 2.165773681640625, 2.166070068359375, 2.16474169921875, 2.165238037109375, 2.166134521484375, 2.165922119140625, 2.16620849609375, 2.16500732421875, 2.166781005859375, 2.165747802734375, 2.166453369140625, 2.165948974609375, 2.16657958984375, 2.166655029296875, 2.166045654296875, 2.165488525390625, 2.1654794921875, 2.166298583984375, 2.16550146484375, 2.1659345703125, 2.165116455078125, 2.16589892578125, 2.166954833984375, 2.16605224609375, 2.167164794921875, 2.166157958984375, 2.165796875, 2.16744970703125, 2.1675908203125, 2.166740966796875, 2.166281494140625, 2.16719140625, 2.1674482421875, 2.166792236328125, 2.167214111328125, 2.166931396484375, 2.16813525390625, 2.166753173828125, 2.1673251953125, 2.166135009765625, 2.166437744140625, 2.1670419921875, 2.165987060546875, 2.16810302734375, 2.1652685546875, 2.16776904296875, 2.166192138671875, 2.1670849609375, 2.16669775390625, 2.16688671875, 2.16671630859375, 2.1665361328125, 2.166640625, 2.166073486328125, 2.165718994140625, 2.165845947265625, 2.1654443359375, 2.16521337890625, 2.1656064453125, 2.164690185546875, 2.165152099609375, 2.165153564453125, 2.166433349609375, 2.16533056640625, 2.16590087890625, 2.165445068359375, 2.16591162109375, 2.166353515625, 2.166974853515625, 2.166444091796875, 2.16614501953125, 2.1657333984375, 2.166519775390625, 2.165841552734375, 2.165972412109375, 2.165611572265625, 2.165755615234375, 2.16555322265625, 2.16642724609375, 2.165973388671875, 2.166176025390625, 2.167818359375, 2.16762939453125, 2.167644287109375, 2.16604296875, 2.167754638671875, 2.167920166015625, 2.167478759765625, 2.16732421875, 2.167259765625, 2.168190185546875, 2.167671630859375, 2.167791015625, 2.167031982421875, 2.168864990234375, 2.1674599609375, 2.167080810546875, 2.16809423828125, 2.16811328125, 2.1680869140625, 2.167527587890625, 2.167656005859375, 2.167390869140625, 2.166988037109375, 2.168318603515625, 2.167972900390625, 2.1684541015625, 2.167140380859375, 2.168670166015625, 2.1695263671875, 2.16701123046875, 2.168231201171875, 2.167736572265625, 2.16655078125, 2.166257568359375, 2.166305908203125, 2.166731689453125, 2.167017578125, 2.16652099609375, 2.164148193359375, 2.165614501953125, 2.16821533203125, 2.167324951171875, 2.166708251953125, 2.1669375, 2.16767919921875, 2.1668359375, 2.1665556640625, 2.16671630859375, 2.1664638671875, 2.167177978515625, 2.167428955078125, 2.1676435546875, 2.16705712890625, 2.166687744140625, 2.16604052734375, 2.167203857421875, 2.166679443359375, 2.167146484375, 2.16696142578125, 2.167349365234375, 2.1678125, 2.167871826171875, 2.167183349609375, 2.16703173828125, 2.16832421875, 2.1674189453125, 2.167361572265625, 2.168323974609375, 2.167330810546875, 2.167510009765625, 2.167006103515625, 2.167711669921875, 2.1676904296875, 2.1678779296875, 2.1690947265625, 2.16873583984375, 2.168320068359375, 2.16975146484375, 2.1685224609375, 2.169452880859375, 2.169634521484375, 2.169194580078125, 2.168541259765625, 2.169198974609375, 2.168323974609375, 2.16812890625, 2.169175048828125, 2.168743408203125, 2.16881201171875, 2.166730712890625, 2.1686845703125, 2.16939892578125, 2.168643798828125, 2.169746826171875, 2.168140625, 2.168422119140625, 2.167958740234375, 2.16957373046875, 2.168743896484375, 2.16764599609375, 2.169199462890625, 2.1681357421875, 2.165767822265625, 2.166407470703125, 2.167010986328125, 2.1682080078125, 2.167797607421875, 2.1663662109375, 2.16692333984375, 2.166476806640625, 2.166065185546875, 2.16653759765625, 2.167151123046875, 2.167858154296875, 2.166207275390625, 2.16652197265625, 2.167060302734375, 2.16585009765625, 2.16644970703125, 2.16733544921875, 2.166739013671875, 2.16699853515625, 2.166995361328125, 2.1677744140625, 2.166822998046875, 2.166467529296875, 2.16658935546875, 2.166917236328125, 2.167504638671875, 2.166527587890625, 2.1661435546875, 2.167434326171875, 2.16802099609375, 2.1672333984375, 2.166693359375, 2.166917724609375, 2.166367431640625, 2.167483154296875, 2.167582763671875, 2.167276611328125, 2.167583251953125, 2.168227294921875, 2.16863818359375, 2.168342529296875, 2.167855224609375, 2.1689814453125, 2.169544677734375, 2.16923876953125, 2.1683740234375, 2.167326416015625, 2.168219482421875, 2.168541748046875, 2.16815771484375, 2.168617431640625, 2.168645751953125, 2.169489013671875, 2.167476806640625, 2.1682646484375, 2.168379150390625, 2.1695771484375, 2.1693896484375, 2.16714453125, 2.16802099609375, 2.168919677734375, 2.1644375, 2.16413525390625, 2.16406494140625, 2.164509765625, 2.165660400390625, 2.167010986328125, 2.16608935546875, 2.165963623046875, 2.16654248046875, 2.164913818359375, 2.164343017578125, 2.16390869140625, 2.163916748046875, 2.164716796875, 2.165203125, 2.164300048828125, 2.16482861328125, 2.16473583984375, 2.165071044921875, 2.16362890625, 2.1636845703125, 2.16490380859375, 2.165056396484375, 2.16562841796875, 2.163931640625, 2.164023193359375, 2.165086181640625, 2.165473388671875, 2.165923828125, 2.165321533203125, 2.166739013671875, 2.16607958984375, 2.1651669921875, 2.167163818359375, 2.16523974609375, 2.1653134765625, 2.16590478515625, 2.165904052734375, 2.166857177734375, 2.167341552734375, 2.16569873046875, 2.167005126953125, 2.165802001953125, 2.165296142578125, 2.16627001953125, 2.166312744140625, 2.166623779296875, 2.166095947265625, 2.16608740234375, 2.166962890625, 2.1675576171875, 2.166712890625, 2.166293701171875, 2.16719580078125, 2.167150634765625, 2.167144775390625, 2.16835498046875, 2.166561279296875, 2.16798828125, 2.1664521484375, 2.166530029296875, 2.166826904296875, 2.1669375, 2.166547607421875, 2.16634033203125, 2.16666650390625, 2.16660986328125, 2.167008056640625, 2.166205810546875, 2.166114990234375, 2.166304931640625, 2.166407958984375, 2.167910888671875, 2.16789453125, 2.16703564453125, 2.1695634765625, 2.167732177734375, 2.166179931640625, 2.16768017578125, 2.167562744140625, 2.167038330078125, 2.16815380859375, 2.166386962890625, 2.1661962890625, 2.1672734375, 2.166528076171875, 2.16612451171875, 2.16584814453125, 2.16689208984375, 2.167228759765625, 2.167357421875, 2.16840185546875, 2.168848388671875, 2.168018798828125, 2.1668857421875, 2.1680400390625, 2.167444580078125, 2.166825927734375, 2.166277587890625, 2.1680703125, 2.16814208984375, 2.16769873046875, 2.16819287109375, 2.168285888671875, 2.168908935546875, 2.16764697265625, 2.168127197265625, 2.168122802734375, 2.168683349609375, 2.16888330078125, 2.168537109375, 2.168404052734375, 2.168764404296875, 2.1684013671875, 2.166385009765625, 2.16930908203125, 2.168162353515625, 2.1688134765625, 2.16937255859375, 2.167416748046875, 2.16837744140625, 2.169112548828125, 2.1670537109375, 2.16781884765625, 2.16823193359375, 2.167018798828125, 2.166949951171875, 2.1668740234375, 2.16616357421875, 2.166748291015625, 2.167088134765625, 2.167986083984375, 2.16764599609375, 2.1668125, 2.167314697265625, 2.166613525390625, 2.16709912109375, 2.16738037109375, 2.167388427734375, 2.1675126953125, 2.16755224609375, 2.167644287109375, 2.166744384765625, 2.167925048828125, 2.168575927734375, 2.1672841796875, 2.16865771484375, 2.168525146484375, 2.1678818359375, 2.16807421875, 2.167703369140625, 2.16839794921875, 2.169016357421875, 2.169155517578125, 2.16738720703125, 2.1685380859375, 2.167985107421875, 2.1684755859375, 2.168439453125, 2.16862255859375, 2.16863818359375, 2.167289794921875, 2.167142333984375, 2.16776708984375, 2.16764013671875, 2.1680947265625, 2.168289306640625, 2.167946533203125, 2.1689287109375, 2.168463623046875, 2.16720703125, 2.16830029296875, 2.16787548828125, 2.1665458984375, 2.167499267578125, 2.166310791015625, 2.168385498046875, 2.166969970703125, 2.1669541015625, 2.1665302734375, 2.167193115234375, 2.166907470703125, 2.168440673828125, 2.166923095703125, 2.166546875, 2.16703564453125, 2.168654052734375, 2.167615478515625, 2.167334716796875, 2.165754150390625, 2.16547900390625, 2.165604248046875, 2.165964599609375, 2.164871337890625, 2.1648740234375, 2.165676025390625, 2.166201416015625, 2.16602734375, 2.165337646484375, 2.166315185546875, 2.164508544921875, 2.164201904296875, 2.1656923828125, 2.165769775390625, 2.165017333984375, 2.1645595703125, 2.16470947265625, 2.165456787109375, 2.165345703125, 2.165402099609375, 2.166044677734375, 2.16509814453125, 2.166001953125, 2.165135498046875, 2.16656884765625, 2.164768798828125, 2.165671875, 2.166693359375, 2.166614501953125, 2.168379150390625, 2.167519287109375, 2.166951904296875, 2.16780712890625, 2.166045654296875, 2.16529296875, 2.16745166015625, 2.166091064453125, 2.16620068359375, 2.16697900390625, 2.16651171875, 2.167469970703125, 2.167701416015625, 2.1664912109375, 2.165646728515625, 2.166743408203125, 2.16632763671875, 2.16810693359375, 2.168990966796875, 2.1682939453125, 2.168727783203125, 2.1678369140625, 2.16667333984375, 2.167680908203125, 2.167146728515625, 2.1657333984375, 2.1679345703125, 2.165698974609375, 2.166783203125, 2.1666943359375, 2.165668212890625, 2.16674609375, 2.16652001953125, 2.164810302734375, 2.1640908203125, 2.16560009765625, 2.16654833984375, 2.16613720703125, 2.166560546875, 2.165886962890625, 2.165761474609375, 2.16423388671875, 2.164981689453125, 2.16667431640625, 2.16605078125, 2.166181884765625, 2.16501806640625, 2.166455078125, 2.165868408203125, 2.166765625, 2.167066650390625, 2.1680078125, 2.1658408203125, 2.166343505859375, 2.1672490234375, 2.167269287109375, 2.1657314453125, 2.16593212890625, 2.166884033203125, 2.166148681640625, 2.167370361328125, 2.166601806640625, 2.1672099609375, 2.1672568359375, 2.167049560546875, 2.16719384765625, 2.16667822265625, 2.167221923828125, 2.16730859375, 2.167490478515625, 2.166222412109375, 2.167810302734375, 2.166908935546875, 2.167332275390625, 2.166823486328125, 2.167975830078125, 2.16768408203125, 2.168091064453125, 2.167548583984375, 2.16677783203125, 2.166906494140625, 2.16699951171875, 2.16765966796875, 2.167762939453125, 2.166803466796875, 2.1678857421875, 2.166867919921875, 2.16678515625, 2.167721923828125, 2.166592529296875, 2.1664208984375, 2.167431884765625, 2.166370361328125, 2.16732861328125, 2.166756591796875, 2.16758544921875]",tokens/s,0.46149296877687485,, 4bit-gptq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,3554.562048,4495.179776,0.0,4116.709376,3980.386816,s,1,9.8548896484375,9.8548896484375,0.0,9.8548896484375,9.8548896484375,9.8548896484375,9.8548896484375,[9.8548896484375],,kWh,9.09966390958336e-05,1.0026190653853335e-05,3.093585808200405e-05,0.000131958687831691,,MB,3485.7984,4826.529792,0.0,4418.699264,4245.89568,s,10,6.620557922363281,0.662055792236328,0.000751265648237151,0.6620900573730468,0.6628945556640625,0.6629605834960938,0.6630134057617187,"[0.6616687622070313, 0.6617026977539062, 0.663026611328125, 0.66051611328125, 0.6628349609375, 0.6612864379882812, 0.6621630859375, 0.6620170288085937, 0.6624623413085937, 0.6628798828125]",tokens/s,386.67436038172747,kWh,1.9296033496614424e-05,2.1279812942374758e-06,1.2802249825124906e-05,3.42262646159768e-05,tokens/kWh,7479635.971741402,MB,3489.87392,4837.015552,0.0,4429.185024,4245.89824,s,10,385.57617187499994,38.5576171875,0.010987278465840647,38.560576171875,38.567292968749996,38.567697265625,38.568020703125,"[38.53258984375, 38.54215234375, 38.55772265625, 38.5681015625, 38.5658203125, 38.5649296875, 38.5565, 38.560171875, 38.56098046875, 38.567203125]",tokens/s,1.6339183952587188,kWh,0.001124747326490886,0.00012406806433800434,0.0007475806918140751,0.0019963960826429655,tokens/kWh,31556.86416524936,,s,630,385.5724367675785,0.6120197409009176,0.0004554953714293266,0.6120278625488281,0.6126125122070313,0.6127261108398437,0.6130161370849609,"[0.6121142578125, 0.6112144165039063, 0.612133056640625, 0.6109717407226563, 0.6118790283203125, 0.6111687622070312, 0.6118563842773438, 0.6111764526367187, 0.611968994140625, 0.6111453247070312, 0.6115288696289063, 0.6115491333007812, 0.61184814453125, 0.6115392456054688, 0.6113744506835938, 0.6116513061523438, 0.6119515380859375, 0.6112620239257812, 0.6114207763671875, 0.6118275146484375, 0.61140771484375, 0.6112462158203125, 0.6116078491210938, 0.6112278442382812, 0.6117545166015625, 0.6113173828125, 0.6115408325195313, 0.6119054565429688, 0.61109716796875, 0.611557373046875, 0.6114898071289062, 0.6120910034179687, 0.6114508666992188, 0.6112526245117188, 0.6114199829101562, 0.6118121337890625, 0.6114620361328125, 0.61165380859375, 0.6115807495117187, 0.6114669799804687, 0.6115678100585937, 0.6115462036132813, 0.6115228881835938, 0.6111729736328125, 0.611970703125, 0.6116990356445312, 0.6120406494140626, 0.611641357421875, 0.6119807739257812, 0.6115968017578125, 0.6114283447265625, 0.6122384643554688, 0.6113780517578125, 0.612173828125, 0.6114588012695312, 0.6116984252929687, 0.6117297973632813, 0.61170703125, 0.6120755004882813, 0.61180126953125, 0.6120068359375, 0.612145263671875, 0.611707763671875, 0.611455078125, 0.6119686889648438, 0.6116396484375, 0.611403564453125, 0.6110066528320313, 0.61208984375, 0.6113250732421875, 0.6116536254882813, 0.6114210815429687, 0.611715087890625, 0.6115339965820312, 0.6111299438476563, 0.6121720581054687, 0.6109406127929687, 0.6123619384765625, 0.6113341064453125, 0.6119102783203125, 0.6113526000976562, 0.611614013671875, 0.6116974487304687, 0.611786376953125, 0.6113773803710938, 0.6118853149414063, 0.611208984375, 0.6120120239257812, 0.6114317626953125, 0.6120004272460937, 0.6119423828125, 0.6115693969726562, 0.6115078735351562, 0.6117171020507812, 0.6123374633789063, 0.6121275024414062, 0.6108787231445313, 0.6121347045898438, 0.6113515014648437, 0.61224267578125, 0.6114926147460937, 0.6115780029296874, 0.611550537109375, 0.6118218383789062, 0.61205126953125, 0.6117457885742188, 0.6117621459960938, 0.6116024169921875, 0.6124619140625, 0.6115656127929687, 0.6119385986328125, 0.6118463134765625, 0.612028564453125, 0.6117929077148437, 0.6125826416015625, 0.6117527465820313, 0.6122456665039062, 0.6116406860351562, 0.6126637573242187, 0.6123397216796875, 0.6117594604492187, 0.6127886352539063, 0.6115760498046875, 0.6121256713867187, 0.6121380004882813, 0.6117232666015625, 0.6116925659179687, 0.6115901489257812, 0.6112005615234375, 0.6118138427734375, 0.6111497192382812, 0.6115753173828125, 0.6116255493164062, 0.6120460205078125, 0.6120271606445312, 0.6112542724609376, 0.6120878295898438, 0.6115795288085938, 0.6120797729492188, 0.6120482788085938, 0.6121416015625, 0.6114710693359375, 0.6121723022460938, 0.6112501831054687, 0.6116390991210937, 0.6114646606445312, 0.6126207275390625, 0.6116801147460937, 0.6122869873046874, 0.61224755859375, 0.6117556762695312, 0.6119776000976562, 0.6119215698242187, 0.6119111938476562, 0.61240380859375, 0.6120157470703125, 0.612395751953125, 0.6117763061523438, 0.6126807861328125, 0.61121630859375, 0.6124871826171875, 0.6119192504882812, 0.6123485717773437, 0.612099365234375, 0.6127904663085938, 0.611822021484375, 0.6121922607421875, 0.6124646606445312, 0.6118911743164063, 0.61201416015625, 0.6117307739257812, 0.612303466796875, 0.61236962890625, 0.6120701904296875, 0.612619384765625, 0.6118327026367187, 0.612288330078125, 0.612212890625, 0.61239013671875, 0.6121643676757812, 0.6127042846679688, 0.61183349609375, 0.6126124877929687, 0.6126674194335937, 0.611536376953125, 0.6123505249023438, 0.612005859375, 0.6123560791015625, 0.6124705200195313, 0.6131610717773438, 0.6112392578125, 0.6117443237304687, 0.6113894653320312, 0.6120325317382812, 0.6118903198242187, 0.6113043212890625, 0.6120017700195313, 0.6121266479492188, 0.61219775390625, 0.6118837280273437, 0.6123126831054687, 0.6115352172851563, 0.6124722290039063, 0.611629638671875, 0.612042724609375, 0.6121021118164063, 0.6123438110351562, 0.6116590576171875, 0.6119451293945313, 0.6118370971679687, 0.6123292846679688, 0.6120560302734375, 0.6118502197265625, 0.612443115234375, 0.6120396728515625, 0.6123577880859375, 0.6119038696289063, 0.6128283081054687, 0.6119330444335938, 0.612441650390625, 0.6119649047851563, 0.6122930908203125, 0.6122843627929687, 0.6123295288085937, 0.6125045776367187, 0.6127236938476562, 0.6114489135742187, 0.61211572265625, 0.6123500366210938, 0.6122659301757812, 0.6124222412109375, 0.6124031982421875, 0.6122719116210937, 0.6120572509765625, 0.6123601684570312, 0.61216064453125, 0.6124381713867187, 0.6126107788085937, 0.6120919189453125, 0.6125908203125, 0.6119821166992188, 0.6124503173828125, 0.6126817016601562, 0.6120588989257812, 0.6132062377929688, 0.6120140991210937, 0.6124320068359375, 0.6122822265625, 0.6124273071289063, 0.612599365234375, 0.6120205078125, 0.61283935546875, 0.6122119140625, 0.6117545166015625, 0.611811279296875, 0.6120430908203125, 0.611078125, 0.61248876953125, 0.6120595703125, 0.612294677734375, 0.6119854125976563, 0.6123458862304687, 0.6114295043945313, 0.611914794921875, 0.612179443359375, 0.6117108154296875, 0.6124776611328125, 0.6116636962890625, 0.6123334350585937, 0.6115018310546875, 0.6118132934570313, 0.612077392578125, 0.6123218994140625, 0.6119627685546875, 0.6126206665039062, 0.6122882690429687, 0.6115143432617187, 0.6121533203125, 0.6119895629882812, 0.6120072021484375, 0.6114556884765625, 0.6126775512695313, 0.61162890625, 0.6123578491210937, 0.6120326538085937, 0.6123626708984375, 0.6125403442382813, 0.612054443359375, 0.6119348754882813, 0.6121572265625, 0.6124013671875, 0.612495361328125, 0.6122327880859375, 0.6121760864257813, 0.6119195556640625, 0.6124318237304688, 0.6122091674804687, 0.6119915771484375, 0.6124253540039063, 0.6124895629882813, 0.6127388916015625, 0.6125733642578125, 0.6124769287109375, 0.6121710815429687, 0.6117905883789062, 0.6126417236328126, 0.6126325073242187, 0.6121554565429688, 0.6125913696289063, 0.61184228515625, 0.6124534912109375, 0.6122402954101562, 0.6122333374023438, 0.612042236328125, 0.6128357543945312, 0.612595703125, 0.6127513427734375, 0.6109691162109375, 0.6123616943359375, 0.6117359619140625, 0.612026611328125, 0.6123339233398437, 0.610981689453125, 0.6121719970703124, 0.6120570678710937, 0.61194677734375, 0.6122832641601562, 0.611919921875, 0.6118634033203125, 0.6124083862304688, 0.6119985961914063, 0.61241357421875, 0.6116821899414062, 0.61247900390625, 0.611831787109375, 0.611758056640625, 0.6124605712890625, 0.61218408203125, 0.6115502319335937, 0.6121154174804687, 0.6121103515625, 0.6117601318359375, 0.6125892333984375, 0.61183349609375, 0.6121336669921875, 0.6117366943359375, 0.612065185546875, 0.61258837890625, 0.6118395385742188, 0.6126959838867188, 0.6116390380859374, 0.6129447021484375, 0.6121799926757813, 0.6119232788085938, 0.611804931640625, 0.6121580810546875, 0.6127963256835938, 0.6115818481445312, 0.6126958618164062, 0.6120201416015625, 0.6121695556640625, 0.61252197265625, 0.6119035034179687, 0.6121950073242187, 0.6117481079101562, 0.6126157836914062, 0.6124855346679687, 0.6115591430664062, 0.6131427612304687, 0.6119464721679687, 0.612766845703125, 0.6125670776367188, 0.61140869140625, 0.6127734985351563, 0.6117210083007812, 0.6126946411132812, 0.6123840942382812, 0.6119880981445313, 0.611838134765625, 0.6118623657226563, 0.6119669189453125, 0.6112620849609375, 0.6125039672851562, 0.6112794189453125, 0.6130339965820313, 0.6112164916992188, 0.6126127319335938, 0.6115816040039063, 0.6117484741210938, 0.61230078125, 0.61129931640625, 0.6126282348632812, 0.6112975463867187, 0.6116823120117187, 0.6121248168945312, 0.61120703125, 0.6125687255859374, 0.6120260620117187, 0.6120589599609375, 0.6121654052734375, 0.6110865478515625, 0.6126713256835937, 0.6112509155273438, 0.6119139404296875, 0.6123639526367187, 0.6120186157226563, 0.6126338500976563, 0.6111279907226562, 0.6127632446289063, 0.6112950439453125, 0.6119976196289062, 0.6120557250976563, 0.6124134521484375, 0.6123069458007813, 0.6116843872070312, 0.6122720336914063, 0.611460693359375, 0.612424072265625, 0.6122516479492187, 0.612010009765625, 0.6124620361328125, 0.6112745361328125, 0.6120944213867188, 0.6120062255859375, 0.6122250366210937, 0.6117908325195313, 0.6119178466796875, 0.6122025146484374, 0.6119874267578125, 0.611997802734375, 0.6123902587890625, 0.6122452392578125, 0.6121665649414062, 0.6117449951171875, 0.6119155883789062, 0.6119039306640625, 0.6129322509765625, 0.6121162719726563, 0.612421630859375, 0.6124832763671875, 0.6116205444335937, 0.6125343627929688, 0.6110094604492188, 0.6124031982421875, 0.6119382934570312, 0.611999755859375, 0.61218603515625, 0.610879150390625, 0.61291259765625, 0.6114223022460937, 0.6118695068359375, 0.6122926635742187, 0.611399658203125, 0.6122921752929688, 0.6110989379882813, 0.6120439453125, 0.6116995239257812, 0.6118563842773438, 0.6123151245117188, 0.6112803955078125, 0.612638427734375, 0.6121705322265625, 0.611858154296875, 0.6119235229492187, 0.6121417236328125, 0.6124308471679687, 0.6114147338867187, 0.6120963134765625, 0.6115348510742188, 0.6124412231445312, 0.6118445434570312, 0.6117661743164062, 0.612811279296875, 0.6112337646484375, 0.6126388549804688, 0.6118950805664063, 0.612406982421875, 0.6118363037109374, 0.6121062622070312, 0.6122528686523437, 0.6123179931640625, 0.6118911743164063, 0.6126052856445312, 0.6117557373046875, 0.612244140625, 0.612090087890625, 0.6121342163085938, 0.612966552734375, 0.6113693237304687, 0.6125150146484375, 0.611779541015625, 0.61260546875, 0.6122430419921875, 0.6119534301757813, 0.6121980590820313, 0.612149169921875, 0.6125381469726563, 0.6121417846679688, 0.612169677734375, 0.61263671875, 0.611600341796875, 0.6127117919921875, 0.6122625732421875, 0.6120914916992187, 0.6121656494140625, 0.6111639404296875, 0.6126246337890625, 0.6118850708007812, 0.6122250366210937, 0.61163134765625, 0.6121688842773437, 0.6111559448242188, 0.6125655517578125, 0.6118174438476562, 0.6122291259765625, 0.6118236083984375, 0.6115594482421876, 0.611704833984375, 0.6121345825195312, 0.6124312744140625, 0.6116069946289062, 0.6121016235351563, 0.6124471435546875, 0.611276123046875, 0.6121101684570313, 0.6114844970703125, 0.6124954833984375, 0.6115463256835938, 0.6123585205078125, 0.612188232421875, 0.6113446655273438, 0.6121123657226563, 0.6114365234375, 0.6124310302734375, 0.6119739379882813, 0.6115614624023438, 0.6124237060546875, 0.61134814453125, 0.6127559204101563, 0.6119259033203125, 0.6119415893554687, 0.61205712890625, 0.6115681762695313, 0.612296875, 0.6123171997070312, 0.61203662109375, 0.6125028686523437, 0.6117075805664063, 0.6124912719726563, 0.6118809814453126, 0.6126448364257813, 0.61213037109375, 0.6118486328125, 0.6125650024414062, 0.6119321899414063, 0.6125468139648438, 0.6120425415039062, 0.6125135498046875, 0.6115678100585937, 0.61297216796875, 0.6116644287109375, 0.612972412109375, 0.6127227172851563, 0.6117510986328125, 0.6131720581054687, 0.6115921630859374, 0.6129044799804687, 0.6119600830078125, 0.6125370483398438, 0.61171435546875, 0.6126210327148438, 0.6113599243164063, 0.6115885620117187, 0.6133087158203125, 0.6110249633789062, 0.6127280883789062, 0.6117313842773437, 0.6121046142578125, 0.61195849609375, 0.6116296997070313, 0.6124127197265625, 0.6114652099609375, 0.61193701171875, 0.612396728515625, 0.6118967895507812, 0.6119945068359375, 0.6115444946289063, 0.6119818725585937, 0.6122965698242188, 0.6124524536132813, 0.6119517211914063, 0.6124677734375, 0.611462158203125, 0.6120108642578125, 0.612780029296875, 0.6114529418945313, 0.6124046630859376, 0.611566162109375, 0.6125131225585938, 0.6121314697265625, 0.6122491455078125, 0.6122808227539063, 0.6119035034179687, 0.6121082763671875, 0.611983154296875, 0.6118319702148437, 0.6126550903320312, 0.612560791015625, 0.6118763427734375, 0.6122172241210937, 0.6117850341796875, 0.6125855712890625, 0.6121488647460938, 0.6121957397460938, 0.6129197387695312, 0.611925537109375, 0.612774658203125, 0.6122659912109375, 0.6121143798828125, 0.6127218627929687, 0.6118162841796875, 0.6126605224609375, 0.6120098266601562, 0.6125699462890625, 0.6128468627929687, 0.6122276000976562, 0.6131796875, 0.61227197265625, 0.612358154296875, 0.6123499755859375]",tokens/s,1.6339342233110976,, 4bit-gptq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,853.331968,556.72832,0.0,178.25792,176.52224,s,1,7.511060546875,7.511060546875,0.0,7.511060546875,7.511060546875,7.511060546875,7.511060546875,[7.511060546875],,kWh,2.013667340417366e-05,2.214166881086207e-06,6.45611627600462e-06,2.8806956561264485e-05,,MB,1176.674304,669.974528,0.0,262.144,221.118976,s,10,0.2373123188018799,0.02373123188018799,7.72173514862464e-05,0.023694639205932615,0.023846604537963865,0.023847382164001465,0.023848004264831545,"[0.023846431732177733, 0.023769792556762696, 0.023693119049072266, 0.02362953567504883, 0.023674528121948243, 0.023655296325683594, 0.023848159790039063, 0.02381488037109375, 0.023684415817260742, 0.023696159362792967]",tokens/s,10787.472023891078,kWh,6.970796484486802e-07,7.687053776317147e-08,4.0972453886394427e-07,1.183674725075796e-06,tokens/kWh,216275632.63514575,MB,1211.006976,684.654592,0.0,276.824064,221.271552,s,10,13.6217958984375,1.3621795898437499,0.005329277517955597,1.3617802124023437,1.3692102783203126,1.3709354614257812,1.3723156079101562,"[1.3575599365234374, 1.3600166015625, 1.3583939208984375, 1.368826904296875, 1.3543009033203126, 1.37266064453125, 1.3644005126953125, 1.3580308837890624, 1.364061767578125, 1.3635438232421875]",tokens/s,46.24940827899681,kWh,3.945427498654954e-05,4.3513940986203914e-06,2.053856999753807e-05,6.4344239082708e-05,tokens/kWh,979108.6334709761,,s,630,13.616925313949594,0.02161416716499934,0.00044554585507595816,0.0215066556930542,0.021869215393066407,0.0222325496673584,0.023323972034454347,"[0.021236480712890624, 0.021513376235961914, 0.021559551239013673, 0.021807424545288084, 0.021418655395507812, 0.021406335830688475, 0.021258880615234375, 0.021897600173950194, 0.021334016799926758, 0.02127462387084961, 0.021506048202514647, 0.021445632934570313, 0.021560319900512694, 0.021444896697998046, 0.021406431198120118, 0.021496511459350585, 0.021620447158813477, 0.02152262306213379, 0.021377023696899415, 0.021294591903686523, 0.02125686454772949, 0.021400192260742187, 0.021376031875610352, 0.021254976272583007, 0.021309215545654295, 0.02146099281311035, 0.02182143974304199, 0.021465087890625, 0.02154105567932129, 0.02138297653198242, 0.0215285758972168, 0.021423744201660155, 0.021451135635375977, 0.021352767944335938, 0.021373823165893556, 0.0214653434753418, 0.021315488815307617, 0.021363168716430663, 0.021469600677490236, 0.024030271530151366, 0.0217542724609375, 0.021818784713745116, 0.021697439193725587, 0.021587167739868164, 0.02165635108947754, 0.021979455947875978, 0.021725215911865235, 0.02145987129211426, 0.02147439956665039, 0.021526176452636717, 0.022349855422973634, 0.021488832473754882, 0.021376895904541015, 0.02139638328552246, 0.02154911994934082, 0.02137696075439453, 0.021604352951049805, 0.0214769287109375, 0.021598175048828126, 0.02154745674133301, 0.021344287872314453, 0.021366783142089844, 0.021515615463256837, 0.02103910446166992, 0.021428096771240236, 0.021523616790771485, 0.02137392044067383, 0.021559551239013673, 0.02133580780029297, 0.021301248550415038, 0.021358591079711914, 0.021294240951538087, 0.021968000411987303, 0.02149087905883789, 0.02148828887939453, 0.021571424484252928, 0.021542783737182616, 0.021405759811401366, 0.021499839782714844, 0.02141404724121094, 0.021464672088623047, 0.022016416549682616, 0.021377023696899415, 0.021331968307495116, 0.02183782386779785, 0.021456159591674805, 0.021559616088867188, 0.021505983352661132, 0.021764575958251955, 0.021630975723266603, 0.02229574394226074, 0.02196361541748047, 0.02179836845397949, 0.021625343322753905, 0.021481472015380858, 0.022567264556884764, 0.021362079620361327, 0.021455488204956054, 0.021685888290405273, 0.02166783905029297, 0.0214334716796875, 0.02133046340942383, 0.021445247650146486, 0.021576704025268553, 0.021385951995849608, 0.02145075225830078, 0.02139468765258789, 0.021360992431640625, 0.02156716728210449, 0.021318368911743164, 0.021366304397583007, 0.02145280075073242, 0.021376703262329103, 0.021602815628051757, 0.021485855102539062, 0.0213090877532959, 0.0218767032623291, 0.02401113510131836, 0.021763200759887694, 0.02183660888671875, 0.02154435157775879, 0.02144937515258789, 0.021519519805908202, 0.021488447189331055, 0.021364063262939454, 0.02138591957092285, 0.021095584869384766, 0.02143539237976074, 0.021462879180908202, 0.02152681541442871, 0.021454111099243164, 0.021505823135375978, 0.021439456939697267, 0.021528255462646483, 0.0215284481048584, 0.021602399826049806, 0.02222457695007324, 0.021737695693969727, 0.02164339256286621, 0.021497856140136717, 0.02161782455444336, 0.022481760025024413, 0.021597471237182617, 0.021271263122558594, 0.02153267288208008, 0.02141798400878906, 0.021300928115844726, 0.021309440612792968, 0.02148080062866211, 0.02167087936401367, 0.02307891273498535, 0.023346271514892578, 0.021427104949951172, 0.021526527404785157, 0.02138966369628906, 0.021448352813720702, 0.021378175735473633, 0.021322175979614257, 0.021395904541015625, 0.021366336822509765, 0.021342655181884766, 0.021215232849121093, 0.021369888305664064, 0.02134934425354004, 0.021639232635498048, 0.021739200592041017, 0.02140166473388672, 0.021434560775756836, 0.021565216064453125, 0.02146892738342285, 0.02161712074279785, 0.02147020721435547, 0.021308095932006835, 0.02142585563659668, 0.021338367462158205, 0.021242176055908203, 0.021329631805419923, 0.02126268768310547, 0.02137049674987793, 0.021432479858398436, 0.02135001564025879, 0.02145955276489258, 0.021856607437133788, 0.02143824005126953, 0.02148543930053711, 0.021528863906860353, 0.021448768615722657, 0.02254198455810547, 0.021444608688354492, 0.021233823776245116, 0.02158470344543457, 0.02138092803955078, 0.021612543106079102, 0.021438432693481446, 0.021710847854614256, 0.02139548873901367, 0.02126233673095703, 0.021428224563598632, 0.021377023696899415, 0.021393312454223632, 0.02146633529663086, 0.021441408157348633, 0.021370880126953123, 0.022338655471801756, 0.02152332878112793, 0.021620927810668947, 0.021360479354858398, 0.021301248550415038, 0.021393407821655275, 0.021346303939819337, 0.021821760177612306, 0.021613279342651368, 0.021425119400024416, 0.02141798400878906, 0.021368831634521485, 0.021389312744140625, 0.021393184661865235, 0.021822975158691405, 0.028106592178344728, 0.023689855575561525, 0.02150614356994629, 0.021493919372558595, 0.02158415985107422, 0.021788543701171875, 0.021432159423828125, 0.02142972755432129, 0.021537151336669922, 0.02193833541870117, 0.021755903244018555, 0.021513856887817383, 0.02152662467956543, 0.02160691261291504, 0.022642656326293944, 0.02157548713684082, 0.02149996757507324, 0.021469120025634766, 0.021436416625976562, 0.0213602237701416, 0.021483936309814454, 0.02154003143310547, 0.021692480087280273, 0.022926080703735353, 0.021868383407592774, 0.021767936706542968, 0.021552799224853515, 0.02137785530090332, 0.021379072189331053, 0.021987552642822265, 0.02208745574951172, 0.021632160186767578, 0.02144540786743164, 0.021427871704101563, 0.02114784049987793, 0.021381919860839843, 0.021385215759277345, 0.021882688522338867, 0.021602495193481445, 0.021585376739501953, 0.02145510482788086, 0.02143052864074707, 0.021573663711547852, 0.02146892738342285, 0.021491552352905275, 0.021446815490722658, 0.021419296264648436, 0.021679296493530273, 0.021521663665771483, 0.021440927505493163, 0.02150003242492676, 0.02126790428161621, 0.02143289566040039, 0.021368928909301758, 0.02165670394897461, 0.021641151428222656, 0.02160111999511719, 0.02149190330505371, 0.021603488922119142, 0.021748384475708007, 0.022570112228393554, 0.021510208129882812, 0.02151299285888672, 0.021689952850341795, 0.02138912010192871, 0.021396095275878907, 0.021436256408691408, 0.02161680030822754, 0.021418176651000976, 0.02135379219055176, 0.021361152648925782, 0.02136787223815918, 0.021292255401611327, 0.02136649513244629, 0.021348512649536133, 0.021396928787231446, 0.021399423599243163, 0.02142892837524414, 0.021361759185791016, 0.02150716781616211, 0.02141276741027832, 0.021352575302124022, 0.021504447937011718, 0.021362016677856446, 0.021740352630615235, 0.02172480010986328, 0.021442592620849608, 0.021463424682617187, 0.02154412841796875, 0.021480255126953125, 0.021506048202514647, 0.02141798400878906, 0.021415935516357423, 0.021483295440673827, 0.021291231155395506, 0.021380447387695314, 0.021363391876220703, 0.021203104019165038, 0.021699424743652343, 0.021726112365722656, 0.022511199951171876, 0.021791231155395507, 0.021799999237060545, 0.021648319244384765, 0.021608448028564452, 0.022517759323120116, 0.02173734474182129, 0.02153913688659668, 0.021912895202636718, 0.02168502426147461, 0.021475040435791015, 0.021719039916992186, 0.02185215950012207, 0.021931615829467774, 0.021713312149047852, 0.02166374397277832, 0.021796863555908205, 0.021571584701538086, 0.021753856658935547, 0.021683584213256835, 0.021701248168945312, 0.021776384353637695, 0.02178665542602539, 0.021830751419067384, 0.02181193542480469, 0.021757120132446288, 0.0218239688873291, 0.021856767654418945, 0.0216760311126709, 0.021751808166503905, 0.022245376586914063, 0.021845760345458983, 0.021792831420898436, 0.021808544158935548, 0.021952384948730468, 0.021726112365722656, 0.021707839965820312, 0.021689504623413087, 0.02170217514038086, 0.02162099266052246, 0.021502208709716798, 0.021669408798217774, 0.021523040771484377, 0.021489280700683594, 0.021467424392700194, 0.02176585578918457, 0.021926048278808594, 0.02217763137817383, 0.02186412811279297, 0.021649696350097655, 0.021668224334716796, 0.022607519149780275, 0.021911104202270507, 0.021637920379638673, 0.021941919326782227, 0.021501920700073243, 0.02174569511413574, 0.021964319229125978, 0.022018207550048827, 0.02172934341430664, 0.021215103149414063, 0.021654848098754884, 0.021450944900512695, 0.021463680267333984, 0.021476831436157227, 0.021384832382202148, 0.021523584365844728, 0.021538591384887694, 0.021530176162719728, 0.021807552337646485, 0.02147942352294922, 0.021550592422485353, 0.021504608154296875, 0.021501663208007813, 0.021645824432373048, 0.021761375427246092, 0.022051359176635744, 0.021648256301879883, 0.021705663681030274, 0.021592063903808592, 0.02143779182434082, 0.021566112518310546, 0.021395776748657228, 0.023150527954101562, 0.021544704437255858, 0.021659872055053712, 0.02168832015991211, 0.02164486312866211, 0.021528255462646483, 0.021572128295898437, 0.021745920181274414, 0.021972736358642577, 0.021752864837646484, 0.02178761672973633, 0.021981472015380858, 0.021750848770141603, 0.022692512512207032, 0.021782527923583983, 0.021788288116455078, 0.02184185600280762, 0.02171129608154297, 0.02167807960510254, 0.02164735984802246, 0.021727455139160155, 0.021724832534790038, 0.02165977668762207, 0.02154300880432129, 0.02133907127380371, 0.0214619197845459, 0.021438207626342774, 0.02154521560668945, 0.02156675148010254, 0.021545759201049806, 0.022007871627807617, 0.02141779136657715, 0.0214836483001709, 0.021448703765869142, 0.021372991561889647, 0.021419904708862306, 0.021506111145019532, 0.021420095443725588, 0.02153392028808594, 0.02195884895324707, 0.021135200500488283, 0.02152448081970215, 0.021420032501220702, 0.021526432037353514, 0.021352319717407225, 0.021341888427734376, 0.02133660888671875, 0.021372095108032226, 0.02150275230407715, 0.021373151779174804, 0.02140550422668457, 0.021366783142089844, 0.021950464248657226, 0.021575679779052736, 0.02169241523742676, 0.02153171157836914, 0.02156844711303711, 0.02156915283203125, 0.02149603271484375, 0.022657344818115235, 0.021507904052734374, 0.021608320236206055, 0.021773632049560548, 0.021376895904541015, 0.02155404853820801, 0.021467231750488282, 0.02162892723083496, 0.021494943618774413, 0.02142207908630371, 0.02140598487854004, 0.021397600173950194, 0.021281343460083008, 0.021360864639282228, 0.02146886444091797, 0.021516223907470704, 0.02133852767944336, 0.021405471801757812, 0.021343488693237305, 0.02146588706970215, 0.021483360290527345, 0.02125388717651367, 0.021575935363769533, 0.021437503814697265, 0.021586143493652343, 0.021332704544067382, 0.02195043182373047, 0.02157904052734375, 0.021408096313476562, 0.02149043273925781, 0.021531679153442382, 0.021385568618774414, 0.021508544921875, 0.021430112838745116, 0.02132371139526367, 0.023760351181030273, 0.022306880950927734, 0.02149190330505371, 0.02148387145996094, 0.02141321563720703, 0.02173766326904297, 0.021580255508422852, 0.021537952423095703, 0.021437280654907225, 0.02126233673095703, 0.022544384002685547, 0.02185625648498535, 0.021630975723266603, 0.021977088928222657, 0.021675519943237305, 0.021795232772827147, 0.021745792388916017, 0.0215817928314209, 0.021440511703491212, 0.021544223785400392, 0.0216375675201416, 0.02151775932312012, 0.0219451847076416, 0.021585119247436522, 0.021475263595581055, 0.021744543075561524, 0.02140889549255371, 0.021465919494628907, 0.02141916847229004, 0.02146828842163086, 0.02176358413696289, 0.021487775802612304, 0.02141539192199707, 0.021592863082885744, 0.02155939292907715, 0.0215629768371582, 0.021763872146606446, 0.021606752395629883, 0.021607744216918946, 0.0216746883392334, 0.021718175888061523, 0.02150079917907715, 0.021529823303222655, 0.021543872833251952, 0.0215163516998291, 0.021417728424072267, 0.021360767364501952, 0.021480960845947264, 0.02174835205078125, 0.021801887512207033, 0.02326937675476074, 0.02180624008178711, 0.021788511276245117, 0.021608287811279298, 0.021486719131469725, 0.0214619197845459, 0.02257302474975586, 0.02166169548034668, 0.021725343704223632, 0.021858144760131835, 0.021579519271850586, 0.021688255310058593, 0.02163030433654785, 0.021555583953857423, 0.02142064094543457, 0.021446016311645506, 0.021441375732421875, 0.02157779121398926, 0.02131318473815918, 0.021421344757080078, 0.021473440170288086, 0.021369216918945312, 0.02115167999267578, 0.021372480392456053, 0.0214021110534668, 0.024878175735473632, 0.022911903381347656, 0.022460735321044922, 0.02148748779296875, 0.021491519927978514, 0.02141766357421875, 0.02157756805419922, 0.022239072799682617, 0.021631616592407227, 0.021651456832885742, 0.02146895980834961, 0.021473056793212892, 0.02152284812927246, 0.021473087310791016, 0.0215568962097168, 0.021619264602661132, 0.02139571189880371, 0.021346303939819337, 0.021520063400268553, 0.021483232498168945, 0.02184409523010254, 0.021637088775634767, 0.021454912185668945, 0.02143657684326172, 0.0214998722076416, 0.02136604881286621, 0.02233622360229492, 0.021698688507080077, 0.02163443183898926, 0.022107967376708983, 0.02165017509460449, 0.021835775375366212, 0.021809152603149414, 0.02177129554748535, 0.021502944946289064, 0.0214400634765625, 0.02154854393005371, 0.021502431869506837, 0.021457376480102538, 0.021403167724609377, 0.021461471557617188, 0.021397504806518555, 0.021450239181518553, 0.021422880172729492, 0.021454559326171876, 0.021368831634521485, 0.021394464492797853, 0.021459936141967773, 0.02132809638977051, 0.021341344833374024, 0.02138175964355469, 0.021444896697998046, 0.02145587158203125, 0.021709440231323242, 0.021944063186645508, 0.02167030334472656, 0.021497695922851563, 0.021485183715820314, 0.02149737548828125, 0.021431520462036134]",tokens/s,46.2659510480394,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4000, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2871, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert return t.to( torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 1013, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 839, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 467, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 397, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 199, in _flash_attention_forward attn_output_unpad = flash_attn_varlen_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 1124, in flash_attn_varlen_func return FlashAttnVarlenFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 620, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_varlen_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 90, in _flash_attn_varlen_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.varlen_fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1709, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 864, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,4384.014336,4566.482944,0.0,4188.012544,4187.049984,s,1,10.3226669921875,10.3226669921875,0.0,10.3226669921875,10.3226669921875,10.3226669921875,10.3226669921875,[10.3226669921875],,kWh,9.785378997917177e-05,1.0785661533696069e-05,3.2396137028004035e-05,0.00014103558854087187,,MB,4337.463296,4962.844672,0.0,4555.014144,4514.269184,s,10,7.85943878173828,0.7859438781738282,0.003248193699247347,0.7875285034179688,0.7886953369140625,0.7889753234863282,0.7891993127441406,"[0.78, 0.7875231323242188, 0.7826694946289062, 0.7807692260742187, 0.7872844848632813, 0.7876341552734375, 0.788135986328125, 0.7892553100585937, 0.7875338745117187, 0.7886331176757813]",tokens/s,325.72300276048486,kWh,2.281208450769251e-05,2.5157658233373223e-06,1.5158153152154578e-05,4.048600348318441e-05,tokens/kWh,6323172.898661828,MB,4346.912768,4979.621888,0.0,4571.79136,4514.271744,s,10,466.6024882812499,46.660248828125,0.011068309855742515,46.662541015625,46.666687499999995,46.672259765625,46.676717578125,"[46.632265625, 46.65224609375, 46.66014453125, 46.6612578125, 46.6629609375, 46.663859375, 46.66212109375, 46.6643515625, 46.66544921875, 46.67783203125]",tokens/s,1.3501856844369426,kWh,0.0013613940065339734,0.000150172057835159,0.0009055239445040442,0.0024170900088731767,tokens/kWh,26064.39965773967,,s,630,466.5971835937501,0.7406304501488096,0.0003842440844463258,0.7406415100097656,0.741086279296875,0.7412395568847657,0.7414922277832031,"[0.7400195922851562, 0.7400505981445312, 0.7398154907226563, 0.739328857421875, 0.7396557006835938, 0.7402882080078125, 0.739842529296875, 0.7398804931640625, 0.7397766723632813, 0.7399896240234375, 0.739989501953125, 0.7401123046875, 0.7397409057617188, 0.7400776977539063, 0.7401336059570313, 0.7396638793945313, 0.7399824829101562, 0.7398463745117188, 0.740166259765625, 0.7398889770507813, 0.7399959716796874, 0.740632080078125, 0.7399937133789063, 0.740153564453125, 0.740021728515625, 0.7406412963867187, 0.7402557373046875, 0.7396383056640625, 0.7405346069335937, 0.74044873046875, 0.7398190307617187, 0.7404366455078125, 0.7408128051757813, 0.7404246826171875, 0.740150146484375, 0.7401944580078125, 0.7401615600585938, 0.7403519897460937, 0.739842041015625, 0.740030029296875, 0.7403851928710937, 0.7400409545898438, 0.7400709228515625, 0.740415771484375, 0.74038623046875, 0.7406367797851563, 0.7403157958984375, 0.7405259399414063, 0.7403190307617188, 0.7402066650390625, 0.7406757202148437, 0.74048681640625, 0.7396448974609375, 0.740488037109375, 0.7407861938476562, 0.7405541381835937, 0.739809814453125, 0.740874267578125, 0.740378662109375, 0.7405140380859375, 0.7401420288085937, 0.74058544921875, 0.740694580078125, 0.7398834228515625, 0.740550048828125, 0.7404017944335938, 0.740384765625, 0.7400115966796875, 0.7401517333984375, 0.7411056518554687, 0.7402147827148438, 0.7400131225585938, 0.7413422241210937, 0.7399955444335937, 0.7396536254882813, 0.7408514404296875, 0.7402305908203125, 0.740455078125, 0.7398401489257812, 0.7406387329101562, 0.7405711059570312, 0.7406399536132813, 0.7395582275390625, 0.74058544921875, 0.7408004760742187, 0.7407980346679688, 0.739641845703125, 0.7406991577148437, 0.7408834228515625, 0.7402168579101562, 0.739751220703125, 0.7406128540039062, 0.740921142578125, 0.7402518310546875, 0.74022705078125, 0.7406018676757813, 0.7409004516601563, 0.7403721313476562, 0.740874755859375, 0.7404523315429687, 0.7409985961914063, 0.7404183349609375, 0.7406793823242187, 0.7401761474609375, 0.7410494384765625, 0.7407820434570312, 0.7404300537109375, 0.740877197265625, 0.7407839965820312, 0.7402352905273437, 0.7404441528320312, 0.7406465454101563, 0.7406863403320313, 0.740124755859375, 0.7403067016601562, 0.7408099365234375, 0.7403179931640625, 0.7408599243164062, 0.7406141357421875, 0.7409703369140626, 0.740460693359375, 0.7409868774414062, 0.7406669311523437, 0.7407661743164062, 0.7410028076171875, 0.74051953125, 0.740729248046875, 0.7405772705078125, 0.7405260620117188, 0.740441162109375, 0.7405618286132812, 0.7406755981445312, 0.739999755859375, 0.7403212890625, 0.7403572998046875, 0.7405899658203124, 0.739885498046875, 0.7405383911132812, 0.74066943359375, 0.7408038940429688, 0.740121337890625, 0.7403797607421875, 0.7404747314453125, 0.7406744995117187, 0.7401647338867188, 0.7403672485351562, 0.7405545654296875, 0.7406163940429688, 0.740849853515625, 0.74037841796875, 0.7402281494140625, 0.7402581176757812, 0.7403209838867187, 0.7402026977539062, 0.74078076171875, 0.7406192626953125, 0.7403645629882812, 0.74097021484375, 0.740475341796875, 0.7406515502929687, 0.7405623779296875, 0.7410548095703124, 0.740423828125, 0.7406408081054687, 0.7409862060546875, 0.7403770141601562, 0.7402435302734375, 0.7409392700195313, 0.7409180297851562, 0.7407205810546875, 0.7408023071289063, 0.7407756958007813, 0.7408173217773437, 0.7409131469726562, 0.7404827880859375, 0.7408663330078125, 0.7405254516601563, 0.7406817016601562, 0.7409219970703125, 0.7408558349609375, 0.7406814575195313, 0.7406544799804687, 0.7410221557617187, 0.7406570434570312, 0.7411595458984375, 0.7401569213867187, 0.74084814453125, 0.74292431640625, 0.7409152221679688, 0.741015625, 0.7407022094726563, 0.7403618774414062, 0.73979931640625, 0.7403438110351562, 0.7413043212890625, 0.740691650390625, 0.7397128295898437, 0.7405382690429687, 0.7412589721679688, 0.7402750244140625, 0.7407606201171875, 0.7405452270507813, 0.7407579956054687, 0.7404172973632812, 0.7403253784179687, 0.7409540405273437, 0.7409331665039063, 0.7402005004882812, 0.7401491088867187, 0.7404910888671875, 0.7404653930664062, 0.7410072631835938, 0.7403982543945312, 0.7402630615234375, 0.7406876831054687, 0.7406591186523438, 0.7409129028320313, 0.740784423828125, 0.7405977783203125, 0.7408927001953125, 0.740636474609375, 0.7404173583984375, 0.7406964721679687, 0.7409541015625, 0.7407647705078125, 0.7403014526367188, 0.7405775146484375, 0.7406959838867188, 0.7411856079101562, 0.7405547485351562, 0.7409561767578124, 0.74051953125, 0.7405894775390625, 0.7410322265625, 0.7404525146484375, 0.740384033203125, 0.74058154296875, 0.7407244262695313, 0.7407064208984375, 0.7408749389648438, 0.7411825561523437, 0.7406376342773437, 0.7403233032226563, 0.7412362670898438, 0.7411880493164062, 0.7403493041992187, 0.7403728637695313, 0.7409031982421875, 0.7412422485351563, 0.7403444213867187, 0.74039501953125, 0.7407388916015625, 0.7407108764648438, 0.7405159301757812, 0.7409315795898438, 0.7405479125976563, 0.7402473754882812, 0.7405596923828125, 0.7411990966796875, 0.7401375122070313, 0.74032568359375, 0.7403480834960937, 0.7403026733398438, 0.7407575073242187, 0.7404391479492187, 0.7401747436523437, 0.7403162841796875, 0.7408889770507813, 0.7405757446289063, 0.7406051635742188, 0.7404649658203125, 0.7399645385742187, 0.7404735107421875, 0.7410853271484374, 0.740697509765625, 0.740595458984375, 0.740115234375, 0.7407564697265625, 0.7409378051757812, 0.7409120483398437, 0.7404903564453125, 0.740961181640625, 0.7407882080078125, 0.740537353515625, 0.7407932739257812, 0.7409603271484375, 0.7407615966796876, 0.7405029296875, 0.7403587646484375, 0.7408844604492187, 0.7408968505859375, 0.7407646484375, 0.7403816528320313, 0.7410360107421875, 0.740190185546875, 0.741265380859375, 0.7407677001953125, 0.7410004272460937, 0.74047119140625, 0.7410343017578125, 0.7408720703125, 0.7408909301757812, 0.7405621948242187, 0.7408171997070313, 0.740706787109375, 0.7409868774414062, 0.7408309936523437, 0.7405748291015625, 0.7406619262695312, 0.7407206420898438, 0.7407114868164062, 0.7407252197265625, 0.7413704833984375, 0.7406524047851563, 0.7408787841796876, 0.7406328125, 0.7405247802734375, 0.7409304809570313, 0.7407001342773437, 0.7402608032226563, 0.7400745239257812, 0.7405209350585937, 0.7406243896484375, 0.74045849609375, 0.7405711059570312, 0.7408148193359375, 0.7404664916992187, 0.7404846801757813, 0.7408441772460937, 0.7406868896484375, 0.7407542724609375, 0.7403439331054688, 0.7405230712890625, 0.740754150390625, 0.74096630859375, 0.740278564453125, 0.7405875244140625, 0.740833251953125, 0.7408721923828125, 0.7404459228515625, 0.7403925170898438, 0.74037841796875, 0.7408239135742187, 0.7405977783203125, 0.7402426147460938, 0.7409152221679688, 0.7409365234375, 0.7402453002929688, 0.7410056762695313, 0.7410706787109375, 0.74068994140625, 0.740447509765625, 0.7406742553710938, 0.7407472534179688, 0.74175927734375, 0.7406363525390625, 0.7406817626953125, 0.74133642578125, 0.7406576538085937, 0.7406693725585938, 0.7409738159179687, 0.7410184936523437, 0.7404336547851562, 0.7407271728515625, 0.7404195556640625, 0.7414859008789062, 0.7402501831054688, 0.7405077514648437, 0.7404359741210937, 0.741105224609375, 0.7406533203125, 0.7404771118164063, 0.74070654296875, 0.7406445922851562, 0.7408680419921875, 0.7405767822265625, 0.7410755004882813, 0.74109130859375, 0.7405181274414062, 0.7403685913085938, 0.7405951538085938, 0.7404239501953125, 0.7404251098632812, 0.7405381469726563, 0.740688232421875, 0.7403665161132813, 0.7406198120117188, 0.740737548828125, 0.7408297119140625, 0.7404478759765625, 0.7403399047851562, 0.7411488037109375, 0.7406000366210937, 0.740068115234375, 0.7401787719726562, 0.7406900024414063, 0.7399046020507812, 0.7412335815429687, 0.740421630859375, 0.7405916137695312, 0.7405240478515625, 0.7406632690429688, 0.7404830932617188, 0.7408414916992188, 0.7406366577148438, 0.7406956176757813, 0.7402152099609375, 0.7408025512695312, 0.741148681640625, 0.7409144897460938, 0.7405612182617187, 0.74076318359375, 0.7404671630859375, 0.7405343017578125, 0.7407120361328124, 0.7408722534179687, 0.7407449951171875, 0.7402197875976563, 0.7407444458007812, 0.7409302978515625, 0.7409007568359375, 0.74053857421875, 0.741087158203125, 0.7408059692382812, 0.7411715087890625, 0.74035205078125, 0.7410333251953125, 0.7403152465820313, 0.740950927734375, 0.7404436645507813, 0.7409464111328125, 0.7409313354492187, 0.7408642578125, 0.7404564208984376, 0.7408292236328125, 0.7408162841796875, 0.7409790649414062, 0.7411129760742188, 0.7406704711914063, 0.7402434692382812, 0.7409337768554688, 0.7404783935546875, 0.7400045166015625, 0.740599609375, 0.7409668579101563, 0.7405609130859375, 0.74033740234375, 0.7406492309570313, 0.7410005493164062, 0.7402352294921875, 0.7402005004882812, 0.7404649047851563, 0.7413662109375, 0.74050537109375, 0.7403521728515625, 0.7408446655273437, 0.7406520385742188, 0.7407183227539063, 0.7406736450195313, 0.740123779296875, 0.74053515625, 0.7404544067382812, 0.7410208129882813, 0.7403468017578125, 0.7404906005859375, 0.7409271850585938, 0.7403507080078126, 0.7404238891601562, 0.7406466064453125, 0.7406123657226562, 0.7408271484375, 0.7403028564453125, 0.7402880249023438, 0.7413002319335937, 0.740756103515625, 0.7403969116210938, 0.7409766235351563, 0.741285888671875, 0.7410333862304688, 0.740450927734375, 0.74140283203125, 0.7407388305664062, 0.7408353271484375, 0.7404111328125, 0.7407590942382812, 0.7405862426757812, 0.7411541137695312, 0.7404346313476563, 0.7409149780273437, 0.7412155151367188, 0.7401030883789063, 0.7410667724609376, 0.7412072143554688, 0.7405166625976562, 0.74062353515625, 0.741194580078125, 0.7408453979492188, 0.7408536987304688, 0.7405017700195312, 0.74096630859375, 0.7404544067382812, 0.741702880859375, 0.7403590087890625, 0.7408206787109375, 0.7402210083007813, 0.7412155151367188, 0.74056689453125, 0.7406417236328126, 0.7403038940429687, 0.7407339477539062, 0.7400856323242188, 0.7402881469726562, 0.7404854736328125, 0.7403048706054688, 0.7405787353515625, 0.740346435546875, 0.7411138305664062, 0.7401697387695313, 0.7402978515625, 0.7403488159179688, 0.740921142578125, 0.7409185180664063, 0.7402034912109375, 0.7403060302734376, 0.7403733520507813, 0.7407513427734375, 0.7413466796875, 0.7407335815429688, 0.740063232421875, 0.7410543212890625, 0.7406931762695312, 0.741095458984375, 0.7402077026367188, 0.7405111694335937, 0.740576904296875, 0.7409139404296875, 0.74109130859375, 0.7403963012695313, 0.7408911743164063, 0.7407228393554688, 0.740656494140625, 0.7409578857421875, 0.7406044311523438, 0.7414297485351562, 0.741580810546875, 0.740294677734375, 0.7404707641601562, 0.7409067993164062, 0.7414436645507813, 0.740173828125, 0.740737060546875, 0.7412010498046875, 0.7408501586914062, 0.7404363403320312, 0.7409541015625, 0.7410216674804687, 0.7404930419921875, 0.74058984375, 0.7407490844726563, 0.7411278686523437, 0.740725341796875, 0.7408616333007813, 0.7412691650390625, 0.7407438354492187, 0.7408267211914062, 0.741392822265625, 0.740957275390625, 0.7410277709960937, 0.7412838745117187, 0.7407637329101563, 0.7409290771484375, 0.7401904907226563, 0.7408988037109375, 0.740469970703125, 0.7405146484375, 0.7407565307617188, 0.74098583984375, 0.7407430419921875, 0.7400154418945313, 0.7405875854492188, 0.7409999389648437, 0.7410193481445313, 0.7408694458007813, 0.7402786865234375, 0.740968505859375, 0.7406876220703125, 0.7410165405273438, 0.7406650390625, 0.7407493896484375, 0.7404891967773437, 0.7407401733398438, 0.7408728637695312, 0.740468994140625, 0.7417009887695313, 0.7409425048828125, 0.7408291625976563, 0.740797607421875, 0.7413209228515625, 0.740708984375, 0.7406807861328125, 0.7412581787109375, 0.7409722900390625, 0.7409625244140625, 0.7412992553710938, 0.7411414184570313, 0.7412796630859375, 0.7408763427734375, 0.7417642211914063, 0.74086083984375, 0.7408189697265625, 0.7406930541992187, 0.7413052978515625, 0.7410216674804687, 0.7411827392578125, 0.741086181640625, 0.7409776611328125, 0.7408381958007813, 0.7414948120117187, 0.7412696533203125, 0.740348876953125, 0.740801025390625, 0.7411981811523437, 0.7411402587890625, 0.7410465087890625, 0.740831298828125, 0.7412284545898438, 0.7412449951171876, 0.740482177734375, 0.7411937255859375, 0.7407173461914063]",tokens/s,1.350201034536289,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 864, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1141, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 944, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 677, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 500, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 864, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4000, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2871, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert return t.to( torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1709, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,11136.79872,12227.3792,0.0,11848.9088,11814.752256,s,1,16.58408203125,16.58408203125,0.0,16.58408203125,16.58408203125,16.58408203125,16.58408203125,[16.58408203125],,kWh,0.00027038696720004126,2.9818376568283732e-05,8.865979315003658e-05,0.0003888651369183616,,MB,2071.98208,14033.027072,0.0,13625.196544,13297.870848,s,10,22.904132568359376,2.2904132568359374,0.0012868454753217756,2.2905863037109375,2.2922088623046877,2.292209411621094,2.292209851074219,"[2.292208740234375, 2.2922099609375, 2.29136376953125, 2.291201171875, 2.29030224609375, 2.288611328125, 2.28920458984375, 2.290870361328125, 2.288856201171875, 2.28930419921875]",tokens/s,111.77022279099448,kWh,6.670982737708072e-05,7.357757209983291e-06,4.435417437220202e-05,0.00011842175895926602,tokens/kWh,2161764.8838340365,MB,2079.465472,14335.01696,0.0,13927.186432,13689.870848,s,10,1355.52684375,135.55268437499998,0.057629119548384934,135.5256875,135.6300109375,135.65389609375,135.67300421875,"[135.67778125, 135.624703125, 135.585671875, 135.569046875, 135.532953125, 135.497828125, 135.507578125, 135.518421875, 135.51459375, 135.498265625]",tokens/s,0.4647639424514348,kWh,0.003951603366722094,0.0004358923515490394,0.0026285220194825986,0.0070160177377537325,tokens/kWh,8979.452782878832,,s,630,1355.5206533203132,2.151620084635417,0.0010979530220220387,2.1514691162109374,2.153191381835937,2.1536974487304685,2.15430625,"[2.15374072265625, 2.152707763671875, 2.152853271484375, 2.152304443359375, 2.15255126953125, 2.15317822265625, 2.15230126953125, 2.15374462890625, 2.152818603515625, 2.153167724609375, 2.15337890625, 2.1532265625, 2.153262939453125, 2.153539794921875, 2.153477294921875, 2.152826904296875, 2.15331640625, 2.153724609375, 2.15341259765625, 2.153799560546875, 2.153301513671875, 2.15342529296875, 2.152824951171875, 2.1533818359375, 2.1532939453125, 2.153029052734375, 2.153637939453125, 2.15375732421875, 2.153396240234375, 2.154084228515625, 2.15372607421875, 2.154080322265625, 2.153596923828125, 2.15418798828125, 2.153638671875, 2.153900146484375, 2.15345556640625, 2.1538623046875, 2.1537548828125, 2.153672607421875, 2.15450830078125, 2.153498779296875, 2.15490625, 2.1540546875, 2.154407958984375, 2.154281982421875, 2.153531005859375, 2.1537177734375, 2.153956787109375, 2.15450927734375, 2.153934814453125, 2.15406591796875, 2.15435888671875, 2.1535478515625, 2.154036376953125, 2.1539912109375, 2.1544140625, 2.1534453125, 2.153170166015625, 2.154316162109375, 2.154031494140625, 2.1533955078125, 2.15377197265625, 2.153185302734375, 2.15213427734375, 2.152380126953125, 2.152184326171875, 2.1523232421875, 2.152303955078125, 2.151925537109375, 2.152924072265625, 2.15286083984375, 2.152084228515625, 2.1526240234375, 2.151804931640625, 2.15241943359375, 2.1530869140625, 2.152595458984375, 2.15252392578125, 2.152662841796875, 2.152728759765625, 2.152658935546875, 2.15320361328125, 2.1518603515625, 2.152887451171875, 2.152725341796875, 2.15288427734375, 2.15361767578125, 2.1521552734375, 2.152619873046875, 2.153019287109375, 2.15226171875, 2.1537685546875, 2.152908935546875, 2.152489990234375, 2.152644775390625, 2.15259326171875, 2.152498046875, 2.152840576171875, 2.152521728515625, 2.152732666015625, 2.153362060546875, 2.153743896484375, 2.152993408203125, 2.152506591796875, 2.153020263671875, 2.154104736328125, 2.152980712890625, 2.152964111328125, 2.152489990234375, 2.15295458984375, 2.152462646484375, 2.153101318359375, 2.15288818359375, 2.152739990234375, 2.153175537109375, 2.152466796875, 2.153329833984375, 2.152919921875, 2.152263671875, 2.15319091796875, 2.15310595703125, 2.153195556640625, 2.152091552734375, 2.15292919921875, 2.153471923828125, 2.152052734375, 2.15093359375, 2.151443603515625, 2.151208740234375, 2.151665771484375, 2.15092431640625, 2.151182373046875, 2.151501953125, 2.151745361328125, 2.151505859375, 2.151245849609375, 2.15233251953125, 2.151381103515625, 2.1523515625, 2.1514228515625, 2.151843017578125, 2.15215576171875, 2.15196484375, 2.151256103515625, 2.15206494140625, 2.1517880859375, 2.1520634765625, 2.1513359375, 2.152428466796875, 2.15237939453125, 2.152940673828125, 2.151484375, 2.15225537109375, 2.1527265625, 2.152265625, 2.153132080078125, 2.15225244140625, 2.152393798828125, 2.152676513671875, 2.152208984375, 2.152499267578125, 2.15231884765625, 2.151991455078125, 2.152485107421875, 2.152724365234375, 2.152110107421875, 2.1523330078125, 2.15228466796875, 2.15233544921875, 2.15267919921875, 2.152990966796875, 2.1522646484375, 2.15244921875, 2.152156982421875, 2.153568359375, 2.15230419921875, 2.152249755859375, 2.151930908203125, 2.15242138671875, 2.152317138671875, 2.153120361328125, 2.152474609375, 2.152212646484375, 2.152177001953125, 2.152445556640625, 2.152672119140625, 2.1532548828125, 2.15179052734375, 2.150914794921875, 2.1508173828125, 2.151113037109375, 2.15136474609375, 2.151604248046875, 2.151395263671875, 2.15161767578125, 2.15109716796875, 2.151607421875, 2.150910888671875, 2.15164306640625, 2.151921630859375, 2.15119775390625, 2.151859130859375, 2.1525478515625, 2.1510517578125, 2.15236376953125, 2.151751220703125, 2.151856689453125, 2.15140966796875, 2.1511865234375, 2.153145751953125, 2.151104736328125, 2.151987548828125, 2.15233251953125, 2.1515537109375, 2.150688720703125, 2.150803466796875, 2.15117822265625, 2.152405029296875, 2.152108154296875, 2.151814697265625, 2.1515673828125, 2.15140625, 2.15215478515625, 2.151510009765625, 2.15209521484375, 2.152355712890625, 2.152592041015625, 2.152163330078125, 2.15205859375, 2.15209375, 2.15196484375, 2.152599853515625, 2.151763427734375, 2.151579345703125, 2.152060791015625, 2.15278662109375, 2.1517724609375, 2.153413818359375, 2.151795654296875, 2.152584716796875, 2.152, 2.15275537109375, 2.152795654296875, 2.151948486328125, 2.152388671875, 2.15271533203125, 2.151862060546875, 2.1522646484375, 2.152122314453125, 2.152468505859375, 2.152443359375, 2.15089306640625, 2.150482177734375, 2.15049267578125, 2.15028125, 2.150667236328125, 2.151060546875, 2.15079052734375, 2.15144873046875, 2.150859130859375, 2.1505966796875, 2.15177001953125, 2.15014404296875, 2.151184326171875, 2.1510185546875, 2.1508076171875, 2.151145751953125, 2.15081103515625, 2.1512626953125, 2.150821044921875, 2.1516171875, 2.150609130859375, 2.15122900390625, 2.15109326171875, 2.1516748046875, 2.15079931640625, 2.151235595703125, 2.150504150390625, 2.151947998046875, 2.15126904296875, 2.15168408203125, 2.15206103515625, 2.151129150390625, 2.15129248046875, 2.15198046875, 2.151365478515625, 2.151548583984375, 2.15148291015625, 2.1516533203125, 2.1513544921875, 2.151612548828125, 2.150863525390625, 2.15126123046875, 2.15179052734375, 2.1515458984375, 2.151701904296875, 2.151739990234375, 2.15112109375, 2.15173876953125, 2.151557861328125, 2.1519248046875, 2.1510234375, 2.15153662109375, 2.151571533203125, 2.15143212890625, 2.152637939453125, 2.151794677734375, 2.1523251953125, 2.15185400390625, 2.151250244140625, 2.1512763671875, 2.151165771484375, 2.15204638671875, 2.15148388671875, 2.150702392578125, 2.14983154296875, 2.15018896484375, 2.149712158203125, 2.150517578125, 2.149858154296875, 2.150455322265625, 2.14996484375, 2.149432373046875, 2.150497802734375, 2.149911865234375, 2.150378662109375, 2.14996923828125, 2.150060302734375, 2.14969140625, 2.150126708984375, 2.150521728515625, 2.15058837890625, 2.149822265625, 2.149945556640625, 2.150328369140625, 2.15119677734375, 2.149961669921875, 2.15074609375, 2.15079931640625, 2.15058642578125, 2.150466796875, 2.15084619140625, 2.150916748046875, 2.15092041015625, 2.151034912109375, 2.150868896484375, 2.151144775390625, 2.151258544921875, 2.15111474609375, 2.15045556640625, 2.150823974609375, 2.150533203125, 2.1515546875, 2.1513095703125, 2.151034912109375, 2.15058837890625, 2.15149560546875, 2.151301025390625, 2.150927978515625, 2.151443115234375, 2.15052294921875, 2.15045654296875, 2.15022265625, 2.152161376953125, 2.1504658203125, 2.15101416015625, 2.15065185546875, 2.151403076171875, 2.1523974609375, 2.151395263671875, 2.150956787109375, 2.152105224609375, 2.150964111328125, 2.152171630859375, 2.151329833984375, 2.151540771484375, 2.151630615234375, 2.14989013671875, 2.14916650390625, 2.149771728515625, 2.14993310546875, 2.149523681640625, 2.15041357421875, 2.149125, 2.150731689453125, 2.15006982421875, 2.150005126953125, 2.149689453125, 2.15046728515625, 2.150459228515625, 2.150856689453125, 2.150032958984375, 2.151127197265625, 2.150333251953125, 2.150701171875, 2.14982568359375, 2.150210205078125, 2.15022412109375, 2.1509560546875, 2.15025439453125, 2.151168701171875, 2.15073583984375, 2.15011328125, 2.15008203125, 2.1519951171875, 2.150269775390625, 2.1510556640625, 2.150285400390625, 2.151147216796875, 2.150312255859375, 2.151153564453125, 2.151604248046875, 2.151036865234375, 2.15113330078125, 2.151172119140625, 2.15152978515625, 2.15193408203125, 2.151918212890625, 2.150645751953125, 2.15139111328125, 2.1517958984375, 2.1515087890625, 2.151620849609375, 2.151088134765625, 2.151255126953125, 2.150869873046875, 2.152009765625, 2.151751708984375, 2.151941650390625, 2.15211962890625, 2.152193115234375, 2.15191748046875, 2.151174072265625, 2.151095458984375, 2.15147412109375, 2.1513125, 2.1514921875, 2.152072998046875, 2.152081787109375, 2.151716796875, 2.15088427734375, 2.149719482421875, 2.150300048828125, 2.150454833984375, 2.15006640625, 2.151052734375, 2.149905029296875, 2.15017822265625, 2.1502412109375, 2.15087451171875, 2.15023193359375, 2.15002783203125, 2.150787109375, 2.150412353515625, 2.150989501953125, 2.150674560546875, 2.15045947265625, 2.15073974609375, 2.150287109375, 2.150994140625, 2.151113037109375, 2.15100830078125, 2.15099365234375, 2.150598876953125, 2.151919677734375, 2.151110595703125, 2.150681884765625, 2.15150634765625, 2.15130908203125, 2.1512744140625, 2.152123046875, 2.151403564453125, 2.15124609375, 2.15190087890625, 2.15092626953125, 2.151630126953125, 2.15122412109375, 2.151163818359375, 2.150731201171875, 2.151129150390625, 2.151237548828125, 2.15116259765625, 2.151464111328125, 2.150979248046875, 2.1516767578125, 2.15100830078125, 2.151354248046875, 2.151202880859375, 2.15159716796875, 2.152220703125, 2.151686279296875, 2.1517109375, 2.151391845703125, 2.151088134765625, 2.151636962890625, 2.15196875, 2.151333984375, 2.151794677734375, 2.151206787109375, 2.151443359375, 2.151432373046875, 2.151592529296875, 2.151311279296875, 2.1513984375, 2.150262939453125, 2.1506396484375, 2.149322021484375, 2.15015283203125, 2.150506103515625, 2.149909912109375, 2.15073681640625, 2.150552734375, 2.150275146484375, 2.15019189453125, 2.150582275390625, 2.1506826171875, 2.15127978515625, 2.150322998046875, 2.150883544921875, 2.150467529296875, 2.151138916015625, 2.15074658203125, 2.150599853515625, 2.149941650390625, 2.151123291015625, 2.151288818359375, 2.151434326171875, 2.1514072265625, 2.15140380859375, 2.15039794921875, 2.1509931640625, 2.150472412109375, 2.15136669921875, 2.150916015625, 2.151755859375, 2.15052685546875, 2.150698974609375, 2.151047119140625, 2.150866943359375, 2.15036474609375, 2.150756591796875, 2.1507783203125, 2.1514296875, 2.151457275390625, 2.15137451171875, 2.1515224609375, 2.151106201171875, 2.151357421875, 2.15172509765625, 2.150919189453125, 2.151572509765625, 2.151522216796875, 2.152494140625, 2.15128125, 2.1516044921875, 2.151712646484375, 2.151427978515625, 2.151321044921875, 2.151582275390625, 2.1514423828125, 2.15172412109375, 2.15140625, 2.152030517578125, 2.1506416015625, 2.1513359375, 2.151771484375, 2.150775390625, 2.149087158203125, 2.149661865234375, 2.149651123046875, 2.14935888671875, 2.149644287109375, 2.1498623046875, 2.15006103515625, 2.14955615234375, 2.150403076171875, 2.1497666015625, 2.149818359375, 2.14995556640625, 2.150998291015625, 2.150135986328125, 2.150268310546875, 2.150007080078125, 2.149869873046875, 2.151012451171875, 2.150572021484375, 2.149473876953125, 2.150074462890625, 2.15087353515625, 2.15081884765625, 2.149972900390625, 2.1505576171875, 2.150240234375, 2.15158984375, 2.150319580078125, 2.150472412109375, 2.1510146484375, 2.1510009765625, 2.151279541015625, 2.15128857421875, 2.150300048828125, 2.15108203125, 2.150709228515625, 2.15055322265625, 2.151600341796875, 2.1515654296875, 2.151026611328125, 2.151, 2.151370849609375, 2.151364501953125, 2.1511015625, 2.15116259765625, 2.1517353515625, 2.1517578125, 2.1517373046875, 2.15187451171875, 2.15133349609375, 2.151178466796875, 2.151184326171875, 2.1522880859375, 2.152058349609375, 2.151327880859375, 2.1508779296875, 2.151151611328125, 2.151152587890625, 2.15120166015625, 2.151810791015625, 2.15129736328125, 2.15137255859375]",tokens/s,0.464766064948425,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,3553.804288,4495.179776,0.0,4116.709376,3980.386816,s,1,10.698505859375,10.698505859375,0.0,10.698505859375,10.698505859375,10.698505859375,10.698505859375,[10.698505859375],,kWh,9.446468462082861e-05,1.0412533916205323e-05,3.08761358120066e-05,0.00013575335434904052,,MB,3569.58208,4826.529792,0.0,4418.699264,4245.764608,s,10,6.540985412597656,0.6540985412597655,0.0014622330901031934,0.6539962463378907,0.6554122009277343,0.6563269500732422,0.6570587493896484,"[0.65141357421875, 0.6533139038085938, 0.6548538208007812, 0.6540926513671875, 0.652806884765625, 0.6543641967773437, 0.65724169921875, 0.6552089233398437, 0.6538998413085938, 0.6537899169921875]",tokens/s,391.37833805125916,kWh,1.9062787973176872e-05,2.1022905774729127e-06,1.263961080612535e-05,3.380468935677513e-05,tokens/kWh,7572913.843347965,MB,3576.631296,4837.015552,0.0,4429.185024,4245.767168,s,10,385.14350781249993,38.514350781249995,0.015261880598067006,38.517564453125004,38.528111328125,38.5308974609375,38.5331263671875,"[38.48181640625, 38.49543359375, 38.50627734375, 38.51169140625, 38.51723046875, 38.5178984375, 38.52492578125, 38.52705859375, 38.53368359375, 38.5274921875]",tokens/s,1.6357539130757046,kWh,0.0011235635662026559,0.0001239374439319332,0.0007470703580724743,0.0019945713682070635,tokens/kWh,31585.73365897216,,s,630,385.13932885742145,0.6113322680276537,0.0005062537275485813,0.6113420104980469,0.6119678344726562,0.6121722381591798,0.6125492669677735,"[0.6109921264648438, 0.6100316162109375, 0.611051513671875, 0.6100374145507812, 0.6104026489257812, 0.61068017578125, 0.6101295776367187, 0.6103638305664062, 0.61080810546875, 0.6105416870117187, 0.6103138427734375, 0.6109783325195313, 0.6102640991210937, 0.61072216796875, 0.610486572265625, 0.6104078979492188, 0.6116115112304688, 0.6101176147460937, 0.6113382568359375, 0.6105477294921875, 0.6103302001953125, 0.6110244140625, 0.6103285522460937, 0.6104708251953125, 0.61085693359375, 0.6103161010742187, 0.6109573364257812, 0.6105985107421875, 0.610902587890625, 0.6105989379882812, 0.6109214477539062, 0.61062451171875, 0.610783203125, 0.6112945556640625, 0.6108182983398438, 0.6106873779296875, 0.6106516723632812, 0.6108145141601562, 0.6111149291992187, 0.6111642456054688, 0.6110392456054687, 0.6110167236328125, 0.6107685546875, 0.6111071166992188, 0.6108098754882813, 0.6105782470703125, 0.611965087890625, 0.6104343872070312, 0.6109550170898438, 0.6109105224609375, 0.6109763793945312, 0.6111295776367187, 0.611034912109375, 0.6109859619140625, 0.6112991943359375, 0.6115060424804688, 0.6106196899414063, 0.611409423828125, 0.6109086303710938, 0.6111492309570312, 0.6111985473632813, 0.611876953125, 0.6106612548828125, 0.6112717895507812, 0.6104872436523437, 0.610522705078125, 0.610563720703125, 0.6107140502929688, 0.6106519775390625, 0.6105010375976563, 0.6112579956054688, 0.6113301391601562, 0.6104927368164063, 0.6113546142578125, 0.6107176513671875, 0.6111146240234375, 0.6111682739257812, 0.6106771240234375, 0.61136279296875, 0.610326416015625, 0.6114895629882813, 0.6102262573242188, 0.6108101196289063, 0.6111925048828125, 0.6111926879882813, 0.6110531005859375, 0.610546630859375, 0.6112174072265625, 0.6107294921875, 0.6116746826171875, 0.6105430297851563, 0.6113797607421875, 0.6106009521484375, 0.611178466796875, 0.6106419067382812, 0.6110119018554687, 0.6121970825195312, 0.6102078247070313, 0.6113463745117188, 0.611293212890625, 0.6107523193359375, 0.611599609375, 0.6104749755859376, 0.611045166015625, 0.610963623046875, 0.610951171875, 0.61168212890625, 0.6109185791015626, 0.6111758422851562, 0.6113695068359375, 0.6109900512695312, 0.6115401611328125, 0.610949951171875, 0.6108753662109375, 0.6111744384765625, 0.6108746948242187, 0.6114636840820312, 0.611250244140625, 0.610959228515625, 0.6115820922851563, 0.61092041015625, 0.6112987060546875, 0.6110767822265625, 0.611411865234375, 0.6119174194335938, 0.610774658203125, 0.611037353515625, 0.610566162109375, 0.611219482421875, 0.6104780883789063, 0.6116825561523438, 0.6103765869140625, 0.6105445556640625, 0.6110844116210937, 0.6112127075195313, 0.6108016357421875, 0.6114279174804688, 0.610241455078125, 0.6113211669921875, 0.6111033935546875, 0.6113541870117187, 0.611105224609375, 0.6106083374023438, 0.611092529296875, 0.611721435546875, 0.6106875610351562, 0.6114488525390624, 0.6113054809570313, 0.6107791137695312, 0.6112071533203125, 0.6111641845703125, 0.6112214965820313, 0.6112781372070313, 0.6113916015625, 0.610779296875, 0.6114451904296875, 0.6112849731445312, 0.6113416748046875, 0.6112028198242188, 0.6110396728515625, 0.6113751220703125, 0.6110286254882813, 0.6112623291015625, 0.6121746826171875, 0.610402587890625, 0.6124005737304687, 0.6108922119140625, 0.611768310546875, 0.6109204711914062, 0.6111437377929687, 0.6113484497070313, 0.6110239868164062, 0.6111959838867187, 0.6112222900390625, 0.6117939453125, 0.611330078125, 0.611583984375, 0.6110422973632812, 0.6113079223632812, 0.6110150756835937, 0.6115774536132812, 0.6112814331054688, 0.61088525390625, 0.610998779296875, 0.6116351928710938, 0.6121427001953125, 0.6112935791015625, 0.611651611328125, 0.6115852661132812, 0.61158984375, 0.6109527587890625, 0.610888427734375, 0.6115921630859374, 0.6106536254882813, 0.6108473510742187, 0.6118167114257812, 0.6111113891601563, 0.6113118896484375, 0.6108585205078125, 0.6109044189453126, 0.6109605712890624, 0.6112481689453125, 0.6114147338867187, 0.610752685546875, 0.6111846313476562, 0.6110863647460938, 0.6110222778320312, 0.6114710083007813, 0.6109985961914063, 0.6114678344726563, 0.6113565673828125, 0.6111417846679688, 0.6116593627929687, 0.6111826782226563, 0.6116703491210937, 0.6109490966796876, 0.6115655517578125, 0.610693115234375, 0.6110773315429687, 0.6111281127929687, 0.6118500366210937, 0.6110473022460937, 0.6114962768554687, 0.6114857177734375, 0.6112329711914063, 0.61167822265625, 0.6107901611328125, 0.6116265869140625, 0.6112479248046875, 0.61144873046875, 0.61187109375, 0.6102879638671875, 0.6122516479492187, 0.6111025390625, 0.6116984252929687, 0.6117682495117187, 0.610472412109375, 0.6118174438476562, 0.6105640869140625, 0.611926025390625, 0.611567138671875, 0.6107017822265625, 0.611706298828125, 0.6110521240234374, 0.6117572021484375, 0.612184814453125, 0.6106842041015625, 0.6116370849609375, 0.6110784301757812, 0.6120189208984375, 0.611293212890625, 0.6113382568359375, 0.6117147216796875, 0.6109002075195312, 0.6115035400390625, 0.610646728515625, 0.61136279296875, 0.6111211547851563, 0.6111058959960938, 0.6116148071289063, 0.6106111450195313, 0.611332275390625, 0.6105990600585938, 0.6115392456054688, 0.6115084838867187, 0.6101905517578124, 0.6114943237304687, 0.6112234497070312, 0.611407470703125, 0.6116044921875, 0.61050146484375, 0.6120794067382812, 0.6108378295898438, 0.6114108276367187, 0.611237548828125, 0.6108038330078125, 0.611182861328125, 0.612284423828125, 0.6106126098632813, 0.612206298828125, 0.6105542602539062, 0.6119552612304687, 0.6112861328125, 0.6112113037109375, 0.6112544555664062, 0.6114638061523437, 0.611952392578125, 0.6113582763671875, 0.610810546875, 0.6126079711914062, 0.6107545776367187, 0.6121326904296875, 0.61099365234375, 0.61170556640625, 0.6126713256835937, 0.6104024658203125, 0.6125906982421875, 0.6105870971679688, 0.6122266845703125, 0.6111973876953125, 0.6116929321289063, 0.610832275390625, 0.6116475219726563, 0.6121077880859375, 0.6109210205078125, 0.6118068237304688, 0.6112506103515625, 0.6116597900390625, 0.6113929443359375, 0.61121142578125, 0.6117564697265625, 0.6117857666015625, 0.6112423095703124, 0.6110398559570313, 0.6121041870117188, 0.6117105712890625, 0.6112811279296875, 0.611064208984375, 0.6111959838867187, 0.6108968505859375, 0.6108692626953125, 0.611430419921875, 0.6108098754882813, 0.6111761474609375, 0.6110431518554688, 0.6113449096679687, 0.6107310180664063, 0.6115316772460937, 0.6114692993164063, 0.6104757080078125, 0.6113712158203125, 0.6112704467773438, 0.6113427124023437, 0.6115717163085937, 0.6105310668945313, 0.61163330078125, 0.6108427734375, 0.6118502197265625, 0.6115921630859374, 0.6103900146484375, 0.6117611694335937, 0.6107698974609375, 0.611978271484375, 0.6117611083984374, 0.6107955322265625, 0.6119133911132812, 0.6106209716796875, 0.6123458862304687, 0.611243896484375, 0.611052734375, 0.6119125366210938, 0.6110679931640625, 0.6112162475585937, 0.6125850830078124, 0.6105450439453125, 0.6122443237304688, 0.6114215698242188, 0.6118607788085938, 0.6113755493164063, 0.6115061645507812, 0.611826904296875, 0.6110618286132813, 0.6113306274414062, 0.6119398803710937, 0.6121388549804687, 0.611496826171875, 0.6111968383789063, 0.61158154296875, 0.6117154541015625, 0.611382568359375, 0.611432861328125, 0.6111724243164063, 0.6114041748046875, 0.61209521484375, 0.6118342895507812, 0.6113487548828125, 0.612005859375, 0.6110945434570313, 0.6110287475585937, 0.6114531860351563, 0.6117652587890625, 0.610704345703125, 0.6109407348632813, 0.6114449462890625, 0.6114954223632812, 0.6116541748046875, 0.611900390625, 0.6107484741210938, 0.611822021484375, 0.6110479125976562, 0.611915771484375, 0.61170068359375, 0.6108094482421875, 0.6110839233398437, 0.6109970703125, 0.611715087890625, 0.6115327758789062, 0.61136279296875, 0.6114140014648437, 0.611293212890625, 0.61115185546875, 0.6119301147460937, 0.6108995361328124, 0.6124560546875, 0.6110175170898438, 0.6112965087890625, 0.6114058837890625, 0.6116432495117188, 0.6114476318359375, 0.6111863403320312, 0.6121980590820313, 0.6117874145507812, 0.6110812377929687, 0.611695068359375, 0.6110883178710937, 0.61243603515625, 0.6119996337890625, 0.61095556640625, 0.6116661987304688, 0.610967529296875, 0.6117539672851563, 0.61153271484375, 0.61158203125, 0.6116188354492188, 0.6116188354492188, 0.6107908935546875, 0.6125614624023438, 0.611567626953125, 0.6112808837890625, 0.6116290283203125, 0.6113873901367187, 0.612421630859375, 0.611565185546875, 0.6117789306640625, 0.6108671875, 0.61222216796875, 0.6117747192382812, 0.6116050415039063, 0.6115977172851562, 0.6112446899414062, 0.6119505615234375, 0.6115143432617187, 0.6112133178710938, 0.6109389038085937, 0.6114498291015625, 0.6111221923828125, 0.6121287841796875, 0.6100809326171875, 0.6120098266601562, 0.6109921264648438, 0.6113211059570313, 0.6113121948242187, 0.6112925415039062, 0.6113984375, 0.611454345703125, 0.6114136352539062, 0.6119331665039063, 0.6110739135742187, 0.6118888549804687, 0.6115637817382813, 0.6115280151367187, 0.6106920166015625, 0.6120549926757812, 0.611082275390625, 0.61172119140625, 0.6113543090820313, 0.611282958984375, 0.61156591796875, 0.61148291015625, 0.6115827026367188, 0.61085888671875, 0.6118744506835937, 0.6116233520507812, 0.6119481201171875, 0.61106005859375, 0.61166796875, 0.6118728637695312, 0.610848388671875, 0.612122802734375, 0.6112965087890625, 0.6121966552734375, 0.6116377563476563, 0.611567626953125, 0.6121145629882813, 0.61161181640625, 0.6117364501953125, 0.6111968383789063, 0.6119666748046875, 0.6112833862304687, 0.6117703857421875, 0.6125808715820312, 0.6109168701171875, 0.6113150024414062, 0.6118038330078125, 0.6116127319335938, 0.6124400024414063, 0.6106331176757812, 0.611764892578125, 0.6115382690429687, 0.6115887451171875, 0.6122815551757812, 0.6113696899414063, 0.61158203125, 0.6125175170898437, 0.6118273315429688, 0.6111178588867188, 0.6110796508789063, 0.6114712524414062, 0.6117977905273437, 0.611784912109375, 0.61105322265625, 0.6115532836914063, 0.6113218383789063, 0.6120200805664062, 0.6107853393554687, 0.6117777709960938, 0.6113821411132813, 0.611683837890625, 0.6118856201171875, 0.61125341796875, 0.6108681640625, 0.611375, 0.6117590942382812, 0.6111150512695313, 0.6115455322265625, 0.6114103393554687, 0.6111492919921875, 0.6117642211914063, 0.6117012939453125, 0.6114487915039063, 0.6117152099609375, 0.6112135620117187, 0.6121029663085937, 0.6118154296875, 0.61160498046875, 0.6118670654296875, 0.6117969970703125, 0.6106843872070312, 0.6123680419921875, 0.6115070190429688, 0.6117271728515625, 0.611373046875, 0.61243408203125, 0.6114918212890625, 0.6121692504882813, 0.61119921875, 0.6118868408203125, 0.6116414794921875, 0.611754150390625, 0.6111735229492188, 0.6117667236328125, 0.6120140991210937, 0.6122023315429688, 0.6115181884765625, 0.6117030639648438, 0.6116904907226562, 0.6116195678710937, 0.611751953125, 0.6125194091796875, 0.6119388427734375, 0.6119915771484375, 0.6117601318359375, 0.612121826171875, 0.6117793579101563, 0.6114078979492188, 0.6126141357421875, 0.6114260864257812, 0.6122276000976562, 0.6115369873046875, 0.6108263549804688, 0.6116002807617188, 0.6113702392578125, 0.6110420532226563, 0.6116039428710938, 0.6109025268554688, 0.611217041015625, 0.6117154541015625, 0.6109921264648438, 0.6114871215820312, 0.6117135620117188, 0.6114755249023438, 0.6108710327148438, 0.611488037109375, 0.6119318237304687, 0.6111788330078125, 0.611694580078125, 0.611493896484375, 0.6110637817382812, 0.6114118041992187, 0.6115015869140625, 0.6112830810546875, 0.6119649047851563, 0.6117708740234375, 0.6107279663085937, 0.6119935913085938, 0.6113423461914063, 0.6116249389648437, 0.611358642578125, 0.6114295654296875, 0.6116730346679687, 0.6117742309570312, 0.6115117797851563, 0.6119463500976563, 0.611017578125, 0.6122537231445313, 0.611446044921875, 0.6121294555664063, 0.6116015625, 0.6116072998046875, 0.6116763305664062, 0.6117437744140625, 0.6113546142578125, 0.6115429077148438, 0.6107661743164062, 0.6120408935546875, 0.6112135620117187, 0.6119387817382812, 0.6119666137695312, 0.6120185546875, 0.610864990234375, 0.61204833984375, 0.6110808715820313, 0.6115655517578125, 0.6121328735351562, 0.6121287841796875, 0.6120202026367187, 0.6107361450195312, 0.61201611328125, 0.6118806762695312, 0.6115453491210937]",tokens/s,1.6357716618268952,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,