{ "config": { "name": "pytorch-llama", "backend": { "name": "pytorch", "version": "2.3.0", "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", "task": "text-generation", "library": "transformers", "model": "meta-llama/Llama-2-7b-hf", "processor": "meta-llama/Llama-2-7b-hf", "device": "cuda", "device_ids": "0", "seed": 42, "inter_op_num_threads": null, "intra_op_num_threads": null, "model_kwargs": {}, "processor_kwargs": {}, "hub_kwargs": {}, "no_weights": true, "device_map": null, "torch_dtype": "float16", "eval_mode": true, "to_bettertransformer": false, "low_cpu_mem_usage": null, "attn_implementation": null, "cache_implementation": null, "autocast_enabled": false, "autocast_dtype": null, "torch_compile": false, "torch_compile_target": "forward", "torch_compile_config": {}, "quantization_scheme": "gptq", "quantization_config": { "bits": 4, "use_exllama ": true, "version": 2, "model_seqlen": 256 }, "deepspeed_inference": false, "deepspeed_inference_config": {}, "tensor_parallel": false, "peft_type": null, "peft_config": {} }, "scenario": { "name": "inference", "_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", "iterations": 10, "duration": 10, "warmup_runs": 10, "input_shapes": { "batch_size": 1, "num_choices": 2, "sequence_length": 128 }, "new_tokens": null, "latency": true, "memory": true, "energy": false, "forward_kwargs": {}, "generate_kwargs": { "max_new_tokens": 32, "min_new_tokens": 32 }, "call_kwargs": {} }, "launcher": { "name": "process", "_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", "device_isolation": true, "device_isolation_action": "warn", "numactl": false, "numactl_kwargs": {}, "start_method": "spawn" }, "environment": { "cpu": " AMD EPYC 7742 64-Core Processor", "cpu_count": 128, "cpu_ram_mb": 540671.627264, "system": "Linux", "machine": "x86_64", "platform": "Linux-5.4.0-166-generic-x86_64-with-glibc2.35", "processor": "x86_64", "python_version": "3.10.12", "gpu": [ "NVIDIA A100-SXM4-80GB", "NVIDIA A100-SXM4-80GB", "NVIDIA A100-SXM4-80GB", "NVIDIA DGX Display", "NVIDIA A100-SXM4-80GB" ], "gpu_count": 5, "gpu_vram_mb": 347892350976, "optimum_benchmark_version": "0.2.1", "optimum_benchmark_commit": "347e13ca9f7f904f55669603cfb9f0b6c7e8672c", "transformers_version": "4.41.1", "transformers_commit": null, "accelerate_version": "0.30.1", "accelerate_commit": null, "diffusers_version": null, "diffusers_commit": null, "optimum_version": "1.20.0", "optimum_commit": null, "timm_version": null, "timm_commit": null, "peft_version": "0.11.1", "peft_commit": null } }, "report": { "prefill": { "memory": { "unit": "MB", "max_ram": 4115.685376, "max_global_vram": 6031.343616, "max_process_vram": 5104.467968, "max_reserved": 4571.79136, "max_allocated": 4252.77184 }, "latency": { "unit": "s", "count": 10, "total": 1.5582011566162108, "mean": 0.15582011566162107, "stdev": 0.0004235381281571237, "p50": 0.15581967926025392, "p90": 0.1563481979370117, "p95": 0.15637425613403322, "p99": 0.1563951026916504, "values": [ 0.1550247039794922, 0.155944580078125, 0.15537123107910156, 0.15548223876953124, 0.15573526000976562, 0.15573081970214844, 0.1562655029296875, 0.1564003143310547, 0.1563424072265625, 0.1559040985107422 ] }, "throughput": { "unit": "tokens/s", "value": 821.460049984591 }, "energy": null, "efficiency": null }, "decode": { "memory": { "unit": "MB", "max_ram": 4115.685376, "max_global_vram": 6031.343616, "max_process_vram": 5104.467968, "max_reserved": 4571.79136, "max_allocated": 4359.322112 }, "latency": { "unit": "s", "count": 10, "total": 46.20984765625, "mean": 4.620984765625, "stdev": 0.0032065285351623603, "p50": 4.623131591796875, "p90": 4.623380273437499, "p95": 4.623573193359375, "p99": 4.623727529296875, "values": [ 4.61422802734375, 4.61675830078125, 4.61875732421875, 4.6200751953125, 4.62333740234375, 4.6232158203125, 4.62304736328125, 4.6233251953125, 4.62376611328125, 4.6233369140625 ] }, "throughput": { "unit": "tokens/s", "value": 6.7085267691435835 }, "energy": null, "efficiency": null }, "per_token": { "memory": null, "latency": { "unit": "s", "count": 309, "total": 47.46416026306151, "mean": 0.15360569664421206, "stdev": 0.02631289684865597, "p50": 0.14913433837890624, "p90": 0.14920335083007813, "p95": 0.1492768768310547, "p99": 0.30549913330078127, "values": [ 0.14821478271484376, 0.14794239807128906, 0.14795059204101563, 0.1487615966796875, 0.15093350219726562, 0.14969754028320312, 0.14820556640625, 0.14847386169433593, 0.14911180114746095, 0.14855372619628907, 0.14842367553710936, 0.14902989196777344, 0.14976409912109376, 0.14818202209472656, 0.14904934692382812, 0.14912307739257813, 0.1481134033203125, 0.14855270385742186, 0.14921011352539063, 0.14918759155273437, 0.1486571502685547, 0.14919270324707032, 0.14846975708007812, 0.14917631530761719, 0.14829466247558593, 0.14875648498535157, 0.149180419921875, 0.14884249877929687, 0.14918348693847655, 0.148822021484375, 0.3053148193359375, 0.14864076232910156, 0.14915890502929688, 0.14831411743164064, 0.14853427124023438, 0.14888755798339845, 0.14919270324707032, 0.14915481567382813, 0.14921932983398437, 0.14869094848632813, 0.14916096496582032, 0.14906675720214843, 0.14883737182617188, 0.1489817657470703, 0.14899813842773438, 0.14900837707519532, 0.14853939819335937, 0.14870527648925783, 0.14893157958984374, 0.1488353271484375, 0.14920191955566406, 0.14870118713378908, 0.14916607666015624, 0.1491210174560547, 0.14840422058105468, 0.14914047241210937, 0.1491619873046875, 0.14917631530761719, 0.14846258544921875, 0.149032958984375, 0.14897970581054687, 0.3050260620117187, 0.1486376953125, 0.14920294189453126, 0.1486929931640625, 0.14914047241210937, 0.14911692810058594, 0.14829977416992188, 0.14916915893554689, 0.14849331665039062, 0.1490688018798828, 0.14862130737304688, 0.14913740539550782, 0.149138427734375, 0.14916915893554689, 0.14920909118652342, 0.1487237091064453, 0.14913638305664062, 0.14863871765136719, 0.14916812133789062, 0.1488732147216797, 0.14920806884765625, 0.14851890563964842, 0.14915072631835938, 0.14923161315917968, 0.14900428771972657, 0.14920704650878908, 0.14898687744140626, 0.14916709899902345, 0.14883226013183593, 0.14916812133789062, 0.14903602600097657, 0.30511309814453125, 0.14909849548339843, 0.1488107452392578, 0.1491558380126953, 0.14880767822265625, 0.14836531066894532, 0.1491179504394531, 0.14913433837890624, 0.14903500366210937, 0.1491445770263672, 0.14917018127441406, 0.14883123779296875, 0.14916403198242187, 0.14882815551757814, 0.14917529296875, 0.1488404541015625, 0.1491793975830078, 0.14881484985351562, 0.14919168090820312, 0.1488035888671875, 0.1491619873046875, 0.1491568603515625, 0.14915788269042968, 0.14899813842773438, 0.14884556579589844, 0.14927154541015625, 0.14869810485839843, 0.14915277099609375, 0.14899609375, 0.1491558380126953, 0.1491988525390625, 0.3054049377441406, 0.14867149353027342, 0.14917324829101564, 0.1491988525390625, 0.1491179504394531, 0.14917222595214844, 0.14916607666015624, 0.14896946716308593, 0.14921318054199217, 0.1496258544921875, 0.14931149291992188, 0.1491630096435547, 0.14886093139648438, 0.1491988525390625, 0.14869708251953126, 0.1491773376464844, 0.14902886962890624, 0.14915174865722655, 0.14920498657226564, 0.14920909118652342, 0.1491445770263672, 0.14901657104492189, 0.1492367401123047, 0.1491988525390625, 0.14896127319335936, 0.14915379333496093, 0.1489827880859375, 0.14927769470214844, 0.14909645080566405, 0.14920191955566406, 0.1491435546875, 0.3053475952148437, 0.14914662170410156, 0.14883634948730468, 0.14913638305664062, 0.149106689453125, 0.14874520874023436, 0.1491015625, 0.14910258483886718, 0.14913740539550782, 0.14919679260253907, 0.14903910827636718, 0.14875340270996093, 0.14911077880859375, 0.1491312713623047, 0.14914151000976564, 0.14912307739257813, 0.14916709899902345, 0.14917837524414063, 0.14920089721679688, 0.14921011352539063, 0.14930227661132814, 0.14927565002441406, 0.14916915893554689, 0.14918655395507813, 0.14915481567382813, 0.14918246459960938, 0.14913536071777345, 0.14914866638183594, 0.14916709899902345, 0.14917837524414063, 0.14916915893554689, 0.30592410278320314, 0.14912921142578126, 0.1491568603515625, 0.14913023376464843, 0.1491005401611328, 0.14912205505371093, 0.14910258483886718, 0.1491077117919922, 0.14917018127441406, 0.1486182403564453, 0.1489469451904297, 0.14910566711425782, 0.14913638305664062, 0.14912307739257813, 0.14915379333496093, 0.1491619873046875, 0.14915481567382813, 0.14914047241210937, 0.14914764404296876, 0.149138427734375, 0.1491558380126953, 0.14914866638183594, 0.14916096496582032, 0.1491210174560547, 0.1491568603515625, 0.1491179504394531, 0.14915072631835938, 0.14913740539550782, 0.1491312713623047, 0.14913536071777345, 0.14915890502929688, 0.3060193176269531, 0.1491619873046875, 0.1491435546875, 0.14900633239746094, 0.14910975646972657, 0.14915174865722655, 0.14911488342285156, 0.14913433837890624, 0.14911386108398436, 0.1491210174560547, 0.14919577026367187, 0.14918963623046874, 0.14917837524414063, 0.14899404907226563, 0.14920704650878908, 0.14899302673339843, 0.1489950714111328, 0.14918348693847655, 0.14921420288085938, 0.14920294189453126, 0.14913023376464843, 0.14899302673339843, 0.14913023376464843, 0.14910975646972657, 0.14911488342285156, 0.14919065856933594, 0.14913536071777345, 0.14912818908691405, 0.14911077880859375, 0.14911180114746095, 0.14914151000976564, 0.30597940063476564, 0.14910975646972657, 0.14916709899902345, 0.1491425323486328, 0.1491394500732422, 0.14912307739257813, 0.1491558380126953, 0.14909336853027344, 0.14914866638183594, 0.14913433837890624, 0.149106689453125, 0.14912818908691405, 0.14910258483886718, 0.14913638305664062, 0.14915379333496093, 0.1491240997314453, 0.14916709899902345, 0.149180419921875, 0.14919270324707032, 0.14918861389160157, 0.14910362243652345, 0.14912205505371093, 0.14911590576171874, 0.1491619873046875, 0.14914764404296876, 0.14917324829101564, 0.1491445770263672, 0.14913331604003907, 0.1491251220703125, 0.14911077880859375, 0.1491630096435547, 0.30550732421875, 0.14909542846679688, 0.14910362243652345, 0.14910362243652345, 0.14909440612792968, 0.14910258483886718, 0.14910873413085937, 0.14908108520507812, 0.14913740539550782, 0.14913638305664062, 0.14913638305664062, 0.14911692810058594, 0.14913433837890624, 0.14913229370117187, 0.14912921142578126, 0.14916403198242187, 0.14916709899902345, 0.14909747314453126, 0.14915994262695312, 0.14911590576171874, 0.14913638305664062, 0.149106689453125, 0.14913536071777345, 0.14914968872070314, 0.14912614440917968, 0.14910464477539062, 0.14912205505371093, 0.14912614440917968, 0.14914968872070314, 0.14911488342285156, 0.1491742706298828 ] }, "throughput": { "unit": "tokens/s", "value": 6.5101752203646575 }, "energy": null, "efficiency": null } } }