IlyasMoutawwakil's picture
Upload cuda_inference_transformers_text-generation_openai-community/gpt2/benchmark.json with huggingface_hub
dae877d verified
raw
history blame
16 kB
{
"config": {
"name": "cuda_inference_transformers_text-generation_openai-community/gpt2",
"backend": {
"name": "pytorch",
"version": "2.3.1+cu121",
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
"task": "text-generation",
"library": "transformers",
"model": "openai-community/gpt2",
"processor": "openai-community/gpt2",
"device": "cuda",
"device_ids": "0",
"seed": 42,
"inter_op_num_threads": null,
"intra_op_num_threads": null,
"model_kwargs": {},
"processor_kwargs": {},
"hub_kwargs": {},
"no_weights": true,
"device_map": null,
"torch_dtype": null,
"eval_mode": true,
"to_bettertransformer": false,
"low_cpu_mem_usage": null,
"attn_implementation": null,
"cache_implementation": null,
"autocast_enabled": false,
"autocast_dtype": null,
"torch_compile": false,
"torch_compile_target": "forward",
"torch_compile_config": {},
"quantization_scheme": null,
"quantization_config": {},
"deepspeed_inference": false,
"deepspeed_inference_config": {},
"peft_type": null,
"peft_config": {}
},
"scenario": {
"name": "inference",
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario",
"iterations": 1,
"duration": 1,
"warmup_runs": 1,
"input_shapes": {
"batch_size": 1,
"num_choices": 2,
"sequence_length": 2
},
"new_tokens": null,
"latency": true,
"memory": true,
"energy": true,
"forward_kwargs": {},
"generate_kwargs": {
"max_new_tokens": 2,
"min_new_tokens": 2
},
"call_kwargs": {
"num_inference_steps": 2
}
},
"launcher": {
"name": "process",
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
"device_isolation": true,
"device_isolation_action": "error",
"numactl": false,
"numactl_kwargs": {},
"start_method": "spawn"
},
"environment": {
"cpu": " AMD EPYC 7R32",
"cpu_count": 16,
"cpu_ram_mb": 66697.293824,
"system": "Linux",
"machine": "x86_64",
"platform": "Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35",
"processor": "x86_64",
"python_version": "3.10.12",
"gpu": [
"NVIDIA A10G"
],
"gpu_count": 1,
"gpu_vram_mb": 24146608128,
"optimum_benchmark_version": "0.2.1",
"optimum_benchmark_commit": null,
"transformers_version": "4.42.3",
"transformers_commit": null,
"accelerate_version": "0.31.0",
"accelerate_commit": null,
"diffusers_version": "0.29.2",
"diffusers_commit": null,
"optimum_version": null,
"optimum_commit": null,
"timm_version": "1.0.7",
"timm_commit": null,
"peft_version": null,
"peft_commit": null
}
},
"report": {
"prefill": {
"memory": {
"unit": "MB",
"max_ram": 1026.445312,
"max_global_vram": 1365.77024,
"max_process_vram": 0.0,
"max_reserved": 725.614592,
"max_allocated": 522.210816
},
"latency": {
"unit": "s",
"count": 73,
"total": 0.490487072467804,
"mean": 0.006719000992709644,
"stdev": 0.00035388240329148145,
"p50": 0.006752960205078125,
"p90": 0.007144352149963378,
"p95": 0.007237023830413818,
"p99": 0.00748655366897583,
"values": [
0.007704512119293213,
0.007366687774658203,
0.007263423919677735,
0.007401792049407959,
0.007152224063873291,
0.0068960638046264644,
0.00700867223739624,
0.007116576194763183,
0.007039904117584228,
0.006909599781036377,
0.007077919960021973,
0.007068992137908936,
0.006998496055603027,
0.0071512961387634275,
0.00703385591506958,
0.007035552024841308,
0.006958784103393555,
0.007179135799407959,
0.006958623886108398,
0.007023039817810058,
0.0069565439224243165,
0.006872511863708496,
0.0069491519927978515,
0.006842559814453125,
0.0069734082221984866,
0.006893760204315186,
0.007109951972961426,
0.007096352100372315,
0.006941247940063477,
0.006928351879119873,
0.006823103904724121,
0.006659296035766602,
0.006848703861236572,
0.006492159843444824,
0.006541759967803955,
0.00697327995300293,
0.007037312030792236,
0.007219423770904541,
0.006811776161193848,
0.006752960205078125,
0.006694975852966308,
0.006557216167449951,
0.006535903930664062,
0.006492735862731934,
0.0063496317863464356,
0.006371776103973389,
0.006358784198760986,
0.00636297607421875,
0.006360064029693604,
0.006328479766845703,
0.006314976215362549,
0.006344064235687256,
0.006352128028869629,
0.0063415999412536625,
0.006304768085479737,
0.006363584041595459,
0.006368480205535889,
0.006328671932220459,
0.006340672016143798,
0.006376512050628662,
0.006356128215789795,
0.006400608062744141,
0.006356607913970947,
0.0063359041213989255,
0.00632803201675415,
0.006322591781616211,
0.006347328186035156,
0.006365312099456787,
0.006333183765411377,
0.006375967979431152,
0.0063578557968139646,
0.006354944229125976,
0.006335840225219726
]
},
"throughput": {
"unit": "tokens/s",
"value": 297.66329878058826
},
"energy": {
"unit": "kWh",
"cpu": 7.918028257511282e-08,
"ram": 4.329235377442412e-08,
"gpu": 1.5617790272000794e-07,
"total": 2.7865053906954487e-07
},
"efficiency": {
"unit": "tokens/kWh",
"value": 7177448.881593031
}
},
"decode": {
"memory": {
"unit": "MB",
"max_ram": 1026.445312,
"max_global_vram": 1365.77024,
"max_process_vram": 0.0,
"max_reserved": 725.614592,
"max_allocated": 522.43456
},
"latency": {
"unit": "s",
"count": 73,
"total": 0.5170226554870605,
"mean": 0.007082502129959733,
"stdev": 0.0003597104042888968,
"p50": 0.007062367916107178,
"p90": 0.0074906113624572754,
"p95": 0.007536230278015137,
"p99": 0.007759183235168457,
"values": [
0.007666079998016357,
0.007704383850097656,
0.007900095939636231,
0.0075414719581604,
0.007397151947021484,
0.007262623786926269,
0.007381343841552734,
0.0074691839218139645,
0.007346271991729736,
0.007439616203308105,
0.007455103874206543,
0.007493535995483399,
0.007474400043487549,
0.007427296161651611,
0.007484096050262451,
0.007425663948059082,
0.007488768100738525,
0.007494976043701172,
0.007348991870880127,
0.007409503936767578,
0.007383296012878418,
0.007337664127349854,
0.007439199924468994,
0.007332831859588623,
0.007424928188323974,
0.007332223892211914,
0.007491072177886963,
0.00746665620803833,
0.007402719974517823,
0.007462175846099853,
0.0071636161804199214,
0.007093088150024414,
0.006930784225463867,
0.006762656211853027,
0.006867392063140869,
0.007349535942077636,
0.007532735824584961,
0.007402207851409912,
0.007062367916107178,
0.007123424053192139,
0.006920576095581055,
0.006858143806457519,
0.006901088237762451,
0.006782944202423096,
0.006692192077636719,
0.006723072052001953,
0.006711552143096924,
0.006696320056915283,
0.006690656185150147,
0.006713696002960205,
0.006814047813415527,
0.006681663990020752,
0.006779967784881592,
0.00669708776473999,
0.006699647903442383,
0.0066685757637023925,
0.006705984115600586,
0.006695551872253418,
0.0066795840263366695,
0.006672383785247803,
0.006726016044616699,
0.006727744102478028,
0.006696191787719727,
0.006784800052642822,
0.006721407890319824,
0.00671011209487915,
0.006702527999877929,
0.006715839862823487,
0.006738912105560302,
0.006681151866912842,
0.0066744318008422855,
0.006683680057525634,
0.006703968048095703
]
},
"throughput": {
"unit": "tokens/s",
"value": 141.19303907723435
},
"energy": {
"unit": "kWh",
"cpu": 7.653913935955035e-08,
"ram": 4.184949717320094e-08,
"gpu": 1.4918052870101789e-07,
"total": 2.6756916523376944e-07
},
"efficiency": {
"unit": "tokens/kWh",
"value": 3737351.421365468
}
},
"per_token": {
"memory": null,
"latency": {
"unit": "s",
"count": 72,
"total": 0.9944811506271363,
"mean": 0.013812238203154671,
"stdev": 0.0006911200575579257,
"p50": 0.01382041597366333,
"p90": 0.014617190170288086,
"p95": 0.014701055908203125,
"p99": 0.015139122943878175,
"values": [
0.015118335723876953,
0.01519001579284668,
0.015003647804260254,
0.01460428810119629,
0.014211071968078613,
0.014368767738342286,
0.014649375915527344,
0.014426079750061035,
0.01437388801574707,
0.014575615882873535,
0.01458892822265625,
0.014493696212768555,
0.014650367736816406,
0.014519295692443847,
0.014517248153686524,
0.014442496299743653,
0.01468723201751709,
0.01439948844909668,
0.014460927963256836,
0.014389280319213868,
0.014238688468933105,
0.01439129638671875,
0.014241791725158692,
0.014428159713745118,
0.01425100803375244,
0.014618623733520507,
0.014573568344116212,
0.014424063682556153,
0.014424063682556153,
0.01406771183013916,
0.01375641632080078,
0.01389571189880371,
0.013287391662597656,
0.013415424346923829,
0.014295040130615234,
0.01456332778930664,
0.014717951774597168,
0.013940735816955567,
0.013884415626525879,
0.013701120376586913,
0.013446144104003906,
0.013450240135192871,
0.013321215629577637,
0.01306214427947998,
0.013120512008666992,
0.013101056098937988,
0.013073408126831054,
0.013071359634399414,
0.013079551696777344,
0.013148159980773925,
0.01305395221710205,
0.013072383880615235,
0.013153280258178711,
0.01303756809234619,
0.013064191818237305,
0.013094911575317383,
0.01305292797088623,
0.01304576015472412,
0.013073408126831054,
0.013097984313964844,
0.013163519859313964,
0.013080575942993163,
0.013124608039855956,
0.013094911575317383,
0.01306009578704834,
0.01307750415802002,
0.013103103637695313,
0.013097984313964844,
0.013083647727966309,
0.013057024002075195,
0.013065216064453124,
0.013066240310668945
]
},
"throughput": {
"unit": "tokens/s",
"value": 72.39956227887839
},
"energy": null,
"efficiency": null
}
}
}