IlyasMoutawwakil
HF staff
Upload benchmarks/pytorch-llama-2.3.0-float16.json with huggingface_hub
614dafc
verified
{ | |
"config": { | |
"name": "pytorch-llama", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.3.0", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "text-generation", | |
"library": "transformers", | |
"model": "meta-llama/Llama-2-7b-hf", | |
"processor": "meta-llama/Llama-2-7b-hf", | |
"device": "cuda", | |
"device_ids": "0", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"hub_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": "float16", | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"tensor_parallel": false, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 10, | |
"duration": 10, | |
"warmup_runs": 10, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 128 | |
}, | |
"new_tokens": null, | |
"latency": true, | |
"memory": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 32, | |
"min_new_tokens": 32 | |
}, | |
"call_kwargs": {} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "warn", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7742 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 540671.627264, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.4.0-166-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"NVIDIA A100-SXM4-80GB", | |
"NVIDIA A100-SXM4-80GB", | |
"NVIDIA A100-SXM4-80GB", | |
"NVIDIA DGX Display", | |
"NVIDIA A100-SXM4-80GB" | |
], | |
"gpu_count": 5, | |
"gpu_vram_mb": 347892350976, | |
"optimum_benchmark_version": "0.2.1", | |
"optimum_benchmark_commit": "347e13ca9f7f904f55669603cfb9f0b6c7e8672c", | |
"transformers_version": "4.41.1", | |
"transformers_commit": null, | |
"accelerate_version": "0.30.1", | |
"accelerate_commit": null, | |
"diffusers_version": null, | |
"diffusers_commit": null, | |
"optimum_version": "1.20.0", | |
"optimum_commit": null, | |
"timm_version": null, | |
"timm_commit": null, | |
"peft_version": "0.11.1", | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"prefill": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1227.485184, | |
"max_global_vram": 15218.966528, | |
"max_process_vram": 14292.09088, | |
"max_reserved": 13759.414272, | |
"max_allocated": 13578.50368 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 11, | |
"total": 0.33348883247375494, | |
"mean": 0.030317166588523178, | |
"stdev": 0.0006808782450806106, | |
"p50": 0.030200544357299804, | |
"p90": 0.030986015319824218, | |
"p95": 0.03135895919799805, | |
"p99": 0.03165731430053711, | |
"values": [ | |
0.03173190307617187, | |
0.030948896408081055, | |
0.03068956756591797, | |
0.030453184127807616, | |
0.029873664855957032, | |
0.030986015319824218, | |
0.030200544357299804, | |
0.029642112731933595, | |
0.029924224853515625, | |
0.029542335510253905, | |
0.029496383666992188 | |
] | |
}, | |
"throughput": { | |
"unit": "tokens/s", | |
"value": 4222.030433690182 | |
}, | |
"energy": null, | |
"efficiency": null | |
}, | |
"decode": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1227.485184, | |
"max_global_vram": 15218.966528, | |
"max_process_vram": 14292.09088, | |
"max_reserved": 13759.414272, | |
"max_allocated": 13657.83552 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 11, | |
"total": 10.365610290527345, | |
"mean": 0.9423282082297586, | |
"stdev": 0.011083219201782932, | |
"p50": 0.9396575927734375, | |
"p90": 0.9554216918945313, | |
"p95": 0.9603456420898437, | |
"p99": 0.9642848022460938, | |
"values": [ | |
0.9652695922851563, | |
0.9370484619140625, | |
0.9452354736328125, | |
0.9422606811523437, | |
0.9396575927734375, | |
0.9554216918945313, | |
0.9331040649414063, | |
0.932161865234375, | |
0.9298218383789062, | |
0.931310546875, | |
0.9543184814453125 | |
] | |
}, | |
"throughput": { | |
"unit": "tokens/s", | |
"value": 32.89724294493536 | |
}, | |
"energy": null, | |
"efficiency": null | |
}, | |
"per_token": { | |
"memory": null, | |
"latency": { | |
"unit": "s", | |
"count": 340, | |
"total": 10.636115966796876, | |
"mean": 0.03128269401999081, | |
"stdev": 0.005345889653283576, | |
"p50": 0.030132224082946777, | |
"p90": 0.03119851493835449, | |
"p95": 0.03183129644393921, | |
"p99": 0.060970833816528326, | |
"values": [ | |
0.031073280334472656, | |
0.03139891242980957, | |
0.031197183609008788, | |
0.03141119956970215, | |
0.031120384216308594, | |
0.031143936157226562, | |
0.03181977653503418, | |
0.03158118438720703, | |
0.031438848495483396, | |
0.031048704147338867, | |
0.03205017471313477, | |
0.03077836799621582, | |
0.029928447723388672, | |
0.0299683837890625, | |
0.029938688278198244, | |
0.030219263076782226, | |
0.03061452865600586, | |
0.031148031234741212, | |
0.031123455047607423, | |
0.031510528564453126, | |
0.03125657653808594, | |
0.031118335723876952, | |
0.03124019241333008, | |
0.031086591720581053, | |
0.031032320022583007, | |
0.03147776031494141, | |
0.03156377601623535, | |
0.03160678482055664, | |
0.03129958343505859, | |
0.03143475151062012, | |
0.06258687973022461, | |
0.030999551773071288, | |
0.03094118309020996, | |
0.030883840560913086, | |
0.030267391204833984, | |
0.030338048934936523, | |
0.030442495346069336, | |
0.030108671188354492, | |
0.029937664031982423, | |
0.030014463424682617, | |
0.029875200271606447, | |
0.03015782356262207, | |
0.030728191375732423, | |
0.030130176544189452, | |
0.030060543060302734, | |
0.02998374366760254, | |
0.029997055053710937, | |
0.029501440048217774, | |
0.029851648330688478, | |
0.02997452735900879, | |
0.029884416580200194, | |
0.030724096298217773, | |
0.03058995246887207, | |
0.03017318344116211, | |
0.03034419250488281, | |
0.02992639923095703, | |
0.02996121597290039, | |
0.029831167221069335, | |
0.030067712783813476, | |
0.029856767654418945, | |
0.02995609664916992, | |
0.061211647033691405, | |
0.03102822494506836, | |
0.030716928482055664, | |
0.030514175415039063, | |
0.030458879470825196, | |
0.0307589111328125, | |
0.030826496124267577, | |
0.03038924789428711, | |
0.030666751861572264, | |
0.030298112869262695, | |
0.030434303283691407, | |
0.03059404754638672, | |
0.03057459259033203, | |
0.030996480941772462, | |
0.031040512084960937, | |
0.03078451156616211, | |
0.030276607513427735, | |
0.03031449508666992, | |
0.030518272399902343, | |
0.030305280685424804, | |
0.0301977596282959, | |
0.030195711135864257, | |
0.030451711654663087, | |
0.030279680252075194, | |
0.02997657585144043, | |
0.03019161605834961, | |
0.03021004867553711, | |
0.030331903457641602, | |
0.030256128311157225, | |
0.030503936767578125, | |
0.030621696472167968, | |
0.06128844833374023, | |
0.02996019172668457, | |
0.030069759368896484, | |
0.02996326446533203, | |
0.0301711368560791, | |
0.029783039093017577, | |
0.03156172752380371, | |
0.033949695587158206, | |
0.03012505531311035, | |
0.02991001510620117, | |
0.030093311309814453, | |
0.030284799575805665, | |
0.03017932891845703, | |
0.029879295349121093, | |
0.030256128311157225, | |
0.0304005126953125, | |
0.030342144012451173, | |
0.03034316825866699, | |
0.030423040390014647, | |
0.030135295867919923, | |
0.030357503890991212, | |
0.03017932891845703, | |
0.030473215103149414, | |
0.030493696212768553, | |
0.03035238456726074, | |
0.029899776458740233, | |
0.03038515281677246, | |
0.030277631759643556, | |
0.030272512435913085, | |
0.030552064895629883, | |
0.03040358352661133, | |
0.06059417724609375, | |
0.030332927703857423, | |
0.03003392028808594, | |
0.030119935989379884, | |
0.030078975677490235, | |
0.030011392593383788, | |
0.030120960235595705, | |
0.03017728042602539, | |
0.030134271621704102, | |
0.029839359283447265, | |
0.030063615798950196, | |
0.02993971252441406, | |
0.030225408554077147, | |
0.030040063858032227, | |
0.029928447723388672, | |
0.030212095260620117, | |
0.030067712783813476, | |
0.030315519332885742, | |
0.02980147171020508, | |
0.030082048416137694, | |
0.0299683837890625, | |
0.02997555160522461, | |
0.030086143493652344, | |
0.03022643280029297, | |
0.03098931121826172, | |
0.031122432708740235, | |
0.031247360229492187, | |
0.030840831756591795, | |
0.030888959884643553, | |
0.030846975326538087, | |
0.03121049690246582, | |
0.06288896179199219, | |
0.030744575500488282, | |
0.030449663162231445, | |
0.030307327270507813, | |
0.030925823211669923, | |
0.0303687686920166, | |
0.03053670310974121, | |
0.03036672019958496, | |
0.030291967391967774, | |
0.031016960144042968, | |
0.03396710586547851, | |
0.03447091293334961, | |
0.036863998413085936, | |
0.03136716842651367, | |
0.030459903717041017, | |
0.030306304931640625, | |
0.03062579154968262, | |
0.03041279983520508, | |
0.030220287322998047, | |
0.03019161605834961, | |
0.029807615280151366, | |
0.029896703720092774, | |
0.029883392333984377, | |
0.029868032455444334, | |
0.030216192245483397, | |
0.03000115203857422, | |
0.030040063858032227, | |
0.029864959716796875, | |
0.03014963150024414, | |
0.030023679733276368, | |
0.029954048156738283, | |
0.06057574462890625, | |
0.030092287063598632, | |
0.030130176544189452, | |
0.029865983963012696, | |
0.02995199966430664, | |
0.029914112091064454, | |
0.030035968780517577, | |
0.02997964859008789, | |
0.029929471969604493, | |
0.02998681640625, | |
0.030136320114135744, | |
0.0299683837890625, | |
0.030291967391967774, | |
0.030308351516723633, | |
0.030095359802246095, | |
0.03037798309326172, | |
0.03021107292175293, | |
0.030259199142456054, | |
0.030108671188354492, | |
0.030082048416137694, | |
0.029981695175170898, | |
0.029929471969604493, | |
0.03001753616333008, | |
0.030266368865966797, | |
0.030030847549438477, | |
0.030266368865966797, | |
0.030010368347167967, | |
0.030010368347167967, | |
0.030051328659057616, | |
0.030102527618408204, | |
0.03037900733947754, | |
0.06019379043579102, | |
0.0299233283996582, | |
0.029826047897338868, | |
0.030074880599975585, | |
0.02995814323425293, | |
0.030268415451049805, | |
0.030028799057006835, | |
0.029857791900634766, | |
0.02976563262939453, | |
0.03001651191711426, | |
0.030003200531005858, | |
0.029874176025390626, | |
0.029899776458740233, | |
0.029820928573608397, | |
0.02999091148376465, | |
0.02996121597290039, | |
0.03000831985473633, | |
0.030050304412841795, | |
0.03018137550354004, | |
0.030297088623046874, | |
0.030274560928344726, | |
0.029936639785766602, | |
0.030139392852783203, | |
0.030418943405151368, | |
0.030254079818725587, | |
0.030128128051757814, | |
0.030045183181762695, | |
0.03017215919494629, | |
0.03002572822570801, | |
0.030071807861328126, | |
0.03037286376953125, | |
0.06049075317382813, | |
0.030277631759643556, | |
0.03034419250488281, | |
0.0304005126953125, | |
0.030069759368896484, | |
0.029916160583496092, | |
0.030072832107543947, | |
0.030270463943481447, | |
0.03060326385498047, | |
0.03036262321472168, | |
0.029912063598632813, | |
0.03002060890197754, | |
0.029829120635986327, | |
0.029920255661010742, | |
0.030039039611816407, | |
0.029895679473876953, | |
0.02982707214355469, | |
0.02999398422241211, | |
0.029929471969604493, | |
0.02956287956237793, | |
0.029930496215820314, | |
0.02997145652770996, | |
0.02993561553955078, | |
0.02977177619934082, | |
0.02993971252441406, | |
0.029663232803344725, | |
0.029641727447509765, | |
0.02961408042907715, | |
0.02999398422241211, | |
0.02982809638977051, | |
0.02976870346069336, | |
0.05970022583007813, | |
0.02977689552307129, | |
0.02978713607788086, | |
0.02999295997619629, | |
0.02973593521118164, | |
0.02979327964782715, | |
0.030005247116088866, | |
0.029825023651123047, | |
0.02998067283630371, | |
0.030058496475219725, | |
0.02972876739501953, | |
0.029978624343872072, | |
0.03019161605834961, | |
0.029885440826416015, | |
0.03017932891845703, | |
0.030027776718139648, | |
0.030219263076782226, | |
0.030219263076782226, | |
0.030051328659057616, | |
0.030044160842895507, | |
0.030073856353759764, | |
0.030535680770874023, | |
0.030113792419433592, | |
0.030079999923706056, | |
0.030076927185058593, | |
0.030318592071533205, | |
0.03016703987121582, | |
0.03002572822570801, | |
0.030102527618408204, | |
0.030123008728027343, | |
0.030062591552734375, | |
0.059947006225585936, | |
0.03017728042602539, | |
0.029688831329345702, | |
0.02992742347717285, | |
0.030006271362304687, | |
0.02955571174621582, | |
0.029641727447509765, | |
0.029493247985839844, | |
0.029463552474975587, | |
0.02935398483276367, | |
0.029277183532714843, | |
0.029447168350219727, | |
0.029500415802001953, | |
0.02940928077697754, | |
0.029488128662109377, | |
0.029711360931396483, | |
0.029430784225463868, | |
0.029405183792114258, | |
0.029677568435668947, | |
0.02954035186767578, | |
0.04206284713745117, | |
0.04883148956298828, | |
0.031104000091552734, | |
0.0303503360748291, | |
0.029662208557128908, | |
0.029731840133666993, | |
0.029820928573608397, | |
0.03002470397949219, | |
0.029973503112792968, | |
0.030058496475219725, | |
0.030078975677490235 | |
] | |
}, | |
"throughput": { | |
"unit": "tokens/s", | |
"value": 31.966556312604112 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |