IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub
f103a33
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_text-classification_FacebookAI/roberta-base", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.3.1+cu121", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "text-classification", | |
"library": "transformers", | |
"model": "FacebookAI/roberta-base", | |
"processor": "FacebookAI/roberta-base", | |
"device": "cuda", | |
"device_ids": "0", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"hub_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"latency": true, | |
"memory": true, | |
"energy": true, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "error", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7R32", | |
"cpu_count": 16, | |
"cpu_ram_mb": 66697.293824, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"NVIDIA A10G" | |
], | |
"gpu_count": 1, | |
"gpu_vram_mb": 24146608128, | |
"optimum_benchmark_version": "0.3.1", | |
"optimum_benchmark_commit": null, | |
"transformers_version": "4.42.3", | |
"transformers_commit": null, | |
"accelerate_version": "0.31.0", | |
"accelerate_commit": null, | |
"diffusers_version": "0.29.2", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.7", | |
"timm_commit": null, | |
"peft_version": null, | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 903.036928, | |
"max_global_vram": 1195.900928, | |
"max_process_vram": 0.0, | |
"max_reserved": 555.74528, | |
"max_allocated": 508.993536 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 153, | |
"total": 1.0004660811424257, | |
"mean": 0.006538993994394938, | |
"stdev": 0.00021833955407656367, | |
"p50": 0.006576128005981445, | |
"p90": 0.006752876949310303, | |
"p95": 0.006824550533294678, | |
"p99": 0.0071314227867126465, | |
"values": [ | |
0.007101439952850342, | |
0.006807551860809326, | |
0.006912000179290771, | |
0.006856704235076904, | |
0.006842368125915528, | |
0.006780928134918213, | |
0.007520256042480469, | |
0.006909952163696289, | |
0.006703104019165039, | |
0.0067040958404541015, | |
0.006684671878814697, | |
0.006751264095306396, | |
0.0066938881874084475, | |
0.00667852783203125, | |
0.00679423999786377, | |
0.007163904190063477, | |
0.006796288013458252, | |
0.006668288230895996, | |
0.006723584175109864, | |
0.006812672138214112, | |
0.00675328016281128, | |
0.006688767910003662, | |
0.006672383785247803, | |
0.006743040084838867, | |
0.0070215358734130855, | |
0.006772736072540283, | |
0.006700032234191895, | |
0.006681600093841553, | |
0.006709184169769287, | |
0.006694911956787109, | |
0.0066620798110961915, | |
0.006652927875518798, | |
0.006721536159515381, | |
0.006718463897705078, | |
0.006755328178405762, | |
0.0066826238632202144, | |
0.006638591766357422, | |
0.0066406397819519045, | |
0.0066744318008422855, | |
0.006633471965789795, | |
0.0067010560035705566, | |
0.006680575847625733, | |
0.006651904106140137, | |
0.006711296081542969, | |
0.006691840171813965, | |
0.006673408031463623, | |
0.006680575847625733, | |
0.006666240215301514, | |
0.006686751842498779, | |
0.00673689603805542, | |
0.006680511951446534, | |
0.006625279903411865, | |
0.0067348480224609375, | |
0.006700032234191895, | |
0.006667263984680176, | |
0.006665215969085693, | |
0.006690815925598144, | |
0.006651904106140137, | |
0.006662144184112549, | |
0.006676479816436768, | |
0.006690815925598144, | |
0.0066344962120056155, | |
0.0066938881874084475, | |
0.006714367866516113, | |
0.006612959861755371, | |
0.0067041277885437015, | |
0.006723584175109864, | |
0.006334464073181153, | |
0.006363135814666748, | |
0.006331391811370849, | |
0.00637440013885498, | |
0.006376448154449463, | |
0.006388735771179199, | |
0.006358016014099121, | |
0.006323232173919678, | |
0.00637440013885498, | |
0.006358016014099121, | |
0.006340608119964599, | |
0.006377471923828125, | |
0.006398975849151611, | |
0.006426623821258545, | |
0.006415359973907471, | |
0.006351871967315674, | |
0.006598656177520752, | |
0.006586368083953857, | |
0.0065812478065490725, | |
0.006621183872222901, | |
0.0066109437942504885, | |
0.006576128005981445, | |
0.0066447358131408694, | |
0.006617087841033936, | |
0.006536191940307618, | |
0.006583295822143555, | |
0.006534143924713135, | |
0.006582272052764892, | |
0.006574079990386963, | |
0.006519807815551758, | |
0.0065484800338745115, | |
0.00652288007736206, | |
0.006508543968200684, | |
0.006544320106506347, | |
0.00653107213973999, | |
0.00648905611038208, | |
0.006523903846740723, | |
0.006502399921417237, | |
0.006524928092956543, | |
0.006518784046173095, | |
0.006491136074066162, | |
0.006452191829681397, | |
0.006331391811370849, | |
0.006275072097778321, | |
0.0063211522102355954, | |
0.006288383960723877, | |
0.006296576023101807, | |
0.006281216144561768, | |
0.006281216144561768, | |
0.006302720069885254, | |
0.0062740478515625, | |
0.006284287929534912, | |
0.006259712219238281, | |
0.00628326416015625, | |
0.00628326416015625, | |
0.006296607971191406, | |
0.006299647808074951, | |
0.006285312175750732, | |
0.006322175979614258, | |
0.006268928050994873, | |
0.006303743839263916, | |
0.006259712219238281, | |
0.006247424125671387, | |
0.006328288078308105, | |
0.006278143882751465, | |
0.006293504238128662, | |
0.006269951820373535, | |
0.006304768085479737, | |
0.0062873601913452145, | |
0.0062638077735900875, | |
0.006268928050994873, | |
0.006275072097778321, | |
0.006280191898345947, | |
0.0063211522102355954, | |
0.006270976066589356, | |
0.006307839870452881, | |
0.006289408206939697, | |
0.006293504238128662, | |
0.006292479991912842, | |
0.006322175979614258, | |
0.0062893757820129395, | |
0.0062863359451293946, | |
0.0062904319763183595, | |
0.006278143882751465, | |
0.006302720069885254, | |
0.006276095867156982 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 152.92872280616484 | |
}, | |
"energy": { | |
"unit": "kWh", | |
"cpu": 7.487462376648526e-08, | |
"ram": 4.093680077032314e-08, | |
"gpu": 1.3797530255345898e-07, | |
"total": 2.537867270902674e-07 | |
}, | |
"efficiency": { | |
"unit": "samples/kWh", | |
"value": 3940316.388746043 | |
} | |
} | |
} | |
} |