
IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub
31906a6
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.3.1+cu121", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "multiple-choice", | |
"library": "transformers", | |
"model_type": "roberta", | |
"model": "FacebookAI/roberta-base", | |
"processor": "FacebookAI/roberta-base", | |
"device": "cuda", | |
"device_ids": "0", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"memory": true, | |
"latency": true, | |
"energy": true, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "error", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7R32", | |
"cpu_count": 16, | |
"cpu_ram_mb": 66697.293824, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"NVIDIA A10G" | |
], | |
"gpu_count": 1, | |
"gpu_vram_mb": 24146608128, | |
"optimum_benchmark_version": "0.3.1", | |
"optimum_benchmark_commit": null, | |
"transformers_version": "4.42.4", | |
"transformers_commit": null, | |
"accelerate_version": "0.32.1", | |
"accelerate_commit": null, | |
"diffusers_version": "0.29.2", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.7", | |
"timm_commit": null, | |
"peft_version": null, | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"load": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 718.565376, | |
"max_global_vram": 1185.415168, | |
"max_process_vram": 0.0, | |
"max_reserved": 555.74528, | |
"max_allocated": 499.37152 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 1, | |
"total": 7.14132763671875, | |
"mean": 7.14132763671875, | |
"stdev": 0.0, | |
"p50": 7.14132763671875, | |
"p90": 7.14132763671875, | |
"p95": 7.14132763671875, | |
"p99": 7.14132763671875, | |
"values": [ | |
7.14132763671875 | |
] | |
}, | |
"throughput": null, | |
"energy": { | |
"unit": "kWh", | |
"cpu": 9.394897354973688e-07, | |
"ram": 4.7088757128610346e-07, | |
"gpu": 1.604445728000184e-06, | |
"total": 3.014823034783656e-06 | |
}, | |
"efficiency": null | |
}, | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 918.130688, | |
"max_global_vram": 1195.900928, | |
"max_process_vram": 0.0, | |
"max_reserved": 555.74528, | |
"max_allocated": 509.038592 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 133, | |
"total": 1.0013403520584103, | |
"mean": 0.0075288748275068465, | |
"stdev": 0.0002552160246561254, | |
"p50": 0.007582719802856445, | |
"p90": 0.0077996031761169435, | |
"p95": 0.007904256153106689, | |
"p99": 0.008112527313232422, | |
"values": [ | |
0.007799808025360107, | |
0.007512063980102539, | |
0.007536640167236328, | |
0.007502848148345947, | |
0.007472127914428711, | |
0.007408736228942871, | |
0.007293951988220215, | |
0.007592959880828858, | |
0.00765235185623169, | |
0.0077578239440917966, | |
0.007665664196014404, | |
0.007600128173828125, | |
0.007544864177703858, | |
0.007612415790557861, | |
0.007662591934204102, | |
0.007713823795318603, | |
0.007740447998046875, | |
0.00773635196685791, | |
0.00805679988861084, | |
0.007655392169952393, | |
0.007823359966278077, | |
0.008736767768859864, | |
0.00793497610092163, | |
0.007804992198944091, | |
0.007817215919494629, | |
0.007670783996582031, | |
0.007675903797149658, | |
0.007698368072509765, | |
0.007883776187896728, | |
0.0076605439186096195, | |
0.007601151943206787, | |
0.007661568164825439, | |
0.007566336154937744, | |
0.007610367774963379, | |
0.0076871681213378906, | |
0.007790592193603516, | |
0.007659520149230957, | |
0.007798783779144287, | |
0.00774451208114624, | |
0.007745535850524903, | |
0.0077844481468200685, | |
0.007619584083557129, | |
0.007641088008880615, | |
0.007771135807037354, | |
0.007655424118041992, | |
0.007742464065551758, | |
0.007678976058959961, | |
0.007635968208312988, | |
0.007849984169006348, | |
0.007614463806152344, | |
0.007685120105743408, | |
0.007733248233795166, | |
0.007664639949798584, | |
0.00759500789642334, | |
0.007628799915313721, | |
0.007666687965393066, | |
0.0076267518997192385, | |
0.0077199358940124516, | |
0.007990272045135497, | |
0.00783564805984497, | |
0.00794316816329956, | |
0.008045568466186523, | |
0.007733248233795166, | |
0.007678976058959961, | |
0.0077506561279296875, | |
0.007582719802856445, | |
0.0076308479309082035, | |
0.007709695816040039, | |
0.0077578239440917966, | |
0.007504896163940429, | |
0.0074527359008789065, | |
0.007585792064666748, | |
0.007489535808563232, | |
0.007265279769897461, | |
0.007422976016998291, | |
0.00738099193572998, | |
0.007621632099151611, | |
0.007620607852935791, | |
0.007651328086853027, | |
0.007716864109039307, | |
0.008138751983642578, | |
0.007580671787261963, | |
0.0073062400817871095, | |
0.007334911823272705, | |
0.00728166389465332, | |
0.007234560012817383, | |
0.0072765440940856935, | |
0.007362559795379638, | |
0.007256063938140869, | |
0.007354368209838867, | |
0.007291903972625732, | |
0.007292928218841553, | |
0.0072325119972229, | |
0.007274496078491211, | |
0.007255040168762207, | |
0.007269375801086426, | |
0.007200767993927002, | |
0.007260159969329834, | |
0.00723967981338501, | |
0.007176191806793213, | |
0.007228415966033935, | |
0.007347296237945557, | |
0.007241727828979493, | |
0.007214079856872559, | |
0.00723967981338501, | |
0.007247871875762939, | |
0.007204864025115967, | |
0.007243775844573975, | |
0.007234560012817383, | |
0.007269408226013184, | |
0.007153664112091064, | |
0.0073697280883789065, | |
0.007368703842163086, | |
0.007324672222137451, | |
0.0073359360694885255, | |
0.007328767776489258, | |
0.007214079856872559, | |
0.007225344181060791, | |
0.00722431993484497, | |
0.007287807941436767, | |
0.007241727828979493, | |
0.0071874880790710445, | |
0.0072325119972229, | |
0.007245823860168457, | |
0.007265279769897461, | |
0.0074711041450500485, | |
0.007259136199951172, | |
0.00727347183227539, | |
0.007278592109680176, | |
0.0073062400817871095, | |
0.007265279769897461, | |
0.0072213120460510255, | |
0.007393280029296875 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 132.82197179670013 | |
}, | |
"energy": { | |
"unit": "kWh", | |
"cpu": 8.206821949545909e-08, | |
"ram": 4.4868679406439095e-08, | |
"gpu": 1.5297904958620099e-07, | |
"total": 2.7991594848809914e-07 | |
}, | |
"efficiency": { | |
"unit": "samples/kWh", | |
"value": 3572500.978959103 | |
} | |
} | |
} | |
} |