hf-transformers-bot's picture
Upload folder using huggingface_hub
72346c5 verified
raw
history blame
17 kB
{
"config": {
"name": "pytorch_generate",
"backend": {
"name": "pytorch",
"version": "2.3.0+cu121",
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
"task": "text-generation",
"library": "transformers",
"model": "google/gemma-2b",
"processor": "google/gemma-2b",
"device": "cuda",
"device_ids": "0",
"seed": 42,
"inter_op_num_threads": null,
"intra_op_num_threads": null,
"model_kwargs": {},
"processor_kwargs": {},
"hub_kwargs": {},
"no_weights": true,
"device_map": null,
"torch_dtype": "float16",
"eval_mode": true,
"to_bettertransformer": false,
"low_cpu_mem_usage": null,
"attn_implementation": null,
"cache_implementation": "static",
"autocast_enabled": false,
"autocast_dtype": null,
"torch_compile": false,
"torch_compile_target": "forward",
"torch_compile_config": {
"backend": "inductor",
"mode": "reduce-overhead",
"fullgraph": true
},
"quantization_scheme": null,
"quantization_config": {},
"deepspeed_inference": false,
"deepspeed_inference_config": {},
"peft_type": null,
"peft_config": {}
},
"scenario": {
"name": "inference",
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario",
"iterations": 2,
"duration": 0,
"warmup_runs": 10,
"input_shapes": {
"batch_size": 1,
"num_choices": 2,
"sequence_length": 7
},
"new_tokens": null,
"latency": true,
"memory": true,
"energy": false,
"forward_kwargs": {},
"generate_kwargs": {
"max_new_tokens": 128,
"min_new_tokens": 128,
"do_sample": false
},
"call_kwargs": {}
},
"launcher": {
"name": "process",
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
"device_isolation": true,
"device_isolation_action": "warn",
"start_method": "spawn"
},
"environment": {
"cpu": " AMD EPYC 7R32",
"cpu_count": 16,
"cpu_ram_mb": 66697.29792,
"system": "Linux",
"machine": "x86_64",
"platform": "Linux-5.10.217-205.860.amzn2.x86_64-x86_64-with-glibc2.29",
"processor": "x86_64",
"python_version": "3.8.10",
"gpu": [
"NVIDIA A10G"
],
"gpu_count": 1,
"gpu_vram_mb": 24146608128,
"optimum_benchmark_version": "0.2.1",
"optimum_benchmark_commit": null,
"transformers_version": "4.42.0.dev0",
"transformers_commit": "b7672826cad31e30319487af876e608d8af7d37b",
"accelerate_version": "0.32.0.dev0",
"accelerate_commit": null,
"diffusers_version": null,
"diffusers_commit": null,
"optimum_version": "1.21.0.dev0",
"optimum_commit": null,
"timm_version": "0.9.16",
"timm_commit": null,
"peft_version": "0.11.2.dev0",
"peft_commit": null
}
},
"report": {
"prefill": {
"memory": {
"unit": "MB",
"max_ram": 1769.92256,
"max_global_vram": 6793.199616,
"max_process_vram": 0.0,
"max_reserved": 6146.752512,
"max_allocated": 5036.936192
},
"latency": {
"unit": "s",
"count": 2,
"total": 0.04534239959716797,
"mean": 0.022671199798583987,
"stdev": 4.268836975097716e-05,
"p50": 0.022671199798583987,
"p90": 0.02270535049438477,
"p95": 0.022709619331359863,
"p99": 0.02271303440093994,
"values": [
0.022628511428833008,
0.022713888168334962
]
},
"throughput": {
"unit": "tokens/s",
"value": 308.76177979946215
},
"energy": null,
"efficiency": null
},
"decode": {
"memory": {
"unit": "MB",
"max_ram": 1769.947136,
"max_global_vram": 6793.199616,
"max_process_vram": 0.0,
"max_reserved": 6146.752512,
"max_allocated": 5036.936704
},
"latency": {
"unit": "s",
"count": 2,
"total": 5.27013916015625,
"mean": 2.635069580078125,
"stdev": 0.0036293945312499964,
"p50": 2.635069580078125,
"p90": 2.637973095703125,
"p95": 2.63833603515625,
"p99": 2.6386263867187503,
"values": [
2.631440185546875,
2.638698974609375
]
},
"throughput": {
"unit": "tokens/s",
"value": 48.19607078316113
},
"energy": null,
"efficiency": null
},
"per_token": {
"memory": null,
"latency": {
"unit": "s",
"count": 253,
"total": 5.271720960617061,
"mean": 0.02083684174156943,
"stdev": 0.0014756745103518068,
"p50": 0.020813823699951172,
"p90": 0.020935475540161133,
"p95": 0.02108149757385254,
"p99": 0.022195609741210934,
"values": [
0.022304767608642577,
0.022768640518188478,
0.02065510368347168,
0.020341760635375978,
0.020304895401000975,
0.02025164794921875,
0.02030284881591797,
0.020298751831054687,
0.020296703338623046,
0.02026700782775879,
0.020299776077270508,
0.020487167358398437,
0.020591615676879883,
0.02031001663208008,
0.02042470359802246,
0.02036735916137695,
0.020342784881591795,
0.020291584014892578,
0.020342784881591795,
0.020315135955810547,
0.020299776077270508,
0.020273151397705077,
0.02040729522705078,
0.020195327758789062,
0.020588544845581053,
0.021157888412475585,
0.02091929626464844,
0.020958208084106447,
0.020883455276489257,
0.020806655883789063,
0.020246528625488282,
0.020576255798339844,
0.020924415588378906,
0.020755456924438476,
0.020192256927490236,
0.02006118392944336,
0.02007756805419922,
0.020578304290771485,
0.020880384445190428,
0.020271104812622072,
0.02030080032348633,
0.020273151397705077,
0.02027827262878418,
0.02030080032348633,
0.020303871154785155,
0.02028339195251465,
0.02061311912536621,
0.020890623092651366,
0.020593664169311524,
0.02146816062927246,
0.020969472885131835,
0.02087424087524414,
0.020855808258056642,
0.020815872192382814,
0.02085273551940918,
0.02087321662902832,
0.020783103942871094,
0.020831232070922853,
0.02082099151611328,
0.02082815933227539,
0.020785152435302736,
0.020805631637573242,
0.020570112228393556,
0.02081996726989746,
0.020769792556762694,
0.020847616195678712,
0.020726783752441406,
0.020725759506225586,
0.020624383926391602,
0.020876287460327148,
0.02079641532897949,
0.02088755226135254,
0.020752384185791017,
0.02079539108276367,
0.02085478401184082,
0.020916223526000977,
0.02088652801513672,
0.020929536819458007,
0.02086502456665039,
0.020896768569946288,
0.020888576507568358,
0.020912128448486327,
0.02087321662902832,
0.020632575988769532,
0.020973567962646485,
0.021380096435546874,
0.020876287460327148,
0.020718591690063477,
0.020840448379516603,
0.020898815155029296,
0.020813823699951172,
0.02066739273071289,
0.02084556770324707,
0.02086297607421875,
0.02084351921081543,
0.02068889617919922,
0.020896768569946288,
0.020746240615844725,
0.020641792297363282,
0.020853759765625,
0.02084454345703125,
0.02082611274719238,
0.020661247253417968,
0.02082713508605957,
0.02084454345703125,
0.020723712921142577,
0.02063974380493164,
0.020797439575195312,
0.02083020782470703,
0.02069606399536133,
0.02084966468811035,
0.02082099151611328,
0.02084147262573242,
0.020813823699951172,
0.020969472885131835,
0.02084556770324707,
0.02089369583129883,
0.020815872192382814,
0.020939775466918945,
0.02066431999206543,
0.020891647338867187,
0.020876287460327148,
0.020912128448486327,
0.020634624481201173,
0.020610048294067384,
0.020791296005249024,
0.0437125129699707,
0.020891647338867187,
0.020890623092651366,
0.02086809539794922,
0.02086809539794922,
0.02084454345703125,
0.021421056747436523,
0.021015552520751952,
0.020883455276489257,
0.02081279945373535,
0.02063564872741699,
0.020876287460327148,
0.020884479522705078,
0.020935680389404295,
0.020853759765625,
0.020649984359741212,
0.02083328056335449,
0.020944896697998046,
0.02066227149963379,
0.02085171127319336,
0.02110361671447754,
0.0220948486328125,
0.02106572723388672,
0.021007360458374022,
0.020961280822753905,
0.020817920684814452,
0.02090291213989258,
0.02087833595275879,
0.02089472007751465,
0.02086502456665039,
0.020853759765625,
0.02086092758178711,
0.02081996726989746,
0.020822015762329102,
0.020793344497680662,
0.02088960075378418,
0.0208035831451416,
0.02088243293762207,
0.020815872192382814,
0.02089369583129883,
0.02085785675048828,
0.020814847946166993,
0.020915199279785156,
0.020799488067626954,
0.020884479522705078,
0.020761600494384767,
0.020723712921142577,
0.02083737564086914,
0.021391359329223633,
0.020934656143188478,
0.02082918357849121,
0.020888576507568358,
0.020809728622436522,
0.02083737564086914,
0.02146406364440918,
0.02084556770324707,
0.02090598487854004,
0.020839424133300782,
0.020840448379516603,
0.02084454345703125,
0.02081279945373535,
0.020891647338867187,
0.020918272018432618,
0.02085478401184082,
0.020883455276489257,
0.020748287200927733,
0.020831232070922853,
0.020864000320434572,
0.020759552001953126,
0.020583423614501953,
0.020754432678222655,
0.02091007995605469,
0.02067353630065918,
0.0208855037689209,
0.02069606399536133,
0.02043391990661621,
0.020378623962402344,
0.020402175903320312,
0.02086911964416504,
0.020566015243530272,
0.02067046356201172,
0.02045952033996582,
0.020890623092651366,
0.020822015762329102,
0.020666368484497072,
0.020763647079467772,
0.02063667106628418,
0.020749311447143554,
0.02067865562438965,
0.020739072799682616,
0.020588544845581053,
0.020774911880493165,
0.02052403259277344,
0.02065203285217285,
0.020800512313842775,
0.020556800842285155,
0.020762624740600585,
0.020784128189086915,
0.020577280044555665,
0.02063564872741699,
0.020744192123413087,
0.0208035831451416,
0.020556800842285155,
0.020864000320434572,
0.020264959335327147,
0.020346879959106445,
0.02030080032348633,
0.02030080032348633,
0.02025881576538086,
0.020313087463378905,
0.02027519989013672,
0.020315135955810547,
0.02027724838256836,
0.02027622413635254,
0.02021171188354492,
0.020312063217163084,
0.020323328018188477,
0.02047385597229004,
0.020353023529052734,
0.02029363250732422,
0.021699583053588867,
0.021465087890625,
0.020989952087402345,
0.02088140869140625,
0.02089472007751465,
0.020742143630981445,
0.02106675148010254
]
},
"throughput": {
"unit": "tokens/s",
"value": 47.99191798846383
},
"energy": null,
"efficiency": null
}
}
}