IlyasMoutawwakil HF staff commited on
Commit
e47fb56
1 Parent(s): 01f842c

Upload cpu_inference_transformers_fill-mask_google-bert/bert-base-uncased/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_fill-mask_google-bert/bert-base-uncased/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cpu_inference_transformers_fill-mask_google-bert/bert-base-uncased",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.4.1+cpu",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "fill-mask",
9
  "library": "transformers",
@@ -45,7 +45,6 @@
45
  "warmup_runs": 1,
46
  "input_shapes": {
47
  "batch_size": 1,
48
- "num_choices": 2,
49
  "sequence_length": 2
50
  },
51
  "new_tokens": null,
@@ -73,23 +72,23 @@
73
  "environment": {
74
  "cpu": " AMD EPYC 7763 64-Core Processor",
75
  "cpu_count": 4,
76
- "cpu_ram_mb": 16766.783488,
77
  "system": "Linux",
78
  "machine": "x86_64",
79
- "platform": "Linux-6.8.0-1015-azure-x86_64-with-glibc2.39",
80
  "processor": "x86_64",
81
  "python_version": "3.10.15",
82
  "optimum_benchmark_version": "0.5.0.dev0",
83
- "optimum_benchmark_commit": "08c9f59440cf4e5a5d6711ec19e8329ab2de652d",
84
- "transformers_version": "4.45.2",
85
  "transformers_commit": null,
86
- "accelerate_version": "1.0.1",
87
  "accelerate_commit": null,
88
- "diffusers_version": "0.30.3",
89
  "diffusers_commit": null,
90
  "optimum_version": null,
91
  "optimum_commit": null,
92
- "timm_version": "1.0.9",
93
  "timm_commit": null,
94
  "peft_version": null,
95
  "peft_commit": null
@@ -101,7 +100,7 @@
101
  "load": {
102
  "memory": {
103
  "unit": "MB",
104
- "max_ram": 971.833344,
105
  "max_global_vram": null,
106
  "max_process_vram": null,
107
  "max_reserved": null,
@@ -110,15 +109,15 @@
110
  "latency": {
111
  "unit": "s",
112
  "values": [
113
- 1.2947268169999973
114
  ],
115
  "count": 1,
116
- "total": 1.2947268169999973,
117
- "mean": 1.2947268169999973,
118
- "p50": 1.2947268169999973,
119
- "p90": 1.2947268169999973,
120
- "p95": 1.2947268169999973,
121
- "p99": 1.2947268169999973,
122
  "stdev": 0,
123
  "stdev_": 0
124
  },
@@ -129,7 +128,7 @@
129
  "forward": {
130
  "memory": {
131
  "unit": "MB",
132
- "max_ram": 885.587968,
133
  "max_global_vram": null,
134
  "max_process_vram": null,
135
  "max_reserved": null,
@@ -138,54 +137,53 @@
138
  "latency": {
139
  "unit": "s",
140
  "values": [
141
- 0.04652446100001839,
142
- 0.04549899299999538,
143
- 0.04631391899999926,
144
- 0.046339426999992384,
145
- 0.04690633300000968,
146
- 0.045781308999977455,
147
- 0.04620207000002097,
148
- 0.045252376000007644,
149
- 0.045695099999989,
150
- 0.0457592489999854,
151
- 0.045759870000011915,
152
- 0.04659333099999685,
153
- 0.04498026999999638,
154
- 0.04538643600000114,
155
- 0.04654000099998257,
156
- 0.04513919600000804,
157
- 0.045177897000002076,
158
- 0.04675323799997955,
159
- 0.04150667700000099,
160
- 0.03996228399998358,
161
- 0.040088518999993994,
162
- 0.03983912500001452,
163
- 0.039345217000004595
164
  ],
165
- "count": 23,
166
- "total": 1.0273452979999718,
167
- "mean": 0.04466718686956399,
168
- "p50": 0.045695099999989,
169
- "p90": 0.04658266499999399,
170
- "p95": 0.04673724729998128,
171
- "p99": 0.046872652100003055,
172
- "stdev": 0.002462207465802744,
173
- "stdev_": 5.512340575628417
174
  },
175
  "throughput": {
176
  "unit": "samples/s",
177
- "value": 22.387798965718957
178
  },
179
  "energy": {
180
  "unit": "kWh",
181
- "cpu": 1.545123317948727e-06,
182
- "ram": 6.460925190255893e-08,
183
  "gpu": 0.0,
184
- "total": 1.6097325698512858e-06
185
  },
186
  "efficiency": {
187
  "unit": "samples/kWh",
188
- "value": 621221.2007938589
189
  }
190
  }
191
  }
 
3
  "name": "cpu_inference_transformers_fill-mask_google-bert/bert-base-uncased",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.5.1+cpu",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "fill-mask",
9
  "library": "transformers",
 
45
  "warmup_runs": 1,
46
  "input_shapes": {
47
  "batch_size": 1,
 
48
  "sequence_length": 2
49
  },
50
  "new_tokens": null,
 
72
  "environment": {
73
  "cpu": " AMD EPYC 7763 64-Core Processor",
74
  "cpu_count": 4,
75
+ "cpu_ram_mb": 16757.342208,
76
  "system": "Linux",
77
  "machine": "x86_64",
78
+ "platform": "Linux-6.5.0-1025-azure-x86_64-with-glibc2.35",
79
  "processor": "x86_64",
80
  "python_version": "3.10.15",
81
  "optimum_benchmark_version": "0.5.0.dev0",
82
+ "optimum_benchmark_commit": "6807ba28334c8c98abf72a03d78f86133328d180",
83
+ "transformers_version": "4.46.3",
84
  "transformers_commit": null,
85
+ "accelerate_version": "1.1.1",
86
  "accelerate_commit": null,
87
+ "diffusers_version": "0.31.0",
88
  "diffusers_commit": null,
89
  "optimum_version": null,
90
  "optimum_commit": null,
91
+ "timm_version": "1.0.11",
92
  "timm_commit": null,
93
  "peft_version": null,
94
  "peft_commit": null
 
100
  "load": {
101
  "memory": {
102
  "unit": "MB",
103
+ "max_ram": 988.368896,
104
  "max_global_vram": null,
105
  "max_process_vram": null,
106
  "max_reserved": null,
 
109
  "latency": {
110
  "unit": "s",
111
  "values": [
112
+ 1.320219662999989
113
  ],
114
  "count": 1,
115
+ "total": 1.320219662999989,
116
+ "mean": 1.320219662999989,
117
+ "p50": 1.320219662999989,
118
+ "p90": 1.320219662999989,
119
+ "p95": 1.320219662999989,
120
+ "p99": 1.320219662999989,
121
  "stdev": 0,
122
  "stdev_": 0
123
  },
 
128
  "forward": {
129
  "memory": {
130
  "unit": "MB",
131
+ "max_ram": 902.414336,
132
  "max_global_vram": null,
133
  "max_process_vram": null,
134
  "max_reserved": null,
 
137
  "latency": {
138
  "unit": "s",
139
  "values": [
140
+ 0.046674376999988,
141
+ 0.04629105300000447,
142
+ 0.04908905299998878,
143
+ 0.04822053199995935,
144
+ 0.04760002500000837,
145
+ 0.048074410000026546,
146
+ 0.047451387000023715,
147
+ 0.04733164399999623,
148
+ 0.04762481100004834,
149
+ 0.04887401099995259,
150
+ 0.046376983000016025,
151
+ 0.047196352000014485,
152
+ 0.04715971199999558,
153
+ 0.047721571000010954,
154
+ 0.04844241600000032,
155
+ 0.04697265300001163,
156
+ 0.04687613400000146,
157
+ 0.046902041999999255,
158
+ 0.047909687000014856,
159
+ 0.04163711900002909,
160
+ 0.04078571100001227,
161
+ 0.0435484540000175
 
162
  ],
163
+ "count": 22,
164
+ "total": 1.0287601370001198,
165
+ "mean": 0.046761824409096354,
166
+ "p50": 0.04726399800000536,
167
+ "p90": 0.04842022759999622,
168
+ "p95": 0.048852431249954975,
169
+ "p99": 0.04904389417998118,
170
+ "stdev": 0.002068625017921856,
171
+ "stdev_": 4.423747456524508
172
  },
173
  "throughput": {
174
  "unit": "samples/s",
175
+ "value": 21.384965463526157
176
  },
177
  "energy": {
178
  "unit": "kWh",
179
+ "cpu": 1.5602357586666486e-06,
180
+ "ram": 6.52026019961935e-08,
181
  "gpu": 0.0,
182
+ "total": 1.625438360662842e-06
183
  },
184
  "efficiency": {
185
  "unit": "samples/kWh",
186
+ "value": 615218.6537496306
187
  }
188
  }
189
  }