IlyasMoutawwakil HF staff commited on
Commit
a2a9486
·
verified ·
1 Parent(s): 1f821f9

Upload cuda_inference_transformers_fill-mask_google-bert/bert-base-uncased/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_fill-mask_google-bert/bert-base-uncased/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cuda_inference_transformers_fill-mask_google-bert/bert-base-uncased",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.4.0+cu121",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "fill-mask",
9
  "library": "transformers",
@@ -104,7 +104,7 @@
104
  "load": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 757.579776,
108
  "max_global_vram": 1218.9696,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 589.299712,
@@ -113,31 +113,31 @@
113
  "latency": {
114
  "unit": "s",
115
  "count": 1,
116
- "total": 7.45509765625,
117
- "mean": 7.45509765625,
118
  "stdev": 0.0,
119
- "p50": 7.45509765625,
120
- "p90": 7.45509765625,
121
- "p95": 7.45509765625,
122
- "p99": 7.45509765625,
123
  "values": [
124
- 7.45509765625
125
  ]
126
  },
127
  "throughput": null,
128
  "energy": {
129
  "unit": "kWh",
130
- "cpu": 9.307679090277867e-07,
131
- "ram": 4.943789285316454e-07,
132
- "gpu": 0.0,
133
- "total": 1.4251468375594321e-06
134
  },
135
  "efficiency": null
136
  },
137
  "forward": {
138
  "memory": {
139
  "unit": "MB",
140
- "max_ram": 924.839936,
141
  "max_global_vram": 1229.45536,
142
  "max_process_vram": 0.0,
143
  "max_reserved": 589.299712,
@@ -145,219 +145,207 @@
145
  },
146
  "latency": {
147
  "unit": "s",
148
- "count": 188,
149
- "total": 1.0048574457168578,
150
- "mean": 0.005344986413387542,
151
- "stdev": 0.00024542633124477865,
152
- "p50": 0.005389312028884888,
153
- "p90": 0.005617971229553223,
154
- "p95": 0.005688473677635192,
155
- "p99": 0.006041344132423401,
156
  "values": [
157
- 0.005710847854614258,
158
- 0.005793791770935058,
159
- 0.005662720203399658,
160
- 0.005445631980895996,
161
- 0.0054568958282470706,
162
- 0.005518335819244385,
163
- 0.005756927967071533,
164
- 0.005600255966186523,
165
- 0.005566463947296142,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
166
  0.00566476821899414,
167
- 0.006031360149383545,
168
- 0.005504000186920166,
169
- 0.005600255966186523,
170
- 0.005513216018676758,
171
- 0.0056258559226989744,
172
- 0.005536767959594726,
173
- 0.005462016105651855,
174
- 0.005434368133544922,
175
- 0.00540880012512207,
176
- 0.005414912223815918,
177
- 0.005565440177917481,
178
- 0.005426176071166992,
179
- 0.005442560195922852,
180
- 0.005469183921813964,
181
- 0.005425151824951172,
182
- 0.005493760108947754,
183
- 0.00546611213684082,
184
- 0.005436416149139404,
185
- 0.005547008037567139,
186
- 0.0053309440612792965,
187
- 0.0054241280555725096,
188
- 0.005499904155731201,
189
- 0.0054282240867614745,
190
- 0.005447679996490478,
191
- 0.005485568046569824,
192
- 0.005402624130249023,
193
- 0.005485568046569824,
194
- 0.0053821439743042,
195
- 0.0053821439743042,
196
- 0.005434368133544922,
197
- 0.005384191989898681,
198
- 0.005377024173736572,
199
- 0.005509119987487793,
200
- 0.005351424217224121,
201
- 0.005241856098175048,
202
- 0.005440512180328369,
203
- 0.005451776027679443,
204
- 0.005337088108062744,
205
- 0.005434368133544922,
206
- 0.005315584182739258,
207
- 0.005369855880737305,
208
- 0.005392384052276611,
209
- 0.005345280170440674,
210
- 0.005414912223815918,
211
- 0.0054579200744628905,
212
- 0.005437439918518067,
213
- 0.00557260799407959,
214
- 0.005513216018676758,
215
- 0.0055316481590271,
216
- 0.005595136165618897,
217
- 0.005475327968597412,
218
- 0.005468160152435303,
219
- 0.005499904155731201,
220
- 0.005536767959594726,
221
- 0.005666816234588623,
222
  0.005478400230407715,
223
- 0.005386240005493164,
224
- 0.005588992118835449,
225
- 0.005903359889984131,
226
- 0.00566374397277832,
227
- 0.00572211217880249,
228
- 0.006278143882751465,
229
- 0.005787648200988769,
230
- 0.005692416191101074,
231
- 0.005484543800354004,
232
- 0.005530623912811279,
233
- 0.0056145920753479005,
234
- 0.005483520030975342,
235
- 0.005438464164733887,
236
- 0.005501952171325684,
237
- 0.005469183921813964,
238
- 0.005494783878326416,
239
- 0.005501952171325684,
240
- 0.0054876160621643065,
241
- 0.005588992118835449,
242
- 0.0054609918594360355,
243
- 0.005550079822540284,
244
- 0.005633024215698243,
245
- 0.0056483840942382815,
246
- 0.005681151866912842,
247
- 0.005560319900512695,
248
- 0.0056258559226989744,
249
- 0.006108160018920898,
250
- 0.005476352214813233,
251
- 0.005590015888214111,
252
- 0.005512191772460938,
253
  0.005495808124542236,
254
- 0.005323775768280029,
255
- 0.0054568958282470706,
256
- 0.005585919857025146,
257
- 0.005400576114654541,
258
- 0.0054609918594360355,
259
- 0.005438464164733887,
260
- 0.005295104026794434,
261
- 0.005189631938934326,
262
- 0.005342207908630371,
263
- 0.005413887977600097,
 
 
 
 
 
 
 
 
 
 
264
  0.005346303939819336,
265
- 0.005224448204040527,
266
- 0.005127168178558349,
267
- 0.005175295829772949,
268
- 0.005227519989013672,
269
- 0.005213183879852295,
270
- 0.005224448204040527,
271
- 0.005355519771575928,
272
- 0.005377024173736572,
273
- 0.005269504070281982,
274
- 0.0053012480735778805,
275
- 0.005403647899627686,
276
- 0.005610496044158936,
277
- 0.005371903896331787,
278
- 0.005173247814178467,
279
- 0.005192704200744629,
280
- 0.005185535907745361,
281
- 0.005243904113769531,
282
- 0.005153791904449463,
283
- 0.005167103767395019,
284
  0.005254144191741943,
285
- 0.005194752216339111,
286
- 0.005156864166259765,
287
- 0.005212160110473632,
288
- 0.005148672103881836,
289
- 0.005141503810882568,
290
- 0.005150720119476319,
291
- 0.005105663776397705,
292
- 0.005037055969238281,
293
- 0.005040128231048584,
294
- 0.00506879997253418,
295
- 0.005148672103881836,
296
- 0.005129216194152832,
297
- 0.005092351913452148,
298
- 0.005165056228637695,
299
- 0.0050022401809692385,
300
- 0.004983808040618896,
301
- 0.005004288196563721,
302
- 0.005015552043914795,
303
- 0.0050032639503479,
304
- 0.004989952087402344,
305
- 0.00501145601272583,
306
- 0.005074944019317627,
307
- 0.00501043176651001,
308
- 0.005000192165374756,
309
- 0.0050063362121582035,
310
- 0.005025792121887207,
311
- 0.005022719860076904,
312
- 0.005015552043914795,
313
- 0.005078015804290771,
314
- 0.00506060791015625,
315
- 0.005128191947937012,
316
- 0.005071872234344482,
317
- 0.0050432000160217285,
318
- 0.004999167919158935,
319
- 0.005021696090698242,
320
- 0.005004288196563721,
321
- 0.0050462718009948735,
322
- 0.0050094079971313476,
323
- 0.005015552043914795,
324
- 0.005039103984832764,
325
- 0.005041215896606445,
326
- 0.004994048118591309,
327
- 0.004992000102996826,
328
- 0.005001215934753418,
329
- 0.005059584140777588,
330
- 0.0050022401809692385,
331
- 0.005153791904449463,
332
- 0.005054463863372802,
333
- 0.005033984184265137,
334
- 0.0049797120094299315,
335
- 0.005001215934753418,
336
- 0.0050657281875610355,
337
- 0.005332992076873779,
338
- 0.005051392078399658,
339
- 0.00501145601272583,
340
- 0.005035007953643799,
341
- 0.0050421757698059086,
342
- 0.004993023872375488,
343
- 0.005549056053161621,
344
- 0.005238783836364746
345
  ]
346
  },
347
  "throughput": {
348
  "unit": "samples/s",
349
- "value": 187.09121458107145
350
  },
351
  "energy": {
352
  "unit": "kWh",
353
- "cpu": 5.998896607744065e-08,
354
- "ram": 3.27961887022411e-08,
355
- "gpu": 1.2353545236363426e-07,
356
- "total": 2.1632060714331604e-07
357
  },
358
  "efficiency": {
359
  "unit": "samples/kWh",
360
- "value": 4622768.090408895
361
  }
362
  }
363
  }
 
3
  "name": "cuda_inference_transformers_fill-mask_google-bert/bert-base-uncased",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.4.0+cu124",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "fill-mask",
9
  "library": "transformers",
 
104
  "load": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 783.966208,
108
  "max_global_vram": 1218.9696,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 589.299712,
 
113
  "latency": {
114
  "unit": "s",
115
  "count": 1,
116
+ "total": 7.44635498046875,
117
+ "mean": 7.44635498046875,
118
  "stdev": 0.0,
119
+ "p50": 7.44635498046875,
120
+ "p90": 7.44635498046875,
121
+ "p95": 7.44635498046875,
122
+ "p99": 7.44635498046875,
123
  "values": [
124
+ 7.44635498046875
125
  ]
126
  },
127
  "throughput": null,
128
  "energy": {
129
  "unit": "kWh",
130
+ "cpu": 9.293292131941103e-07,
131
+ "ram": 4.937943525166576e-07,
132
+ "gpu": 1.7491680660000272e-06,
133
+ "total": 3.1722916317107952e-06
134
  },
135
  "efficiency": null
136
  },
137
  "forward": {
138
  "memory": {
139
  "unit": "MB",
140
+ "max_ram": 1039.44192,
141
  "max_global_vram": 1229.45536,
142
  "max_process_vram": 0.0,
143
  "max_reserved": 589.299712,
 
145
  },
146
  "latency": {
147
  "unit": "s",
148
+ "count": 176,
149
+ "total": 0.9997998409271238,
150
+ "mean": 0.005680680914358659,
151
+ "stdev": 0.0003507425247852144,
152
+ "p50": 0.005774847984313965,
153
+ "p90": 0.00597760009765625,
154
+ "p95": 0.006251263976097107,
155
+ "p99": 0.006653696179389954,
156
  "values": [
157
+ 0.006077439785003662,
158
+ 0.006248447895050049,
159
+ 0.006082560062408447,
160
+ 0.006051839828491211,
161
+ 0.005952511787414551,
162
+ 0.005946368217468262,
163
+ 0.005929984092712402,
164
+ 0.005921792030334472,
165
+ 0.00592793607711792,
166
+ 0.005917695999145508,
167
+ 0.005887008190155029,
168
+ 0.00587059211730957,
169
+ 0.005897215843200684,
170
+ 0.005865471839904785,
171
+ 0.0058787841796875,
172
+ 0.005772287845611572,
173
+ 0.005753856182098388,
174
+ 0.005689343929290771,
175
+ 0.005810175895690918,
176
+ 0.005796864032745362,
177
+ 0.005850111961364746,
178
+ 0.005854207992553711,
179
+ 0.005857279777526855,
180
+ 0.00587775993347168,
181
+ 0.005910528182983398,
182
+ 0.005823488235473633,
183
+ 0.005772287845611572,
184
+ 0.005835775852203369,
185
+ 0.005855231761932373,
186
+ 0.0059054079055786135,
187
+ 0.005900288105010986,
188
+ 0.005925888061523437,
189
+ 0.005860352039337159,
190
+ 0.005921792030334472,
191
+ 0.0058726401329040525,
192
+ 0.005851136207580566,
193
+ 0.005858304023742676,
194
+ 0.005913599967956543,
195
+ 0.005864448070526123,
196
+ 0.0058757119178771975,
197
+ 0.005847040176391601,
198
+ 0.005699584007263184,
199
+ 0.005803008079528809,
200
+ 0.005705728054046631,
201
+ 0.005786623954772949,
202
+ 0.0057794561386108395,
203
+ 0.0058009600639343266,
204
+ 0.005804031848907471,
205
+ 0.00582860803604126,
206
+ 0.005822463989257813,
207
+ 0.0059361281394958495,
208
+ 0.0059023361206054685,
209
+ 0.00586240005493164,
210
+ 0.005929984092712402,
211
+ 0.005851136207580566,
212
+ 0.005889023780822754,
213
+ 0.005940224170684814,
214
+ 0.005863423824310303,
215
+ 0.005850111961364746,
216
+ 0.005886975765228271,
217
+ 0.007069695949554444,
218
+ 0.006604800224304199,
219
+ 0.006191103935241699,
220
+ 0.005886975765228271,
221
+ 0.0059064321517944334,
222
+ 0.00576204776763916,
223
+ 0.00577023983001709,
224
+ 0.005777408123016357,
225
+ 0.005786623954772949,
226
+ 0.005827583789825439,
227
+ 0.005858304023742676,
228
+ 0.005985280036926269,
229
+ 0.005740543842315674,
230
+ 0.005789696216583252,
231
+ 0.005764095783233642,
232
+ 0.005823488235473633,
233
+ 0.0058009600639343266,
234
+ 0.005835775852203369,
235
+ 0.005850111961364746,
236
+ 0.005835775852203369,
237
+ 0.00587775993347168,
238
+ 0.0058716158866882326,
239
+ 0.0059699201583862304,
240
+ 0.00587775993347168,
241
+ 0.006245376110076905,
242
+ 0.0059054079055786135,
243
+ 0.006493184089660644,
244
+ 0.00588595199584961,
245
+ 0.006012928009033203,
246
+ 0.005874688148498535,
247
+ 0.006078464031219482,
248
+ 0.005743616104125977,
249
+ 0.0064245758056640625,
250
+ 0.005777408123016357,
251
+ 0.0057149438858032225,
252
+ 0.0056780800819396975,
253
  0.00566476821899414,
254
+ 0.005518335819244385,
255
+ 0.005656576156616211,
256
+ 0.00576204776763916,
257
+ 0.005640192031860352,
258
+ 0.005488639831542969,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
259
  0.005478400230407715,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
260
  0.005495808124542236,
261
+ 0.0054876160621643065,
262
+ 0.005469183921813964,
263
+ 0.0055797758102417,
264
+ 0.005602303981781006,
265
+ 0.005620736122131348,
266
+ 0.005635072231292724,
267
+ 0.005653503894805908,
268
+ 0.00573747205734253,
269
+ 0.006259712219238281,
270
+ 0.006494207859039307,
271
+ 0.00638156795501709,
272
+ 0.006400000095367431,
273
+ 0.006800384044647217,
274
+ 0.0053606400489807126,
275
+ 0.0053350400924682614,
276
+ 0.005416959762573242,
277
+ 0.0053350400924682614,
278
+ 0.00536575984954834,
279
+ 0.00530841588973999,
280
+ 0.005287936210632324,
281
  0.005346303939819336,
282
+ 0.005306367874145508,
283
+ 0.005295104026794434,
284
+ 0.0052899842262268066,
285
+ 0.005351424217224121,
286
+ 0.005262335777282715,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
287
  0.005254144191741943,
288
+ 0.005278719902038574,
289
+ 0.005249023914337158,
290
+ 0.0052295680046081545,
291
+ 0.0052971520423889164,
292
+ 0.005253119945526123,
293
+ 0.005245952129364013,
294
+ 0.005286911964416504,
295
+ 0.005282815933227539,
296
+ 0.005239808082580567,
297
+ 0.005264383792877197,
298
+ 0.005279744148254394,
299
+ 0.005261312007904053,
300
+ 0.005264383792877197,
301
+ 0.005285888195037842,
302
+ 0.005244927883148193,
303
+ 0.005281792163848877,
304
+ 0.005258240222930908,
305
+ 0.005277696132659912,
306
+ 0.005239808082580567,
307
+ 0.005265408039093018,
308
+ 0.0052674560546875,
309
+ 0.005248000144958496,
310
+ 0.0053678078651428224,
311
+ 0.0052715520858764645,
312
+ 0.005250048160552978,
313
+ 0.00526643180847168,
314
+ 0.005268479824066162,
315
+ 0.005223423957824707,
316
+ 0.005244927883148193,
317
+ 0.005255167961120606,
318
+ 0.005250048160552978,
319
+ 0.005264383792877197,
320
+ 0.0052899842262268066,
321
+ 0.005234687805175781,
322
+ 0.005249023914337158,
323
+ 0.005241856098175048,
324
+ 0.005239808082580567,
325
+ 0.005252096176147461,
326
+ 0.005281792163848877,
327
+ 0.0052336640357971195,
328
+ 0.005215231895446777,
329
+ 0.0052633600234985355,
330
+ 0.005248000144958496,
331
+ 0.0052295680046081545,
332
+ 0.005262335777282715
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
333
  ]
334
  },
335
  "throughput": {
336
  "unit": "samples/s",
337
+ "value": 176.035235049441
338
  },
339
  "energy": {
340
  "unit": "kWh",
341
+ "cpu": 6.14790637593567e-08,
342
+ "ram": 3.3614266225425634e-08,
343
+ "gpu": 1.2718634239378388e-07,
344
+ "total": 2.222796723785662e-07
345
  },
346
  "efficiency": {
347
  "unit": "samples/kWh",
348
+ "value": 4498836.935016227
349
  }
350
  }
351
  }