IlyasMoutawwakil HF staff commited on
Commit
b71ab43
·
verified ·
1 Parent(s): 8075b56

Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cuda_inference_transformers_text-classification_FacebookAI/roberta-base",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.4.0+cu124",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "text-classification",
9
  "library": "transformers",
@@ -104,7 +104,7 @@
104
  "load": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 791.334912,
108
  "max_global_vram": 1185.415168,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 555.74528,
@@ -113,31 +113,31 @@
113
  "latency": {
114
  "unit": "s",
115
  "count": 1,
116
- "total": 7.3526640625,
117
- "mean": 7.3526640625,
118
  "stdev": 0.0,
119
- "p50": 7.3526640625,
120
- "p90": 7.3526640625,
121
- "p95": 7.3526640625,
122
- "p99": 7.3526640625,
123
  "values": [
124
- 7.3526640625
125
  ]
126
  },
127
  "throughput": null,
128
  "energy": {
129
  "unit": "kWh",
130
- "cpu": 1.0392684020830798e-06,
131
- "ram": 5.537592941036581e-07,
132
- "gpu": 1.690834685999943e-06,
133
- "total": 3.283862382186681e-06
134
  },
135
  "efficiency": null
136
  },
137
  "forward": {
138
  "memory": {
139
  "unit": "MB",
140
- "max_ram": 1080.754176,
141
  "max_global_vram": 1195.900928,
142
  "max_process_vram": 0.0,
143
  "max_reserved": 555.74528,
@@ -145,181 +145,178 @@
145
  },
146
  "latency": {
147
  "unit": "s",
148
- "count": 150,
149
- "total": 0.9972951388359071,
150
- "mean": 0.006648634258906046,
151
- "stdev": 0.00018516933050798928,
152
- "p50": 0.00670361590385437,
153
- "p90": 0.0068649982452392575,
154
- "p95": 0.0069198337078094484,
155
- "p99": 0.007006791744232178,
156
  "values": [
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
157
  0.007035903930664063,
158
- 0.00698367977142334,
159
- 0.007003136157989502,
160
- 0.006945792198181152,
161
- 0.006918144226074219,
162
- 0.006874112129211426,
163
  0.0069212160110473635,
164
- 0.006930431842803955,
165
- 0.006918144226074219,
166
- 0.0068249602317810056,
167
- 0.006793216228485107,
168
- 0.006792191982269287,
169
- 0.006783999919891357,
170
- 0.006714303970336914,
171
- 0.006646783828735352,
172
- 0.0066826238632202144,
173
- 0.006746079921722412,
 
174
  0.0067358717918396,
175
- 0.0067758078575134275,
176
- 0.0067645440101623535,
177
- 0.006778880119323731,
178
- 0.006740960121154785,
179
  0.006703104019165039,
180
- 0.006776832103729248,
181
- 0.006811679840087891,
182
- 0.0067573761940002445,
183
- 0.006829055786132812,
184
- 0.00682092809677124,
185
- 0.006789120197296142,
186
- 0.0067276802062988285,
187
- 0.006772736072540283,
188
- 0.00678604793548584,
189
- 0.006764480113983154,
190
- 0.006810624122619629,
191
- 0.00679423999786377,
192
- 0.006681600093841553,
193
- 0.006673408031463623,
194
- 0.00669593620300293,
195
- 0.006739967823028564,
196
- 0.006711296081542969,
197
- 0.006748127937316895,
198
- 0.006726655960083008,
199
- 0.006731776237487793,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
200
  0.006864895820617676,
201
- 0.006819839954376221,
202
- 0.006765567779541016,
203
- 0.00679423999786377,
204
- 0.006789120197296142,
205
- 0.006762495994567871,
206
- 0.006714367866516113,
207
- 0.006792191982269287,
208
- 0.006797311782836914,
209
- 0.0067686400413513184,
210
- 0.006781951904296875,
211
- 0.006758399963378906,
212
- 0.006785024166107178,
213
- 0.006694943904876709,
214
- 0.0067573761940002445,
215
- 0.0067041277885437015,
216
  0.00672051191329956,
217
- 0.0068577280044555666,
218
- 0.00669593620300293,
219
- 0.006677504062652588,
220
- 0.00672870397567749,
221
- 0.00673689603805542,
222
- 0.0067010560035705566,
223
- 0.006755392074584961,
224
- 0.006738944053649902,
225
- 0.006729728221893311,
226
- 0.006865920066833496,
227
- 0.006863872051239014,
228
- 0.006918144226074219,
229
- 0.0068689918518066405,
230
- 0.00689356803894043,
231
- 0.006792191982269287,
232
- 0.006739967823028564,
233
- 0.006752255916595459,
234
- 0.006715392112731934,
235
- 0.0066979842185974124,
236
- 0.006717440128326416,
237
- 0.00667955207824707,
238
- 0.006662144184112549,
239
- 0.006574079990386963,
240
- 0.006502399921417237,
241
- 0.006654975891113281,
242
- 0.006620160102844238,
243
- 0.006457344055175781,
244
- 0.006351871967315674,
245
- 0.0064143362045288085,
246
- 0.006464511871337891,
247
- 0.00672870397567749,
248
- 0.006782976150512696,
249
- 0.006781951904296875,
250
- 0.006692895889282227,
251
- 0.006964223861694336,
252
- 0.007010303974151612,
253
- 0.0066979842185974124,
254
- 0.006535168170928955,
255
- 0.006584320068359375,
256
- 0.006493184089660644,
257
- 0.006451200008392334,
258
- 0.006499328136444092,
259
- 0.0064860482215881345,
260
- 0.006618112087249756,
261
  0.0065771517753601075,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
262
  0.006556672096252441,
263
- 0.006583295822143555,
264
- 0.006592512130737305,
265
- 0.006536191940307618,
266
- 0.006558720111846924,
267
- 0.006486015796661377,
268
- 0.006404096126556396,
269
- 0.006549503803253174,
270
- 0.006346752166748047,
271
- 0.006387712001800537,
272
- 0.00638156795501709,
273
- 0.006368288040161133,
274
- 0.006372352123260498,
275
- 0.006348800182342529,
276
- 0.006397952079772949,
277
- 0.006379519939422608,
278
- 0.00637337589263916,
279
- 0.0063907837867736815,
280
- 0.0063539199829101565,
281
- 0.006395904064178467,
282
- 0.006367263793945313,
283
- 0.006360064029693604,
284
- 0.006364160060882569,
285
- 0.0063836159706115725,
286
- 0.006387712001800537,
287
- 0.006397952079772949,
288
- 0.00643891191482544,
289
- 0.0063907837867736815,
290
- 0.006368256092071533,
291
- 0.006364160060882569,
292
- 0.0063937921524047855,
293
- 0.0063836159706115725,
294
- 0.006416384220123291,
295
- 0.0063805441856384275,
296
- 0.006354944229125976,
297
- 0.006385663986206055,
298
- 0.006364160060882569,
299
- 0.0064767999649047855,
300
- 0.006751232147216797,
301
- 0.006411263942718506,
302
- 0.006396927833557129,
303
- 0.006492159843444824,
304
- 0.006598656177520752,
305
- 0.00657203197479248,
306
- 0.006363135814666748
307
  ]
308
  },
309
  "throughput": {
310
  "unit": "samples/s",
311
- "value": 150.40682959217824
312
  },
313
  "energy": {
314
  "unit": "kWh",
315
- "cpu": 7.563939582890824e-08,
316
- "ram": 4.135528669307882e-08,
317
- "gpu": 1.4184370866241985e-07,
318
- "total": 2.588383911844069e-07
319
  },
320
  "efficiency": {
321
  "unit": "samples/kWh",
322
- "value": 3863414.524499805
323
  }
324
  }
325
  }
 
3
  "name": "cuda_inference_transformers_text-classification_FacebookAI/roberta-base",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.4.1+cu124",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "text-classification",
9
  "library": "transformers",
 
104
  "load": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 788.897792,
108
  "max_global_vram": 1185.415168,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 555.74528,
 
113
  "latency": {
114
  "unit": "s",
115
  "count": 1,
116
+ "total": 7.38494970703125,
117
+ "mean": 7.38494970703125,
118
  "stdev": 0.0,
119
+ "p50": 7.38494970703125,
120
+ "p90": 7.38494970703125,
121
+ "p95": 7.38494970703125,
122
+ "p99": 7.38494970703125,
123
  "values": [
124
+ 7.38494970703125
125
  ]
126
  },
127
  "throughput": null,
128
  "energy": {
129
  "unit": "kWh",
130
+ "cpu": 1.0599018097219454e-06,
131
+ "ram": 5.649998351948104e-07,
132
+ "gpu": 0.0,
133
+ "total": 1.6249016449167558e-06
134
  },
135
  "efficiency": null
136
  },
137
  "forward": {
138
  "memory": {
139
  "unit": "MB",
140
+ "max_ram": 1079.435264,
141
  "max_global_vram": 1195.900928,
142
  "max_process_vram": 0.0,
143
  "max_reserved": 555.74528,
 
145
  },
146
  "latency": {
147
  "unit": "s",
148
+ "count": 147,
149
+ "total": 0.9994813418388366,
150
+ "mean": 0.006799192801624739,
151
+ "stdev": 0.00026584770300099806,
152
+ "p50": 0.00672051191329956,
153
+ "p90": 0.007036287879943847,
154
+ "p95": 0.0072573951244354245,
155
+ "p99": 0.007814840421676635,
156
  "values": [
157
+ 0.007408639907836914,
158
+ 0.007305215835571289,
159
+ 0.007269375801086426,
160
+ 0.007229440212249756,
161
+ 0.0072837119102478025,
162
+ 0.006821887969970703,
163
+ 0.0068986878395080565,
164
+ 0.007171072006225586,
165
+ 0.007008255958557129,
166
+ 0.006897664070129395,
167
+ 0.007023615837097168,
168
+ 0.007021599769592285,
169
+ 0.006971392154693603,
170
+ 0.006565887928009034,
171
+ 0.006532095909118653,
172
+ 0.006660096168518067,
173
+ 0.006592512130737305,
174
+ 0.006708223819732666,
175
+ 0.006730751991271973,
176
+ 0.006683648109436035,
177
+ 0.0067010560035705566,
178
+ 0.006710271835327148,
179
+ 0.006599679946899414,
180
+ 0.006615039825439453,
181
+ 0.006725632190704346,
182
+ 0.006681600093841553,
183
+ 0.006692863941192627,
184
+ 0.006699007987976074,
185
+ 0.0066375679969787596,
186
+ 0.006675456047058105,
187
+ 0.0067358717918396,
188
+ 0.007104512214660645,
189
+ 0.007093247890472412,
190
+ 0.007027711868286133,
191
  0.007035903930664063,
192
+ 0.00694374418258667,
193
+ 0.006875135898590088,
194
+ 0.006909952163696289,
195
+ 0.006956031799316406,
196
+ 0.007036863803863525,
197
  0.0069212160110473635,
198
+ 0.006848512172698974,
199
+ 0.006692863941192627,
200
+ 0.006643712043762207,
201
+ 0.006782976150512696,
202
+ 0.006773759841918945,
203
+ 0.006683648109436035,
204
+ 0.0067338237762451176,
205
+ 0.006717440128326416,
206
+ 0.006675456047058105,
207
+ 0.006576128005981445,
208
+ 0.006710271835327148,
209
  0.0067358717918396,
210
+ 0.006680575847625733,
 
 
 
211
  0.006703104019165039,
212
+ 0.007074816226959229,
213
+ 0.006969344139099121,
214
+ 0.006944767951965332,
215
+ 0.006880256175994873,
216
+ 0.00694271993637085,
217
+ 0.006951935768127441,
218
+ 0.007003136157989502,
219
+ 0.006949888229370117,
220
+ 0.006956031799316406,
221
+ 0.006929408073425293,
222
+ 0.0069550080299377445,
223
+ 0.006933504104614257,
224
+ 0.00695091199874878,
225
+ 0.006896639823913574,
226
+ 0.0071157760620117185,
227
+ 0.006986720085144043,
228
+ 0.007029759883880615,
229
+ 0.006994944095611572,
230
+ 0.0069918718338012695,
231
+ 0.007023615837097168,
232
+ 0.006960095882415772,
233
+ 0.006887423992156983,
234
+ 0.006959104061126709,
235
+ 0.00694374418258667,
236
+ 0.006961152076721191,
237
+ 0.006919167995452881,
238
+ 0.006896639823913574,
239
+ 0.006867968082427979,
240
+ 0.0069283838272094726,
241
+ 0.0068055038452148435,
242
+ 0.006807551860809326,
243
+ 0.006986752033233643,
244
+ 0.0068055038452148435,
245
+ 0.006633471965789795,
246
+ 0.006586368083953857,
247
+ 0.006915071964263916,
248
+ 0.006668288230895996,
249
+ 0.006816768169403077,
250
+ 0.00683622407913208,
251
+ 0.0067123198509216305,
252
  0.006864895820617676,
253
+ 0.007021567821502686,
254
+ 0.007905280113220215,
255
+ 0.00773632001876831,
256
+ 0.007881728172302246,
257
+ 0.007699456214904785,
258
+ 0.006708223819732666,
259
+ 0.006696959972381592,
260
+ 0.006676479816436768,
 
 
 
 
 
 
 
261
  0.00672051191329956,
262
+ 0.006593535900115967,
263
+ 0.006627327919006347,
264
+ 0.0065177597999572755,
265
+ 0.00658128023147583,
266
+ 0.006582272052764892,
267
+ 0.00653107213973999,
268
+ 0.0065669121742248536,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
269
  0.0065771517753601075,
270
+ 0.006520832061767578,
271
+ 0.006553599834442139,
272
+ 0.006565887928009034,
273
+ 0.006532095909118653,
274
+ 0.006557695865631104,
275
+ 0.00652185583114624,
276
+ 0.006559743881225586,
277
+ 0.006541279792785645,
278
+ 0.006546432018280029,
279
+ 0.006554624080657959,
280
+ 0.006561791896820069,
281
+ 0.006498303890228272,
282
+ 0.006562816143035889,
283
+ 0.006557695865631104,
284
+ 0.006516736030578613,
285
+ 0.0065823040008544925,
286
+ 0.006552639961242676,
287
+ 0.006515711784362793,
288
+ 0.006551551818847656,
289
+ 0.006550528049468994,
290
+ 0.006582272052764892,
291
+ 0.006550528049468994,
292
+ 0.006533120155334473,
293
+ 0.006621183872222901,
294
+ 0.00659660816192627,
295
+ 0.006516736030578613,
296
+ 0.006532095909118653,
297
+ 0.006535168170928955,
298
+ 0.006479872226715088,
299
+ 0.00652288007736206,
300
+ 0.006535168170928955,
301
+ 0.0065064959526062015,
302
  0.006556672096252441,
303
+ 0.006520832061767578
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
304
  ]
305
  },
306
  "throughput": {
307
  "unit": "samples/s",
308
+ "value": 147.0762823141358
309
  },
310
  "energy": {
311
  "unit": "kWh",
312
+ "cpu": 7.771506574074031e-08,
313
+ "ram": 4.249170526421879e-08,
314
+ "gpu": 1.3959343048366145e-07,
315
+ "total": 2.598002014886205e-07
316
  },
317
  "efficiency": {
318
  "unit": "samples/kWh",
319
+ "value": 3849111.7184287515
320
  }
321
  }
322
  }