IlyasMoutawwakil HF staff commited on
Commit
417d828
·
verified ·
1 Parent(s): db48b71

Upload cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -84,7 +84,7 @@
84
  ],
85
  "gpu_count": 1,
86
  "gpu_vram_mb": 24146608128,
87
- "optimum_benchmark_version": "0.2.1",
88
  "optimum_benchmark_commit": null,
89
  "transformers_version": "4.42.3",
90
  "transformers_commit": null,
@@ -104,7 +104,7 @@
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 908.996608,
108
  "max_global_vram": 1195.900928,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 555.74528,
@@ -112,162 +112,170 @@
112
  },
113
  "latency": {
114
  "unit": "s",
115
- "count": 131,
116
- "total": 0.9963868465423579,
117
- "mean": 0.007606006462155408,
118
- "stdev": 0.00022405574825812592,
119
- "p50": 0.007632895946502686,
120
- "p90": 0.007879680156707763,
121
- "p95": 0.00793343997001648,
122
- "p99": 0.008100556659698487,
123
  "values": [
124
- 0.008248319625854492,
125
- 0.007875584125518798,
126
- 0.00810700798034668,
127
- 0.00796569585800171,
128
- 0.007873536109924317,
129
- 0.007924736022949219,
130
- 0.007879680156707763,
131
- 0.007797760009765625,
132
- 0.007830527782440186,
133
- 0.007783423900604248,
134
- 0.007731200218200684,
135
- 0.007632895946502686,
136
- 0.007740416049957275,
137
- 0.007763936042785645,
138
- 0.007766016006469726,
139
- 0.007881728172302246,
140
- 0.007758848190307617,
141
- 0.007887872219085693,
142
- 0.007802879810333252,
143
- 0.007822336196899414,
144
- 0.007747583866119385,
145
- 0.007773183822631836,
146
- 0.007724031925201416,
147
- 0.007781375885009765,
148
- 0.007816192150115966,
149
- 0.007675903797149658,
150
- 0.007667712211608887,
151
- 0.007672832012176513,
152
- 0.007714816093444824,
153
- 0.008085503578186035,
154
- 0.007956480026245117,
155
- 0.00785920000076294,
156
- 0.007845888137817383,
157
- 0.007792640209197998,
158
- 0.007811039924621582,
159
- 0.0077619199752807615,
160
- 0.007772160053253174,
161
- 0.007830527782440186,
162
- 0.007703551769256592,
163
- 0.007774208068847656,
164
- 0.007685120105743408,
165
- 0.007730175971984863,
166
- 0.00790015983581543,
167
- 0.007798816204071045,
168
- 0.007727104187011719,
169
- 0.0076902399063110355,
170
- 0.0077916159629821775,
171
- 0.0076871681213378906,
172
- 0.0077292160987854006,
173
- 0.007778304100036621,
174
- 0.007847936153411865,
175
- 0.0077619199752807615,
176
- 0.007832575798034667,
177
- 0.00775270414352417,
178
- 0.007925759792327881,
179
- 0.007747583866119385,
180
- 0.007697408199310303,
181
- 0.007863296031951903,
182
- 0.0074997758865356446,
183
- 0.007527423858642578,
184
- 0.007685120105743408,
185
- 0.007542784214019775,
186
- 0.007388160228729248,
187
- 0.007357439994812012,
188
- 0.007400447845458984,
189
- 0.007559167861938477,
190
- 0.007713791847229004,
191
- 0.007635968208312988,
192
- 0.007776288032531739,
193
- 0.007898111820220948,
194
- 0.007941120147705078,
195
- 0.007562240123748779,
196
- 0.00801587200164795,
197
- 0.007296000003814697,
198
- 0.007334911823272705,
199
- 0.007137279987335205,
200
- 0.007408639907836914,
201
- 0.007387135982513428,
202
- 0.007398399829864502,
203
- 0.007398399829864502,
204
- 0.007390207767486572,
205
- 0.007420928001403809,
206
- 0.007387135982513428,
207
- 0.00744652795791626,
208
- 0.007408639907836914,
209
- 0.007390207767486572,
210
- 0.007396351814270019,
211
- 0.007502848148345947,
212
- 0.00753766393661499,
213
- 0.00740556812286377,
214
- 0.007396351814270019,
215
- 0.007384064197540283,
216
- 0.007354368209838867,
217
- 0.007423999786376953,
218
- 0.007395328044891358,
219
- 0.007457791805267334,
220
- 0.007398399829864502,
221
- 0.007756800174713135,
222
- 0.007539711952209473,
223
- 0.007415808200836181,
224
- 0.007325695991516113,
225
- 0.007402495861053467,
226
- 0.0073994240760803225,
227
- 0.007362559795379638,
228
- 0.007228415966033935,
229
- 0.007431136131286621,
230
- 0.007209951877593994,
231
- 0.007252992153167725,
232
- 0.007351295948028564,
233
- 0.007469056129455566,
234
- 0.007349247932434082,
235
- 0.007366655826568603,
236
- 0.007379968166351319,
237
- 0.007379968166351319,
238
  0.007386112213134765,
239
- 0.007379968166351319,
240
- 0.007456768035888672,
241
- 0.007408671855926514,
242
- 0.007555071830749512,
243
- 0.007526400089263916,
244
- 0.007373824119567871,
245
- 0.007390207767486572,
246
- 0.007483391761779785,
247
- 0.007374847888946533,
248
- 0.007387135982513428,
249
- 0.007359488010406494,
250
- 0.007390207767486572,
251
- 0.007345151901245117,
252
- 0.007366655826568603,
253
- 0.007367680072784424,
254
- 0.007464960098266602
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
255
  ]
256
  },
257
  "throughput": {
258
  "unit": "samples/s",
259
- "value": 131.47503949353964
260
  },
261
  "energy": {
262
  "unit": "kWh",
263
- "cpu": 8.68918143049644e-08,
264
- "ram": 4.750641595922445e-08,
265
- "gpu": 1.591951800729893e-07,
266
- "total": 2.9359341033717815e-07
267
  },
268
  "efficiency": {
269
  "unit": "samples/kWh",
270
- "value": 3406070.997477591
271
  }
272
  }
273
  }
 
84
  ],
85
  "gpu_count": 1,
86
  "gpu_vram_mb": 24146608128,
87
+ "optimum_benchmark_version": "0.3.0",
88
  "optimum_benchmark_commit": null,
89
  "transformers_version": "4.42.3",
90
  "transformers_commit": null,
 
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 908.619776,
108
  "max_global_vram": 1195.900928,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 555.74528,
 
112
  },
113
  "latency": {
114
  "unit": "s",
115
+ "count": 139,
116
+ "total": 0.999167808532715,
117
+ "mean": 0.007188257615343272,
118
+ "stdev": 0.0003418419015973708,
119
+ "p50": 0.007041024208068848,
120
+ "p90": 0.007641907119750977,
121
+ "p95": 0.008080998802185059,
122
+ "p99": 0.008395510120391846,
123
  "values": [
124
+ 0.00820736026763916,
125
+ 0.008043519973754883,
126
+ 0.008146944046020508,
127
+ 0.008501312255859376,
128
+ 0.008173567771911621,
129
+ 0.008077312469482421,
130
+ 0.007948287963867188,
131
+ 0.008114175796508789,
132
+ 0.008378368377685547,
133
+ 0.007952383995056152,
134
+ 0.007854080200195313,
135
+ 0.007856128215789794,
136
+ 0.007877632141113282,
137
+ 0.00840601634979248,
138
+ 0.007550975799560547,
139
+ 0.007588863849639893,
140
+ 0.007315455913543701,
141
+ 0.007321599960327148,
142
+ 0.0073134078979492185,
143
+ 0.007316480159759522,
144
+ 0.007245823860168457,
145
+ 0.007277535915374756,
146
+ 0.0072765440940856935,
147
+ 0.007278592109680176,
148
+ 0.00724889612197876,
149
+ 0.007222271919250488,
150
+ 0.007300096035003662,
151
+ 0.007207935810089112,
152
+ 0.007266304016113281,
153
+ 0.007241727828979493,
154
+ 0.007201791763305664,
155
+ 0.0071823358535766605,
156
+ 0.007189504146575928,
157
+ 0.007262207984924316,
158
+ 0.00722431993484497,
159
+ 0.007238656044006348,
160
+ 0.007221248149871826,
161
+ 0.0072427520751953125,
162
+ 0.007202816009521484,
163
+ 0.007269375801086426,
164
+ 0.007058432102203369,
165
+ 0.006980607986450196,
166
+ 0.007008255958557129,
167
+ 0.007049215793609619,
168
+ 0.007234560012817383,
169
+ 0.007267327785491944,
170
+ 0.0071823358535766605,
171
+ 0.007211008071899414,
172
+ 0.007226367950439453,
173
+ 0.0072325119972229,
174
+ 0.0072540159225463864,
175
+ 0.0072130560874938965,
176
+ 0.007230463981628418,
177
+ 0.007027711868286133,
178
+ 0.007027711868286133,
179
+ 0.007031807899475098,
180
+ 0.006998015880584717,
181
+ 0.007018496036529541,
182
+ 0.00703276777267456,
183
+ 0.007049215793609619,
184
+ 0.00704307222366333,
185
+ 0.007007232189178467,
186
+ 0.007090112209320068,
187
+ 0.007036928176879883,
188
+ 0.007003136157989502,
189
+ 0.007008255958557129,
190
+ 0.0070522880554199216,
191
+ 0.007053311824798584,
192
+ 0.007041024208068848,
193
+ 0.0070225920677185055,
194
+ 0.007017471790313721,
195
+ 0.0070348801612854,
196
+ 0.007027711868286133,
197
+ 0.006985727787017822,
198
+ 0.007003136157989502,
199
+ 0.007016448020935059,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
200
  0.007386112213134765,
201
+ 0.007251967906951904,
202
+ 0.007214079856872559,
203
+ 0.007209983825683594,
204
+ 0.007245823860168457,
205
+ 0.007252992153167725,
206
+ 0.007228415966033935,
207
+ 0.007127039909362793,
208
+ 0.007111711978912354,
209
+ 0.007110655784606934,
210
+ 0.007096320152282715,
211
+ 0.007068672180175781,
212
+ 0.007088064193725586,
213
+ 0.006906879901885986,
214
+ 0.006907904148101806,
215
+ 0.006969344139099121,
216
+ 0.007007232189178467,
217
+ 0.006987775802612305,
218
+ 0.006985727787017822,
219
+ 0.0069723520278930666,
220
+ 0.006938560009002686,
221
+ 0.006956031799316406,
222
+ 0.006960127830505371,
223
+ 0.006936575889587402,
224
+ 0.006979584217071533,
225
+ 0.006979584217071533,
226
+ 0.006986752033233643,
227
+ 0.006952960014343262,
228
+ 0.006952960014343262,
229
+ 0.006971392154693603,
230
+ 0.0069959678649902345,
231
+ 0.006948895931243896,
232
+ 0.006976511955261231,
233
+ 0.006958079814910889,
234
+ 0.0069632000923156735,
235
+ 0.006960095882415772,
236
+ 0.006964159965515137,
237
+ 0.006991903781890869,
238
+ 0.006975520133972168,
239
+ 0.006939648151397705,
240
+ 0.006980607986450196,
241
+ 0.0069847040176391605,
242
+ 0.006977536201477051,
243
+ 0.006936575889587402,
244
+ 0.006967296123504638,
245
+ 0.006966271877288818,
246
+ 0.006965248107910156,
247
+ 0.006947840213775635,
248
+ 0.006958144187927246,
249
+ 0.0069847040176391605,
250
+ 0.007014400005340577,
251
+ 0.006956992149353028,
252
+ 0.0069621758460998535,
253
+ 0.006965248107910156,
254
+ 0.006953983783721924,
255
+ 0.006934559822082519,
256
+ 0.006937600135803222,
257
+ 0.0069550080299377445,
258
+ 0.0069632320404052735,
259
+ 0.006959104061126709,
260
+ 0.006998015880584717,
261
+ 0.006986752033233643,
262
+ 0.006972415924072266
263
  ]
264
  },
265
  "throughput": {
266
  "unit": "samples/s",
267
+ "value": 139.11577095755567
268
  },
269
  "energy": {
270
  "unit": "kWh",
271
+ "cpu": 8.57844660823472e-08,
272
+ "ram": 4.689846807414724e-08,
273
+ "gpu": 1.5517725779709873e-07,
274
+ "total": 2.878601919535932e-07
275
  },
276
  "efficiency": {
277
  "unit": "samples/kWh",
278
+ "value": 3473908.612418396
279
  }
280
  }
281
  }