Upload cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub
Browse files
cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json
CHANGED
@@ -3,7 +3,7 @@
|
|
3 |
"name": "cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base",
|
4 |
"backend": {
|
5 |
"name": "pytorch",
|
6 |
-
"version": "2.4.0+
|
7 |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
|
8 |
"task": "multiple-choice",
|
9 |
"library": "transformers",
|
@@ -104,7 +104,7 @@
|
|
104 |
"load": {
|
105 |
"memory": {
|
106 |
"unit": "MB",
|
107 |
-
"max_ram":
|
108 |
"max_global_vram": 1185.415168,
|
109 |
"max_process_vram": 0.0,
|
110 |
"max_reserved": 555.74528,
|
@@ -113,31 +113,31 @@
|
|
113 |
"latency": {
|
114 |
"unit": "s",
|
115 |
"count": 1,
|
116 |
-
"total": 7.
|
117 |
-
"mean": 7.
|
118 |
"stdev": 0.0,
|
119 |
-
"p50": 7.
|
120 |
-
"p90": 7.
|
121 |
-
"p95": 7.
|
122 |
-
"p99": 7.
|
123 |
"values": [
|
124 |
-
7.
|
125 |
]
|
126 |
},
|
127 |
"throughput": null,
|
128 |
"energy": {
|
129 |
"unit": "kWh",
|
130 |
-
"cpu": 8.
|
131 |
-
"ram": 4.
|
132 |
-
"gpu": 1.
|
133 |
-
"total": 3.
|
134 |
},
|
135 |
"efficiency": null
|
136 |
},
|
137 |
"forward": {
|
138 |
"memory": {
|
139 |
"unit": "MB",
|
140 |
-
"max_ram":
|
141 |
"max_global_vram": 1195.900928,
|
142 |
"max_process_vram": 0.0,
|
143 |
"max_reserved": 555.74528,
|
@@ -145,167 +145,160 @@
|
|
145 |
},
|
146 |
"latency": {
|
147 |
"unit": "s",
|
148 |
-
"count":
|
149 |
-
"total":
|
150 |
-
"mean": 0.
|
151 |
-
"stdev": 0.
|
152 |
-
"p50": 0.
|
153 |
-
"p90": 0.
|
154 |
-
"p95": 0.
|
155 |
-
"p99": 0.
|
156 |
"values": [
|
157 |
-
0.
|
158 |
-
0.
|
159 |
-
0.
|
160 |
-
0.
|
161 |
-
0.
|
162 |
-
0.
|
163 |
-
0.
|
164 |
-
0.007280704021453858,
|
165 |
-
0.00727347183227539,
|
166 |
-
0.007217152118682861,
|
167 |
-
0.007275519847869873,
|
168 |
-
0.007264256000518799,
|
169 |
-
0.007198719978332519,
|
170 |
-
0.007229440212249756,
|
171 |
-
0.008026111602783203,
|
172 |
-
0.0075970559120178225,
|
173 |
0.007654399871826172,
|
174 |
-
0.007600096225738526,
|
175 |
-
0.007580671787261963,
|
176 |
-
0.007554048061370849,
|
177 |
-
0.007528448104858398,
|
178 |
-
0.007572480201721191,
|
179 |
-
0.007725056171417236,
|
180 |
-
0.00760319995880127,
|
181 |
-
0.007607359886169434,
|
182 |
-
0.007584767818450928,
|
183 |
-
0.007574528217315674,
|
184 |
-
0.007592959880828858,
|
185 |
-
0.007526400089263916,
|
186 |
-
0.0075939841270446775,
|
187 |
-
0.00758681583404541,
|
188 |
-
0.007515135765075683,
|
189 |
-
0.007476223945617676,
|
190 |
-
0.007488512039184571,
|
191 |
-
0.0075038719177246095,
|
192 |
0.00765337610244751,
|
193 |
-
0.
|
194 |
-
0.
|
195 |
-
0.
|
196 |
-
0.
|
197 |
-
0.
|
198 |
-
0.
|
199 |
-
0.
|
200 |
-
0.
|
201 |
-
0.
|
202 |
-
0.
|
203 |
-
0.
|
204 |
-
0.
|
205 |
-
0.
|
206 |
-
0.
|
207 |
-
0.
|
208 |
-
0.
|
209 |
-
0.
|
210 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
211 |
0.007615488052368164,
|
212 |
-
0.
|
213 |
-
0.
|
214 |
-
0.
|
215 |
-
0.
|
216 |
-
0.
|
217 |
-
0.
|
218 |
-
0.
|
219 |
-
0.
|
220 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
221 |
0.007605247974395752,
|
222 |
-
0.
|
223 |
-
0.
|
224 |
-
0.
|
225 |
-
0.
|
226 |
-
0.
|
227 |
-
0.
|
228 |
-
0.
|
229 |
-
0.
|
230 |
-
0.
|
231 |
-
0.
|
232 |
-
0.
|
233 |
-
0.
|
234 |
-
0.
|
235 |
-
0.
|
236 |
-
0.
|
237 |
-
0.
|
238 |
-
0.
|
239 |
-
0.
|
240 |
-
0.
|
241 |
-
0.
|
242 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
243 |
0.0076308479309082035,
|
244 |
-
0.
|
245 |
-
0.
|
246 |
-
0.
|
247 |
-
0.
|
248 |
-
0.007101439952850342,
|
249 |
-
0.007165952205657959,
|
250 |
-
0.0071495680809020995,
|
251 |
-
0.007163904190063477,
|
252 |
-
0.007175168037414551,
|
253 |
-
0.007117824077606201,
|
254 |
-
0.007134208202362061,
|
255 |
-
0.0071495680809020995,
|
256 |
-
0.0071495680809020995,
|
257 |
-
0.007117824077606201,
|
258 |
-
0.00714137601852417,
|
259 |
-
0.007130112171173096,
|
260 |
-
0.007137279987335205,
|
261 |
-
0.0071198720932006835,
|
262 |
-
0.007088128089904785,
|
263 |
-
0.007124991893768311,
|
264 |
-
0.007156735897064209,
|
265 |
-
0.007150591850280762,
|
266 |
-
0.007113664150238037,
|
267 |
-
0.007097343921661377,
|
268 |
-
0.007130112171173096,
|
269 |
-
0.007131135940551757,
|
270 |
-
0.007123968124389648,
|
271 |
-
0.007108607769012451,
|
272 |
-
0.007109632015228271,
|
273 |
-
0.007136256217956543,
|
274 |
-
0.007136256217956543,
|
275 |
-
0.007136256217956543,
|
276 |
-
0.007105535984039306,
|
277 |
-
0.007164927959442138,
|
278 |
-
0.007105535984039306,
|
279 |
-
0.007214079856872559,
|
280 |
-
0.0072765440940856935,
|
281 |
-
0.007457759857177735,
|
282 |
-
0.007256063938140869,
|
283 |
-
0.00714137601852417,
|
284 |
-
0.007154687881469727,
|
285 |
-
0.007134208202362061,
|
286 |
-
0.007129087924957276,
|
287 |
-
0.007095295906066895,
|
288 |
-
0.007101439952850342,
|
289 |
-
0.007114751815795899,
|
290 |
-
0.00710969591140747,
|
291 |
-
0.007135231971740722,
|
292 |
-
0.007130112171173096
|
293 |
]
|
294 |
},
|
295 |
"throughput": {
|
296 |
"unit": "samples/s",
|
297 |
-
"value":
|
298 |
},
|
299 |
"energy": {
|
300 |
"unit": "kWh",
|
301 |
-
"cpu": 8.
|
302 |
-
"ram": 4.
|
303 |
-
"gpu": 1.
|
304 |
-
"total": 2.
|
305 |
},
|
306 |
"efficiency": {
|
307 |
"unit": "samples/kWh",
|
308 |
-
"value":
|
309 |
}
|
310 |
}
|
311 |
}
|
|
|
3 |
"name": "cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base",
|
4 |
"backend": {
|
5 |
"name": "pytorch",
|
6 |
+
"version": "2.4.0+cu124",
|
7 |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
|
8 |
"task": "multiple-choice",
|
9 |
"library": "transformers",
|
|
|
104 |
"load": {
|
105 |
"memory": {
|
106 |
"unit": "MB",
|
107 |
+
"max_ram": 789.131264,
|
108 |
"max_global_vram": 1185.415168,
|
109 |
"max_process_vram": 0.0,
|
110 |
"max_reserved": 555.74528,
|
|
|
113 |
"latency": {
|
114 |
"unit": "s",
|
115 |
"count": 1,
|
116 |
+
"total": 7.38857666015625,
|
117 |
+
"mean": 7.38857666015625,
|
118 |
"stdev": 0.0,
|
119 |
+
"p50": 7.38857666015625,
|
120 |
+
"p90": 7.38857666015625,
|
121 |
+
"p95": 7.38857666015625,
|
122 |
+
"p99": 7.38857666015625,
|
123 |
"values": [
|
124 |
+
7.38857666015625
|
125 |
]
|
126 |
},
|
127 |
"throughput": null,
|
128 |
"energy": {
|
129 |
"unit": "kWh",
|
130 |
+
"cpu": 8.771265812504858e-07,
|
131 |
+
"ram": 4.6516059702451484e-07,
|
132 |
+
"gpu": 1.7094458119996415e-06,
|
133 |
+
"total": 3.051732990274642e-06
|
134 |
},
|
135 |
"efficiency": null
|
136 |
},
|
137 |
"forward": {
|
138 |
"memory": {
|
139 |
"unit": "MB",
|
140 |
+
"max_ram": 1085.034496,
|
141 |
"max_global_vram": 1195.900928,
|
142 |
"max_process_vram": 0.0,
|
143 |
"max_reserved": 555.74528,
|
|
|
145 |
},
|
146 |
"latency": {
|
147 |
"unit": "s",
|
148 |
+
"count": 129,
|
149 |
+
"total": 0.9988249263763427,
|
150 |
+
"mean": 0.007742828886638315,
|
151 |
+
"stdev": 0.00022354460679941142,
|
152 |
+
"p50": 0.007705599784851074,
|
153 |
+
"p90": 0.00802263011932373,
|
154 |
+
"p95": 0.00808263645172119,
|
155 |
+
"p99": 0.008394888916015624,
|
156 |
"values": [
|
157 |
+
0.007838719844818116,
|
158 |
+
0.007737343788146973,
|
159 |
+
0.007865344047546387,
|
160 |
+
0.00774348783493042,
|
161 |
+
0.007674880027770996,
|
162 |
+
0.00765235185623169,
|
163 |
+
0.007705599784851074,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
164 |
0.007654399871826172,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
165 |
0.00765337610244751,
|
166 |
+
0.007824384212493896,
|
167 |
+
0.007625728130340576,
|
168 |
+
0.007633920192718506,
|
169 |
+
0.007541759967803955,
|
170 |
+
0.007579648017883301,
|
171 |
+
0.00765337610244751,
|
172 |
+
0.007615488052368164,
|
173 |
+
0.007688223838806152,
|
174 |
+
0.007676928043365478,
|
175 |
+
0.0076277761459350585,
|
176 |
+
0.007897119998931884,
|
177 |
+
0.008144927978515624,
|
178 |
+
0.007935999870300293,
|
179 |
+
0.008492095947265625,
|
180 |
+
0.007994368076324462,
|
181 |
+
0.007948287963867188,
|
182 |
+
0.007933951854705811,
|
183 |
+
0.0080250883102417,
|
184 |
+
0.007887872219085693,
|
185 |
+
0.007988224029541016,
|
186 |
+
0.008049599647521973,
|
187 |
+
0.008094719886779785,
|
188 |
+
0.008071167945861817,
|
189 |
+
0.007924736022949219,
|
190 |
+
0.007864319801330566,
|
191 |
+
0.007932928085327149,
|
192 |
+
0.007938047885894776,
|
193 |
+
0.007890944004058837,
|
194 |
+
0.00799129581451416,
|
195 |
+
0.008078335762023926,
|
196 |
+
0.007994431972503662,
|
197 |
+
0.008085503578186035,
|
198 |
+
0.007960576057434082,
|
199 |
+
0.007886847972869874,
|
200 |
+
0.007928832054138184,
|
201 |
+
0.008032256126403809,
|
202 |
+
0.007935999870300293,
|
203 |
+
0.007877632141113282,
|
204 |
+
0.0079267840385437,
|
205 |
+
0.007907264232635498,
|
206 |
+
0.007874559879302979,
|
207 |
+
0.007841792106628418,
|
208 |
+
0.007898143768310547,
|
209 |
+
0.007824384212493896,
|
210 |
+
0.007709695816040039,
|
211 |
+
0.007518208026885987,
|
212 |
0.007615488052368164,
|
213 |
+
0.0075939841270446775,
|
214 |
+
0.007569407939910889,
|
215 |
+
0.007536640167236328,
|
216 |
+
0.007572480201721191,
|
217 |
+
0.007601183891296387,
|
218 |
+
0.008136704444885253,
|
219 |
+
0.00809779167175293,
|
220 |
+
0.007982079982757568,
|
221 |
+
0.007993375778198242,
|
222 |
+
0.008068096160888672,
|
223 |
+
0.007887807846069336,
|
224 |
+
0.007912447929382324,
|
225 |
+
0.007837696075439453,
|
226 |
+
0.00790015983581543,
|
227 |
+
0.007837696075439453,
|
228 |
+
0.007828479766845703,
|
229 |
+
0.007714816093444824,
|
230 |
+
0.007806975841522217,
|
231 |
+
0.007840767860412597,
|
232 |
+
0.007741439819335938,
|
233 |
+
0.007506944179534912,
|
234 |
+
0.007679999828338623,
|
235 |
+
0.007572480201721191,
|
236 |
+
0.007623680114746094,
|
237 |
+
0.007684095859527588,
|
238 |
+
0.007802879810333252,
|
239 |
+
0.0076277761459350585,
|
240 |
+
0.007780352115631104,
|
241 |
+
0.007969791889190675,
|
242 |
+
0.00774348783493042,
|
243 |
+
0.007890944004058837,
|
244 |
+
0.007608320236206055,
|
245 |
0.007605247974395752,
|
246 |
+
0.007591936111450195,
|
247 |
+
0.00758070421218872,
|
248 |
+
0.007553023815155029,
|
249 |
+
0.007699456214904785,
|
250 |
+
0.007513023853302002,
|
251 |
+
0.007507967948913574,
|
252 |
+
0.00744755220413208,
|
253 |
+
0.007498752117156982,
|
254 |
+
0.0074711041450500485,
|
255 |
+
0.007508992195129394,
|
256 |
+
0.007505919933319092,
|
257 |
+
0.0074670081138610836,
|
258 |
+
0.007763967990875244,
|
259 |
+
0.008022015571594238,
|
260 |
+
0.008565759658813477,
|
261 |
+
0.00757862377166748,
|
262 |
+
0.007725056171417236,
|
263 |
+
0.007525375843048096,
|
264 |
+
0.007485375881195068,
|
265 |
+
0.007432191848754883,
|
266 |
+
0.007469056129455566,
|
267 |
+
0.007461887836456299,
|
268 |
+
0.00765235185623169,
|
269 |
+
0.007456768035888672,
|
270 |
+
0.007465983867645264,
|
271 |
+
0.007466944217681884,
|
272 |
+
0.007465983867645264,
|
273 |
+
0.007451648235321045,
|
274 |
+
0.007437312126159668,
|
275 |
+
0.007468031883239746,
|
276 |
+
0.007452672004699707,
|
277 |
+
0.007451648235321045,
|
278 |
+
0.007469056129455566,
|
279 |
+
0.007456768035888672,
|
280 |
+
0.00744755220413208,
|
281 |
0.0076308479309082035,
|
282 |
+
0.007679999828338623,
|
283 |
+
0.007525375843048096,
|
284 |
+
0.007505919933319092,
|
285 |
+
0.007514111995697022
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
286 |
]
|
287 |
},
|
288 |
"throughput": {
|
289 |
"unit": "samples/s",
|
290 |
+
"value": 129.15176282995031
|
291 |
},
|
292 |
"energy": {
|
293 |
"unit": "kWh",
|
294 |
+
"cpu": 8.60660248138085e-08,
|
295 |
+
"ram": 4.7057267963438394e-08,
|
296 |
+
"gpu": 1.585609158985516e-07,
|
297 |
+
"total": 2.9168420867579845e-07
|
298 |
},
|
299 |
"efficiency": {
|
300 |
"unit": "samples/kWh",
|
301 |
+
"value": 3428365.2328655245
|
302 |
}
|
303 |
}
|
304 |
}
|