Upload cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub
Browse files
cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json
CHANGED
@@ -6,17 +6,19 @@
|
|
6 |
"version": "2.2.2",
|
7 |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
|
8 |
"task": "multiple-choice",
|
9 |
-
"library": "transformers",
|
10 |
"model": "FacebookAI/roberta-base",
|
11 |
-
"
|
12 |
"device": "cuda",
|
13 |
"device_ids": "0",
|
14 |
"seed": 42,
|
15 |
"inter_op_num_threads": null,
|
16 |
"intra_op_num_threads": null,
|
17 |
-
"
|
18 |
-
|
19 |
-
|
|
|
|
|
|
|
20 |
"no_weights": true,
|
21 |
"device_map": null,
|
22 |
"torch_dtype": null,
|
@@ -71,7 +73,7 @@
|
|
71 |
"environment": {
|
72 |
"cpu": " AMD EPYC 7R32",
|
73 |
"cpu_count": 16,
|
74 |
-
"cpu_ram_mb": 66697.
|
75 |
"system": "Linux",
|
76 |
"machine": "x86_64",
|
77 |
"platform": "Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35",
|
@@ -86,7 +88,7 @@
|
|
86 |
"optimum_benchmark_commit": null,
|
87 |
"transformers_version": "4.40.2",
|
88 |
"transformers_commit": null,
|
89 |
-
"accelerate_version": "0.30.
|
90 |
"accelerate_commit": null,
|
91 |
"diffusers_version": "0.27.2",
|
92 |
"diffusers_commit": null,
|
@@ -102,7 +104,7 @@
|
|
102 |
"forward": {
|
103 |
"memory": {
|
104 |
"unit": "MB",
|
105 |
-
"max_ram": 891.
|
106 |
"max_global_vram": 1195.900928,
|
107 |
"max_process_vram": 0.0,
|
108 |
"max_reserved": 555.74528,
|
@@ -110,177 +112,167 @@
|
|
110 |
},
|
111 |
"latency": {
|
112 |
"unit": "s",
|
113 |
-
"count":
|
114 |
-
"total": 0.
|
115 |
-
"mean": 0.
|
116 |
-
"stdev": 0.
|
117 |
-
"p50": 0.
|
118 |
-
"p90": 0.
|
119 |
-
"p95": 0.
|
120 |
-
"p99": 0.
|
121 |
"values": [
|
122 |
-
0.
|
123 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
124 |
0.0074291200637817386,
|
125 |
-
0.
|
126 |
-
0.
|
127 |
-
0.
|
128 |
-
0.
|
129 |
-
0.
|
130 |
-
0.
|
131 |
-
0.
|
132 |
-
0.
|
133 |
-
0.
|
134 |
-
0.
|
135 |
-
0.
|
136 |
-
0.
|
137 |
-
0.
|
138 |
-
0.
|
139 |
-
0.
|
140 |
-
0.
|
141 |
-
0.
|
142 |
-
0.
|
143 |
-
0.
|
144 |
-
0.
|
145 |
-
0.
|
146 |
-
0.
|
147 |
-
0.
|
148 |
-
0.
|
149 |
-
0.
|
150 |
-
0.
|
151 |
-
0.
|
152 |
-
0.
|
153 |
-
0.
|
154 |
-
0.
|
155 |
-
0.
|
156 |
-
0.
|
157 |
-
0.
|
158 |
-
0.
|
159 |
-
0.
|
160 |
-
0.
|
161 |
-
0.
|
162 |
-
0.
|
163 |
-
0.
|
164 |
-
0.
|
165 |
-
0.
|
166 |
-
0.
|
167 |
-
0.
|
168 |
-
0.
|
169 |
-
0.
|
170 |
-
0.
|
171 |
-
0.
|
172 |
-
0.
|
173 |
-
0.
|
174 |
-
0.
|
175 |
-
0.
|
176 |
-
0.
|
177 |
-
0.
|
178 |
-
0.
|
179 |
-
0.
|
180 |
-
0.
|
181 |
-
0.
|
182 |
-
0.
|
183 |
-
0.
|
184 |
-
0.
|
185 |
-
0.
|
186 |
-
0.
|
187 |
-
0.
|
188 |
-
0.
|
189 |
-
0.
|
190 |
-
0.
|
191 |
-
0.
|
192 |
-
0.
|
193 |
-
0.
|
194 |
-
0.
|
195 |
-
0.
|
196 |
-
0.
|
197 |
-
0.
|
198 |
-
0.
|
199 |
-
0.
|
200 |
-
0.
|
201 |
-
0.
|
202 |
-
0.
|
203 |
-
0.
|
204 |
-
0.
|
205 |
-
0.
|
206 |
-
0.
|
207 |
-
0.
|
208 |
-
0.
|
209 |
-
0.
|
210 |
-
0.
|
211 |
-
0.
|
212 |
-
0.
|
213 |
-
0.
|
214 |
-
0.
|
215 |
-
0.
|
216 |
-
0.
|
217 |
-
0.
|
218 |
-
0.
|
219 |
-
0.
|
220 |
-
0.
|
221 |
-
0.
|
222 |
-
0.
|
223 |
-
0.
|
224 |
-
0.
|
225 |
-
0.
|
226 |
-
0.
|
227 |
-
0.
|
228 |
-
0.
|
229 |
-
0.
|
230 |
-
0.
|
231 |
-
0.
|
232 |
-
0.
|
233 |
-
0.
|
234 |
-
0.
|
235 |
-
0.
|
236 |
-
0.
|
237 |
-
0.
|
238 |
-
0.
|
239 |
-
0.
|
240 |
-
0.
|
241 |
-
0.
|
242 |
-
0.
|
243 |
-
0.
|
244 |
-
0.
|
245 |
-
0.
|
246 |
-
0.
|
247 |
-
0.006740992069244385,
|
248 |
-
0.006745120048522949,
|
249 |
-
0.006756351947784424,
|
250 |
-
0.006747136116027832,
|
251 |
-
0.006763519763946534,
|
252 |
-
0.006793216228485107,
|
253 |
-
0.006718463897705078,
|
254 |
-
0.006751232147216797,
|
255 |
-
0.0067276802062988285,
|
256 |
-
0.006703104019165039,
|
257 |
-
0.006717472076416015,
|
258 |
-
0.007165952205657959,
|
259 |
-
0.006743040084838867,
|
260 |
-
0.006721536159515381,
|
261 |
-
0.006823935985565186,
|
262 |
-
0.006763519763946534,
|
263 |
-
0.006749184131622315,
|
264 |
-
0.006754335880279541,
|
265 |
-
0.006740992069244385,
|
266 |
-
0.006762495994567871,
|
267 |
-
0.006765567779541016
|
268 |
]
|
269 |
},
|
270 |
"throughput": {
|
271 |
"unit": "samples/s",
|
272 |
-
"value":
|
273 |
},
|
274 |
"energy": {
|
275 |
"unit": "kWh",
|
276 |
-
"cpu": 8.
|
277 |
-
"ram": 4.
|
278 |
-
"gpu": 1.
|
279 |
-
"total": 2.
|
280 |
},
|
281 |
"efficiency": {
|
282 |
"unit": "samples/kWh",
|
283 |
-
"value":
|
284 |
}
|
285 |
}
|
286 |
}
|
|
|
6 |
"version": "2.2.2",
|
7 |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
|
8 |
"task": "multiple-choice",
|
|
|
9 |
"model": "FacebookAI/roberta-base",
|
10 |
+
"library": "transformers",
|
11 |
"device": "cuda",
|
12 |
"device_ids": "0",
|
13 |
"seed": 42,
|
14 |
"inter_op_num_threads": null,
|
15 |
"intra_op_num_threads": null,
|
16 |
+
"hub_kwargs": {
|
17 |
+
"revision": "main",
|
18 |
+
"force_download": false,
|
19 |
+
"local_files_only": false,
|
20 |
+
"trust_remote_code": false
|
21 |
+
},
|
22 |
"no_weights": true,
|
23 |
"device_map": null,
|
24 |
"torch_dtype": null,
|
|
|
73 |
"environment": {
|
74 |
"cpu": " AMD EPYC 7R32",
|
75 |
"cpu_count": 16,
|
76 |
+
"cpu_ram_mb": 66697.289728,
|
77 |
"system": "Linux",
|
78 |
"machine": "x86_64",
|
79 |
"platform": "Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35",
|
|
|
88 |
"optimum_benchmark_commit": null,
|
89 |
"transformers_version": "4.40.2",
|
90 |
"transformers_commit": null,
|
91 |
+
"accelerate_version": "0.30.1",
|
92 |
"accelerate_commit": null,
|
93 |
"diffusers_version": "0.27.2",
|
94 |
"diffusers_commit": null,
|
|
|
104 |
"forward": {
|
105 |
"memory": {
|
106 |
"unit": "MB",
|
107 |
+
"max_ram": 891.00288,
|
108 |
"max_global_vram": 1195.900928,
|
109 |
"max_process_vram": 0.0,
|
110 |
"max_reserved": 555.74528,
|
|
|
112 |
},
|
113 |
"latency": {
|
114 |
"unit": "s",
|
115 |
+
"count": 136,
|
116 |
+
"total": 0.9980350713729856,
|
117 |
+
"mean": 0.007338493171860189,
|
118 |
+
"stdev": 0.00022279241863946134,
|
119 |
+
"p50": 0.007393791913986206,
|
120 |
+
"p90": 0.007586303949356079,
|
121 |
+
"p95": 0.007633935928344727,
|
122 |
+
"p99": 0.007742544031143188,
|
123 |
"values": [
|
124 |
+
0.0077619199752807615,
|
125 |
+
0.007643136024475097,
|
126 |
+
0.007706560134887695,
|
127 |
+
0.007701504230499268,
|
128 |
+
0.007520256042480469,
|
129 |
+
0.007599103927612305,
|
130 |
+
0.007539711952209473,
|
131 |
+
0.0075346240997314455,
|
132 |
+
0.007480319976806641,
|
133 |
+
0.007457791805267334,
|
134 |
+
0.007255040168762207,
|
135 |
+
0.007449600219726562,
|
136 |
+
0.007479296207427978,
|
137 |
0.0074291200637817386,
|
138 |
+
0.007436287879943848,
|
139 |
+
0.007308288097381592,
|
140 |
+
0.007264256000518799,
|
141 |
+
0.00733900785446167,
|
142 |
+
0.007513088226318359,
|
143 |
+
0.007528448104858398,
|
144 |
+
0.007599103927612305,
|
145 |
+
0.007576576232910156,
|
146 |
+
0.007632895946502686,
|
147 |
+
0.0075345921516418455,
|
148 |
+
0.007529471874237061,
|
149 |
+
0.007567296028137207,
|
150 |
+
0.0075673599243164065,
|
151 |
+
0.007555071830749512,
|
152 |
+
0.00744755220413208,
|
153 |
+
0.007550975799560547,
|
154 |
+
0.007477248191833496,
|
155 |
+
0.007477248191833496,
|
156 |
+
0.00738918399810791,
|
157 |
+
0.007475168228149414,
|
158 |
+
0.00744652795791626,
|
159 |
+
0.007451648235321045,
|
160 |
+
0.0073994240760803225,
|
161 |
+
0.007502848148345947,
|
162 |
+
0.007490560054779053,
|
163 |
+
0.007546879768371582,
|
164 |
+
0.007637055873870849,
|
165 |
+
0.0075345921516418455,
|
166 |
+
0.007603104114532471,
|
167 |
+
0.007620607852935791,
|
168 |
+
0.0074700798988342285,
|
169 |
+
0.007417856216430664,
|
170 |
+
0.007547904014587403,
|
171 |
+
0.007455743789672851,
|
172 |
+
0.007592959880828858,
|
173 |
+
0.007436287879943848,
|
174 |
+
0.007459839820861816,
|
175 |
+
0.007412735939025879,
|
176 |
+
0.007352255821228028,
|
177 |
+
0.007461887836456299,
|
178 |
+
0.007579648017883301,
|
179 |
+
0.007544832229614258,
|
180 |
+
0.007398399829864502,
|
181 |
+
0.007478271961212158,
|
182 |
+
0.007431168079376221,
|
183 |
+
0.007482367992401123,
|
184 |
+
0.007477248191833496,
|
185 |
+
0.007559167861938477,
|
186 |
+
0.007491583824157715,
|
187 |
+
0.007704576015472412,
|
188 |
+
0.007565311908721924,
|
189 |
+
0.00820633602142334,
|
190 |
+
0.007569407939910889,
|
191 |
+
0.007479296207427978,
|
192 |
+
0.007508992195129394,
|
193 |
+
0.00738099193572998,
|
194 |
+
0.007427072048187256,
|
195 |
+
0.007171072006225586,
|
196 |
+
0.007388160228729248,
|
197 |
+
0.0073134078979492185,
|
198 |
+
0.007341055870056152,
|
199 |
+
0.007418879985809326,
|
200 |
+
0.007269375801086426,
|
201 |
+
0.007488512039184571,
|
202 |
+
0.007251904010772705,
|
203 |
+
0.007277567863464355,
|
204 |
+
0.007612415790557861,
|
205 |
+
0.007341055870056152,
|
206 |
+
0.007350272178649903,
|
207 |
+
0.00748748779296875,
|
208 |
+
0.007237631797790528,
|
209 |
+
0.007096320152282715,
|
210 |
+
0.007063551902770996,
|
211 |
+
0.007126016139984131,
|
212 |
+
0.007218175888061523,
|
213 |
+
0.0070553598403930665,
|
214 |
+
0.007085055828094483,
|
215 |
+
0.007071743965148926,
|
216 |
+
0.007302144050598145,
|
217 |
+
0.007079936027526855,
|
218 |
+
0.007074816226959229,
|
219 |
+
0.007081984043121338,
|
220 |
+
0.007116799831390381,
|
221 |
+
0.007029759883880615,
|
222 |
+
0.007046144008636474,
|
223 |
+
0.007093247890472412,
|
224 |
+
0.007060480117797851,
|
225 |
+
0.0071188478469848635,
|
226 |
+
0.007062528133392334,
|
227 |
+
0.007081984043121338,
|
228 |
+
0.007044095993041993,
|
229 |
+
0.007096320152282715,
|
230 |
+
0.0071792640686035155,
|
231 |
+
0.007088128089904785,
|
232 |
+
0.007048192024230957,
|
233 |
+
0.007065599918365479,
|
234 |
+
0.007117824077606201,
|
235 |
+
0.007157760143280029,
|
236 |
+
0.00703385591506958,
|
237 |
+
0.007079936027526855,
|
238 |
+
0.007080959796905518,
|
239 |
+
0.007085055828094483,
|
240 |
+
0.007066559791564941,
|
241 |
+
0.007065599918365479,
|
242 |
+
0.00709939193725586,
|
243 |
+
0.0071157760620117185,
|
244 |
+
0.007061503887176514,
|
245 |
+
0.007087103843688965,
|
246 |
+
0.007127007961273193,
|
247 |
+
0.007060480117797851,
|
248 |
+
0.007090176105499267,
|
249 |
+
0.007067647933959961,
|
250 |
+
0.007061503887176514,
|
251 |
+
0.007065599918365479,
|
252 |
+
0.007090176105499267,
|
253 |
+
0.007095295906066895,
|
254 |
+
0.007104512214660645,
|
255 |
+
0.0070594558715820314,
|
256 |
+
0.007085055828094483,
|
257 |
+
0.007069695949554444,
|
258 |
+
0.00703385591506958,
|
259 |
+
0.007185408115386963
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
260 |
]
|
261 |
},
|
262 |
"throughput": {
|
263 |
"unit": "samples/s",
|
264 |
+
"value": 136.26775641551987
|
265 |
},
|
266 |
"energy": {
|
267 |
"unit": "kWh",
|
268 |
+
"cpu": 8.46603555036775e-08,
|
269 |
+
"ram": 4.617796958749095e-08,
|
270 |
+
"gpu": 1.6257105204255317e-07,
|
271 |
+
"total": 2.934093771337216e-07
|
272 |
},
|
273 |
"efficiency": {
|
274 |
"unit": "samples/kWh",
|
275 |
+
"value": 3408207.364634597
|
276 |
}
|
277 |
}
|
278 |
}
|