Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub
Browse files
cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json
CHANGED
@@ -104,7 +104,7 @@
|
|
104 |
"forward": {
|
105 |
"memory": {
|
106 |
"unit": "MB",
|
107 |
-
"max_ram": 904.
|
108 |
"max_global_vram": 1195.900928,
|
109 |
"max_process_vram": 0.0,
|
110 |
"max_reserved": 555.74528,
|
@@ -112,183 +112,179 @@
|
|
112 |
},
|
113 |
"latency": {
|
114 |
"unit": "s",
|
115 |
-
"count":
|
116 |
-
"total":
|
117 |
-
"mean": 0.
|
118 |
-
"stdev": 0.
|
119 |
-
"p50": 0.
|
120 |
-
"p90": 0.
|
121 |
-
"p95": 0.
|
122 |
-
"p99": 0.
|
123 |
"values": [
|
124 |
-
0.
|
125 |
-
0.
|
126 |
-
0.
|
127 |
-
0.
|
128 |
-
0.
|
129 |
-
0.
|
130 |
-
0.
|
131 |
-
0.
|
132 |
-
0.
|
133 |
-
0.
|
134 |
-
0.
|
135 |
-
0.
|
136 |
-
0.
|
137 |
-
0.
|
138 |
-
0.
|
139 |
-
0.
|
140 |
-
0.
|
141 |
-
0.
|
142 |
-
0.
|
143 |
-
0.
|
144 |
-
0.
|
145 |
-
0.
|
146 |
-
0.
|
147 |
-
0.
|
148 |
-
0.
|
149 |
-
0.
|
150 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
151 |
0.0069928960800170895,
|
152 |
-
0.
|
153 |
-
0.
|
154 |
-
0.
|
155 |
-
0.
|
156 |
-
0.
|
157 |
-
0.
|
158 |
-
0.
|
159 |
-
0.
|
160 |
-
0.
|
161 |
-
0.
|
162 |
-
0.
|
163 |
-
0.006567935943603515,
|
164 |
-
0.006482944011688232,
|
165 |
-
0.006605823993682861,
|
166 |
-
0.006620128154754639,
|
167 |
-
0.006559743881225586,
|
168 |
-
0.006606847763061524,
|
169 |
-
0.006643712043762207,
|
170 |
0.00662937593460083,
|
171 |
-
0.
|
172 |
-
0.
|
173 |
-
0.
|
174 |
-
0.
|
175 |
-
0.
|
176 |
-
0.
|
177 |
-
0.
|
178 |
-
0.
|
179 |
-
0.
|
180 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
181 |
0.00652288007736206,
|
182 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
183 |
0.00658739185333252,
|
184 |
-
0.
|
185 |
-
0.
|
186 |
-
0.
|
187 |
-
0.
|
188 |
-
0.
|
189 |
-
0.
|
190 |
-
0.
|
191 |
-
0.006562816143035889,
|
192 |
-
0.0065413122177124024,
|
193 |
-
0.006512639999389648,
|
194 |
-
0.0065177597999572755,
|
195 |
-
0.006527999877929688,
|
196 |
-
0.006575104236602783,
|
197 |
-
0.006680575847625733,
|
198 |
-
0.006617087841033936,
|
199 |
-
0.0065812478065490725,
|
200 |
-
0.006606847763061524,
|
201 |
-
0.006584320068359375,
|
202 |
-
0.006552576065063476,
|
203 |
0.006557695865631104,
|
204 |
-
0.
|
205 |
-
0.
|
206 |
-
0.
|
207 |
-
0.
|
208 |
-
0.
|
209 |
-
0.
|
210 |
-
0.
|
211 |
-
0.006523903846740723,
|
212 |
-
0.006333439826965332,
|
213 |
-
0.006213632106781006,
|
214 |
-
0.006138815879821778,
|
215 |
-
0.006141024112701416,
|
216 |
-
0.00621670389175415,
|
217 |
-
0.006605823993682861,
|
218 |
-
0.00675328016281128,
|
219 |
-
0.00670201587677002,
|
220 |
-
0.006743040084838867,
|
221 |
-
0.006850560188293457,
|
222 |
-
0.006923264026641846,
|
223 |
-
0.0067717118263244626,
|
224 |
-
0.006553599834442139,
|
225 |
-
0.0065177597999572755,
|
226 |
-
0.006534143924713135,
|
227 |
0.006550528049468994,
|
228 |
-
0.
|
229 |
-
0.
|
230 |
-
0.
|
231 |
-
0.
|
232 |
-
0.
|
233 |
-
0.
|
234 |
-
0.
|
235 |
-
0.
|
236 |
-
0.
|
237 |
-
0.
|
238 |
-
0.
|
239 |
-
0.
|
240 |
-
0.
|
241 |
-
0.
|
242 |
-
0.
|
243 |
-
0.
|
244 |
-
0.
|
245 |
-
0.
|
246 |
-
0.
|
247 |
-
0.006341631889343262,
|
248 |
-
0.006402048110961914,
|
249 |
-
0.006453248023986816,
|
250 |
-
0.006335487842559814,
|
251 |
-
0.006440959930419922,
|
252 |
-
0.006432703971862793,
|
253 |
-
0.006396959781646729,
|
254 |
-
0.006385663986206055,
|
255 |
-
0.006396927833557129,
|
256 |
-
0.006500351905822754,
|
257 |
-
0.0064204797744750975,
|
258 |
-
0.0064143362045288085,
|
259 |
-
0.006425727844238281,
|
260 |
-
0.006370304107666015,
|
261 |
-
0.006341728210449218,
|
262 |
-
0.006460415840148926,
|
263 |
-
0.006402048110961914,
|
264 |
-
0.006435840129852295,
|
265 |
-
0.006440959930419922,
|
266 |
-
0.006434815883636475,
|
267 |
-
0.0063907837867736815,
|
268 |
-
0.006352896213531494,
|
269 |
-
0.006429696083068847,
|
270 |
-
0.006417535781860352,
|
271 |
-
0.006370431900024414,
|
272 |
-
0.006360064029693604,
|
273 |
-
0.006333439826965332,
|
274 |
-
0.006414207935333252,
|
275 |
-
0.0063610877990722655
|
276 |
]
|
277 |
},
|
278 |
"throughput": {
|
279 |
"unit": "samples/s",
|
280 |
-
"value":
|
281 |
},
|
282 |
"energy": {
|
283 |
"unit": "kWh",
|
284 |
-
"cpu": 7.
|
285 |
-
"ram": 4.
|
286 |
-
"gpu": 1.
|
287 |
-
"total": 2.
|
288 |
},
|
289 |
"efficiency": {
|
290 |
"unit": "samples/kWh",
|
291 |
-
"value":
|
292 |
}
|
293 |
}
|
294 |
}
|
|
|
104 |
"forward": {
|
105 |
"memory": {
|
106 |
"unit": "MB",
|
107 |
+
"max_ram": 904.265728,
|
108 |
"max_global_vram": 1195.900928,
|
109 |
"max_process_vram": 0.0,
|
110 |
"max_reserved": 555.74528,
|
|
|
112 |
},
|
113 |
"latency": {
|
114 |
"unit": "s",
|
115 |
+
"count": 148,
|
116 |
+
"total": 1.00163472032547,
|
117 |
+
"mean": 0.006767802164361284,
|
118 |
+
"stdev": 0.00020090693793547749,
|
119 |
+
"p50": 0.006718976020812988,
|
120 |
+
"p90": 0.0070174720287323,
|
121 |
+
"p95": 0.007048192024230957,
|
122 |
+
"p99": 0.0071990167999267584,
|
123 |
"values": [
|
124 |
+
0.0071485438346862796,
|
125 |
+
0.006850560188293457,
|
126 |
+
0.006949888229370117,
|
127 |
+
0.006866879940032959,
|
128 |
+
0.006973440170288086,
|
129 |
+
0.0068986878395080565,
|
130 |
+
0.006841343879699707,
|
131 |
+
0.00687718391418457,
|
132 |
+
0.00682700777053833,
|
133 |
+
0.006870016098022461,
|
134 |
+
0.0067041277885437015,
|
135 |
+
0.0066938881874084475,
|
136 |
+
0.006721536159515381,
|
137 |
+
0.006681600093841553,
|
138 |
+
0.006651904106140137,
|
139 |
+
0.006628352165222168,
|
140 |
+
0.00698367977142334,
|
141 |
+
0.006998015880584717,
|
142 |
+
0.007011328220367432,
|
143 |
+
0.006800384044647217,
|
144 |
+
0.006726655960083008,
|
145 |
+
0.006755328178405762,
|
146 |
+
0.006723584175109864,
|
147 |
+
0.006675392150878906,
|
148 |
+
0.006692863941192627,
|
149 |
+
0.0067348480224609375,
|
150 |
+
0.006760447978973389,
|
151 |
+
0.006972415924072266,
|
152 |
+
0.007065599918365479,
|
153 |
+
0.007037951946258545,
|
154 |
+
0.007006207942962647,
|
155 |
+
0.00698367977142334,
|
156 |
+
0.007024640083312988,
|
157 |
+
0.007038976192474365,
|
158 |
+
0.00704307222366333,
|
159 |
+
0.0068925762176513675,
|
160 |
+
0.006884352207183838,
|
161 |
+
0.0068689918518066405,
|
162 |
+
0.00693452787399292,
|
163 |
+
0.006913023948669434,
|
164 |
+
0.006967296123504638,
|
165 |
+
0.006980607986450196,
|
166 |
+
0.00698367977142334,
|
167 |
+
0.006966271877288818,
|
168 |
+
0.007061503887176514,
|
169 |
+
0.0069959678649902345,
|
170 |
+
0.0070011520385742185,
|
171 |
+
0.00694163179397583,
|
172 |
+
0.0069304962158203125,
|
173 |
+
0.006906879901885986,
|
174 |
+
0.007048192024230957,
|
175 |
+
0.006974463939666748,
|
176 |
+
0.006965248107910156,
|
177 |
+
0.006966271877288818,
|
178 |
+
0.00694374418258667,
|
179 |
+
0.006900735855102539,
|
180 |
+
0.00694374418258667,
|
181 |
+
0.006918144226074219,
|
182 |
+
0.006906879901885986,
|
183 |
+
0.006953983783721924,
|
184 |
+
0.007024640083312988,
|
185 |
+
0.006649856090545654,
|
186 |
+
0.0066375679969787596,
|
187 |
+
0.006665215969085693,
|
188 |
+
0.006749184131622315,
|
189 |
0.0069928960800170895,
|
190 |
+
0.007054336071014404,
|
191 |
+
0.007006207942962647,
|
192 |
+
0.00694271993637085,
|
193 |
+
0.007014400005340577,
|
194 |
+
0.007048192024230957,
|
195 |
+
0.007058432102203369,
|
196 |
+
0.006776832103729248,
|
197 |
+
0.006774784088134766,
|
198 |
+
0.006690815925598144,
|
199 |
+
0.006716415882110595,
|
200 |
+
0.006686719894409179,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
201 |
0.00662937593460083,
|
202 |
+
0.00672160005569458,
|
203 |
+
0.006626304149627686,
|
204 |
+
0.006568960189819336,
|
205 |
+
0.006445055961608887,
|
206 |
+
0.0065146880149841305,
|
207 |
+
0.006666240215301514,
|
208 |
+
0.006501376152038574,
|
209 |
+
0.006302720069885254,
|
210 |
+
0.00638976001739502,
|
211 |
+
0.007243775844573975,
|
212 |
+
0.0075980801582336424,
|
213 |
+
0.006823935985565186,
|
214 |
+
0.006876160144805908,
|
215 |
+
0.006866943836212158,
|
216 |
+
0.006952960014343262,
|
217 |
+
0.007032832145690918,
|
218 |
+
0.006964223861694336,
|
219 |
+
0.006755328178405762,
|
220 |
+
0.006619135856628418,
|
221 |
+
0.006627327919006347,
|
222 |
+
0.006661119937896728,
|
223 |
+
0.00667852783203125,
|
224 |
+
0.006615039825439453,
|
225 |
+
0.006616064071655273,
|
226 |
0.00652288007736206,
|
227 |
+
0.006546432018280029,
|
228 |
+
0.006688767910003662,
|
229 |
+
0.006536191940307618,
|
230 |
+
0.006590464115142822,
|
231 |
+
0.006582272052764892,
|
232 |
+
0.006554624080657959,
|
233 |
+
0.00657203197479248,
|
234 |
+
0.006546432018280029,
|
235 |
+
0.006626304149627686,
|
236 |
0.00658739185333252,
|
237 |
+
0.006594560146331787,
|
238 |
+
0.006586368083953857,
|
239 |
+
0.006567840099334717,
|
240 |
+
0.006593535900115967,
|
241 |
+
0.006560768127441406,
|
242 |
+
0.006560768127441406,
|
243 |
+
0.00657919979095459,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
244 |
0.006557695865631104,
|
245 |
+
0.0065669121742248536,
|
246 |
+
0.006597631931304931,
|
247 |
+
0.006546432018280029,
|
248 |
+
0.006582272052764892,
|
249 |
+
0.006561791896820069,
|
250 |
+
0.006584320068359375,
|
251 |
+
0.0065669121742248536,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
252 |
0.006550528049468994,
|
253 |
+
0.0066109437942504885,
|
254 |
+
0.006598656177520752,
|
255 |
+
0.006666240215301514,
|
256 |
+
0.006642687797546387,
|
257 |
+
0.006612991809844971,
|
258 |
+
0.006627295970916748,
|
259 |
+
0.006642687797546387,
|
260 |
+
0.0066416640281677245,
|
261 |
+
0.00658022403717041,
|
262 |
+
0.006563839912414551,
|
263 |
+
0.006591487884521485,
|
264 |
+
0.006532095909118653,
|
265 |
+
0.00658739185333252,
|
266 |
+
0.00658841609954834,
|
267 |
+
0.006551551818847656,
|
268 |
+
0.006583295822143555,
|
269 |
+
0.006591487884521485,
|
270 |
+
0.006576128005981445,
|
271 |
+
0.0065771517753601075
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
272 |
]
|
273 |
},
|
274 |
"throughput": {
|
275 |
"unit": "samples/s",
|
276 |
+
"value": 147.7584562483108
|
277 |
},
|
278 |
"energy": {
|
279 |
"unit": "kWh",
|
280 |
+
"cpu": 7.761637737721574e-08,
|
281 |
+
"ram": 4.2429894939214493e-08,
|
282 |
+
"gpu": 1.4114571930719248e-07,
|
283 |
+
"total": 2.611919916236227e-07
|
284 |
},
|
285 |
"efficiency": {
|
286 |
"unit": "samples/kWh",
|
287 |
+
"value": 3828601.305054554
|
288 |
}
|
289 |
}
|
290 |
}
|