IlyasMoutawwakil HF staff commited on
Commit
3d19845
·
verified ·
1 Parent(s): 94b755e

Upload cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -73,7 +73,7 @@
73
  "environment": {
74
  "cpu": " AMD EPYC 7R32",
75
  "cpu_count": 16,
76
- "cpu_ram_mb": 66697.285632,
77
  "system": "Linux",
78
  "machine": "x86_64",
79
  "platform": "Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35",
@@ -104,7 +104,7 @@
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 908.505088,
108
  "max_global_vram": 1195.900928,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 555.74528,
@@ -113,165 +113,165 @@
113
  "latency": {
114
  "unit": "s",
115
  "count": 135,
116
- "total": 1.002766082286835,
117
- "mean": 0.007427896905828405,
118
- "stdev": 0.0001725960668012388,
119
- "p50": 0.00742195177078247,
120
- "p90": 0.007624499225616455,
121
- "p95": 0.007669862556457519,
122
- "p99": 0.007903109216690064,
123
  "values": [
124
- 0.008250368118286134,
125
- 0.007636991977691651,
126
- 0.007659520149230957,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
127
  0.007620607852935791,
128
- 0.007599071979522705,
129
- 0.007602176189422607,
130
- 0.007712768077850342,
131
- 0.007488512039184571,
132
- 0.007473152160644531,
133
- 0.007488512039184571,
134
- 0.007527423858642578,
135
- 0.00742195177078247,
136
- 0.00760319995880127,
137
- 0.0074332160949707035,
138
- 0.007497727870941162,
139
- 0.007513088226318359,
140
- 0.007458816051483155,
141
- 0.007442431926727295,
142
- 0.007514111995697022,
143
- 0.007491583824157715,
144
- 0.00744755220413208,
145
- 0.0074618558883666995,
146
- 0.007477248191833496,
147
- 0.0074967041015625,
148
- 0.007476191997528076,
149
- 0.007459839820861816,
150
- 0.007552000045776367,
151
- 0.007588863849639893,
152
- 0.0073994240760803225,
153
- 0.007364607810974121,
154
- 0.0074208641052246095,
155
- 0.007467967987060547,
156
- 0.007531519889831543,
157
- 0.00749567985534668,
158
- 0.007452672004699707,
159
- 0.0074301438331604,
160
- 0.00744755220413208,
161
- 0.007414783954620361,
 
162
  0.007450623989105225,
163
- 0.007463935852050781,
164
- 0.007523327827453613,
165
- 0.007674880027770996,
166
- 0.007444479942321777,
167
- 0.007491583824157715,
168
- 0.007395328044891358,
169
- 0.007368703842163086,
170
- 0.007413760185241699,
171
- 0.007465983867645264,
172
- 0.007423999786376953,
173
- 0.0075038719177246095,
174
- 0.007527423858642578,
175
- 0.007510015964508057,
176
- 0.007667712211608887,
177
  0.00744755220413208,
178
- 0.007427072048187256,
179
- 0.007554048061370849,
180
- 0.007560192108154297,
181
- 0.007622655868530274,
182
- 0.007589888095855713,
183
- 0.007667712211608887,
184
- 0.007651328086853027,
185
- 0.00762883186340332,
186
- 0.007427072048187256,
187
- 0.007266304016113281,
188
- 0.007350272178649903,
189
- 0.007363584041595459,
190
- 0.007502848148345947,
191
- 0.0075838079452514645,
192
- 0.007589888095855713,
193
- 0.00790937614440918,
194
- 0.007792640209197998,
195
- 0.007805952072143555,
196
- 0.007890944004058837,
197
- 0.0074301438331604,
198
- 0.007356416225433349,
199
- 0.007392255783081054,
200
- 0.007411712169647216,
201
- 0.007407616138458252,
202
- 0.007312384128570557,
203
- 0.007316480159759522,
204
- 0.007294976234436036,
205
- 0.007358463764190673,
206
- 0.007413760185241699,
207
- 0.00738099193572998,
208
  0.007327744007110596,
209
- 0.007450623989105225,
210
- 0.007296000003814697,
211
- 0.007415808200836181,
212
- 0.007320576190948487,
213
- 0.0073266558647155765,
214
- 0.007362559795379638,
215
- 0.0073994240760803225,
216
- 0.007314432144165039,
217
- 0.0073820161819458,
218
- 0.007360511779785156,
219
- 0.007319551944732666,
220
- 0.00733081579208374,
221
- 0.007392255783081054,
222
- 0.007223328113555908,
223
- 0.00719974422454834,
224
- 0.007189504146575928,
225
- 0.0073359360694885255,
226
- 0.007200767993927002,
227
- 0.007301119804382325,
228
- 0.007350207805633545,
229
- 0.007322624206542969,
230
- 0.007246816158294677,
231
- 0.007299071788787842,
232
- 0.007319551944732666,
233
- 0.007278592109680176,
234
- 0.007287807941436767,
235
- 0.007308288097381592,
236
- 0.007074816226959229,
237
- 0.007019519805908203,
238
- 0.007041024208068848,
239
- 0.007035903930664063,
240
- 0.007007232189178467,
241
  0.00719974422454834,
242
- 0.007047167778015137,
243
- 0.007359488010406494,
244
- 0.007435264110565186,
245
- 0.007318528175354004,
246
- 0.00733081579208374,
247
- 0.007286784172058106,
248
- 0.00729804801940918,
249
- 0.007311359882354736,
250
- 0.007287807941436767,
251
- 0.007625728130340576,
252
- 0.007318528175354004,
253
- 0.007337984085083008,
254
- 0.007332863807678222,
255
- 0.007288832187652588,
256
- 0.007318528175354004,
257
- 0.007464960098266602,
258
- 0.007309311866760254
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
259
  ]
260
  },
261
  "throughput": {
262
  "unit": "samples/s",
263
- "value": 134.62760895554914
264
  },
265
  "energy": {
266
  "unit": "kWh",
267
- "cpu": 8.645235694527915e-08,
268
- "ram": 4.7264715358803115e-08,
269
- "gpu": 1.65352930335746e-07,
270
- "total": 2.9907000263982825e-07
271
  },
272
  "efficiency": {
273
  "unit": "samples/kWh",
274
- "value": 3343698.770098002
275
  }
276
  }
277
  }
 
73
  "environment": {
74
  "cpu": " AMD EPYC 7R32",
75
  "cpu_count": 16,
76
+ "cpu_ram_mb": 66697.293824,
77
  "system": "Linux",
78
  "machine": "x86_64",
79
  "platform": "Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35",
 
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 909.160448,
108
  "max_global_vram": 1195.900928,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 555.74528,
 
113
  "latency": {
114
  "unit": "s",
115
  "count": 135,
116
+ "total": 1.0018786244392397,
117
+ "mean": 0.007421323143994366,
118
+ "stdev": 0.00024644641538249917,
119
+ "p50": 0.0073768959045410155,
120
+ "p90": 0.007706252861022949,
121
+ "p95": 0.007784163236618041,
122
+ "p99": 0.008095638465881348,
123
  "values": [
124
+ 0.008121343612670898,
125
+ 0.007740543842315674,
126
+ 0.007848959922790527,
127
+ 0.007797760009765625,
128
+ 0.007814144134521485,
129
+ 0.007755775928497314,
130
+ 0.007774208068847656,
131
+ 0.007677951812744141,
132
+ 0.00724886417388916,
133
+ 0.0072622718811035155,
134
+ 0.007229440212249756,
135
+ 0.0072499198913574215,
136
+ 0.0076871681213378906,
137
+ 0.007672832012176513,
138
+ 0.007699552059173584,
139
+ 0.0076912641525268555,
140
+ 0.007693439960479736,
141
+ 0.00764518404006958,
142
+ 0.007634943962097168,
143
+ 0.0076492481231689455,
144
+ 0.007612415790557861,
145
+ 0.007452703952789306,
146
+ 0.007641088008880615,
147
  0.007620607852935791,
148
+ 0.007623712062835694,
149
+ 0.007631872177124023,
150
+ 0.007916672229766846,
151
+ 0.0075939841270446775,
152
+ 0.0076943359375,
153
+ 0.007507967948913574,
154
+ 0.007713791847229004,
155
+ 0.008061951637268066,
156
+ 0.007646207809448242,
157
+ 0.007649280071258545,
158
+ 0.007683072090148926,
159
+ 0.007556096076965332,
160
+ 0.007617536067962646,
161
+ 0.0076390719413757325,
162
+ 0.007600128173828125,
163
+ 0.007597152233123779,
164
+ 0.007569407939910889,
165
+ 0.007555071830749512,
166
+ 0.007482367992401123,
167
+ 0.007710720062255859,
168
+ 0.007549952030181885,
169
+ 0.007194623947143554,
170
+ 0.0073768959045410155,
171
+ 0.0075632638931274416,
172
+ 0.007622655868530274,
173
+ 0.007577600002288819,
174
+ 0.007675903797149658,
175
+ 0.008112992286682129,
176
+ 0.007670783996582031,
177
+ 0.0076165437698364254,
178
+ 0.007623680114746094,
179
+ 0.007559167861938477,
180
+ 0.007617504119873047,
181
+ 0.007516160011291504,
182
+ 0.0074917120933532715,
183
  0.007450623989105225,
184
+ 0.00740556812286377,
185
+ 0.007631743907928467,
186
+ 0.007472127914428711,
187
+ 0.007450592041015625,
188
+ 0.007235583782196045,
189
+ 0.007237631797790528,
 
 
 
 
 
 
 
 
190
  0.00744755220413208,
191
+ 0.007484416007995606,
192
+ 0.007724031925201416,
193
+ 0.007521279811859131,
194
+ 0.007577600002288819,
195
+ 0.0076984319686889645,
196
+ 0.00777833604812622,
197
+ 0.007620607852935791,
198
+ 0.007292928218841553,
199
+ 0.007207935810089112,
200
+ 0.007208960056304932,
201
+ 0.0071535038948059085,
202
+ 0.007223296165466309,
203
+ 0.007250944137573242,
204
+ 0.007227392196655274,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
205
  0.007327744007110596,
206
+ 0.007203904151916504,
207
+ 0.007158783912658692,
208
+ 0.007294976234436036,
209
+ 0.0071495680809020995,
210
+ 0.007177216053009033,
211
+ 0.00719155216217041,
212
+ 0.007161952018737793,
213
+ 0.007171072006225586,
214
+ 0.007164927959442138,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
215
  0.00719974422454834,
216
+ 0.007376863956451416,
217
+ 0.007168000221252442,
218
+ 0.007132160186767578,
219
+ 0.007197696208953858,
220
+ 0.007146399974822998,
221
+ 0.007194623947143554,
222
+ 0.00728985595703125,
223
+ 0.007156735897064209,
224
+ 0.007156735897064209,
225
+ 0.007204864025115967,
226
+ 0.007181312084197998,
227
+ 0.00735430383682251,
228
+ 0.007164927959442138,
229
+ 0.007168000221252442,
230
+ 0.007136288166046142,
231
+ 0.007243840217590332,
232
+ 0.007192575931549072,
233
+ 0.007205887794494629,
234
+ 0.007109632015228271,
235
+ 0.007202816009521484,
236
+ 0.007144447803497315,
237
+ 0.0071526398658752445,
238
+ 0.007163904190063477,
239
+ 0.0071495361328125,
240
+ 0.007203839778900147,
241
+ 0.007353343963623047,
242
+ 0.0071485438346862796,
243
+ 0.007145472049713135,
244
+ 0.007155712127685547,
245
+ 0.007164927959442138,
246
+ 0.007152448177337647,
247
+ 0.007206816196441651,
248
+ 0.007154687881469727,
249
+ 0.007156735897064209,
250
+ 0.007385087966918945,
251
+ 0.00717523193359375,
252
+ 0.007242591857910157,
253
+ 0.00713318395614624,
254
+ 0.007171072006225586,
255
+ 0.007146495819091797,
256
+ 0.0072130560874938965,
257
+ 0.007239776134490967,
258
+ 0.00719155216217041
259
  ]
260
  },
261
  "throughput": {
262
  "unit": "samples/s",
263
+ "value": 134.74686125333866
264
  },
265
  "energy": {
266
  "unit": "kWh",
267
+ "cpu": 8.491470700218565e-08,
268
+ "ram": 4.64189233804575e-08,
269
+ "gpu": 1.5764695151428502e-07,
270
+ "total": 2.8898058189692814e-07
271
  },
272
  "efficiency": {
273
  "unit": "samples/kWh",
274
+ "value": 3460440.121740339
275
  }
276
  }
277
  }