IlyasMoutawwakil HF staff commited on
Commit
5331800
·
verified ·
1 Parent(s): 52c2cdb

Upload cuda_inference_timm_image-classification_timm/resnet50.a1_in1k/benchmark.json with huggingface_hub

Browse files
cuda_inference_timm_image-classification_timm/resnet50.a1_in1k/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cuda_inference_timm_image-classification_timm/resnet50.a1_in1k",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.4.0+cu121",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "image-classification",
9
  "library": "timm",
@@ -104,7 +104,7 @@
104
  "load": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 832.34816,
108
  "max_global_vram": 709.361664,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 123.731968,
@@ -113,31 +113,31 @@
113
  "latency": {
114
  "unit": "s",
115
  "count": 1,
116
- "total": 7.70229736328125,
117
- "mean": 7.70229736328125,
118
  "stdev": 0.0,
119
- "p50": 7.70229736328125,
120
- "p90": 7.70229736328125,
121
- "p95": 7.70229736328125,
122
- "p99": 7.70229736328125,
123
  "values": [
124
- 7.70229736328125
125
  ]
126
  },
127
  "throughput": null,
128
  "energy": {
129
  "unit": "kWh",
130
- "cpu": 4.3296000680550705e-06,
131
- "ram": 2.3569493401435366e-06,
132
- "gpu": 4.743614905999903e-06,
133
- "total": 1.1430164314198509e-05
134
  },
135
  "efficiency": null
136
  },
137
  "forward": {
138
  "memory": {
139
  "unit": "MB",
140
- "max_ram": 1022.365696,
141
  "max_global_vram": 791.150592,
142
  "max_process_vram": 0.0,
143
  "max_reserved": 148.897792,
@@ -145,174 +145,166 @@
145
  },
146
  "latency": {
147
  "unit": "s",
148
- "count": 143,
149
- "total": 0.9986373128890994,
150
- "mean": 0.006983477712511182,
151
- "stdev": 0.00031380831557064205,
152
- "p50": 0.0068618240356445315,
153
- "p90": 0.007499571323394775,
154
- "p95": 0.007550156831741333,
155
- "p99": 0.00797843451499939,
156
  "values": [
157
- 0.007591936111450195,
158
- 0.0075162239074707034,
159
- 0.007501823902130127,
160
- 0.007494656085968018,
161
- 0.007528448104858398,
162
- 0.007552000045776367,
163
- 0.007517183780670166,
164
- 0.0074670081138610836,
165
- 0.007726079940795898,
166
- 0.007472127914428711,
167
- 0.0075008001327514645,
168
- 0.007478208065032959,
169
- 0.0074065918922424315,
170
- 0.007365632057189942,
171
- 0.007385087966918945,
172
- 0.0074618239402770995,
173
- 0.0074291200637817386,
174
- 0.007515135765075683,
175
- 0.007613440036773681,
176
- 0.00744652795791626,
177
- 0.007494656085968018,
178
- 0.007356416225433349,
179
- 0.007453695774078369,
180
- 0.0075335679054260255,
181
- 0.0074332160949707035,
182
- 0.0074700798988342285,
183
- 0.007585792064666748,
184
- 0.006681600093841553,
185
- 0.006594560146331787,
186
- 0.0066078720092773435,
187
- 0.006626304149627686,
188
- 0.006590464115142822,
189
- 0.006597631931304931,
190
- 0.006608895778656006,
191
- 0.00658739185333252,
192
- 0.006625279903411865,
193
- 0.006651904106140137,
194
- 0.006562816143035889,
195
- 0.006661119937896728,
196
- 0.0071198720932006835,
197
- 0.007061503887176514,
198
- 0.006846464157104492,
199
- 0.006920191764831543,
200
- 0.00693452787399292,
201
- 0.00690176010131836,
202
- 0.006848512172698974,
203
- 0.006909952163696289,
204
- 0.00687718391418457,
205
- 0.0068280320167541505,
206
- 0.006879263877868652,
207
- 0.006875135898590088,
208
- 0.006856704235076904,
209
- 0.006766592025756836,
210
- 0.006793216228485107,
211
- 0.006985727787017822,
212
- 0.007006207942962647,
213
- 0.007068672180175781,
214
- 0.006958079814910889,
215
- 0.006896639823913574,
216
- 0.006867968082427979,
217
- 0.006833151817321777,
218
- 0.006856704235076904,
219
- 0.006872064113616944,
220
- 0.006831103801727295,
221
- 0.006859776020050049,
222
- 0.006869952201843262,
223
- 0.00683622407913208,
224
- 0.006748159885406494,
225
- 0.00684441614151001,
226
- 0.006853631973266602,
227
- 0.006829055786132812,
228
- 0.006850560188293457,
229
- 0.006816768169403077,
230
- 0.006770688056945801,
231
- 0.006744063854217529,
232
- 0.006784992218017578,
233
- 0.006779839992523193,
234
- 0.006728640079498291,
235
- 0.006776832103729248,
236
- 0.006767615795135498,
237
- 0.006719488143920899,
238
- 0.006780928134918213,
239
- 0.0067717118263244626,
240
- 0.006806528091430664,
241
- 0.006807551860809326,
242
- 0.006853631973266602,
243
- 0.006849535942077637,
244
- 0.00673689603805542,
245
- 0.006763519763946534,
246
- 0.006782976150512696,
247
- 0.006752255916595459,
248
- 0.0067686400413513184,
249
- 0.006762495994567871,
250
- 0.006802432060241699,
251
- 0.006808576107025147,
252
- 0.00686790418624878,
253
- 0.0068618240356445315,
254
- 0.006892543792724609,
255
- 0.007967743873596191,
256
- 0.007986176013946533,
257
- 0.008019968032836914,
258
- 0.007094272136688232,
259
- 0.007027711868286133,
260
- 0.007001088142395019,
261
- 0.0070266880989074704,
262
- 0.007038976192474365,
263
- 0.006937600135803222,
264
- 0.00693555212020874,
265
- 0.0067983360290527345,
266
- 0.006847487926483154,
267
- 0.007124991893768311,
268
- 0.006929408073425293,
269
- 0.006923264026641846,
270
- 0.006913023948669434,
271
- 0.006837247848510742,
272
- 0.0068618240356445315,
273
- 0.006813727855682373,
274
- 0.006874112129211426,
275
- 0.0068689918518066405,
276
- 0.006814720153808594,
277
- 0.006811647891998291,
278
- 0.0068280320167541505,
279
- 0.006812672138214112,
280
- 0.006776832103729248,
281
- 0.006834176063537598,
282
- 0.006864895820617676,
283
- 0.006780928134918213,
284
- 0.006892543792724609,
285
- 0.006897664070129395,
286
- 0.006875135898590088,
287
- 0.0067870721817016606,
288
- 0.006814720153808594,
289
- 0.006832159996032715,
290
- 0.006846464157104492,
291
- 0.006875135898590088,
292
- 0.0068986878395080565,
293
- 0.0068853759765625,
294
- 0.006859776020050049,
295
- 0.006907904148101806,
296
- 0.00692633581161499,
297
- 0.0068351998329162595,
298
- 0.00679423999786377,
299
- 0.006817791938781738
300
  ]
301
  },
302
  "throughput": {
303
  "unit": "samples/s",
304
- "value": 143.1951301582104
305
  },
306
  "energy": {
307
  "unit": "kWh",
308
- "cpu": 8.065982723922962e-08,
309
- "ram": 4.409877346695881e-08,
310
- "gpu": 1.6462409239455742e-07,
311
- "total": 2.893826931007458e-07
312
  },
313
  "efficiency": {
314
  "unit": "samples/kWh",
315
- "value": 3455631.6733560134
316
  }
317
  }
318
  }
 
3
  "name": "cuda_inference_timm_image-classification_timm/resnet50.a1_in1k",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.4.0+cu124",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "image-classification",
9
  "library": "timm",
 
104
  "load": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 854.171648,
108
  "max_global_vram": 709.361664,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 123.731968,
 
113
  "latency": {
114
  "unit": "s",
115
  "count": 1,
116
+ "total": 7.67897314453125,
117
+ "mean": 7.67897314453125,
118
  "stdev": 0.0,
119
+ "p50": 7.67897314453125,
120
+ "p90": 7.67897314453125,
121
+ "p95": 7.67897314453125,
122
+ "p99": 7.67897314453125,
123
  "values": [
124
+ 7.67897314453125
125
  ]
126
  },
127
  "throughput": null,
128
  "energy": {
129
  "unit": "kWh",
130
+ "cpu": 4.411601574999935e-06,
131
+ "ram": 2.390782810158575e-06,
132
+ "gpu": 6.4663940619999385e-06,
133
+ "total": 1.326877844715845e-05
134
  },
135
  "efficiency": null
136
  },
137
  "forward": {
138
  "memory": {
139
  "unit": "MB",
140
+ "max_ram": 1092.743168,
141
  "max_global_vram": 791.150592,
142
  "max_process_vram": 0.0,
143
  "max_reserved": 148.897792,
 
145
  },
146
  "latency": {
147
  "unit": "s",
148
+ "count": 135,
149
+ "total": 0.999097406387329,
150
+ "mean": 0.007400721528795031,
151
+ "stdev": 0.0002838832395345611,
152
+ "p50": 0.00725708818435669,
153
+ "p90": 0.007823769569396974,
154
+ "p95": 0.007904870271682738,
155
+ "p99": 0.008295587596893311,
156
  "values": [
157
+ 0.008054783821105957,
158
+ 0.0078438401222229,
159
+ 0.007836671829223632,
160
+ 0.007786496162414551,
161
+ 0.007877632141113282,
162
+ 0.007773248195648194,
163
+ 0.007872511863708496,
164
+ 0.007829504013061523,
165
+ 0.007906303882598878,
166
+ 0.007904255867004394,
167
+ 0.007786496162414551,
168
+ 0.0077281279563903805,
169
+ 0.007629824161529541,
170
+ 0.007712768077850342,
171
+ 0.007815167903900147,
172
+ 0.007934944152832031,
173
+ 0.00789299201965332,
174
+ 0.008138784408569336,
175
+ 0.008210432052612305,
176
+ 0.007731200218200684,
177
+ 0.007708672046661377,
178
+ 0.007813119888305664,
179
+ 0.007803872108459473,
180
+ 0.007772160053253174,
181
+ 0.00781004810333252,
182
+ 0.007774208068847656,
183
+ 0.007705599784851074,
184
+ 0.007408639907836914,
185
+ 0.007279615879058838,
186
+ 0.007275519847869873,
187
+ 0.007255040168762207,
188
+ 0.007203839778900147,
189
+ 0.00719155216217041,
190
+ 0.007211008071899414,
191
+ 0.007175168037414551,
192
+ 0.007186431884765625,
193
+ 0.0085350399017334,
194
+ 0.008339455604553223,
195
+ 0.007288832187652588,
196
+ 0.007363584041595459,
197
+ 0.007270400047302246,
198
+ 0.007257120132446289,
199
+ 0.0072130560874938965,
200
+ 0.007184383869171143,
201
+ 0.0072499198913574215,
202
+ 0.0072468481063842774,
203
+ 0.007373824119567871,
204
+ 0.007241727828979493,
205
+ 0.007201791763305664,
206
+ 0.007222271919250488,
207
+ 0.007262207984924316,
208
+ 0.007222271919250488,
209
+ 0.007189504146575928,
210
+ 0.007243711948394776,
211
+ 0.0071792640686035155,
212
+ 0.007157760143280029,
213
+ 0.007209983825683594,
214
+ 0.0072837119102478025,
215
+ 0.007194623947143554,
216
+ 0.00719046401977539,
217
+ 0.007168000221252442,
218
+ 0.007235583782196045,
219
+ 0.007169023990631103,
220
+ 0.0071792640686035155,
221
+ 0.007184383869171143,
222
+ 0.007186431884765625,
223
+ 0.007203839778900147,
224
+ 0.007215104103088379,
225
+ 0.00724070405960083,
226
+ 0.007172160148620606,
227
+ 0.007219200134277344,
228
+ 0.007197696208953858,
229
+ 0.007197696208953858,
230
+ 0.00719046401977539,
231
+ 0.007205952167510987,
232
+ 0.007245823860168457,
233
+ 0.007279615879058838,
234
+ 0.0071905279159545895,
235
+ 0.007226431846618652,
236
+ 0.007196671962738037,
237
+ 0.007189504146575928,
238
+ 0.007163871765136718,
239
+ 0.007210015773773193,
240
+ 0.007234560012817383,
241
+ 0.007358463764190673,
242
+ 0.00733900785446167,
243
+ 0.007796735763549805,
244
+ 0.007671807765960693,
245
+ 0.007378943920135498,
246
+ 0.007372799873352051,
247
+ 0.007404543876647949,
248
+ 0.007287807941436767,
249
+ 0.0073471999168395995,
250
+ 0.007394303798675537,
251
+ 0.007633920192718506,
252
+ 0.007411712169647216,
253
+ 0.007445504188537597,
254
+ 0.0074967360496521,
255
+ 0.007529471874237061,
256
+ 0.007478271961212158,
257
+ 0.0073062400817871095,
258
+ 0.007262207984924316,
259
+ 0.007244800090789795,
260
+ 0.007238656044006348,
261
+ 0.00725708818435669,
262
+ 0.007237631797790528,
263
+ 0.00724889612197876,
264
+ 0.007309311866760254,
265
+ 0.007243775844573975,
266
+ 0.0072325119972229,
267
+ 0.007243775844573975,
268
+ 0.007250944137573242,
269
+ 0.007228415966033935,
270
+ 0.007189504146575928,
271
+ 0.0072837119102478025,
272
+ 0.0072468481063842774,
273
+ 0.007203839778900147,
274
+ 0.0071823358535766605,
275
+ 0.007196671962738037,
276
+ 0.007265279769897461,
277
+ 0.007225344181060791,
278
+ 0.007269375801086426,
279
+ 0.00719974422454834,
280
+ 0.007237631797790528,
281
+ 0.007175168037414551,
282
+ 0.007219200134277344,
283
+ 0.007203839778900147,
284
+ 0.0073697280883789065,
285
+ 0.007237631797790528,
286
+ 0.007476223945617676,
287
+ 0.007269343852996826,
288
+ 0.007417856216430664,
289
+ 0.007299071788787842,
290
+ 0.007231488227844239,
291
+ 0.007181312084197998
 
 
 
 
 
 
 
 
292
  ]
293
  },
294
  "throughput": {
295
  "unit": "samples/s",
296
+ "value": 135.12196021822453
297
  },
298
  "energy": {
299
  "unit": "kWh",
300
+ "cpu": 8.229956506226121e-08,
301
+ "ram": 4.493005148389291e-08,
302
+ "gpu": 1.7017638135172345e-07,
303
+ "total": 2.974059978978776e-07
304
  },
305
  "efficiency": {
306
  "unit": "samples/kWh",
307
+ "value": 3362406.969153921
308
  }
309
  }
310
  }