IlyasMoutawwakil HF staff commited on
Commit
7af033b
1 Parent(s): ab2bd5e

Upload cpu_inference_transformers_text-generation_openai-community/gpt2/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_text-generation_openai-community/gpt2/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cpu_inference_transformers_text-generation_openai-community/gpt2",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.4.0+cpu",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "text-generation",
9
  "library": "transformers",
@@ -80,7 +80,7 @@
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.4.0",
83
- "optimum_benchmark_commit": "505086556c6e125f92759cd19b806135534e5ab3",
84
  "transformers_version": "4.44.2",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.34.0",
@@ -99,7 +99,7 @@
99
  "load": {
100
  "memory": {
101
  "unit": "MB",
102
- "max_ram": 1120.878592,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
@@ -108,31 +108,31 @@
108
  "latency": {
109
  "unit": "s",
110
  "count": 1,
111
- "total": 4.883996857999989,
112
- "mean": 4.883996857999989,
113
  "stdev": 0.0,
114
- "p50": 4.883996857999989,
115
- "p90": 4.883996857999989,
116
- "p95": 4.883996857999989,
117
- "p99": 4.883996857999989,
118
  "values": [
119
- 4.883996857999989
120
  ]
121
  },
122
  "throughput": null,
123
  "energy": {
124
  "unit": "kWh",
125
- "cpu": 6.890721187777728e-05,
126
- "ram": 2.880146288289732e-06,
127
  "gpu": 0,
128
- "total": 7.178735816606702e-05
129
  },
130
  "efficiency": null
131
  },
132
  "prefill": {
133
  "memory": {
134
  "unit": "MB",
135
- "max_ram": 974.229504,
136
  "max_global_vram": null,
137
  "max_process_vram": null,
138
  "max_reserved": null,
@@ -141,51 +141,51 @@
141
  "latency": {
142
  "unit": "s",
143
  "count": 15,
144
- "total": 0.629977077999996,
145
- "mean": 0.0419984718666664,
146
- "stdev": 0.001218402626521423,
147
- "p50": 0.04163306300000613,
148
- "p90": 0.043583808799996855,
149
- "p95": 0.04479018890001214,
150
- "p99": 0.04512662738001779,
151
  "values": [
152
- 0.04199414699999693,
153
- 0.041580654999989974,
154
- 0.0419893780000109,
155
- 0.04194373200002133,
156
- 0.04189386000001605,
157
- 0.041213037999995095,
158
- 0.04163306300000613,
159
- 0.041117950999989716,
160
- 0.04460995400000911,
161
- 0.04204459099997848,
162
- 0.04145615199999497,
163
- 0.04521073700001921,
164
- 0.040447095999979865,
165
- 0.04127034499998672,
166
- 0.04157237900000155
167
  ]
168
  },
169
  "throughput": {
170
  "unit": "tokens/s",
171
- "value": 47.620780259563965
172
  },
173
  "energy": {
174
  "unit": "kWh",
175
- "cpu": 1.6280356150462874e-06,
176
- "ram": 6.803809038718375e-08,
177
  "gpu": 0.0,
178
- "total": 1.6960737054334712e-06
179
  },
180
  "efficiency": {
181
  "unit": "tokens/kWh",
182
- "value": 1179194.0371417133
183
  }
184
  },
185
  "decode": {
186
  "memory": {
187
  "unit": "MB",
188
- "max_ram": 975.015936,
189
  "max_global_vram": null,
190
  "max_process_vram": null,
191
  "max_reserved": null,
@@ -194,45 +194,45 @@
194
  "latency": {
195
  "unit": "s",
196
  "count": 15,
197
- "total": 0.39445368399998415,
198
- "mean": 0.02629691226666561,
199
- "stdev": 0.0006256829530236599,
200
- "p50": 0.026397308000014164,
201
- "p90": 0.026994013600005928,
202
- "p95": 0.0272245181000045,
203
- "p99": 0.027514792420002435,
204
  "values": [
205
- 0.026040761999979622,
206
- 0.026291319999984353,
207
- 0.026881513000006407,
208
- 0.02662697699997807,
209
- 0.026439295999978185,
210
- 0.02706901400000561,
211
- 0.026443833999991284,
212
- 0.02758736100000192,
213
- 0.026030862999988358,
214
- 0.026397308000014164,
215
- 0.026681409000019585,
216
- 0.025361021999998457,
217
- 0.02584686900001998,
218
- 0.025546748000010666,
219
- 0.025209388000007493
220
  ]
221
  },
222
  "throughput": {
223
  "unit": "tokens/s",
224
- "value": 38.02727825455067
225
  },
226
  "energy": {
227
  "unit": "kWh",
228
- "cpu": 9.820147493981547e-07,
229
- "ram": 4.104005464582367e-08,
230
  "gpu": 0.0,
231
- "total": 1.0230548040439783e-06
232
  },
233
  "efficiency": {
234
  "unit": "tokens/kWh",
235
- "value": 977464.7419152461
236
  }
237
  },
238
  "per_token": {
@@ -240,34 +240,34 @@
240
  "latency": {
241
  "unit": "s",
242
  "count": 15,
243
- "total": 0.3888606839999511,
244
- "mean": 0.02592404559999674,
245
- "stdev": 0.0005828214847466364,
246
- "p50": 0.02605945700000234,
247
- "p90": 0.026641749399999526,
248
- "p95": 0.026804358100005744,
249
- "p99": 0.02698054361999823,
250
  "values": [
251
- 0.02566586099999313,
252
- 0.025917421000002605,
253
- 0.026539413999984163,
254
- 0.026284987999986242,
255
- 0.026094270999976743,
256
- 0.026709973000009768,
257
- 0.02610173499999746,
258
- 0.02702458999999635,
259
- 0.02568422499999201,
260
- 0.02605945700000234,
261
- 0.026112075000014556,
262
- 0.025035683999988123,
263
- 0.025518817000005356,
264
- 0.025227572000005694,
265
- 0.024884600999996564
266
  ]
267
  },
268
  "throughput": {
269
  "unit": "tokens/s",
270
- "value": 38.57422623882924
271
  },
272
  "energy": null,
273
  "efficiency": null
 
3
  "name": "cpu_inference_transformers_text-generation_openai-community/gpt2",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.4.1+cpu",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "text-generation",
9
  "library": "transformers",
 
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.4.0",
83
+ "optimum_benchmark_commit": "ea76e356b5c355783ee27d2d429a010ded791f8b",
84
  "transformers_version": "4.44.2",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.34.0",
 
99
  "load": {
100
  "memory": {
101
  "unit": "MB",
102
+ "max_ram": 1120.366592,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
 
108
  "latency": {
109
  "unit": "s",
110
  "count": 1,
111
+ "total": 4.918687814999998,
112
+ "mean": 4.918687814999998,
113
  "stdev": 0.0,
114
+ "p50": 4.918687814999998,
115
+ "p90": 4.918687814999998,
116
+ "p95": 4.918687814999998,
117
+ "p99": 4.918687814999998,
118
  "values": [
119
+ 4.918687814999998
120
  ]
121
  },
122
  "throughput": null,
123
  "energy": {
124
  "unit": "kWh",
125
+ "cpu": 6.851395802222209e-05,
126
+ "ram": 2.8637100374601876e-06,
127
  "gpu": 0,
128
+ "total": 7.137766805968227e-05
129
  },
130
  "efficiency": null
131
  },
132
  "prefill": {
133
  "memory": {
134
  "unit": "MB",
135
+ "max_ram": 974.381056,
136
  "max_global_vram": null,
137
  "max_process_vram": null,
138
  "max_reserved": null,
 
141
  "latency": {
142
  "unit": "s",
143
  "count": 15,
144
+ "total": 0.6524460319998866,
145
+ "mean": 0.04349640213332577,
146
+ "stdev": 0.0008420335523800986,
147
+ "p50": 0.043470724000030714,
148
+ "p90": 0.04431366519997937,
149
+ "p95": 0.044482252299962964,
150
+ "p99": 0.044766217659976064,
151
  "values": [
152
+ 0.04369766999997182,
153
+ 0.043470724000030714,
154
+ 0.044266618999984075,
155
+ 0.043419276999998146,
156
+ 0.04433012799995595,
157
+ 0.04330064499998798,
158
+ 0.04483720899997934,
159
+ 0.04321312100000796,
160
+ 0.043963650000023335,
161
+ 0.0431257770000002,
162
+ 0.0442889710000145,
163
+ 0.04284532099995886,
164
+ 0.04404234800000495,
165
+ 0.041632043999982216,
166
+ 0.04201252799998656
167
  ]
168
  },
169
  "throughput": {
170
  "unit": "tokens/s",
171
+ "value": 45.98081454805325
172
  },
173
  "energy": {
174
  "unit": "kWh",
175
+ "cpu": 1.6407745002315539e-06,
176
+ "ram": 6.85707082273642e-08,
177
  "gpu": 0.0,
178
+ "total": 1.709345208458918e-06
179
  },
180
  "efficiency": {
181
  "unit": "tokens/kWh",
182
+ "value": 1170038.6733485658
183
  }
184
  },
185
  "decode": {
186
  "memory": {
187
  "unit": "MB",
188
+ "max_ram": 974.381056,
189
  "max_global_vram": null,
190
  "max_process_vram": null,
191
  "max_reserved": null,
 
194
  "latency": {
195
  "unit": "s",
196
  "count": 15,
197
+ "total": 0.39831485600007,
198
+ "mean": 0.026554323733338,
199
+ "stdev": 0.0006085477518003957,
200
+ "p50": 0.026404254999988552,
201
+ "p90": 0.027247266800009128,
202
+ "p95": 0.02754670319999377,
203
+ "p99": 0.028037500640000417,
204
  "values": [
205
+ 0.026663681999991695,
206
+ 0.026277726999978768,
207
+ 0.026314916999979232,
208
+ 0.026237342000001718,
209
+ 0.027192503000037505,
210
+ 0.027283775999990212,
211
+ 0.026454509000018334,
212
+ 0.026404254999988552,
213
+ 0.026972782000029838,
214
+ 0.02643056400000887,
215
+ 0.026195232000020496,
216
+ 0.028160200000002078,
217
+ 0.026207855999984986,
218
+ 0.02592695900000308,
219
+ 0.02559255200003463
220
  ]
221
  },
222
  "throughput": {
223
  "unit": "tokens/s",
224
+ "value": 37.65865062285642
225
  },
226
  "energy": {
227
  "unit": "kWh",
228
+ "cpu": 9.79518341990726e-07,
229
+ "ram": 4.093627066724052e-08,
230
  "gpu": 0.0,
231
+ "total": 1.0204546126579664e-06
232
  },
233
  "efficiency": {
234
  "unit": "tokens/kWh",
235
+ "value": 979955.3920338617
236
  }
237
  },
238
  "per_token": {
 
240
  "latency": {
241
  "unit": "s",
242
  "count": 15,
243
+ "total": 0.3926160600001367,
244
+ "mean": 0.026174404000009113,
245
+ "stdev": 0.0005707680242470367,
246
+ "p50": 0.0260541279999984,
247
+ "p90": 0.026847772800033455,
248
+ "p95": 0.027104114400037814,
249
+ "p99": 0.027536192480040425,
250
  "values": [
251
+ 0.026269812999998976,
252
+ 0.0259137639999949,
253
+ 0.02594395099998792,
254
+ 0.02585419299998648,
255
+ 0.02681046600002901,
256
+ 0.026872644000036416,
257
+ 0.026088843000025008,
258
+ 0.0260541279999984,
259
+ 0.026571117999992566,
260
+ 0.026072902999999314,
261
+ 0.025813575000029232,
262
+ 0.02764421200004108,
263
+ 0.025855784999976095,
264
+ 0.025595787000042947,
265
+ 0.02525487799999837
266
  ]
267
  },
268
  "throughput": {
269
  "unit": "tokens/s",
270
+ "value": 38.205263432155014
271
  },
272
  "energy": null,
273
  "efficiency": null