IlyasMoutawwakil HF staff commited on
Commit
24a188e
·
verified ·
1 Parent(s): 92de66a

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -104,7 +104,7 @@
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 975.54432,
108
  "max_global_vram": 1434.976256,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 794.820608,
@@ -112,101 +112,103 @@
112
  },
113
  "latency": {
114
  "unit": "s",
115
- "count": 70,
116
- "total": 1.002221823692322,
117
- "mean": 0.014317454624176024,
118
- "stdev": 0.0005179854292391007,
119
- "p50": 0.014136191844940187,
120
- "p90": 0.014872780895233155,
121
- "p95": 0.015156429052352906,
122
- "p99": 0.015988807916641236,
123
  "values": [
124
- 0.015131648063659669,
125
- 0.016269311904907227,
126
- 0.01586278438568115,
127
- 0.014862336158752442,
128
- 0.014535552024841308,
129
- 0.014609312057495117,
130
- 0.01455513572692871,
131
- 0.014568448066711426,
132
- 0.014729215621948242,
133
- 0.01496678352355957,
134
- 0.014606335639953612,
135
- 0.014484479904174804,
136
- 0.014614527702331542,
137
- 0.013914079666137695,
138
- 0.01385971164703369,
139
- 0.013853695869445801,
140
- 0.013822976112365723,
141
- 0.0141146879196167,
142
- 0.013762432098388672,
143
- 0.013876223564147949,
144
- 0.013703264236450196,
145
- 0.013898752212524413,
146
- 0.013859840393066406,
147
- 0.014038016319274902,
148
- 0.013897727966308594,
149
- 0.013850624084472657,
150
- 0.014021632194519042,
151
- 0.013942784309387207,
152
- 0.013697024345397948,
153
- 0.013749247550964355,
154
- 0.013845503807067871,
155
- 0.013839424133300781,
156
- 0.014382080078125,
157
- 0.014501888275146485,
158
- 0.014530559539794922,
159
- 0.01475164794921875,
160
- 0.015004672050476075,
161
- 0.014751744270324708,
162
- 0.014507007598876954,
163
- 0.01459712028503418,
164
- 0.014449664115905762,
165
- 0.0144650239944458,
166
- 0.014771200180053711,
167
- 0.015176704406738281,
168
- 0.014560256004333496,
169
- 0.014387200355529785,
170
- 0.01437484836578369,
171
- 0.014741503715515136,
172
- 0.014317567825317384,
173
- 0.015812607765197755,
174
- 0.014359552383422852,
175
- 0.014031744003295899,
176
- 0.014008319854736329,
177
- 0.014081024169921874,
178
- 0.013979647636413574,
179
- 0.01409552001953125,
180
- 0.014419903755187988,
181
- 0.0140697603225708,
182
- 0.014157695770263672,
183
- 0.014027775764465332,
184
- 0.014071807861328126,
185
- 0.013937664031982423,
186
- 0.014016511917114258,
187
- 0.013731840133666993,
188
- 0.01387827205657959,
189
- 0.013939840316772461,
190
- 0.014280735969543457,
191
- 0.013897600173950196,
192
- 0.013896703720092773,
193
- 0.013913087844848633
 
 
194
  ]
195
  },
196
  "throughput": {
197
  "unit": "samples/s",
198
- "value": 69.84481713051355
199
  },
200
  "energy": {
201
  "unit": "kWh",
202
- "cpu": 1.6464468324202565e-07,
203
- "ram": 8.960731809889843e-08,
204
- "gpu": 3.7328416468492946e-07,
205
- "total": 6.275361660258535e-07
206
  },
207
  "efficiency": {
208
  "unit": "samples/kWh",
209
- "value": 1593533.6545349031
210
  }
211
  }
212
  }
 
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 975.36,
108
  "max_global_vram": 1434.976256,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 794.820608,
 
112
  },
113
  "latency": {
114
  "unit": "s",
115
+ "count": 72,
116
+ "total": 1.0089079332351683,
117
+ "mean": 0.014012610183821784,
118
+ "stdev": 0.0002860015472250758,
119
+ "p50": 0.013942319869995118,
120
+ "p90": 0.014350130844116212,
121
+ "p95": 0.014402969598770141,
122
+ "p99": 0.015094637765884404,
123
  "values": [
124
+ 0.014397439956665039,
125
+ 0.01554758358001709,
126
+ 0.014909631729125976,
127
+ 0.014367744445800782,
128
+ 0.013990912437438965,
129
+ 0.013880319595336914,
130
+ 0.013815808296203613,
131
+ 0.013830143928527832,
132
+ 0.013735936164855958,
133
+ 0.014005248069763183,
134
+ 0.014096384048461913,
135
+ 0.013790207862854004,
136
+ 0.013981696128845214,
137
+ 0.014025792121887207,
138
+ 0.014141440391540527,
139
+ 0.01396019172668457,
140
+ 0.013847552299499511,
141
+ 0.01385267162322998,
142
+ 0.013780991554260253,
143
+ 0.013757439613342285,
144
+ 0.014121983528137207,
145
+ 0.013889535903930664,
146
+ 0.014091391563415528,
147
+ 0.014026752471923828,
148
+ 0.013969408035278321,
149
+ 0.014095359802246094,
150
+ 0.014002176284790039,
151
+ 0.01406873607635498,
152
+ 0.013717503547668456,
153
+ 0.014034943580627441,
154
+ 0.014368767738342286,
155
+ 0.013854720115661622,
156
+ 0.013891551971435547,
157
+ 0.01398681640625,
158
+ 0.01378713607788086,
159
+ 0.014253055572509766,
160
+ 0.014125056266784668,
161
+ 0.014178112030029296,
162
+ 0.013849599838256836,
163
+ 0.013847552299499511,
164
+ 0.013737983703613281,
165
+ 0.01378713607788086,
166
+ 0.013812735557556152,
167
+ 0.013742079734802246,
168
+ 0.014217215538024902,
169
+ 0.014356479644775391,
170
+ 0.014439328193664551,
171
+ 0.014409728050231933,
172
+ 0.014169088363647461,
173
+ 0.014233599662780762,
174
+ 0.014256128311157227,
175
+ 0.014195712089538574,
176
+ 0.01386780834197998,
177
+ 0.01390988826751709,
178
+ 0.013906847953796387,
179
+ 0.013820927619934082,
180
+ 0.014292991638183594,
181
+ 0.013759488105773926,
182
+ 0.013771776199340821,
183
+ 0.013819775581359862,
184
+ 0.013884415626525879,
185
+ 0.0138504638671875,
186
+ 0.013733887672424316,
187
+ 0.013883392333984374,
188
+ 0.01391312026977539,
189
+ 0.01399500846862793,
190
+ 0.01377184009552002,
191
+ 0.013825984001159667,
192
+ 0.013757439613342285,
193
+ 0.013996031761169434,
194
+ 0.013989888191223144,
195
+ 0.013924448013305665
196
  ]
197
  },
198
  "throughput": {
199
  "unit": "samples/s",
200
+ "value": 71.36429165456603
201
  },
202
  "energy": {
203
  "unit": "kWh",
204
+ "cpu": 1.6502592666649527e-07,
205
+ "ram": 9.013577984357349e-08,
206
+ "gpu": 3.5299410955555003e-07,
207
+ "total": 6.081558160656188e-07
208
  },
209
  "efficiency": {
210
  "unit": "samples/kWh",
211
+ "value": 1644315.4428241167
212
  }
213
  }
214
  }