IlyasMoutawwakil HF staff commited on
Commit
f67d14d
·
verified ·
1 Parent(s): 1eb5bdc

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -104,7 +104,7 @@
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 975.36,
108
  "max_global_vram": 1434.976256,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 794.820608,
@@ -112,103 +112,101 @@
112
  },
113
  "latency": {
114
  "unit": "s",
115
- "count": 72,
116
- "total": 1.0089079332351683,
117
- "mean": 0.014012610183821784,
118
- "stdev": 0.0002860015472250758,
119
- "p50": 0.013942319869995118,
120
- "p90": 0.014350130844116212,
121
- "p95": 0.014402969598770141,
122
- "p99": 0.015094637765884404,
123
  "values": [
124
- 0.014397439956665039,
125
- 0.01554758358001709,
126
- 0.014909631729125976,
127
- 0.014367744445800782,
128
- 0.013990912437438965,
129
- 0.013880319595336914,
130
- 0.013815808296203613,
131
- 0.013830143928527832,
132
- 0.013735936164855958,
133
- 0.014005248069763183,
134
- 0.014096384048461913,
135
- 0.013790207862854004,
136
- 0.013981696128845214,
137
- 0.014025792121887207,
138
- 0.014141440391540527,
139
- 0.01396019172668457,
140
- 0.013847552299499511,
141
- 0.01385267162322998,
142
- 0.013780991554260253,
143
- 0.013757439613342285,
144
- 0.014121983528137207,
145
- 0.013889535903930664,
146
- 0.014091391563415528,
147
- 0.014026752471923828,
148
- 0.013969408035278321,
149
- 0.014095359802246094,
150
- 0.014002176284790039,
151
- 0.01406873607635498,
152
- 0.013717503547668456,
153
- 0.014034943580627441,
154
- 0.014368767738342286,
155
- 0.013854720115661622,
156
- 0.013891551971435547,
157
  0.01398681640625,
158
- 0.01378713607788086,
159
- 0.014253055572509766,
160
- 0.014125056266784668,
161
- 0.014178112030029296,
162
- 0.013849599838256836,
163
- 0.013847552299499511,
164
- 0.013737983703613281,
165
- 0.01378713607788086,
166
- 0.013812735557556152,
167
- 0.013742079734802246,
168
- 0.014217215538024902,
169
- 0.014356479644775391,
170
- 0.014439328193664551,
171
- 0.014409728050231933,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
172
  0.014169088363647461,
173
- 0.014233599662780762,
174
- 0.014256128311157227,
175
- 0.014195712089538574,
176
- 0.01386780834197998,
177
- 0.01390988826751709,
178
- 0.013906847953796387,
179
- 0.013820927619934082,
180
- 0.014292991638183594,
181
- 0.013759488105773926,
182
- 0.013771776199340821,
183
- 0.013819775581359862,
184
- 0.013884415626525879,
185
- 0.0138504638671875,
186
- 0.013733887672424316,
187
- 0.013883392333984374,
188
- 0.01391312026977539,
189
- 0.01399500846862793,
190
- 0.01377184009552002,
191
- 0.013825984001159667,
192
- 0.013757439613342285,
193
- 0.013996031761169434,
194
- 0.013989888191223144,
195
- 0.013924448013305665
196
  ]
197
  },
198
  "throughput": {
199
  "unit": "samples/s",
200
- "value": 71.36429165456603
201
  },
202
  "energy": {
203
  "unit": "kWh",
204
- "cpu": 1.6502592666649527e-07,
205
- "ram": 9.013577984357349e-08,
206
- "gpu": 3.5299410955555003e-07,
207
- "total": 6.081558160656188e-07
208
  },
209
  "efficiency": {
210
  "unit": "samples/kWh",
211
- "value": 1644315.4428241167
212
  }
213
  }
214
  }
 
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 975.2576,
108
  "max_global_vram": 1434.976256,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 794.820608,
 
112
  },
113
  "latency": {
114
  "unit": "s",
115
+ "count": 70,
116
+ "total": 1.0083922910690306,
117
+ "mean": 0.014405604158129012,
118
+ "stdev": 0.0004794734939388193,
119
+ "p50": 0.014235648155212402,
120
+ "p90": 0.014927871894836426,
121
+ "p95": 0.015397478246688842,
122
+ "p99": 0.015928831815719607,
123
  "values": [
124
+ 0.016570432662963867,
125
+ 0.015566847801208495,
126
+ 0.01549516773223877,
127
+ 0.015640576362609862,
128
+ 0.014824447631835937,
129
+ 0.014640128135681153,
130
+ 0.014393407821655274,
131
+ 0.014565376281738282,
132
+ 0.014671872138977051,
133
+ 0.014258175849914552,
134
+ 0.014014464378356933,
135
+ 0.014052351951599122,
136
+ 0.014145536422729492,
137
+ 0.014072832107543945,
138
+ 0.014042112350463867,
139
+ 0.014030816078186036,
140
+ 0.014020607948303223,
141
+ 0.013926400184631347,
142
+ 0.013937664031982423,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
143
  0.01398681640625,
144
+ 0.01417523193359375,
145
+ 0.014321663856506347,
146
+ 0.014213120460510254,
147
+ 0.013982720375061035,
148
+ 0.01415167999267578,
149
+ 0.01397657585144043,
150
+ 0.013947903633117676,
151
+ 0.01387929630279541,
152
+ 0.01417625617980957,
153
+ 0.01388748836517334,
154
+ 0.013949952125549316,
155
+ 0.013922304153442382,
156
+ 0.01470464038848877,
157
+ 0.015278079986572265,
158
+ 0.015205375671386719,
159
+ 0.014920703887939453,
160
+ 0.014812128067016601,
161
+ 0.01499238395690918,
162
+ 0.014680064201354981,
163
+ 0.014793760299682617,
164
+ 0.014520319938659668,
165
+ 0.014552063941955566,
166
+ 0.014633983612060546,
167
+ 0.014803999900817872,
168
+ 0.014805983543395997,
169
+ 0.01435750389099121,
170
+ 0.014278656005859374,
171
+ 0.014236672401428223,
172
+ 0.014279680252075195,
173
+ 0.014299136161804199,
174
+ 0.014248959541320801,
175
+ 0.014244864463806153,
176
+ 0.014178303718566895,
177
+ 0.014216192245483398,
178
+ 0.014160863876342774,
179
+ 0.014211071968078613,
180
+ 0.01466163158416748,
181
+ 0.014203904151916504,
182
+ 0.014222335815429688,
183
+ 0.014229536056518555,
184
+ 0.014234623908996581,
185
+ 0.014224384307861328,
186
+ 0.014241791725158692,
187
+ 0.014213120460510254,
188
+ 0.014206975936889648,
189
+ 0.014255104064941406,
190
+ 0.014222335815429688,
191
+ 0.014262271881103515,
192
  0.014169088363647461,
193
+ 0.014189567565917969
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
194
  ]
195
  },
196
  "throughput": {
197
  "unit": "samples/s",
198
+ "value": 69.41742873281055
199
  },
200
  "energy": {
201
  "unit": "kWh",
202
+ "cpu": 1.699695440511855e-07,
203
+ "ram": 9.288018814394101e-08,
204
+ "gpu": 3.4962726382857373e-07,
205
+ "total": 6.124769960237002e-07
206
  },
207
  "efficiency": {
208
  "unit": "samples/kWh",
209
+ "value": 1632714.3819150135
210
  }
211
  }
212
  }