IlyasMoutawwakil HF staff commited on
Commit
b37ab57
·
verified ·
1 Parent(s): 26af5c1

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -104,7 +104,7 @@
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 975.536128,
108
  "max_global_vram": 1434.976256,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 794.820608,
@@ -112,105 +112,102 @@
112
  },
113
  "latency": {
114
  "unit": "s",
115
- "count": 74,
116
- "total": 1.006688448905945,
117
- "mean": 0.013603897958188444,
118
- "stdev": 0.0005611513237784378,
119
- "p50": 0.013390847682952881,
120
- "p90": 0.014036150264739991,
121
- "p95": 0.014441523170471187,
122
- "p99": 0.015961723136901855,
123
  "values": [
124
- 0.015607808113098144,
125
- 0.016265216827392577,
126
- 0.015849472045898438,
127
- 0.014922752380371093,
128
- 0.0135600004196167,
129
- 0.013382592201232911,
130
- 0.013385727882385253,
131
- 0.013381631851196289,
132
- 0.013368320465087891,
133
- 0.013326335906982421,
134
- 0.013149184226989746,
135
- 0.013271039962768554,
136
- 0.013327360153198242,
137
- 0.013348863601684571,
138
- 0.013282303810119628,
139
- 0.01336627197265625,
140
- 0.013387840270996093,
141
- 0.013327360153198242,
142
- 0.01336627197265625,
143
- 0.013379584312438965,
144
- 0.013418496131896973,
145
- 0.013467647552490235,
146
- 0.013283328056335449,
147
- 0.01337548828125,
148
- 0.013392895698547362,
149
- 0.0133887996673584,
150
- 0.013334527969360351,
151
- 0.013218815803527833,
152
- 0.013347840309143067,
153
- 0.013365247726440429,
154
- 0.013511679649353027,
155
- 0.013481951713562012,
156
- 0.013541376113891602,
157
- 0.013452287673950195,
158
  0.013584383964538574,
159
- 0.01418239974975586,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
160
  0.014164992332458496,
161
- 0.01397862434387207,
162
- 0.01358028793334961,
163
- 0.013502464294433594,
164
- 0.01349120044708252,
165
- 0.01346560001373291,
166
- 0.013541376113891602,
167
- 0.013716416358947754,
168
- 0.013805567741394043,
169
- 0.013428735733032226,
170
- 0.01334169578552246,
171
- 0.013479935646057128,
172
- 0.01338265609741211,
173
- 0.014037983894348145,
174
- 0.013996031761169434,
175
- 0.014031871795654297,
176
- 0.013830143928527832,
177
- 0.013839360237121581,
178
- 0.013851648330688476,
179
- 0.01383340835571289,
180
- 0.014142463684082032,
181
- 0.01369491195678711,
182
- 0.013360128402709961,
183
- 0.013262847900390624,
184
- 0.013398015975952148,
185
- 0.013355008125305176,
186
- 0.01335807991027832,
187
- 0.013196288108825683,
188
- 0.013299712181091309,
189
- 0.013404159545898438,
190
- 0.01335091209411621,
191
- 0.013461503982543945,
192
- 0.013149184226989746,
193
- 0.013232128143310547,
194
- 0.013344767570495606,
195
- 0.013198335647583008,
196
- 0.013325311660766602,
197
- 0.01325158405303955
198
  ]
199
  },
200
  "throughput": {
201
  "unit": "samples/s",
202
- "value": 73.50834320233055
203
  },
204
  "energy": {
205
  "unit": "kWh",
206
- "cpu": 1.659098874639582e-07,
207
- "ram": 9.065002957413526e-08,
208
- "gpu": 3.7648949872222046e-07,
209
- "total": 6.330494157603139e-07
210
  },
211
  "efficiency": {
212
  "unit": "samples/kWh",
213
- "value": 1579655.5136204744
214
  }
215
  }
216
  }
 
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 975.118336,
108
  "max_global_vram": 1434.976256,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 794.820608,
 
112
  },
113
  "latency": {
114
  "unit": "s",
115
+ "count": 71,
116
+ "total": 0.9987029781341552,
117
+ "mean": 0.014066239128650074,
118
+ "stdev": 0.00043416708567492734,
119
+ "p50": 0.01406873607635498,
120
+ "p90": 0.014366720199584961,
121
+ "p95": 0.015098880290985106,
122
+ "p99": 0.015519538974761963,
123
  "values": [
124
+ 0.014836735725402832,
125
+ 0.01559552001953125,
126
+ 0.015284223556518555,
127
+ 0.014993408203125,
128
+ 0.013825023651123047,
129
+ 0.013669376373291015,
130
+ 0.013617152214050294,
131
+ 0.013675519943237305,
132
+ 0.013592576026916504,
133
+ 0.013620223999023438,
134
+ 0.013559807777404785,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
135
  0.013584383964538574,
136
+ 0.013623295783996582,
137
+ 0.01347481632232666,
138
+ 0.01347481632232666,
139
+ 0.01346457576751709,
140
+ 0.013469696044921875,
141
+ 0.0135731201171875,
142
+ 0.013609984397888183,
143
+ 0.013609984397888183,
144
+ 0.013613056182861329,
145
+ 0.013643775939941406,
146
+ 0.01406771183013916,
147
+ 0.014104576110839843,
148
+ 0.014062591552734375,
149
+ 0.014073856353759765,
150
+ 0.014053376197814941,
151
+ 0.013615103721618652,
152
+ 0.014120960235595703,
153
+ 0.014071807861328126,
154
+ 0.014063615798950196,
155
+ 0.014047231674194336,
156
+ 0.014060544013977052,
157
+ 0.014063615798950196,
158
+ 0.014045184135437011,
159
+ 0.01548697566986084,
160
+ 0.015204352378845215,
161
+ 0.014366720199584961,
162
+ 0.014131199836730958,
163
+ 0.014138367652893067,
164
  0.014164992332458496,
165
+ 0.014070783615112305,
166
+ 0.014120960235595703,
167
+ 0.014060544013977052,
168
+ 0.014064640045166015,
169
+ 0.014106623649597168,
170
+ 0.014082048416137695,
171
+ 0.01409331226348877,
172
+ 0.014247936248779297,
173
+ 0.01417728042602539,
174
+ 0.014087167739868164,
175
+ 0.014135295867919923,
176
+ 0.014106623649597168,
177
+ 0.014108672142028808,
178
+ 0.014096384048461913,
179
+ 0.014058496475219727,
180
+ 0.014575615882873535,
181
+ 0.014111743927001954,
182
+ 0.01405951976776123,
183
+ 0.014119935989379882,
184
+ 0.01406873607635498,
185
+ 0.014033920288085937,
186
+ 0.014130175590515137,
187
+ 0.0140697603225708,
188
+ 0.014119872093200684,
189
+ 0.014065664291381836,
190
+ 0.014052351951599122,
191
+ 0.014108672142028808,
192
+ 0.014097408294677734,
193
+ 0.014042048454284669,
194
+ 0.01407692813873291
 
 
 
 
 
 
 
195
  ]
196
  },
197
  "throughput": {
198
  "unit": "samples/s",
199
+ "value": 71.09220814845973
200
  },
201
  "energy": {
202
  "unit": "kWh",
203
+ "cpu": 1.647783535314195e-07,
204
+ "ram": 9.005688865120949e-08,
205
+ "gpu": 3.438351516111135e-07,
206
+ "total": 5.986703937937426e-07
207
  },
208
  "efficiency": {
209
  "unit": "samples/kWh",
210
+ "value": 1670368.219919901
211
  }
212
  }
213
  }