IlyasMoutawwakil HF staff commited on
Commit
770abba
·
verified ·
1 Parent(s): 5125741

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -104,7 +104,7 @@
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 975.58528,
108
  "max_global_vram": 1434.976256,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 794.820608,
@@ -113,100 +113,100 @@
113
  "latency": {
114
  "unit": "s",
115
  "count": 70,
116
- "total": 1.0000887060165404,
117
- "mean": 0.014286981514522006,
118
- "stdev": 0.0004046909014813206,
119
- "p50": 0.014171648025512695,
120
- "p90": 0.014653542804718018,
121
- "p95": 0.01497089729309082,
122
- "p99": 0.01573415967941284,
123
  "values": [
124
- 0.01568051242828369,
125
- 0.0158535680770874,
126
- 0.015467519760131837,
127
- 0.014728192329406739,
128
- 0.014363648414611817,
129
- 0.014127103805541993,
130
- 0.014821375846862793,
131
- 0.014625791549682618,
132
- 0.014441472053527832,
133
- 0.014082048416137695,
134
- 0.014205951690673829,
135
- 0.014115839958190919,
136
- 0.014645248413085938,
137
- 0.014323712348937988,
138
- 0.013938688278198242,
139
- 0.01415782356262207,
140
- 0.014559231758117675,
141
  0.014302207946777343,
142
- 0.014554112434387208,
143
- 0.014363648414611817,
144
- 0.014498815536499024,
145
- 0.013945856094360352,
146
- 0.014183423995971679,
147
- 0.014105600357055664,
148
- 0.014496767997741699,
149
- 0.014410719871520997,
150
- 0.014520319938659668,
151
- 0.014880767822265625,
152
- 0.014486528396606446,
153
- 0.014500864028930664,
154
- 0.01439027214050293,
155
- 0.014449664115905762,
156
- 0.013932543754577637,
157
- 0.013668352127075196,
158
- 0.014228480339050293,
159
- 0.014503935813903808,
160
- 0.014595071792602539,
161
- 0.01437183952331543,
162
- 0.014118911743164063,
163
- 0.014112768173217773,
164
- 0.014063615798950196,
165
- 0.014227456092834472,
166
- 0.014148608207702636,
167
- 0.014132224082946777,
168
- 0.014072832107543945,
169
- 0.013849599838256836,
170
- 0.013817855834960938,
171
- 0.013840383529663085,
172
- 0.013836288452148437,
173
- 0.014104576110839843,
174
- 0.01415987205505371,
175
- 0.014047231674194336,
176
- 0.014064640045166015,
177
- 0.014057472229003906,
178
- 0.014026752471923828,
179
- 0.013997056007385255,
180
- 0.013810688018798829,
181
- 0.013881343841552735,
182
- 0.013784064292907714,
183
- 0.01386393642425537,
184
- 0.014017536163330077,
185
- 0.014008319854736329,
186
- 0.014107711791992187,
187
- 0.01409337615966797,
188
- 0.014482432365417481,
189
- 0.015044639587402344,
190
- 0.014239744186401367,
191
  0.014234623908996581,
192
- 0.014191616058349609,
193
- 0.014123007774353028
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
194
  ]
195
  },
196
  "throughput": {
197
  "unit": "samples/s",
198
- "value": 69.99379112960634
199
  },
200
  "energy": {
201
  "unit": "kWh",
202
- "cpu": 1.5911024146609836e-07,
203
- "ram": 8.659979004005435e-08,
204
- "gpu": 3.336780447200059e-07,
205
- "total": 5.793880762261586e-07
206
  },
207
  "efficiency": {
208
  "unit": "samples/kWh",
209
- "value": 1725958.8884077407
210
  }
211
  }
212
  }
 
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 975.450112,
108
  "max_global_vram": 1434.976256,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 794.820608,
 
113
  "latency": {
114
  "unit": "s",
115
  "count": 70,
116
+ "total": 1.010673343658447,
117
+ "mean": 0.014438190623692103,
118
+ "stdev": 0.0004596305556127926,
119
+ "p50": 0.01428121566772461,
120
+ "p90": 0.014581551837921142,
121
+ "p95": 0.015744717168807983,
122
+ "p99": 0.016232212028503418,
123
  "values": [
124
+ 0.01617919921875,
125
+ 0.016350208282470705,
126
+ 0.016087039947509766,
127
+ 0.01584332847595215,
128
+ 0.014504960060119629,
129
+ 0.014267328262329102,
130
+ 0.014301183700561524,
131
+ 0.014335935592651367,
132
+ 0.014304256439208985,
133
+ 0.01440665626525879,
 
 
 
 
 
 
 
134
  0.014302207946777343,
135
+ 0.014356479644775391,
136
+ 0.014193663597106934,
137
+ 0.01419878387451172,
138
+ 0.014258175849914552,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
139
  0.014234623908996581,
140
+ 0.014260224342346191,
141
+ 0.014235679626464843,
142
+ 0.014938112258911132,
143
+ 0.014262271881103515,
144
+ 0.01457151985168457,
145
+ 0.015624192237854004,
146
+ 0.014420991897583007,
147
+ 0.014270463943481445,
148
+ 0.014329792022705078,
149
+ 0.014392319679260255,
150
+ 0.01428377628326416,
151
+ 0.014258175849914552,
152
+ 0.014237695693969727,
153
+ 0.014242752075195313,
154
+ 0.014264320373535156,
155
+ 0.01432476806640625,
156
+ 0.014309375762939454,
157
+ 0.014258175849914552,
158
+ 0.0142807035446167,
159
+ 0.014276608467102051,
160
+ 0.014271488189697265,
161
+ 0.01427353572845459,
162
+ 0.01451417636871338,
163
+ 0.014671839714050294,
164
+ 0.014445568084716797,
165
+ 0.01457151985168457,
166
+ 0.014520319938659668,
167
+ 0.014309375762939454,
168
+ 0.014326720237731934,
169
+ 0.01431651210784912,
170
+ 0.014254079818725587,
171
+ 0.014393343925476074,
172
+ 0.01426636791229248,
173
+ 0.014320639610290528,
174
+ 0.014224384307861328,
175
+ 0.014231552124023437,
176
+ 0.01427558422088623,
177
+ 0.01427353572845459,
178
+ 0.014351360321044922,
179
+ 0.014208000183105468,
180
+ 0.014368767738342286,
181
+ 0.014229503631591797,
182
+ 0.014313471794128419,
183
+ 0.014235648155212402,
184
+ 0.014244864463806153,
185
+ 0.014195712089538574,
186
+ 0.01420902442932129,
187
+ 0.01428172779083252,
188
+ 0.014173184394836426,
189
+ 0.014341119766235352,
190
+ 0.014246912002563476,
191
+ 0.01426534366607666,
192
+ 0.01420902442932129,
193
+ 0.014173184394836426
194
  ]
195
  },
196
  "throughput": {
197
  "unit": "samples/s",
198
+ "value": 69.26075614759283
199
  },
200
  "energy": {
201
  "unit": "kWh",
202
+ "cpu": 1.6798623057784796e-07,
203
+ "ram": 9.178775336026071e-08,
204
+ "gpu": 3.7903003718308967e-07,
205
+ "total": 6.388040211211983e-07
206
  },
207
  "efficiency": {
208
  "unit": "samples/kWh",
209
+ "value": 1565425.3369364326
210
  }
211
  }
212
  }