Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub
Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json
CHANGED
@@ -104,7 +104,7 @@
|
|
104 |
"forward": {
|
105 |
"memory": {
|
106 |
"unit": "MB",
|
107 |
-
"max_ram": 976.
|
108 |
"max_global_vram": 1434.976256,
|
109 |
"max_process_vram": 0.0,
|
110 |
"max_reserved": 794.820608,
|
@@ -112,105 +112,102 @@
|
|
112 |
},
|
113 |
"latency": {
|
114 |
"unit": "s",
|
115 |
-
"count":
|
116 |
-
"total":
|
117 |
-
"mean": 0.
|
118 |
-
"stdev": 0.
|
119 |
-
"p50": 0.
|
120 |
-
"p90": 0.
|
121 |
-
"p95": 0.
|
122 |
-
"p99": 0.
|
123 |
"values": [
|
124 |
-
0.
|
125 |
-
0.
|
126 |
-
0.
|
127 |
-
0.
|
128 |
-
0.
|
129 |
-
0.
|
130 |
-
0.
|
131 |
-
0.
|
132 |
-
0.
|
133 |
-
0.
|
134 |
-
0.
|
135 |
-
0.
|
136 |
-
0.
|
137 |
-
0.
|
138 |
-
0.013073408126831054,
|
139 |
-
0.01303756809234619,
|
140 |
-
0.013206527709960938,
|
141 |
-
0.013184000015258789,
|
142 |
-
0.013143039703369141,
|
143 |
-
0.013220895767211914,
|
144 |
-
0.01307750415802002,
|
145 |
-
0.013042688369750977,
|
146 |
-
0.014244864463806153,
|
147 |
-
0.013533184051513672,
|
148 |
-
0.013530112266540528,
|
149 |
-
0.013503487586975099,
|
150 |
-
0.013495295524597169,
|
151 |
-
0.013514752388000489,
|
152 |
-
0.013501440048217773,
|
153 |
-
0.013487104415893555,
|
154 |
0.013526016235351563,
|
155 |
-
0.
|
156 |
-
0.
|
157 |
-
0.
|
158 |
-
0.013591551780700683,
|
159 |
-
0.013510656356811524,
|
160 |
0.013607935905456543,
|
161 |
-
0.
|
162 |
-
0.
|
163 |
-
0.
|
164 |
-
0.
|
165 |
-
0.
|
166 |
-
0.013910016059875489,
|
167 |
-
0.013990912437438965,
|
168 |
-
0.013985792160034179,
|
169 |
-
0.013623295783996582,
|
170 |
-
0.013625344276428223,
|
171 |
-
0.01354751968383789,
|
172 |
-
0.013574144363403321,
|
173 |
-
0.01354751968383789,
|
174 |
-
0.013515775680541992,
|
175 |
-
0.013561856269836426,
|
176 |
-
0.013571071624755859,
|
177 |
-
0.013580224037170411,
|
178 |
-
0.01355059242248535,
|
179 |
0.01354854393005371,
|
180 |
-
0.
|
|
|
|
|
|
|
181 |
0.013533184051513672,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
182 |
0.013515775680541992,
|
183 |
-
0.
|
184 |
-
0.
|
185 |
-
0.
|
186 |
-
0.
|
187 |
-
0.
|
188 |
-
0.
|
189 |
-
0.
|
190 |
-
0.
|
191 |
-
0.
|
192 |
-
0.
|
193 |
-
0.
|
194 |
-
0.
|
195 |
-
0.
|
196 |
-
0.
|
197 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
198 |
]
|
199 |
},
|
200 |
"throughput": {
|
201 |
"unit": "samples/s",
|
202 |
-
"value":
|
203 |
},
|
204 |
"energy": {
|
205 |
"unit": "kWh",
|
206 |
-
"cpu": 1.
|
207 |
-
"ram":
|
208 |
-
"gpu": 3.
|
209 |
-
"total":
|
210 |
},
|
211 |
"efficiency": {
|
212 |
"unit": "samples/kWh",
|
213 |
-
"value":
|
214 |
}
|
215 |
}
|
216 |
}
|
|
|
104 |
"forward": {
|
105 |
"memory": {
|
106 |
"unit": "MB",
|
107 |
+
"max_ram": 976.048128,
|
108 |
"max_global_vram": 1434.976256,
|
109 |
"max_process_vram": 0.0,
|
110 |
"max_reserved": 794.820608,
|
|
|
112 |
},
|
113 |
"latency": {
|
114 |
"unit": "s",
|
115 |
+
"count": 71,
|
116 |
+
"total": 1.0009896955490114,
|
117 |
+
"mean": 0.014098446416183259,
|
118 |
+
"stdev": 0.0006384603254537008,
|
119 |
+
"p50": 0.014302207946777343,
|
120 |
+
"p90": 0.014630911827087402,
|
121 |
+
"p95": 0.015238143920898437,
|
122 |
+
"p99": 0.01611059169769287,
|
123 |
"values": [
|
124 |
+
0.01595084762573242,
|
125 |
+
0.016483327865600587,
|
126 |
+
0.015924223899841307,
|
127 |
+
0.01489510440826416,
|
128 |
+
0.015516672134399414,
|
129 |
+
0.014889984130859376,
|
130 |
+
0.01495961570739746,
|
131 |
+
0.01375334358215332,
|
132 |
+
0.013577216148376465,
|
133 |
+
0.013564000129699707,
|
134 |
+
0.013637632369995116,
|
135 |
+
0.013596672058105469,
|
136 |
+
0.013575167655944824,
|
137 |
+
0.013502464294433594,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
138 |
0.013526016235351563,
|
139 |
+
0.0134901762008667,
|
140 |
+
0.013545472145080567,
|
141 |
+
0.013562879562377929,
|
|
|
|
|
142 |
0.013607935905456543,
|
143 |
+
0.01357209587097168,
|
144 |
+
0.013522944450378417,
|
145 |
+
0.013522944450378417,
|
146 |
+
0.013513728141784668,
|
147 |
+
0.013554688453674316,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
148 |
0.01354854393005371,
|
149 |
+
0.013495295524597169,
|
150 |
+
0.013554688453674316,
|
151 |
+
0.013536255836486816,
|
152 |
+
0.013537280082702637,
|
153 |
0.013533184051513672,
|
154 |
+
0.013478912353515626,
|
155 |
+
0.013537280082702637,
|
156 |
+
0.013517824172973633,
|
157 |
+
0.013504511833190918,
|
158 |
+
0.013527039527893067,
|
159 |
+
0.0135731201171875,
|
160 |
+
0.013526016235351563,
|
161 |
0.013515775680541992,
|
162 |
+
0.013527039527893067,
|
163 |
+
0.01358847999572754,
|
164 |
+
0.01413430404663086,
|
165 |
+
0.014583744049072266,
|
166 |
+
0.014547967910766601,
|
167 |
+
0.014630911827087402,
|
168 |
+
0.014499903678894043,
|
169 |
+
0.014415871620178223,
|
170 |
+
0.014400511741638184,
|
171 |
+
0.014389216423034668,
|
172 |
+
0.01438003158569336,
|
173 |
+
0.01439641571044922,
|
174 |
+
0.014323712348937988,
|
175 |
+
0.014331904411315918,
|
176 |
+
0.014354432106018066,
|
177 |
+
0.014361599922180175,
|
178 |
+
0.014352383613586426,
|
179 |
+
0.014313471794128419,
|
180 |
+
0.014323648452758788,
|
181 |
+
0.014313471794128419,
|
182 |
+
0.014252032279968262,
|
183 |
+
0.014321663856506347,
|
184 |
+
0.014317567825317384,
|
185 |
+
0.014403583526611329,
|
186 |
+
0.014334976196289062,
|
187 |
+
0.014325728416442871,
|
188 |
+
0.014330880165100097,
|
189 |
+
0.014306303977966308,
|
190 |
+
0.014324735641479493,
|
191 |
+
0.014302207946777343,
|
192 |
+
0.014302207946777343,
|
193 |
+
0.014326784133911133,
|
194 |
+
0.014341119766235352
|
195 |
]
|
196 |
},
|
197 |
"throughput": {
|
198 |
"unit": "samples/s",
|
199 |
+
"value": 70.92980109156741
|
200 |
},
|
201 |
"energy": {
|
202 |
"unit": "kWh",
|
203 |
+
"cpu": 1.653465246896685e-07,
|
204 |
+
"ram": 9.038774616184786e-08,
|
205 |
+
"gpu": 3.504362371388536e-07,
|
206 |
+
"total": 6.061705079903701e-07
|
207 |
},
|
208 |
"efficiency": {
|
209 |
"unit": "samples/kWh",
|
210 |
+
"value": 1649700.8462442164
|
211 |
}
|
212 |
}
|
213 |
}
|