IlyasMoutawwakil HF staff commited on
Commit
d95d1b5
·
verified ·
1 Parent(s): 6344efd

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -104,7 +104,7 @@
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 959.778816,
108
  "max_global_vram": 1434.976256,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 794.820608,
@@ -112,107 +112,106 @@
112
  },
113
  "latency": {
114
  "unit": "s",
115
- "count": 76,
116
- "total": 0.9975810575485233,
117
- "mean": 0.01312606654669109,
118
- "stdev": 0.00043091791528167067,
119
- "p50": 0.013164031982421874,
120
- "p90": 0.013576191902160644,
121
- "p95": 0.013826303958892823,
122
- "p99": 0.014409216403961183,
123
  "values": [
124
- 0.014332927703857423,
125
- 0.014395392417907715,
126
- 0.014450688362121582,
127
- 0.014042112350463867,
128
- 0.01369200038909912,
129
- 0.01320246410369873,
130
- 0.013319104194641114,
131
- 0.013288448333740235,
132
- 0.013194304466247558,
133
- 0.013354975700378418,
134
- 0.013536255836486816,
135
- 0.013290495872497558,
136
- 0.013406208038330078,
137
- 0.0133887996673584,
138
- 0.01337446403503418,
139
- 0.013280256271362305,
140
- 0.013294591903686523,
141
- 0.013290495872497558,
142
- 0.01314406394958496,
143
- 0.013189120292663574,
144
- 0.013195263862609862,
145
- 0.013461503982543945,
146
- 0.01345132827758789,
147
- 0.01334169578552246,
148
  0.01323519992828369,
149
- 0.013509632110595703,
150
- 0.013277183532714844,
151
- 0.013616127967834473,
152
- 0.01375436782836914,
153
- 0.013532159805297851,
154
- 0.013134847640991211,
155
- 0.01315225601196289,
 
 
 
 
 
 
 
 
 
 
156
  0.013175807952880859,
157
- 0.013184960365295411,
158
- 0.013420543670654296,
159
- 0.013626367568969726,
160
- 0.013373439788818359,
161
- 0.013298687934875488,
162
- 0.013232159614562988,
163
- 0.013196288108825683,
164
- 0.013194239616394043,
165
- 0.013136960029602051,
166
- 0.013023232460021973,
167
- 0.013038592338562012,
168
- 0.013101056098937988,
169
- 0.013151231765747071,
170
- 0.01303756809234619,
171
- 0.012831744194030761,
172
- 0.012933119773864746,
173
- 0.012923904418945312,
174
- 0.013042688369750977,
175
- 0.013015040397644043,
176
- 0.012633055686950683,
177
- 0.012601344108581543,
178
- 0.012706815719604492,
179
- 0.012614720344543457,
180
- 0.012650495529174804,
181
- 0.012620863914489746,
182
- 0.012638208389282226,
183
- 0.01257369613647461,
184
- 0.012583935737609863,
185
- 0.012638208389282226,
186
- 0.012675071716308594,
187
- 0.012632063865661621,
188
- 0.012646400451660156,
189
- 0.012599295616149902,
190
- 0.012630016326904296,
191
- 0.012617728233337403,
192
- 0.012597248077392579,
193
- 0.012613663673400879,
194
- 0.012630016326904296,
195
- 0.012601344108581543,
196
- 0.012734463691711426,
197
- 0.012740544319152832,
198
- 0.012723199844360352,
199
- 0.012812288284301757
200
  ]
201
  },
202
  "throughput": {
203
  "unit": "samples/s",
204
- "value": 76.18428540209457
205
  },
206
  "energy": {
207
  "unit": "kWh",
208
- "cpu": 1.5285558510370066e-07,
209
- "ram": 8.355080621745751e-08,
210
- "gpu": 3.3213701784615525e-07,
211
- "total": 5.685434091673134e-07
212
  },
213
  "efficiency": {
214
  "unit": "samples/kWh",
215
- "value": 1758880.6481190175
216
  }
217
  }
218
  }
 
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 959.647744,
108
  "max_global_vram": 1434.976256,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 794.820608,
 
112
  },
113
  "latency": {
114
  "unit": "s",
115
+ "count": 75,
116
+ "total": 1.002491840362549,
117
+ "mean": 0.013366557871500649,
118
+ "stdev": 0.0008227278964229968,
119
+ "p50": 0.0132741117477417,
120
+ "p90": 0.013433651351928711,
121
+ "p95": 0.01408081922531128,
122
+ "p99": 0.017026191139221207,
123
  "values": [
124
+ 0.019366912841796875,
125
+ 0.01620377540588379,
126
+ 0.014156800270080566,
127
+ 0.014048255920410157,
128
+ 0.01457151985168457,
129
+ 0.013172736167907715,
130
+ 0.013194239616394043,
131
+ 0.013156352043151855,
132
+ 0.013207551956176757,
133
+ 0.013339679718017578,
134
+ 0.013401087760925292,
135
+ 0.013302783966064453,
136
+ 0.013305855751037597,
137
+ 0.013332480430603028,
138
+ 0.013360128402709961,
139
+ 0.013196288108825683,
140
+ 0.013156352043151855,
141
+ 0.013200384140014648,
142
+ 0.013269023895263672,
143
+ 0.013164544105529785,
144
+ 0.013138943672180176,
145
+ 0.013159423828125,
 
 
146
  0.01323519992828369,
147
+ 0.013295616149902344,
148
+ 0.0132741117477417,
149
+ 0.013332480430603028,
150
+ 0.013293567657470704,
151
+ 0.013475839614868163,
152
+ 0.013637632369995116,
153
+ 0.013455360412597657,
154
+ 0.01334169578552246,
155
+ 0.013234175682067872,
156
+ 0.01316659164428711,
157
+ 0.013387776374816895,
158
+ 0.013316096305847168,
159
+ 0.013296640396118165,
160
+ 0.013282303810119628,
161
+ 0.013297663688659669,
162
+ 0.013254688262939454,
163
+ 0.0132741117477417,
164
  0.013175807952880859,
165
+ 0.013119487762451172,
166
+ 0.013099007606506348,
167
+ 0.013097984313964844,
168
+ 0.0132259521484375,
169
+ 0.013283328056335449,
170
+ 0.013299712181091309,
171
+ 0.013360128402709961,
172
+ 0.013326335906982421,
173
+ 0.013277183532714844,
174
+ 0.013288448333740235,
175
+ 0.013318143844604492,
176
+ 0.013007871627807617,
177
+ 0.012816384315490722,
178
+ 0.01286348819732666,
179
+ 0.012795904159545898,
180
+ 0.012805120468139648,
181
+ 0.012816384315490722,
182
+ 0.012896256446838379,
183
+ 0.012797951698303223,
184
+ 0.012823552131652831,
185
+ 0.012819392204284668,
186
+ 0.012859392166137695,
187
+ 0.012997632026672363,
188
+ 0.013349823951721192,
189
+ 0.013234175682067872,
190
+ 0.013318143844604492,
191
+ 0.013297663688659669,
192
+ 0.013244416236877441,
193
+ 0.013275135993957519,
194
+ 0.013277183532714844,
195
+ 0.013283328056335449,
196
+ 0.013263872146606445,
197
+ 0.013285375595092774,
198
+ 0.01323519992828369
 
 
 
 
 
 
 
 
 
199
  ]
200
  },
201
  "throughput": {
202
  "unit": "samples/s",
203
+ "value": 74.8135765103848
204
  },
205
  "energy": {
206
  "unit": "kWh",
207
+ "cpu": 1.5209295556076575e-07,
208
+ "ram": 8.311950711616262e-08,
209
+ "gpu": 3.271655038977323e-07,
210
+ "total": 5.623779665746606e-07
211
  },
212
  "efficiency": {
213
  "unit": "samples/kWh",
214
+ "value": 1778163.5473573292
215
  }
216
  }
217
  }