IlyasMoutawwakil HF staff commited on
Commit
72dae14
·
verified ·
1 Parent(s): fecfa85

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -105,7 +105,7 @@
105
  "forward": {
106
  "memory": {
107
  "unit": "MB",
108
- "max_ram": 982.654976,
109
  "max_global_vram": 1434.976256,
110
  "max_process_vram": 0.0,
111
  "max_reserved": 794.820608,
@@ -114,104 +114,104 @@
114
  "latency": {
115
  "unit": "s",
116
  "count": 74,
117
- "total": 1.0009302740097046,
118
- "mean": 0.013526084783914927,
119
- "stdev": 0.0003743763872745608,
120
- "p50": 0.013384704113006591,
121
- "p90": 0.013966624164581299,
122
- "p95": 0.014142054605484008,
123
- "p99": 0.015228067684173584,
124
  "values": [
125
- 0.01407795238494873,
126
- 0.015234047889709473,
127
- 0.014339072227478027,
128
- 0.01408614444732666,
129
- 0.013557760238647461,
130
- 0.013416447639465333,
131
- 0.013621248245239258,
132
- 0.013279232025146484,
133
- 0.013232128143310547,
134
- 0.013379584312438965,
135
- 0.013542400360107423,
136
- 0.013356032371520997,
137
- 0.013336576461791993,
138
- 0.013749247550964355,
139
- 0.013454336166381836,
140
- 0.013443072319030762,
141
- 0.013403136253356934,
142
- 0.013369343757629394,
143
- 0.013338624000549316,
144
- 0.013304832458496094,
145
- 0.013239295959472656,
146
- 0.013606911659240722,
147
- 0.013421567916870117,
148
  0.013570048332214356,
149
- 0.01357209587097168,
150
- 0.01334172821044922,
151
- 0.013422592163085938,
152
- 0.013462528228759766,
153
- 0.013494272232055664,
154
- 0.0135895357131958,
155
- 0.013312000274658203,
156
- 0.013493247985839844,
157
- 0.013283328056335449,
158
- 0.013298656463623047,
159
- 0.013223967552185058,
160
- 0.013419487953186034,
161
- 0.013751296043395997,
162
- 0.014009344100952148,
163
- 0.015225855827331543,
164
- 0.014016511917114258,
165
- 0.014245887756347657,
166
- 0.013866944313049316,
167
- 0.013857791900634766,
168
- 0.013351936340332032,
169
- 0.013287424087524414,
170
- 0.013218815803527833,
171
- 0.013325311660766602,
172
- 0.013389823913574218,
173
- 0.01376460838317871,
174
- 0.013614080429077148,
175
- 0.01335091209411621,
176
- 0.013424639701843261,
177
- 0.01364684772491455,
178
- 0.013365247726440429,
179
- 0.013379584312438965,
180
- 0.013371392250061035,
181
- 0.013326335906982421,
182
- 0.0132925443649292,
183
- 0.013290495872497558,
184
- 0.013342720031738281,
185
- 0.013608960151672364,
186
- 0.013273088455200196,
187
- 0.013306879997253418,
188
- 0.01333350372314453,
189
- 0.013338624000549316,
190
- 0.013348863601684571,
191
- 0.013348863601684571,
192
- 0.013416447639465333,
193
- 0.013507583618164062,
194
- 0.013308927536010743,
195
- 0.013294591903686523,
196
- 0.013279232025146484,
197
- 0.013305855751037597,
198
- 0.013270015716552735
 
 
 
 
 
 
 
199
  ]
200
  },
201
  "throughput": {
202
  "unit": "samples/s",
203
- "value": 73.93122370408244
204
  },
205
  "energy": {
206
  "unit": "kWh",
207
- "cpu": 1.535737573930681e-07,
208
- "ram": 8.395961256333168e-08,
209
- "gpu": 3.295301781538397e-07,
210
- "total": 5.670635481102394e-07
211
  },
212
  "efficiency": {
213
  "unit": "samples/kWh",
214
- "value": 1763470.7844165573
215
  }
216
  }
217
  }
 
105
  "forward": {
106
  "memory": {
107
  "unit": "MB",
108
+ "max_ram": 982.4256,
109
  "max_global_vram": 1434.976256,
110
  "max_process_vram": 0.0,
111
  "max_reserved": 794.820608,
 
114
  "latency": {
115
  "unit": "s",
116
  "count": 74,
117
+ "total": 1.0044415998458864,
118
+ "mean": 0.013573535133052516,
119
+ "stdev": 0.0008817712436058069,
120
+ "p50": 0.013554687976837159,
121
+ "p90": 0.01381642246246338,
122
+ "p95": 0.01406679058074951,
123
+ "p99": 0.0173749963569641,
124
  "values": [
125
+ 0.019805183410644533,
126
+ 0.016476160049438478,
127
+ 0.014964735984802247,
128
+ 0.013831168174743653,
129
+ 0.0136878080368042,
130
+ 0.013709312438964843,
131
+ 0.01368166446685791,
132
+ 0.013447168350219727,
133
+ 0.013447168350219727,
134
+ 0.013296607971191407,
135
+ 0.01339084815979004,
136
+ 0.01349120044708252,
137
+ 0.013650943756103515,
138
+ 0.013579263687133789,
139
+ 0.013727744102478028,
140
+ 0.013596672058105469,
 
 
 
 
 
 
 
141
  0.013570048332214356,
142
+ 0.013727744102478028,
143
+ 0.013956095695495606,
144
+ 0.013771776199340821,
145
+ 0.013439999580383302,
146
+ 0.013323264122009277,
147
+ 0.013598719596862792,
148
+ 0.013616127967834473,
149
+ 0.013622271537780761,
150
+ 0.013600768089294434,
151
+ 0.01365401554107666,
152
+ 0.01335807991027832,
153
+ 0.013145088195800781,
154
+ 0.013252608299255371,
155
+ 0.013682687759399414,
156
+ 0.013584383964538574,
157
+ 0.01328326416015625,
158
+ 0.013064191818237305,
159
+ 0.013393983840942383,
160
+ 0.013632512092590332,
161
+ 0.01396735954284668,
162
+ 0.01397862434387207,
163
+ 0.014230527877807618,
164
+ 0.013782015800476074,
165
+ 0.013634559631347656,
166
+ 0.013652992248535157,
167
+ 0.013571071624755859,
168
+ 0.013560832023620606,
169
+ 0.01354854393005371,
170
+ 0.01350864028930664,
171
+ 0.013595647811889648,
172
+ 0.013639679908752441,
173
+ 0.013758463859558106,
174
+ 0.01365503978729248,
175
+ 0.013578240394592284,
176
+ 0.013560832023620606,
177
+ 0.0133887996673584,
178
+ 0.013275135993957519,
179
+ 0.013034496307373047,
180
+ 0.013025279998779296,
181
+ 0.012990464210510254,
182
+ 0.013029376029968261,
183
+ 0.013082624435424805,
184
+ 0.013040639877319337,
185
+ 0.012998720169067383,
186
+ 0.013036543846130372,
187
+ 0.013048831939697265,
188
+ 0.012980223655700684,
189
+ 0.012993535995483398,
190
+ 0.013023232460021973,
191
+ 0.013027327537536621,
192
+ 0.01296281623840332,
193
+ 0.013010944366455078,
194
+ 0.013028287887573242,
195
+ 0.013110272407531738,
196
+ 0.01306726360321045,
197
+ 0.013012991905212403,
198
+ 0.012989439964294434
199
  ]
200
  },
201
  "throughput": {
202
  "unit": "samples/s",
203
+ "value": 73.67277501385246
204
  },
205
  "energy": {
206
  "unit": "kWh",
207
+ "cpu": 1.603169349936752e-07,
208
+ "ram": 8.764534477772694e-08,
209
+ "gpu": 3.399026743242781e-07,
210
+ "total": 5.878649540956803e-07
211
  },
212
  "efficiency": {
213
  "unit": "samples/kWh",
214
+ "value": 1701070.9569144363
215
  }
216
  }
217
  }