IlyasMoutawwakil HF staff commited on
Commit
587b080
·
verified ·
1 Parent(s): 6628f60

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -73,7 +73,7 @@
73
  "environment": {
74
  "cpu": " AMD EPYC 7R32",
75
  "cpu_count": 16,
76
- "cpu_ram_mb": 66697.293824,
77
  "system": "Linux",
78
  "machine": "x86_64",
79
  "platform": "Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35",
@@ -104,7 +104,7 @@
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 977.36704,
108
  "max_global_vram": 1434.976256,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 794.820608,
@@ -112,93 +112,102 @@
112
  },
113
  "latency": {
114
  "unit": "s",
115
- "count": 62,
116
- "total": 1.0132828149795534,
117
- "mean": 0.016343271209347632,
118
- "stdev": 0.0004896172761772674,
119
- "p50": 0.016124432563781738,
120
- "p90": 0.017044685554504394,
121
- "p95": 0.017242060470581057,
122
- "p99": 0.017744330692291258,
123
  "values": [
124
- 0.01720217514038086,
125
- 0.017539072036743163,
126
- 0.01705881690979004,
127
- 0.016917503356933594,
128
- 0.016582656860351562,
129
- 0.016442367553710938,
130
- 0.01680691146850586,
131
- 0.01660927963256836,
132
- 0.01594777584075928,
133
- 0.015989760398864745,
134
- 0.016120832443237306,
135
- 0.015804415702819825,
136
- 0.015668224334716797,
137
- 0.015734784126281737,
138
- 0.01589452838897705,
139
- 0.01659596824645996,
140
- 0.017314815521240236,
141
- 0.01806537628173828,
142
- 0.01680281639099121,
143
- 0.016645120620727538,
144
- 0.01683967971801758,
145
- 0.01656729507446289,
146
- 0.016582687377929686,
147
- 0.01724415969848633,
148
- 0.01716223907470703,
149
- 0.016703487396240235,
150
- 0.01660927963256836,
151
- 0.01644339179992676,
152
- 0.01642086410522461,
153
- 0.016236543655395508,
154
- 0.01646080017089844,
155
- 0.01665126419067383,
156
- 0.016683008193969725,
157
- 0.0164270076751709,
158
- 0.01637990379333496,
159
- 0.016192512512207033,
160
- 0.015997952461242675,
161
- 0.016013311386108398,
162
- 0.01596723175048828,
163
- 0.01593855953216553,
164
- 0.01599078369140625,
165
- 0.015953920364379884,
166
- 0.015997952461242675,
167
- 0.015970335960388184,
168
- 0.015949824333190917,
169
- 0.016285696029663087,
170
- 0.016022527694702148,
171
- 0.016012287139892577,
172
- 0.01598361587524414,
173
- 0.016128032684326173,
174
- 0.016005056381225586,
175
- 0.015952896118164063,
176
- 0.016105472564697267,
177
- 0.01601740837097168,
178
- 0.015988736152648925,
179
- 0.01600921630859375,
180
- 0.015928319931030274,
181
- 0.015921152114868165,
182
- 0.015945728302001954,
183
- 0.015924223899841307,
184
- 0.015925248146057128,
185
- 0.016
 
 
 
 
 
 
 
 
 
186
  ]
187
  },
188
  "throughput": {
189
  "unit": "samples/s",
190
- "value": 61.187260933909236
191
  },
192
  "energy": {
193
  "unit": "kWh",
194
- "cpu": 1.8638558427078858e-07,
195
- "ram": 1.0190462485613505e-07,
196
- "gpu": 3.768622806562448e-07,
197
- "total": 6.651524897831685e-07
198
  },
199
  "efficiency": {
200
  "unit": "samples/kWh",
201
- "value": 1503414.653572127
202
  }
203
  }
204
  }
 
73
  "environment": {
74
  "cpu": " AMD EPYC 7R32",
75
  "cpu_count": 16,
76
+ "cpu_ram_mb": 66697.285632,
77
  "system": "Linux",
78
  "machine": "x86_64",
79
  "platform": "Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35",
 
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 976.195584,
108
  "max_global_vram": 1434.976256,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 794.820608,
 
112
  },
113
  "latency": {
114
  "unit": "s",
115
+ "count": 71,
116
+ "total": 1.001166723251343,
117
+ "mean": 0.01410093976410342,
118
+ "stdev": 0.00042881216000027777,
119
+ "p50": 0.014079999923706055,
120
+ "p90": 0.014682111740112304,
121
+ "p95": 0.014846464157104493,
122
+ "p99": 0.015186943817138672,
123
  "values": [
124
+ 0.015134719848632813,
125
+ 0.015308799743652344,
126
+ 0.014873600006103516,
127
+ 0.015029248237609863,
128
+ 0.014682111740112304,
129
+ 0.01458892822265625,
130
+ 0.014532608032226562,
131
+ 0.014683135986328125,
132
+ 0.01420083236694336,
133
+ 0.014112768173217773,
134
+ 0.014207967758178711,
135
+ 0.014211008071899414,
136
+ 0.014196736335754395,
137
+ 0.014147583961486816,
138
+ 0.014401535987854003,
139
+ 0.01439027214050293,
140
+ 0.014175200462341308,
141
+ 0.014375935554504395,
142
+ 0.01406156826019287,
143
+ 0.014269439697265626,
144
+ 0.01407795238494873,
145
+ 0.014187520027160644,
146
+ 0.014346240043640136,
147
+ 0.01417728042602539,
148
+ 0.014036992073059081,
149
+ 0.014002176284790039,
150
+ 0.014403583526611329,
151
+ 0.014429183959960937,
152
+ 0.014114815711975098,
153
+ 0.014076864242553711,
154
+ 0.013934592247009277,
155
+ 0.014079999923706055,
156
+ 0.01397760009765625,
157
+ 0.013896703720092773,
158
+ 0.013814784049987794,
159
+ 0.014498815536499024,
160
+ 0.014468095779418945,
161
+ 0.014289919853210448,
162
+ 0.014235648155212402,
163
+ 0.01425100803375244,
164
+ 0.01441590404510498,
165
+ 0.013855744361877441,
166
+ 0.013503487586975099,
167
+ 0.01355673599243164,
168
+ 0.013544447898864746,
169
+ 0.013552672386169433,
170
+ 0.013595647811889648,
171
+ 0.0134901762008667,
172
+ 0.014034943580627441,
173
+ 0.014533632278442383,
174
+ 0.014240768432617188,
175
+ 0.014819328308105468,
176
+ 0.013946880340576171,
177
+ 0.013633536338806153,
178
+ 0.013541376113891602,
179
+ 0.013522944450378417,
180
+ 0.01375641632080078,
181
+ 0.013640704154968262,
182
+ 0.013719552040100098,
183
+ 0.01354751968383789,
184
+ 0.01345638370513916,
185
+ 0.014017536163330077,
186
+ 0.014736384391784667,
187
+ 0.013836288452148437,
188
+ 0.013508607864379883,
189
+ 0.013651968002319336,
190
+ 0.013501440048217773,
191
+ 0.013642751693725585,
192
+ 0.013544447898864746,
193
+ 0.013963264465332031,
194
+ 0.013971455574035644
195
  ]
196
  },
197
  "throughput": {
198
  "unit": "samples/s",
199
+ "value": 70.91725918478761
200
  },
201
  "energy": {
202
  "unit": "kWh",
203
+ "cpu": 1.618934890350422e-07,
204
+ "ram": 8.84347716010331e-08,
205
+ "gpu": 3.504056857296918e-07,
206
+ "total": 6.007339463657672e-07
207
  },
208
  "efficiency": {
209
  "unit": "samples/kWh",
210
+ "value": 1664630.417591106
211
  }
212
  }
213
  }