IlyasMoutawwakil HF staff commited on
Commit
bc7ad82
1 Parent(s): 5068356

Upload cpu_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cpu_inference_transformers_token-classification_microsoft/deberta-v3-base",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.4.0+cpu",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "token-classification",
9
  "library": "transformers",
@@ -80,7 +80,7 @@
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.4.0",
83
- "optimum_benchmark_commit": "505086556c6e125f92759cd19b806135534e5ab3",
84
  "transformers_version": "4.44.2",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.34.0",
@@ -99,7 +99,7 @@
99
  "load": {
100
  "memory": {
101
  "unit": "MB",
102
- "max_ram": 1166.4384,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
@@ -108,31 +108,31 @@
108
  "latency": {
109
  "unit": "s",
110
  "count": 1,
111
- "total": 4.996377809000023,
112
- "mean": 4.996377809000023,
113
  "stdev": 0.0,
114
- "p50": 4.996377809000023,
115
- "p90": 4.996377809000023,
116
- "p95": 4.996377809000023,
117
- "p99": 4.996377809000023,
118
  "values": [
119
- 4.996377809000023
120
  ]
121
  },
122
  "throughput": null,
123
  "energy": {
124
  "unit": "kWh",
125
- "cpu": 6.96755543555554e-05,
126
- "ram": 2.912170742977021e-06,
127
  "gpu": 0,
128
- "total": 7.258772509853242e-05
129
  },
130
  "efficiency": null
131
  },
132
  "forward": {
133
  "memory": {
134
  "unit": "MB",
135
- "max_ram": 1194.356736,
136
  "max_global_vram": null,
137
  "max_process_vram": null,
138
  "max_reserved": null,
@@ -141,37 +141,37 @@
141
  "latency": {
142
  "unit": "s",
143
  "count": 7,
144
- "total": 1.085963920000097,
145
- "mean": 0.15513770285715672,
146
- "stdev": 0.0072467513673282625,
147
- "p50": 0.15763956400002144,
148
- "p90": 0.16134641420002255,
149
- "p95": 0.1630562756000188,
150
- "p99": 0.1644241647200158,
151
  "values": [
152
- 0.15763956400002144,
153
- 0.15703392400001803,
154
- 0.1590665990000275,
155
- 0.15842274600004203,
156
- 0.16476613700001508,
157
- 0.14709040299999288,
158
- 0.14194454699998005
159
  ]
160
  },
161
  "throughput": {
162
  "unit": "samples/s",
163
- "value": 6.445886342153407
164
  },
165
  "energy": {
166
  "unit": "kWh",
167
- "cpu": 5.510701981944443e-06,
168
- "ram": 2.3030713001390833e-07,
169
  "gpu": 0.0,
170
- "total": 5.741009111958352e-06
171
  },
172
  "efficiency": {
173
  "unit": "samples/kWh",
174
- "value": 174185.4054746281
175
  }
176
  }
177
  }
 
3
  "name": "cpu_inference_transformers_token-classification_microsoft/deberta-v3-base",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.4.1+cpu",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "token-classification",
9
  "library": "transformers",
 
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.4.0",
83
+ "optimum_benchmark_commit": "ea76e356b5c355783ee27d2d429a010ded791f8b",
84
  "transformers_version": "4.44.2",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.34.0",
 
99
  "load": {
100
  "memory": {
101
  "unit": "MB",
102
+ "max_ram": 1165.778944,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
 
108
  "latency": {
109
  "unit": "s",
110
  "count": 1,
111
+ "total": 4.902655680999999,
112
+ "mean": 4.902655680999999,
113
  "stdev": 0.0,
114
+ "p50": 4.902655680999999,
115
+ "p90": 4.902655680999999,
116
+ "p95": 4.902655680999999,
117
+ "p99": 4.902655680999999,
118
  "values": [
119
+ 4.902655680999999
120
  ]
121
  },
122
  "throughput": null,
123
  "energy": {
124
  "unit": "kWh",
125
+ "cpu": 6.897299181111028e-05,
126
+ "ram": 2.8828912554830126e-06,
127
  "gpu": 0,
128
+ "total": 7.185588306659329e-05
129
  },
130
  "efficiency": null
131
  },
132
  "forward": {
133
  "memory": {
134
  "unit": "MB",
135
+ "max_ram": 1193.422848,
136
  "max_global_vram": null,
137
  "max_process_vram": null,
138
  "max_reserved": null,
 
141
  "latency": {
142
  "unit": "s",
143
  "count": 7,
144
+ "total": 1.0654867479999552,
145
+ "mean": 0.1522123925714222,
146
+ "stdev": 0.008737536553444288,
147
+ "p50": 0.15577061800001957,
148
+ "p90": 0.15940905260001728,
149
+ "p95": 0.1607704073000093,
150
+ "p99": 0.16185949106000294,
151
  "values": [
152
+ 0.1575939130000279,
153
+ 0.15676482999998598,
154
+ 0.15577061800001957,
155
+ 0.15568994799997427,
156
+ 0.16213176200000134,
157
+ 0.13858611299997392,
158
+ 0.13894956399997227
159
  ]
160
  },
161
  "throughput": {
162
  "unit": "samples/s",
163
+ "value": 6.569767304135718
164
  },
165
  "energy": {
166
  "unit": "kWh",
167
+ "cpu": 5.117332563194468e-06,
168
+ "ram": 2.1386336124501847e-07,
169
  "gpu": 0.0,
170
+ "total": 5.331195924439487e-06
171
  },
172
  "efficiency": {
173
  "unit": "samples/kWh",
174
+ "value": 187575.17340823266
175
  }
176
  }
177
  }