IlyasMoutawwakil HF staff commited on
Commit
de45020
1 Parent(s): 0c25878

Upload cpu_inference_transformers_fill-mask_google-bert/bert-base-uncased/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_fill-mask_google-bert/bert-base-uncased/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cpu_inference_transformers_fill-mask_google-bert/bert-base-uncased",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.4.0+cpu",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "fill-mask",
9
  "library": "transformers",
@@ -80,7 +80,7 @@
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.4.0",
83
- "optimum_benchmark_commit": "505086556c6e125f92759cd19b806135534e5ab3",
84
  "transformers_version": "4.44.2",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.34.0",
@@ -99,7 +99,7 @@
99
  "load": {
100
  "memory": {
101
  "unit": "MB",
102
- "max_ram": 970.3424,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
@@ -108,31 +108,31 @@
108
  "latency": {
109
  "unit": "s",
110
  "count": 1,
111
- "total": 4.418891790000032,
112
- "mean": 4.418891790000032,
113
  "stdev": 0.0,
114
- "p50": 4.418891790000032,
115
- "p90": 4.418891790000032,
116
- "p95": 4.418891790000032,
117
- "p99": 4.418891790000032,
118
  "values": [
119
- 4.418891790000032
120
  ]
121
  },
122
  "throughput": null,
123
  "energy": {
124
  "unit": "kWh",
125
- "cpu": 5.05576174833332e-05,
126
- "ram": 2.1130882283032723e-06,
127
  "gpu": 0,
128
- "total": 5.267070571163647e-05
129
  },
130
  "efficiency": null
131
  },
132
  "forward": {
133
  "memory": {
134
  "unit": "MB",
135
- "max_ram": 884.0192,
136
  "max_global_vram": null,
137
  "max_process_vram": null,
138
  "max_reserved": null,
@@ -140,55 +140,54 @@
140
  },
141
  "latency": {
142
  "unit": "s",
143
- "count": 24,
144
- "total": 1.0241846999999211,
145
- "mean": 0.04267436249999671,
146
- "stdev": 0.0018994348249447436,
147
- "p50": 0.04339263250000158,
148
- "p90": 0.04440422620000391,
149
- "p95": 0.04473461129999805,
150
- "p99": 0.04509450773000936,
151
  "values": [
152
- 0.03923072799994998,
153
- 0.038828807000015786,
154
- 0.03815797199996496,
155
- 0.03954421500003491,
156
- 0.04350138600000264,
157
- 0.04355444600003011,
158
- 0.0433399040000495,
159
- 0.04206436900000199,
160
- 0.04141866999998456,
161
- 0.04367648400000235,
162
- 0.04312868899995692,
163
- 0.043445360999953664,
164
- 0.04312368000000788,
165
- 0.04150470199999745,
166
- 0.04413672499998711,
167
- 0.042941839999969034,
168
- 0.043516684999985955,
169
- 0.04431087000000389,
170
- 0.04386888300001601,
171
- 0.04254367599997977,
172
- 0.043929815000012695,
173
- 0.04518670300001304,
174
- 0.04444423600000391,
175
- 0.044785853999997016
176
  ]
177
  },
178
  "throughput": {
179
  "unit": "samples/s",
180
- "value": 23.43327331486386
181
  },
182
  "energy": {
183
  "unit": "kWh",
184
- "cpu": 1.4930092765432305e-06,
185
- "ram": 6.239509902714625e-08,
186
  "gpu": 0.0,
187
- "total": 1.5554043755703768e-06
188
  },
189
  "efficiency": {
190
  "unit": "samples/kWh",
191
- "value": 642919.6263725911
192
  }
193
  }
194
  }
 
3
  "name": "cpu_inference_transformers_fill-mask_google-bert/bert-base-uncased",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.4.1+cpu",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "fill-mask",
9
  "library": "transformers",
 
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.4.0",
83
+ "optimum_benchmark_commit": "ea76e356b5c355783ee27d2d429a010ded791f8b",
84
  "transformers_version": "4.44.2",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.34.0",
 
99
  "load": {
100
  "memory": {
101
  "unit": "MB",
102
+ "max_ram": 968.138752,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
 
108
  "latency": {
109
  "unit": "s",
110
  "count": 1,
111
+ "total": 4.492047818000003,
112
+ "mean": 4.492047818000003,
113
  "stdev": 0.0,
114
+ "p50": 4.492047818000003,
115
+ "p90": 4.492047818000003,
116
+ "p95": 4.492047818000003,
117
+ "p99": 4.492047818000003,
118
  "values": [
119
+ 4.492047818000003
120
  ]
121
  },
122
  "throughput": null,
123
  "energy": {
124
  "unit": "kWh",
125
+ "cpu": 5.147852764444312e-05,
126
+ "ram": 2.151582748910762e-06,
127
  "gpu": 0,
128
+ "total": 5.363011039335388e-05
129
  },
130
  "efficiency": null
131
  },
132
  "forward": {
133
  "memory": {
134
  "unit": "MB",
135
+ "max_ram": 882.044928,
136
  "max_global_vram": null,
137
  "max_process_vram": null,
138
  "max_reserved": null,
 
140
  },
141
  "latency": {
142
  "unit": "s",
143
+ "count": 23,
144
+ "total": 1.0130471230000353,
145
+ "mean": 0.04404552708695806,
146
+ "stdev": 0.0019397615563806901,
147
+ "p50": 0.0444551679999563,
148
+ "p90": 0.045764630000007855,
149
+ "p95": 0.04611271569997939,
150
+ "p99": 0.04676261982000028,
151
  "values": [
152
+ 0.04478551800002606,
153
+ 0.04357774099997869,
154
+ 0.04326259999999138,
155
+ 0.04427914799998689,
156
+ 0.044415805000028286,
157
+ 0.04462528799996335,
158
+ 0.04332045900002868,
159
+ 0.04277524600001925,
160
+ 0.04280934899998101,
161
+ 0.046935071000007156,
162
+ 0.04572969100001956,
163
+ 0.0457663390000107,
164
+ 0.04471968500001822,
165
+ 0.0444551679999563,
166
+ 0.04615120199997591,
167
+ 0.045757793999996466,
168
+ 0.04527943499999765,
169
+ 0.045411573000023964,
170
+ 0.045333868000000166,
171
+ 0.044021729999997206,
172
+ 0.039900159000012536,
173
+ 0.038950436999982685,
174
+ 0.040783817000033196
 
175
  ]
176
  },
177
  "throughput": {
178
  "unit": "samples/s",
179
+ "value": 22.703780977026867
180
  },
181
  "energy": {
182
  "unit": "kWh",
183
+ "cpu": 1.532312900641052e-06,
184
+ "ram": 6.403733274714227e-08,
185
  "gpu": 0.0,
186
+ "total": 1.5963502333881944e-06
187
  },
188
  "efficiency": {
189
  "unit": "samples/kWh",
190
+ "value": 626428.949665724
191
  }
192
  }
193
  }