IlyasMoutawwakil HF staff commited on
Commit
2589c30
1 Parent(s): ad47ca3

Upload cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.4.0+cpu",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "multiple-choice",
9
  "library": "transformers",
@@ -80,7 +80,7 @@
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.4.0",
83
- "optimum_benchmark_commit": "505086556c6e125f92759cd19b806135534e5ab3",
84
  "transformers_version": "4.44.2",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.34.0",
@@ -99,7 +99,7 @@
99
  "load": {
100
  "memory": {
101
  "unit": "MB",
102
- "max_ram": 945.074176,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
@@ -108,31 +108,31 @@
108
  "latency": {
109
  "unit": "s",
110
  "count": 1,
111
- "total": 4.4923935990000246,
112
- "mean": 4.4923935990000246,
113
  "stdev": 0.0,
114
- "p50": 4.4923935990000246,
115
- "p90": 4.4923935990000246,
116
- "p95": 4.4923935990000246,
117
- "p99": 4.4923935990000246,
118
  "values": [
119
- 4.4923935990000246
120
  ]
121
  },
122
  "throughput": null,
123
  "energy": {
124
  "unit": "kWh",
125
- "cpu": 4.9859675444444765e-05,
126
- "ram": 2.08389852040056e-06,
127
  "gpu": 0,
128
- "total": 5.1943573964845325e-05
129
  },
130
  "efficiency": null
131
  },
132
  "forward": {
133
  "memory": {
134
  "unit": "MB",
135
- "max_ram": 955.990016,
136
  "max_global_vram": null,
137
  "max_process_vram": null,
138
  "max_reserved": null,
@@ -140,55 +140,53 @@
140
  },
141
  "latency": {
142
  "unit": "s",
143
- "count": 24,
144
- "total": 1.0124018730000444,
145
- "mean": 0.042183411375001846,
146
- "stdev": 0.0023585229275687927,
147
- "p50": 0.04086939100000109,
148
- "p90": 0.04575586240000007,
149
- "p95": 0.046307956000003966,
150
- "p99": 0.046583093190004664,
151
  "values": [
152
- 0.0453193879999958,
153
- 0.04481910300000891,
154
- 0.043479359999992084,
155
- 0.046641179000005195,
156
- 0.04553435999997646,
157
- 0.04638863200000287,
158
- 0.04508289699998613,
159
- 0.04585079200001019,
160
- 0.04148860800000875,
161
- 0.04051125199998751,
162
- 0.04118137400001842,
163
- 0.04089743299999782,
164
- 0.040531348999991224,
165
- 0.04061770999999226,
166
- 0.04023611700000629,
167
- 0.041013500000019576,
168
- 0.04073581099999046,
169
- 0.04051551899999595,
170
- 0.040696891000038704,
171
- 0.04018028300004062,
172
- 0.04084134900000436,
173
- 0.04023996400002261,
174
- 0.03960027899995566,
175
- 0.03999872299999652
176
  ]
177
  },
178
  "throughput": {
179
  "unit": "samples/s",
180
- "value": 23.706001183977413
181
  },
182
  "energy": {
183
  "unit": "kWh",
184
- "cpu": 1.5389354794872295e-06,
185
- "ram": 6.431499291972038e-08,
186
  "gpu": 0.0,
187
- "total": 1.6032504724069499e-06
188
  },
189
  "efficiency": {
190
  "unit": "samples/kWh",
191
- "value": 623732.8584713943
192
  }
193
  }
194
  }
 
3
  "name": "cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.4.1+cpu",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "multiple-choice",
9
  "library": "transformers",
 
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.4.0",
83
+ "optimum_benchmark_commit": "ea76e356b5c355783ee27d2d429a010ded791f8b",
84
  "transformers_version": "4.44.2",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.34.0",
 
99
  "load": {
100
  "memory": {
101
  "unit": "MB",
102
+ "max_ram": 945.504256,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
 
108
  "latency": {
109
  "unit": "s",
110
  "count": 1,
111
+ "total": 4.364110363000009,
112
+ "mean": 4.364110363000009,
113
  "stdev": 0.0,
114
+ "p50": 4.364110363000009,
115
+ "p90": 4.364110363000009,
116
+ "p95": 4.364110363000009,
117
+ "p99": 4.364110363000009,
118
  "values": [
119
+ 4.364110363000009
120
  ]
121
  },
122
  "throughput": null,
123
  "energy": {
124
  "unit": "kWh",
125
+ "cpu": 4.7422670750001813e-05,
126
+ "ram": 1.9820457280451876e-06,
127
  "gpu": 0,
128
+ "total": 4.9404716478047e-05
129
  },
130
  "efficiency": null
131
  },
132
  "forward": {
133
  "memory": {
134
  "unit": "MB",
135
+ "max_ram": 956.25216,
136
  "max_global_vram": null,
137
  "max_process_vram": null,
138
  "max_reserved": null,
 
140
  },
141
  "latency": {
142
  "unit": "s",
143
+ "count": 22,
144
+ "total": 1.0132536560000176,
145
+ "mean": 0.046056984363637164,
146
+ "stdev": 0.001276235839567508,
147
+ "p50": 0.045823448500044606,
148
+ "p90": 0.04791894960000605,
149
+ "p95": 0.04796938475001866,
150
+ "p99": 0.04800531215999854,
151
  "values": [
152
+ 0.04801440599999296,
153
+ 0.04736015399998905,
154
+ 0.04465490799998406,
155
+ 0.04797110200001953,
156
+ 0.04775868300004049,
157
+ 0.04793675700000222,
158
+ 0.04530245600000171,
159
+ 0.04398687799999834,
160
+ 0.046565184999963094,
161
+ 0.04734634900000856,
162
+ 0.045917739999993046,
163
+ 0.045704309999962334,
164
+ 0.044798410999987937,
165
+ 0.04712582599995585,
166
+ 0.04577908000004527,
167
+ 0.04607173800002329,
168
+ 0.045700703999955294,
169
+ 0.044098398000016914,
170
+ 0.04586781700004394,
171
+ 0.04573587900000575,
172
+ 0.04553675600004681,
173
+ 0.04402011899998115
 
 
174
  ]
175
  },
176
  "throughput": {
177
  "unit": "samples/s",
178
+ "value": 21.712233525856252
179
  },
180
  "energy": {
181
  "unit": "kWh",
182
+ "cpu": 1.5727707022222026e-06,
183
+ "ram": 6.57289447589296e-08,
184
  "gpu": 0.0,
185
+ "total": 1.638499646981132e-06
186
  },
187
  "efficiency": {
188
  "unit": "samples/kWh",
189
+ "value": 610314.4433643661
190
  }
191
  }
192
  }