IlyasMoutawwakil HF staff commited on
Commit
2ca82f6
1 Parent(s): 513388b

Upload cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -80,7 +80,7 @@
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.4.0",
83
- "optimum_benchmark_commit": "a38b9ad1f603bed36d9a51f418af821526aa4776",
84
  "transformers_version": "4.44.2",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.34.0",
@@ -99,7 +99,7 @@
99
  "load": {
100
  "memory": {
101
  "unit": "MB",
102
- "max_ram": 945.774592,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
@@ -108,31 +108,31 @@
108
  "latency": {
109
  "unit": "s",
110
  "count": 1,
111
- "total": 4.479879602999972,
112
- "mean": 4.479879602999972,
113
  "stdev": 0.0,
114
- "p50": 4.479879602999972,
115
- "p90": 4.479879602999972,
116
- "p95": 4.479879602999972,
117
- "p99": 4.479879602999972,
118
  "values": [
119
- 4.479879602999972
120
  ]
121
  },
122
  "throughput": null,
123
  "energy": {
124
  "unit": "kWh",
125
- "cpu": 4.876297828888786e-05,
126
- "ram": 2.038061600925112e-06,
127
  "gpu": 0,
128
- "total": 5.0801039889812973e-05
129
  },
130
  "efficiency": null
131
  },
132
  "forward": {
133
  "memory": {
134
  "unit": "MB",
135
- "max_ram": 956.653568,
136
  "max_global_vram": null,
137
  "max_process_vram": null,
138
  "max_reserved": null,
@@ -140,53 +140,55 @@
140
  },
141
  "latency": {
142
  "unit": "s",
143
- "count": 22,
144
- "total": 1.0383109839999634,
145
- "mean": 0.047195953818180154,
146
- "stdev": 0.0010263162871451432,
147
- "p50": 0.04748327699999777,
148
- "p90": 0.04832159070002717,
149
- "p95": 0.04851235840004335,
150
- "p99": 0.048771871290017546,
151
  "values": [
152
- 0.04640757800001438,
153
- 0.04528559799996401,
154
- 0.04616583699998955,
155
- 0.047686311000006754,
156
- 0.04656766699997661,
157
- 0.04724073999994971,
158
- 0.04585885500000586,
159
- 0.0445932579999635,
160
- 0.04834805400003006,
161
- 0.04769605999996429,
162
- 0.048838557000010496,
163
- 0.04795217600002388,
164
- 0.048521006000044054,
165
- 0.047648960000003626,
166
- 0.04794891000000234,
167
- 0.04756665799999382,
168
- 0.046830996999972285,
169
- 0.04705812000003107,
170
- 0.0474543679999897,
171
- 0.04808342100000118,
172
- 0.04751218600000584,
173
- 0.04704566700002033
 
 
174
  ]
175
  },
176
  "throughput": {
177
  "unit": "samples/s",
178
- "value": 21.188257024160283
179
  },
180
  "energy": {
181
  "unit": "kWh",
182
- "cpu": 1.570152960444413e-06,
183
- "ram": 6.561602659373576e-08,
184
  "gpu": 0.0,
185
- "total": 1.635768987038149e-06
186
  },
187
  "efficiency": {
188
  "unit": "samples/kWh",
189
- "value": 611333.2676704417
190
  }
191
  }
192
  }
 
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.4.0",
83
+ "optimum_benchmark_commit": "712b269a3b6d61a1322007f38975568cf51222c7",
84
  "transformers_version": "4.44.2",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.34.0",
 
99
  "load": {
100
  "memory": {
101
  "unit": "MB",
102
+ "max_ram": 946.42176,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
 
108
  "latency": {
109
  "unit": "s",
110
  "count": 1,
111
+ "total": 4.364826402999995,
112
+ "mean": 4.364826402999995,
113
  "stdev": 0.0,
114
+ "p50": 4.364826402999995,
115
+ "p90": 4.364826402999995,
116
+ "p95": 4.364826402999995,
117
+ "p99": 4.364826402999995,
118
  "values": [
119
+ 4.364826402999995
120
  ]
121
  },
122
  "throughput": null,
123
  "energy": {
124
  "unit": "kWh",
125
+ "cpu": 4.777516986666513e-05,
126
+ "ram": 1.9967696249182595e-06,
127
  "gpu": 0,
128
+ "total": 4.977193949158339e-05
129
  },
130
  "efficiency": null
131
  },
132
  "forward": {
133
  "memory": {
134
  "unit": "MB",
135
+ "max_ram": 957.693952,
136
  "max_global_vram": null,
137
  "max_process_vram": null,
138
  "max_reserved": null,
 
140
  },
141
  "latency": {
142
  "unit": "s",
143
+ "count": 24,
144
+ "total": 1.0057183729998656,
145
+ "mean": 0.04190493220832773,
146
+ "stdev": 0.0015140129595304407,
147
+ "p50": 0.041683923499988396,
148
+ "p90": 0.042619340799996054,
149
+ "p95": 0.04513618750000887,
150
+ "p99": 0.04705175214998974,
151
  "values": [
152
+ 0.047505264999983865,
153
+ 0.0416303750000111,
154
+ 0.04145329299996092,
155
+ 0.04077485599998454,
156
+ 0.0417973960000495,
157
+ 0.04191049799999291,
158
+ 0.040663687000005666,
159
+ 0.041491914999994606,
160
+ 0.04199965599997313,
161
+ 0.04197662299998228,
162
+ 0.04110774800000172,
163
+ 0.04186960900000258,
164
+ 0.04087565300000051,
165
+ 0.04113054999999122,
166
+ 0.04195694600002753,
167
+ 0.04288492000000588,
168
+ 0.041761128999951325,
169
+ 0.0408576309999944,
170
+ 0.041095042999984344,
171
+ 0.040931128000011086,
172
+ 0.04173747199996569,
173
+ 0.0455334700000094,
174
+ 0.0408190789999594,
175
+ 0.04195443100002194
176
  ]
177
  },
178
  "throughput": {
179
  "unit": "samples/s",
180
+ "value": 23.863539380724042
181
  },
182
  "energy": {
183
  "unit": "kWh",
184
+ "cpu": 1.5350404715812119e-06,
185
+ "ram": 6.415279208175929e-08,
186
  "gpu": 0.0,
187
+ "total": 1.599193263662971e-06
188
  },
189
  "efficiency": {
190
  "unit": "samples/kWh",
191
+ "value": 625315.2903542678
192
  }
193
  }
194
  }