IlyasMoutawwakil HF staff commited on
Commit
0a6719e
1 Parent(s): 152aa44

Upload cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -80,7 +80,7 @@
80
  "processor": "x86_64",
81
  "python_version": "3.10.15",
82
  "optimum_benchmark_version": "0.5.0",
83
- "optimum_benchmark_commit": "0600d2e2ba71f8a4277cfa9b8287625223e3f7a0",
84
  "transformers_version": "4.45.1",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.34.2",
@@ -99,7 +99,7 @@
99
  "load": {
100
  "memory": {
101
  "unit": "MB",
102
- "max_ram": 949.02272,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
@@ -108,15 +108,15 @@
108
  "latency": {
109
  "unit": "s",
110
  "values": [
111
- 1.4273132989999908
112
  ],
113
  "count": 1,
114
- "total": 1.4273132989999908,
115
- "mean": 1.4273132989999908,
116
- "p50": 1.4273132989999908,
117
- "p90": 1.4273132989999908,
118
- "p95": 1.4273132989999908,
119
- "p99": 1.4273132989999908,
120
  "stdev": 0,
121
  "stdev_": 0
122
  },
@@ -127,7 +127,7 @@
127
  "forward": {
128
  "memory": {
129
  "unit": "MB",
130
- "max_ram": 957.280256,
131
  "max_global_vram": null,
132
  "max_process_vram": null,
133
  "max_reserved": null,
@@ -136,58 +136,60 @@
136
  "latency": {
137
  "unit": "s",
138
  "values": [
139
- 0.03981268799998361,
140
- 0.03910152699995706,
141
- 0.038372281999954794,
142
- 0.03906676100001505,
143
- 0.03936000100003412,
144
- 0.040058408999982476,
145
- 0.03827129199999035,
146
- 0.038474592999989454,
147
- 0.03948708899997655,
148
- 0.03938883499995427,
149
- 0.039154467000003024,
150
- 0.039820372999997744,
151
- 0.038498207000031925,
152
- 0.039486547999956656,
153
- 0.03888950999998997,
154
- 0.03876054900001691,
155
- 0.037048053000035,
156
- 0.03343306599998641,
157
- 0.0333484469999803,
158
- 0.03389648299997816,
159
- 0.03348706700001003,
160
- 0.03287189600001739,
161
- 0.03333767699996315,
162
- 0.036412813999959326,
163
- 0.03568130400003611,
164
- 0.033097236999992674,
165
- 0.03348883000001024
 
 
166
  ],
167
- "count": 27,
168
- "total": 1.0021060049998027,
169
- "mean": 0.03711503722221492,
170
- "p50": 0.038474592999989454,
171
- "p90": 0.03961732859997937,
172
- "p95": 0.039818067499993504,
173
- "p99": 0.03999651963998645,
174
- "stdev": 0.002614446460988622,
175
- "stdev_": 7.044170386615605
176
  },
177
  "throughput": {
178
  "unit": "samples/s",
179
- "value": 26.943257365277752
180
  },
181
  "energy": {
182
  "unit": "kWh",
183
- "cpu": 1.2213696140625076e-06,
184
- "ram": 5.107078070253942e-08,
185
  "gpu": 0.0,
186
- "total": 1.272440394765047e-06
187
  },
188
  "efficiency": {
189
  "unit": "samples/kWh",
190
- "value": 785891.4288748649
191
  }
192
  }
193
  }
 
80
  "processor": "x86_64",
81
  "python_version": "3.10.15",
82
  "optimum_benchmark_version": "0.5.0",
83
+ "optimum_benchmark_commit": "1d52d2c35074aaaacac2a8342653779fa0d40c86",
84
  "transformers_version": "4.45.1",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.34.2",
 
99
  "load": {
100
  "memory": {
101
  "unit": "MB",
102
+ "max_ram": 946.876416,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
 
108
  "latency": {
109
  "unit": "s",
110
  "values": [
111
+ 1.234438277000038
112
  ],
113
  "count": 1,
114
+ "total": 1.234438277000038,
115
+ "mean": 1.234438277000038,
116
+ "p50": 1.234438277000038,
117
+ "p90": 1.234438277000038,
118
+ "p95": 1.234438277000038,
119
+ "p99": 1.234438277000038,
120
  "stdev": 0,
121
  "stdev_": 0
122
  },
 
127
  "forward": {
128
  "memory": {
129
  "unit": "MB",
130
+ "max_ram": 955.133952,
131
  "max_global_vram": null,
132
  "max_process_vram": null,
133
  "max_reserved": null,
 
136
  "latency": {
137
  "unit": "s",
138
  "values": [
139
+ 0.03679081899997527,
140
+ 0.036414769000032265,
141
+ 0.03584372500000654,
142
+ 0.036520226000050116,
143
+ 0.03659185899999784,
144
+ 0.03676674500002264,
145
+ 0.036201020999953926,
146
+ 0.03639364899999009,
147
+ 0.036961023999992904,
148
+ 0.03652296800004251,
149
+ 0.03825314199997365,
150
+ 0.03612683000000061,
151
+ 0.03598265100004028,
152
+ 0.03623148499997342,
153
+ 0.036643302000015865,
154
+ 0.036257614000021476,
155
+ 0.03680604499999163,
156
+ 0.03642739000002848,
157
+ 0.03588412800002061,
158
+ 0.03632753300001923,
159
+ 0.03597452600001816,
160
+ 0.03642584699997542,
161
+ 0.034570350000024064,
162
+ 0.03182143899999801,
163
+ 0.031883282999956464,
164
+ 0.032162614000014855,
165
+ 0.03186415800001896,
166
+ 0.03165142199998172,
167
+ 0.03170217600001024
168
  ],
169
+ "count": 29,
170
+ "total": 1.0280027400001472,
171
+ "mean": 0.03544837034483266,
172
+ "p50": 0.036257614000021476,
173
+ "p90": 0.03679386419997854,
174
+ "p95": 0.03689903239999239,
175
+ "p99": 0.03789134895997904,
176
+ "stdev": 0.0019210648419562362,
177
+ "stdev_": 5.419331899516423
178
  },
179
  "throughput": {
180
  "unit": "samples/s",
181
+ "value": 28.210041541324927
182
  },
183
  "energy": {
184
  "unit": "kWh",
185
+ "cpu": 1.252282740625008e-06,
186
+ "ram": 5.236378824692169e-08,
187
  "gpu": 0.0,
188
+ "total": 1.3046465288719296e-06
189
  },
190
  "efficiency": {
191
  "unit": "samples/kWh",
192
+ "value": 766491.1360049807
193
  }
194
  }
195
  }