IlyasMoutawwakil HF staff commited on
Commit
4fa87cc
·
verified ·
1 Parent(s): e1ae0e3

Upload cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -81,7 +81,7 @@
81
  "processor": "x86_64",
82
  "python_version": "3.10.14",
83
  "optimum_benchmark_version": "0.3.1",
84
- "optimum_benchmark_commit": "41836e67badc33c5e7a42b3b53f44695ff51963e",
85
  "transformers_version": "4.42.4",
86
  "transformers_commit": null,
87
  "accelerate_version": "0.32.1",
@@ -100,7 +100,7 @@
100
  "forward": {
101
  "memory": {
102
  "unit": "MB",
103
- "max_ram": 942.96064,
104
  "max_global_vram": null,
105
  "max_process_vram": null,
106
  "max_reserved": null,
@@ -108,57 +108,56 @@
108
  },
109
  "latency": {
110
  "unit": "s",
111
- "count": 26,
112
- "total": 1.0184859130001769,
113
- "mean": 0.03917253511539142,
114
- "stdev": 0.0014329893020697767,
115
- "p50": 0.03929683799998429,
116
- "p90": 0.04072265549999088,
117
- "p95": 0.04214556700001992,
118
- "p99": 0.042363420249984074,
119
  "values": [
120
- 0.03942920000002914,
121
- 0.03924314300002152,
122
- 0.03674496700000418,
123
- 0.03995215399999097,
124
- 0.0395850209999935,
125
- 0.042363037000029635,
126
- 0.03939356400002225,
127
- 0.0398444740000059,
128
- 0.039379225999994105,
129
- 0.037841882000009264,
130
- 0.03578465799995456,
131
- 0.04149315699999079,
132
- 0.04236354799996889,
133
- 0.03885950900001944,
134
- 0.03852791000002753,
135
- 0.03928008200000477,
136
- 0.03711653000004844,
137
- 0.039855545000023085,
138
- 0.0385356050000496,
139
- 0.038527058999989094,
140
- 0.03953612000003659,
141
- 0.03939455500000122,
142
- 0.039313593999963814,
143
- 0.03854181700000936,
144
- 0.038492985000004865,
145
- 0.03908657099998436
146
  ]
147
  },
148
  "throughput": {
149
  "unit": "samples/s",
150
- "value": 25.528089950121366
151
  },
152
  "energy": {
153
  "unit": "kWh",
154
- "cpu": 1.343258176270116e-06,
155
- "ram": 5.613570432258491e-08,
156
  "gpu": 0.0,
157
- "total": 1.399393880592701e-06
158
  },
159
  "efficiency": {
160
  "unit": "samples/kWh",
161
- "value": 714595.0928243725
162
  }
163
  }
164
  }
 
81
  "processor": "x86_64",
82
  "python_version": "3.10.14",
83
  "optimum_benchmark_version": "0.3.1",
84
+ "optimum_benchmark_commit": "bc46022225a5818a4648ee2abeffdd38c4a9b291",
85
  "transformers_version": "4.42.4",
86
  "transformers_commit": null,
87
  "accelerate_version": "0.32.1",
 
100
  "forward": {
101
  "memory": {
102
  "unit": "MB",
103
+ "max_ram": 943.24736,
104
  "max_global_vram": null,
105
  "max_process_vram": null,
106
  "max_reserved": null,
 
108
  },
109
  "latency": {
110
  "unit": "s",
111
+ "count": 25,
112
+ "total": 1.0278415210000276,
113
+ "mean": 0.0411136608400011,
114
+ "stdev": 0.0022453338150798816,
115
+ "p50": 0.04061845299997913,
116
+ "p90": 0.04424681320000445,
117
+ "p95": 0.045559793599966265,
118
+ "p99": 0.04626137820001304,
119
  "values": [
120
+ 0.04642747500002997,
121
+ 0.04573540499995943,
122
+ 0.044857347999993635,
123
+ 0.04218202000004112,
124
+ 0.036648029999980736,
125
+ 0.040137858000036886,
126
+ 0.04079287900003692,
127
+ 0.04218278999996983,
128
+ 0.04089306600002374,
129
+ 0.04201441600002909,
130
+ 0.038442585999973744,
131
+ 0.04141703300001609,
132
+ 0.04333101100002068,
133
+ 0.040269792999993115,
134
+ 0.03965328300000692,
135
+ 0.040141054000002896,
136
+ 0.03996799999998757,
137
+ 0.04271494299996448,
138
+ 0.03924250799997253,
139
+ 0.040178663000006054,
140
+ 0.04222931800001106,
141
+ 0.03897120099998119,
142
+ 0.04061845299997913,
143
+ 0.0383669349999991,
144
+ 0.04042545300001166
 
145
  ]
146
  },
147
  "throughput": {
148
  "unit": "samples/s",
149
+ "value": 24.32281581276899
150
  },
151
  "energy": {
152
  "unit": "kWh",
153
+ "cpu": 1.4239761564466692e-06,
154
+ "ram": 5.9510160484015676e-08,
155
  "gpu": 0.0,
156
+ "total": 1.4834863169306848e-06
157
  },
158
  "efficiency": {
159
  "unit": "samples/kWh",
160
+ "value": 674087.7813210895
161
  }
162
  }
163
  }