IlyasMoutawwakil HF staff commited on
Commit
f6ae0eb
1 Parent(s): 3e0170e

Upload cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -80,7 +80,7 @@
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.3.1",
83
- "optimum_benchmark_commit": "8ebe8531a5b12ea7926cab66905381c76b24fcc4",
84
  "transformers_version": "4.42.3",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.31.0",
@@ -99,7 +99,7 @@
99
  "forward": {
100
  "memory": {
101
  "unit": "MB",
102
- "max_ram": 936.6528,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
@@ -107,59 +107,61 @@
107
  },
108
  "latency": {
109
  "unit": "s",
110
- "count": 28,
111
- "total": 1.0214843340000357,
112
- "mean": 0.03648158335714413,
113
- "stdev": 0.0017589491762905706,
114
- "p50": 0.036961173000008785,
115
- "p90": 0.037804026000009115,
116
- "p95": 0.03792682109997543,
117
- "p99": 0.038642149019995034,
118
  "values": [
119
- 0.03696238499998117,
120
- 0.0372744169999919,
121
- 0.03702061399997092,
122
- 0.036033294999981536,
123
- 0.03753845800002864,
124
- 0.0369599610000364,
125
- 0.03666300800000499,
126
- 0.036372016000029816,
127
- 0.03683306400000674,
128
- 0.03462882799999534,
129
- 0.03734442699999363,
130
- 0.036315961000013885,
131
- 0.0371922030000178,
132
- 0.036199645000010605,
133
- 0.03888908400000446,
134
- 0.03686366100004079,
135
- 0.03737322000000631,
136
- 0.03778935600001887,
137
- 0.036203371999988576,
138
- 0.03678186799999139,
139
- 0.03797450999996954,
140
- 0.03759816000001592,
141
- 0.0375417440000092,
142
- 0.03729151800001773,
143
- 0.03783825599998636,
144
- 0.03335671299998921,
145
- 0.03168444399994996,
146
- 0.03096014599998398
 
 
147
  ]
148
  },
149
  "throughput": {
150
  "unit": "samples/s",
151
- "value": 27.41109096637308
152
  },
153
  "energy": {
154
  "unit": "kWh",
155
- "cpu": 1.331015604513663e-06,
156
- "ram": 5.56245077989388e-08,
157
  "gpu": 0.0,
158
- "total": 1.3866401123126015e-06
159
  },
160
  "efficiency": {
161
  "unit": "samples/kWh",
162
- "value": 721167.6563518897
163
  }
164
  }
165
  }
 
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.3.1",
83
+ "optimum_benchmark_commit": "f6013cec1a849341c31271831560b1681406c092",
84
  "transformers_version": "4.42.3",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.31.0",
 
99
  "forward": {
100
  "memory": {
101
  "unit": "MB",
102
+ "max_ram": 937.406464,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
 
107
  },
108
  "latency": {
109
  "unit": "s",
110
+ "count": 30,
111
+ "total": 1.022742634000167,
112
+ "mean": 0.0340914211333389,
113
+ "stdev": 0.007073075727751354,
114
+ "p50": 0.03266044400001533,
115
+ "p90": 0.03350528840003335,
116
+ "p95": 0.03494648380001308,
117
+ "p99": 0.061579902679997106,
118
  "values": [
119
+ 0.07198379999999815,
120
+ 0.03352649600003588,
121
+ 0.03243057399998861,
122
+ 0.03282238700001017,
123
+ 0.03265358100003368,
124
+ 0.03265496400001666,
125
+ 0.03309623000001238,
126
+ 0.032600811999998314,
127
+ 0.03245073099998308,
128
+ 0.03240072699998109,
129
+ 0.033028543000000354,
130
+ 0.032761512999968545,
131
+ 0.03268492900002684,
132
+ 0.03278231100000539,
133
+ 0.032665924000014,
134
+ 0.032440661999999065,
135
+ 0.03261039099999152,
136
+ 0.03295813100004352,
137
+ 0.03243022300000575,
138
+ 0.032385208999983206,
139
+ 0.032680781000010484,
140
+ 0.03209596699997519,
141
+ 0.03350293200003307,
142
+ 0.036108291999994435,
143
+ 0.03240202000000636,
144
+ 0.032883442000013474,
145
+ 0.032650986000021476,
146
+ 0.033125053999981446,
147
+ 0.032253982000042924,
148
+ 0.03167103999999199
149
  ]
150
  },
151
  "throughput": {
152
  "unit": "samples/s",
153
+ "value": 29.332892755886718
154
  },
155
  "energy": {
156
  "unit": "kWh",
157
+ "cpu": 1.2352704587909912e-06,
158
+ "ram": 5.1623710245465304e-08,
159
  "gpu": 0.0,
160
+ "total": 1.2868941690364565e-06
161
  },
162
  "efficiency": {
163
  "unit": "samples/kWh",
164
+ "value": 777064.6756047823
165
  }
166
  }
167
  }