IlyasMoutawwakil HF staff commited on
Commit
0ef5313
·
verified ·
1 Parent(s): 0989217

Upload cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -80,7 +80,7 @@
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.3.1",
83
- "optimum_benchmark_commit": "f6013cec1a849341c31271831560b1681406c092",
84
  "transformers_version": "4.42.3",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.31.0",
@@ -99,7 +99,7 @@
99
  "forward": {
100
  "memory": {
101
  "unit": "MB",
102
- "max_ram": 937.406464,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
@@ -107,61 +107,59 @@
107
  },
108
  "latency": {
109
  "unit": "s",
110
- "count": 30,
111
- "total": 1.022742634000167,
112
- "mean": 0.0340914211333389,
113
- "stdev": 0.007073075727751354,
114
- "p50": 0.03266044400001533,
115
- "p90": 0.03350528840003335,
116
- "p95": 0.03494648380001308,
117
- "p99": 0.061579902679997106,
118
  "values": [
119
- 0.07198379999999815,
120
- 0.03352649600003588,
121
- 0.03243057399998861,
122
- 0.03282238700001017,
123
- 0.03265358100003368,
124
- 0.03265496400001666,
125
- 0.03309623000001238,
126
- 0.032600811999998314,
127
- 0.03245073099998308,
128
- 0.03240072699998109,
129
- 0.033028543000000354,
130
- 0.032761512999968545,
131
- 0.03268492900002684,
132
- 0.03278231100000539,
133
- 0.032665924000014,
134
- 0.032440661999999065,
135
- 0.03261039099999152,
136
- 0.03295813100004352,
137
- 0.03243022300000575,
138
- 0.032385208999983206,
139
- 0.032680781000010484,
140
- 0.03209596699997519,
141
- 0.03350293200003307,
142
- 0.036108291999994435,
143
- 0.03240202000000636,
144
- 0.032883442000013474,
145
- 0.032650986000021476,
146
- 0.033125053999981446,
147
- 0.032253982000042924,
148
- 0.03167103999999199
149
  ]
150
  },
151
  "throughput": {
152
  "unit": "samples/s",
153
- "value": 29.332892755886718
154
  },
155
  "energy": {
156
  "unit": "kWh",
157
- "cpu": 1.2352704587909912e-06,
158
- "ram": 5.1623710245465304e-08,
159
  "gpu": 0.0,
160
- "total": 1.2868941690364565e-06
161
  },
162
  "efficiency": {
163
  "unit": "samples/kWh",
164
- "value": 777064.6756047823
165
  }
166
  }
167
  }
 
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.3.1",
83
+ "optimum_benchmark_commit": "79990507b694d513bac81e140baff3af23a6bff7",
84
  "transformers_version": "4.42.3",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.31.0",
 
99
  "forward": {
100
  "memory": {
101
  "unit": "MB",
102
+ "max_ram": 936.71424,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
 
107
  },
108
  "latency": {
109
  "unit": "s",
110
+ "count": 28,
111
+ "total": 1.0240056579999646,
112
+ "mean": 0.03657163064285588,
113
+ "stdev": 0.0017464278394276223,
114
+ "p50": 0.03706138649999957,
115
+ "p90": 0.03772773680000228,
116
+ "p95": 0.03787638874999573,
117
+ "p99": 0.03835098933998211,
118
  "values": [
119
+ 0.03749122800002169,
120
+ 0.03706104600001936,
121
+ 0.036987447999990763,
122
+ 0.036041463999993084,
123
+ 0.03737990100000843,
124
+ 0.03744559299997263,
125
+ 0.03695636999998442,
126
+ 0.036955529000010756,
127
+ 0.036253027999975984,
128
+ 0.03473598900001207,
129
+ 0.03774897900001406,
130
+ 0.037718632999997226,
131
+ 0.03706172699997978,
132
+ 0.03748448499999313,
133
+ 0.036989532000006875,
134
+ 0.03748380399997586,
135
+ 0.03739596000002621,
136
+ 0.03850115199998072,
137
+ 0.037944993999985854,
138
+ 0.03700147500001094,
139
+ 0.03601153800002521,
140
+ 0.03748902300003465,
141
+ 0.03735119699996403,
142
+ 0.037039515999993,
143
+ 0.037391983000020446,
144
+ 0.03339817400001266,
145
+ 0.031248012999981256,
146
+ 0.03143787699997347
 
 
147
  ]
148
  },
149
  "throughput": {
150
  "unit": "samples/s",
151
+ "value": 27.34359891593584
152
  },
153
  "energy": {
154
  "unit": "kWh",
155
+ "cpu": 1.2068131556013218e-06,
156
+ "ram": 5.043462608010817e-08,
157
  "gpu": 0.0,
158
+ "total": 1.25724778168143e-06
159
  },
160
  "efficiency": {
161
  "unit": "samples/kWh",
162
+ "value": 795388.1602102416
163
  }
164
  }
165
  }