IlyasMoutawwakil HF staff commited on
Commit
fd94f27
·
verified ·
1 Parent(s): 5668ab1

Upload cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -80,7 +80,7 @@
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.2.1",
83
- "optimum_benchmark_commit": "0d561969d04a7091baf8691bc894eb753a189d4f",
84
  "transformers_version": "4.42.3",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.31.0",
@@ -99,7 +99,7 @@
99
  "forward": {
100
  "memory": {
101
  "unit": "MB",
102
- "max_ram": 938.000384,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
@@ -107,60 +107,59 @@
107
  },
108
  "latency": {
109
  "unit": "s",
110
- "count": 29,
111
- "total": 1.0195579560000851,
112
- "mean": 0.03515717089655466,
113
- "stdev": 0.0016951666582072868,
114
- "p50": 0.03561454399999775,
115
- "p90": 0.036774820600032856,
116
- "p95": 0.0370516898000119,
117
- "p99": 0.037224270839976724,
118
  "values": [
119
- 0.0364518549999957,
120
- 0.03621295700003202,
121
- 0.03694045100002086,
122
- 0.03419317800000954,
123
- 0.03658557600004997,
124
- 0.03564331799998399,
125
- 0.03540404000000308,
126
- 0.03605307699996274,
127
- 0.03397549999999683,
128
- 0.035340880999967794,
129
- 0.036328063000041766,
130
- 0.035238499000001866,
131
- 0.03553931300001523,
132
- 0.03476208500001121,
133
- 0.03673341300003585,
134
- 0.03464397399994823,
135
- 0.03561454399999775,
136
- 0.03604753700000174,
137
- 0.03643175700000256,
138
- 0.035032472000011694,
139
- 0.03712584900000593,
140
- 0.03726254599996537,
141
- 0.03651208799999495,
142
- 0.03606907699997919,
143
- 0.031929061000028014,
144
- 0.031537547000027644,
145
- 0.03180835500000967,
146
- 0.032183928999984346,
147
- 0.03195701399999962
148
  ]
149
  },
150
  "throughput": {
151
  "unit": "samples/s",
152
- "value": 28.443699379064608
153
  },
154
  "energy": {
155
  "unit": "kWh",
156
- "cpu": 1.2466133882602057e-06,
157
- "ram": 5.2097879873187484e-08,
158
  "gpu": 0.0,
159
- "total": 1.2987112681333932e-06
160
  },
161
  "efficiency": {
162
  "unit": "samples/kWh",
163
- "value": 769994.0891690854
164
  }
165
  }
166
  }
 
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.2.1",
83
+ "optimum_benchmark_commit": "c594845efb520077430f4fe3f536bc1756e2b290",
84
  "transformers_version": "4.42.3",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.31.0",
 
99
  "forward": {
100
  "memory": {
101
  "unit": "MB",
102
+ "max_ram": 937.058304,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
 
107
  },
108
  "latency": {
109
  "unit": "s",
110
+ "count": 28,
111
+ "total": 1.0062479960001838,
112
+ "mean": 0.03593742842857799,
113
+ "stdev": 0.0019255354945074497,
114
+ "p50": 0.036259797000042227,
115
+ "p90": 0.03768781330002184,
116
+ "p95": 0.037845974850011996,
117
+ "p99": 0.03978163751000409,
118
  "values": [
119
+ 0.03592640199997277,
120
+ 0.03592838500003381,
121
+ 0.036318371000049865,
122
+ 0.040478801000006115,
123
+ 0.03775174500003686,
124
+ 0.03789671399999861,
125
+ 0.03681972200001837,
126
+ 0.036570729000004576,
127
+ 0.03595944299996745,
128
+ 0.03766041400001541,
129
+ 0.036705138999991505,
130
+ 0.036600965000047836,
131
+ 0.03639323999999533,
132
+ 0.03605881699996871,
133
+ 0.03610589599998093,
134
+ 0.03551362499996458,
135
+ 0.03620122300003459,
136
+ 0.03643630999999914,
137
+ 0.036194249000004675,
138
+ 0.03526911000000155,
139
+ 0.03674177700003156,
140
+ 0.03676516000001584,
141
+ 0.036388641000030475,
142
+ 0.036157319999972515,
143
+ 0.03231824800002414,
144
+ 0.0319343030000141,
145
+ 0.032182685000009315,
146
+ 0.03097056199999315
 
147
  ]
148
  },
149
  "throughput": {
150
  "unit": "samples/s",
151
+ "value": 27.826142373748276
152
  },
153
  "energy": {
154
  "unit": "kWh",
155
+ "cpu": 1.2533890704313914e-06,
156
+ "ram": 5.238118496137644e-08,
157
  "gpu": 0.0,
158
+ "total": 1.305770255392768e-06
159
  },
160
  "efficiency": {
161
  "unit": "samples/kWh",
162
+ "value": 765831.5050982732
163
  }
164
  }
165
  }