IlyasMoutawwakil HF staff commited on
Commit
fb55601
1 Parent(s): 34ad7b7

Upload cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -80,7 +80,7 @@
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.3.1",
83
- "optimum_benchmark_commit": "2c8ab57de1af767ec2e6a2cf774f52cea6a0db26",
84
  "transformers_version": "4.42.3",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.31.0",
@@ -99,7 +99,7 @@
99
  "forward": {
100
  "memory": {
101
  "unit": "MB",
102
- "max_ram": 936.833024,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
@@ -107,61 +107,59 @@
107
  },
108
  "latency": {
109
  "unit": "s",
110
- "count": 30,
111
- "total": 1.0281147619998592,
112
- "mean": 0.034270492066661974,
113
- "stdev": 0.0019223535907324756,
114
- "p50": 0.035063915999984374,
115
- "p90": 0.035854227900040316,
116
- "p95": 0.03594990539996275,
117
- "p99": 0.036019693059960216,
118
  "values": [
119
- 0.03602076199996418,
120
- 0.03503670999998576,
121
- 0.0351899249999974,
122
- 0.03414096400001654,
123
- 0.03554006699999945,
124
- 0.03513388199996825,
125
- 0.03485442100003411,
126
- 0.035160080999958154,
127
- 0.03442233700002362,
128
- 0.034280443000000105,
129
- 0.035867807999977686,
130
- 0.03473856599998726,
131
- 0.03509112199998299,
132
- 0.03495292399998107,
133
- 0.035379578999993555,
134
- 0.03501384800000551,
135
- 0.03534937200004151,
136
- 0.03548676799999839,
137
- 0.03546419600002082,
138
- 0.034524105999992116,
139
- 0.03548876100001053,
140
- 0.03585271900004727,
141
- 0.03601707599995052,
142
- 0.03530410700000175,
143
- 0.031821875999980875,
144
- 0.032093072000009215,
145
- 0.030286681000006865,
146
- 0.029568312999970203,
147
- 0.029960982999966745,
148
- 0.030073292999986734
149
  ]
150
  },
151
  "throughput": {
152
  "unit": "samples/s",
153
- "value": 29.179621875718293
154
  },
155
  "energy": {
156
  "unit": "kWh",
157
- "cpu": 1.1783257029414956e-06,
158
- "ram": 4.924426242268964e-08,
159
  "gpu": 0.0,
160
- "total": 1.2275699653641853e-06
161
  },
162
  "efficiency": {
163
  "unit": "samples/kWh",
164
- "value": 814617.5193389717
165
  }
166
  }
167
  }
 
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.3.1",
83
+ "optimum_benchmark_commit": "8ebe8531a5b12ea7926cab66905381c76b24fcc4",
84
  "transformers_version": "4.42.3",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.31.0",
 
99
  "forward": {
100
  "memory": {
101
  "unit": "MB",
102
+ "max_ram": 936.6528,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
 
107
  },
108
  "latency": {
109
  "unit": "s",
110
+ "count": 28,
111
+ "total": 1.0214843340000357,
112
+ "mean": 0.03648158335714413,
113
+ "stdev": 0.0017589491762905706,
114
+ "p50": 0.036961173000008785,
115
+ "p90": 0.037804026000009115,
116
+ "p95": 0.03792682109997543,
117
+ "p99": 0.038642149019995034,
118
  "values": [
119
+ 0.03696238499998117,
120
+ 0.0372744169999919,
121
+ 0.03702061399997092,
122
+ 0.036033294999981536,
123
+ 0.03753845800002864,
124
+ 0.0369599610000364,
125
+ 0.03666300800000499,
126
+ 0.036372016000029816,
127
+ 0.03683306400000674,
128
+ 0.03462882799999534,
129
+ 0.03734442699999363,
130
+ 0.036315961000013885,
131
+ 0.0371922030000178,
132
+ 0.036199645000010605,
133
+ 0.03888908400000446,
134
+ 0.03686366100004079,
135
+ 0.03737322000000631,
136
+ 0.03778935600001887,
137
+ 0.036203371999988576,
138
+ 0.03678186799999139,
139
+ 0.03797450999996954,
140
+ 0.03759816000001592,
141
+ 0.0375417440000092,
142
+ 0.03729151800001773,
143
+ 0.03783825599998636,
144
+ 0.03335671299998921,
145
+ 0.03168444399994996,
146
+ 0.03096014599998398
 
 
147
  ]
148
  },
149
  "throughput": {
150
  "unit": "samples/s",
151
+ "value": 27.41109096637308
152
  },
153
  "energy": {
154
  "unit": "kWh",
155
+ "cpu": 1.331015604513663e-06,
156
+ "ram": 5.56245077989388e-08,
157
  "gpu": 0.0,
158
+ "total": 1.3866401123126015e-06
159
  },
160
  "efficiency": {
161
  "unit": "samples/kWh",
162
+ "value": 721167.6563518897
163
  }
164
  }
165
  }