IlyasMoutawwakil HF staff commited on
Commit
2a9fbb0
1 Parent(s): d55b0bb

Upload cpu_training_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_training_transformers_text-classification_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cpu_training_transformers_text-classification_FacebookAI/roberta-base",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.3.0+cpu",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "text-classification",
9
  "library": "transformers",
@@ -76,23 +76,23 @@
76
  "environment": {
77
  "cpu": " AMD EPYC 7763 64-Core Processor",
78
  "cpu_count": 4,
79
- "cpu_ram_mb": 16757.354496,
80
  "system": "Linux",
81
  "machine": "x86_64",
82
- "platform": "Linux-6.5.0-1021-azure-x86_64-with-glibc2.35",
83
  "processor": "x86_64",
84
  "python_version": "3.10.14",
85
  "optimum_benchmark_version": "0.2.1",
86
- "optimum_benchmark_commit": "347e13ca9f7f904f55669603cfb9f0b6c7e8672c",
87
- "transformers_version": "4.41.1",
88
  "transformers_commit": null,
89
- "accelerate_version": "0.30.1",
90
  "accelerate_commit": null,
91
- "diffusers_version": "0.27.2",
92
  "diffusers_commit": null,
93
  "optimum_version": null,
94
  "optimum_commit": null,
95
- "timm_version": "1.0.3",
96
  "timm_commit": null,
97
  "peft_version": null,
98
  "peft_commit": null
@@ -102,7 +102,7 @@
102
  "overall": {
103
  "memory": {
104
  "unit": "MB",
105
- "max_ram": 2832.50688,
106
  "max_global_vram": null,
107
  "max_process_vram": null,
108
  "max_reserved": null,
@@ -111,24 +111,24 @@
111
  "latency": {
112
  "unit": "s",
113
  "count": 5,
114
- "total": 3.11779525299994,
115
- "mean": 0.6235590505999881,
116
- "stdev": 0.049603432971682564,
117
- "p50": 0.5995712669999875,
118
- "p90": 0.6744295051999984,
119
- "p95": 0.698530285600009,
120
- "p99": 0.7178109099200174,
121
  "values": [
122
- 0.7226310660000195,
123
- 0.5995712669999875,
124
- 0.5993030899999781,
125
- 0.5941626659999883,
126
- 0.6021271639999668
127
  ]
128
  },
129
  "throughput": {
130
  "unit": "samples/s",
131
- "value": 16.03697354785887
132
  },
133
  "energy": null,
134
  "efficiency": null
@@ -136,7 +136,7 @@
136
  "warmup": {
137
  "memory": {
138
  "unit": "MB",
139
- "max_ram": 2832.50688,
140
  "max_global_vram": null,
141
  "max_process_vram": null,
142
  "max_reserved": null,
@@ -145,21 +145,21 @@
145
  "latency": {
146
  "unit": "s",
147
  "count": 2,
148
- "total": 1.322202333000007,
149
- "mean": 0.6611011665000035,
150
- "stdev": 0.06152989950001597,
151
- "p50": 0.6611011665000035,
152
- "p90": 0.7103250861000163,
153
- "p95": 0.7164780760500179,
154
- "p99": 0.7214004680100191,
155
  "values": [
156
- 0.7226310660000195,
157
- 0.5995712669999875
158
  ]
159
  },
160
  "throughput": {
161
  "unit": "samples/s",
162
- "value": 6.050511181483415
163
  },
164
  "energy": null,
165
  "efficiency": null
@@ -167,7 +167,7 @@
167
  "train": {
168
  "memory": {
169
  "unit": "MB",
170
- "max_ram": 2832.50688,
171
  "max_global_vram": null,
172
  "max_process_vram": null,
173
  "max_reserved": null,
@@ -176,22 +176,22 @@
176
  "latency": {
177
  "unit": "s",
178
  "count": 3,
179
- "total": 1.7955929199999332,
180
- "mean": 0.5985309733333111,
181
- "stdev": 0.0032970117994050186,
182
- "p50": 0.5993030899999781,
183
- "p90": 0.6015623491999691,
184
- "p95": 0.601844756599968,
185
- "p99": 0.602070682519967,
186
  "values": [
187
- 0.5993030899999781,
188
- 0.5941626659999883,
189
- 0.6021271639999668
190
  ]
191
  },
192
  "throughput": {
193
  "unit": "samples/s",
194
- "value": 10.024543870445129
195
  },
196
  "energy": null,
197
  "efficiency": null
 
3
  "name": "cpu_training_transformers_text-classification_FacebookAI/roberta-base",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.3.1+cpu",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "text-classification",
9
  "library": "transformers",
 
76
  "environment": {
77
  "cpu": " AMD EPYC 7763 64-Core Processor",
78
  "cpu_count": 4,
79
+ "cpu_ram_mb": 16757.338112,
80
  "system": "Linux",
81
  "machine": "x86_64",
82
+ "platform": "Linux-6.5.0-1022-azure-x86_64-with-glibc2.35",
83
  "processor": "x86_64",
84
  "python_version": "3.10.14",
85
  "optimum_benchmark_version": "0.2.1",
86
+ "optimum_benchmark_commit": "d920fe9626db1e7915f6d3574b5b54b0159cd100",
87
+ "transformers_version": "4.42.3",
88
  "transformers_commit": null,
89
+ "accelerate_version": "0.31.0",
90
  "accelerate_commit": null,
91
+ "diffusers_version": "0.29.2",
92
  "diffusers_commit": null,
93
  "optimum_version": null,
94
  "optimum_commit": null,
95
+ "timm_version": "1.0.7",
96
  "timm_commit": null,
97
  "peft_version": null,
98
  "peft_commit": null
 
102
  "overall": {
103
  "memory": {
104
  "unit": "MB",
105
+ "max_ram": 2848.260096,
106
  "max_global_vram": null,
107
  "max_process_vram": null,
108
  "max_reserved": null,
 
111
  "latency": {
112
  "unit": "s",
113
  "count": 5,
114
+ "total": 3.0871379149999143,
115
+ "mean": 0.6174275829999829,
116
+ "stdev": 0.04857123315806308,
117
+ "p50": 0.5943080519999739,
118
+ "p90": 0.6667415143999961,
119
+ "p95": 0.6906321171999934,
120
+ "p99": 0.7097445994399914,
121
  "values": [
122
+ 0.7145227199999908,
123
+ 0.5950697060000039,
124
+ 0.5906765209999776,
125
+ 0.5943080519999739,
126
+ 0.5925609159999681
127
  ]
128
  },
129
  "throughput": {
130
  "unit": "samples/s",
131
+ "value": 16.196231388645746
132
  },
133
  "energy": null,
134
  "efficiency": null
 
136
  "warmup": {
137
  "memory": {
138
  "unit": "MB",
139
+ "max_ram": 2848.260096,
140
  "max_global_vram": null,
141
  "max_process_vram": null,
142
  "max_reserved": null,
 
145
  "latency": {
146
  "unit": "s",
147
  "count": 2,
148
+ "total": 1.3095924259999947,
149
+ "mean": 0.6547962129999974,
150
+ "stdev": 0.05972650699999349,
151
+ "p50": 0.6547962129999974,
152
+ "p90": 0.7025774185999921,
153
+ "p95": 0.7085500692999915,
154
+ "p99": 0.713328189859991,
155
  "values": [
156
+ 0.7145227199999908,
157
+ 0.5950697060000039
158
  ]
159
  },
160
  "throughput": {
161
  "unit": "samples/s",
162
+ "value": 6.108770821495292
163
  },
164
  "energy": null,
165
  "efficiency": null
 
167
  "train": {
168
  "memory": {
169
  "unit": "MB",
170
+ "max_ram": 2848.260096,
171
  "max_global_vram": null,
172
  "max_process_vram": null,
173
  "max_reserved": null,
 
176
  "latency": {
177
  "unit": "s",
178
  "count": 3,
179
+ "total": 1.7775454889999196,
180
+ "mean": 0.5925151629999732,
181
+ "stdev": 0.0014829192726375981,
182
+ "p50": 0.5925609159999681,
183
+ "p90": 0.5939586247999727,
184
+ "p95": 0.5941333383999734,
185
+ "p99": 0.5942731092799738,
186
  "values": [
187
+ 0.5906765209999776,
188
+ 0.5943080519999739,
189
+ 0.5925609159999681
190
  ]
191
  },
192
  "throughput": {
193
  "unit": "samples/s",
194
+ "value": 10.126323130063545
195
  },
196
  "energy": null,
197
  "efficiency": null