IlyasMoutawwakil HF staff commited on
Commit
ea28247
1 Parent(s): 278f02d

Upload cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -7,6 +7,7 @@
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "multiple-choice",
9
  "library": "transformers",
 
10
  "model": "FacebookAI/roberta-base",
11
  "processor": "FacebookAI/roberta-base",
12
  "device": "cpu",
@@ -73,14 +74,14 @@
73
  "environment": {
74
  "cpu": " AMD EPYC 7763 64-Core Processor",
75
  "cpu_count": 4,
76
- "cpu_ram_mb": 16757.338112,
77
  "system": "Linux",
78
  "machine": "x86_64",
79
  "platform": "Linux-6.5.0-1023-azure-x86_64-with-glibc2.35",
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.3.1",
83
- "optimum_benchmark_commit": "c31e63ddb94c29460ba90ff599147e94ff80543a",
84
  "transformers_version": "4.42.4",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.32.1",
@@ -99,7 +100,7 @@
99
  "forward": {
100
  "memory": {
101
  "unit": "MB",
102
- "max_ram": 945.463296,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
@@ -107,56 +108,55 @@
107
  },
108
  "latency": {
109
  "unit": "s",
110
- "count": 25,
111
- "total": 1.000988487999905,
112
- "mean": 0.0400395395199962,
113
- "stdev": 0.000869439902902602,
114
- "p50": 0.03981994600002281,
115
- "p90": 0.04099346199999445,
116
- "p95": 0.04208900840000069,
117
- "p99": 0.04245403487999738,
118
  "values": [
119
- 0.04093592799995349,
120
- 0.040143141999976706,
121
- 0.03986372899998969,
122
- 0.039934861000006094,
123
- 0.03980596000002379,
124
- 0.03980367599996271,
125
- 0.04044784300003812,
126
- 0.03981994600002281,
127
- 0.04007136799998534,
128
- 0.040407238000000234,
129
- 0.04103181800002176,
130
- 0.0400583329999904,
131
- 0.039630520999992314,
132
- 0.04007267999998021,
133
- 0.042485843999997996,
134
- 0.03929198699995595,
135
- 0.039359733000026154,
136
- 0.03914272599996593,
137
- 0.03964961700000913,
138
- 0.039727342999981374,
139
- 0.03968610499998704,
140
- 0.04235330599999543,
141
- 0.03923431900000196,
142
- 0.03928257900003018,
143
- 0.03874788600001011
144
  ]
145
  },
146
  "throughput": {
147
  "unit": "samples/s",
148
- "value": 24.975312203592868
149
  },
150
  "energy": {
151
  "unit": "kWh",
152
- "cpu": 1.5327061343396833e-06,
153
- "ram": 6.405499365491152e-08,
154
  "gpu": 0.0,
155
- "total": 1.5967611279945946e-06
156
  },
157
  "efficiency": {
158
  "unit": "samples/kWh",
159
- "value": 626267.7506784755
160
  }
161
  }
162
  }
 
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "multiple-choice",
9
  "library": "transformers",
10
+ "model_type": "roberta",
11
  "model": "FacebookAI/roberta-base",
12
  "processor": "FacebookAI/roberta-base",
13
  "device": "cpu",
 
74
  "environment": {
75
  "cpu": " AMD EPYC 7763 64-Core Processor",
76
  "cpu_count": 4,
77
+ "cpu_ram_mb": 16757.342208,
78
  "system": "Linux",
79
  "machine": "x86_64",
80
  "platform": "Linux-6.5.0-1023-azure-x86_64-with-glibc2.35",
81
  "processor": "x86_64",
82
  "python_version": "3.10.14",
83
  "optimum_benchmark_version": "0.3.1",
84
+ "optimum_benchmark_commit": "402644384abc46743e46dac65be6512bc68c9d92",
85
  "transformers_version": "4.42.4",
86
  "transformers_commit": null,
87
  "accelerate_version": "0.32.1",
 
100
  "forward": {
101
  "memory": {
102
  "unit": "MB",
103
+ "max_ram": 942.24384,
104
  "max_global_vram": null,
105
  "max_process_vram": null,
106
  "max_reserved": null,
 
108
  },
109
  "latency": {
110
  "unit": "s",
111
+ "count": 24,
112
+ "total": 1.0189366050000785,
113
+ "mean": 0.042455691875003275,
114
+ "stdev": 0.0035839844448729606,
115
+ "p50": 0.043137909000023456,
116
+ "p90": 0.04663252819999002,
117
+ "p95": 0.04714009640003951,
118
+ "p99": 0.047295363899999075,
119
  "values": [
120
+ 0.04731630999998515,
121
+ 0.04665761600000451,
122
+ 0.04639604699997335,
123
+ 0.04640906100001985,
124
+ 0.04657398999995621,
125
+ 0.04515594500003317,
126
+ 0.04722524000004569,
127
+ 0.04651740399998516,
128
+ 0.04602047200000925,
129
+ 0.04346564299999045,
130
+ 0.043248105000031956,
131
+ 0.043027713000014955,
132
+ 0.04367045600002939,
133
+ 0.04210949499997696,
134
+ 0.038652904000002763,
135
+ 0.038482815999998365,
136
+ 0.038240813000015805,
137
+ 0.03894371799998453,
138
+ 0.03823495199998206,
139
+ 0.03825739399997019,
140
+ 0.03864707400003908,
141
+ 0.03861378099998092,
142
+ 0.0389012890000231,
143
+ 0.03816836700002568
 
144
  ]
145
  },
146
  "throughput": {
147
  "unit": "samples/s",
148
+ "value": 23.553967815297153
149
  },
150
  "energy": {
151
  "unit": "kWh",
152
+ "cpu": 1.5004251757238667e-06,
153
+ "ram": 6.270243458835431e-08,
154
  "gpu": 0.0,
155
+ "total": 1.563127610312221e-06
156
  },
157
  "efficiency": {
158
  "unit": "samples/kWh",
159
+ "value": 639743.0340317889
160
  }
161
  }
162
  }