IlyasMoutawwakil HF staff commited on
Commit
6e88e49
1 Parent(s): 8af2acb

Upload cpu_inference_transformers_fill-mask_google-bert/bert-base-uncased/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_fill-mask_google-bert/bert-base-uncased/benchmark.json CHANGED
@@ -79,8 +79,8 @@
79
  "platform": "Linux-6.5.0-1022-azure-x86_64-with-glibc2.35",
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
- "optimum_benchmark_version": "0.3.0",
83
- "optimum_benchmark_commit": "2a75c0bc0d007cc875fa0f75ca41d02e46f917be",
84
  "transformers_version": "4.42.3",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.31.0",
@@ -99,7 +99,7 @@
99
  "forward": {
100
  "memory": {
101
  "unit": "MB",
102
- "max_ram": 865.857536,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
@@ -107,54 +107,55 @@
107
  },
108
  "latency": {
109
  "unit": "s",
110
- "count": 23,
111
- "total": 1.0210175170000184,
112
- "mean": 0.04439206595652254,
113
- "stdev": 0.0017810219066118384,
114
- "p50": 0.044878564999976334,
115
- "p90": 0.045958327600010306,
116
- "p95": 0.04600135670004306,
117
- "p99": 0.04611832724001829,
118
  "values": [
119
- 0.04551222399999233,
120
- 0.04474227099996142,
121
- 0.04600151600004665,
122
- 0.04432372200000145,
123
- 0.04536882700000433,
124
- 0.04446885200002271,
125
- 0.04519714799999974,
126
- 0.04357871399997748,
127
- 0.045365481000033014,
128
- 0.04508262499996363,
129
- 0.044468271000027926,
130
- 0.04416608799999722,
131
- 0.04615127400001029,
132
- 0.04395190000002458,
133
- 0.045288046999985454,
134
- 0.044878564999976334,
135
- 0.04431974400000627,
136
- 0.045791946000008465,
137
- 0.04599992300001077,
138
- 0.04502122999997482,
139
- 0.0429243670000119,
140
- 0.039059500000007574,
141
- 0.03935528199997407
 
142
  ]
143
  },
144
  "throughput": {
145
  "unit": "samples/s",
146
- "value": 22.52654789663083
147
  },
148
  "energy": {
149
  "unit": "kWh",
150
- "cpu": 1.5213631157182222e-06,
151
- "ram": 6.357892781505388e-08,
152
  "gpu": 0.0,
153
- "total": 1.584942043533276e-06
154
  },
155
  "efficiency": {
156
  "unit": "samples/kWh",
157
- "value": 630937.897117501
158
  }
159
  }
160
  }
 
79
  "platform": "Linux-6.5.0-1022-azure-x86_64-with-glibc2.35",
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
+ "optimum_benchmark_version": "0.3.1",
83
+ "optimum_benchmark_commit": "2c8ab57de1af767ec2e6a2cf774f52cea6a0db26",
84
  "transformers_version": "4.42.3",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.31.0",
 
99
  "forward": {
100
  "memory": {
101
  "unit": "MB",
102
+ "max_ram": 865.947648,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
 
107
  },
108
  "latency": {
109
  "unit": "s",
110
+ "count": 24,
111
+ "total": 0.999454433999972,
112
+ "mean": 0.041643934749998834,
113
+ "stdev": 0.0026174987956124125,
114
+ "p50": 0.043079464500010545,
115
+ "p90": 0.044571618000043146,
116
+ "p95": 0.04467869325000322,
117
+ "p99": 0.045458232809986614,
118
  "values": [
119
+ 0.04315225999999939,
120
+ 0.04333076299997174,
121
+ 0.04345083600003363,
122
+ 0.0430066690000217,
123
+ 0.04329281200000423,
124
+ 0.045689762999984396,
125
+ 0.0446536650000553,
126
+ 0.04380489499999385,
127
+ 0.04294270000002598,
128
+ 0.04346829799999341,
129
+ 0.04318877800000109,
130
+ 0.043153843000027337,
131
+ 0.04438017500001479,
132
+ 0.04468310999999403,
133
+ 0.04122787599999356,
134
+ 0.03843196499997248,
135
+ 0.038477398999987145,
136
+ 0.03831623899998249,
137
+ 0.039220381999996334,
138
+ 0.03960326399999303,
139
+ 0.037818221999998514,
140
+ 0.037912408000011055,
141
+ 0.03811272999996618,
142
+ 0.03813538199995037
143
  ]
144
  },
145
  "throughput": {
146
  "unit": "samples/s",
147
+ "value": 24.013100731314253
148
  },
149
  "energy": {
150
  "unit": "kWh",
151
+ "cpu": 1.4775882532567157e-06,
152
+ "ram": 6.174592250999922e-08,
153
  "gpu": 0.0,
154
+ "total": 1.5393341757667148e-06
155
  },
156
  "efficiency": {
157
  "unit": "samples/kWh",
158
+ "value": 649631.5197458134
159
  }
160
  }
161
  }