IlyasMoutawwakil HF staff commited on
Commit
159014a
·
verified ·
1 Parent(s): a34d2da

Upload cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -80,7 +80,7 @@
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.3.1",
83
- "optimum_benchmark_commit": "f6013cec1a849341c31271831560b1681406c092",
84
  "transformers_version": "4.42.3",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.31.0",
@@ -99,7 +99,7 @@
99
  "forward": {
100
  "memory": {
101
  "unit": "MB",
102
- "max_ram": 940.158976,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
@@ -107,53 +107,55 @@
107
  },
108
  "latency": {
109
  "unit": "s",
110
- "count": 22,
111
- "total": 1.0221214099999543,
112
- "mean": 0.04646006409090701,
113
- "stdev": 0.00307194651322619,
114
- "p50": 0.047473979500011865,
115
- "p90": 0.048771257499993226,
116
- "p95": 0.0492678234499607,
117
- "p99": 0.05025819809996733,
118
  "values": [
119
- 0.05051479499996958,
120
- 0.048591108999971766,
121
- 0.04757033499998897,
122
- 0.049292904999958864,
123
- 0.04737762400003476,
124
- 0.04804061699996964,
125
- 0.04589986400003454,
126
- 0.04879127399999561,
127
- 0.048118443000021216,
128
- 0.048033985000017765,
129
- 0.04829099599999154,
130
- 0.047251538000011806,
131
- 0.04709436299998515,
132
- 0.04783470099999931,
133
- 0.047895885000002636,
134
- 0.047307773,
135
- 0.046759626000039134,
136
- 0.04639914099999487,
137
- 0.04076937600001429,
138
- 0.04014425499997287,
139
- 0.04001080400001911,
140
- 0.04013200099996084
 
 
141
  ]
142
  },
143
  "throughput": {
144
  "unit": "samples/s",
145
- "value": 21.523861827726495
146
  },
147
  "energy": {
148
  "unit": "kWh",
149
- "cpu": 1.6129671202765571e-06,
150
- "ram": 6.740891937956803e-08,
151
  "gpu": 0.0,
152
- "total": 1.680376039656125e-06
153
  },
154
  "efficiency": {
155
  "unit": "samples/kWh",
156
- "value": 595104.8910484595
157
  }
158
  }
159
  }
 
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.3.1",
83
+ "optimum_benchmark_commit": "79990507b694d513bac81e140baff3af23a6bff7",
84
  "transformers_version": "4.42.3",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.31.0",
 
99
  "forward": {
100
  "memory": {
101
  "unit": "MB",
102
+ "max_ram": 940.224512,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
 
107
  },
108
  "latency": {
109
  "unit": "s",
110
+ "count": 24,
111
+ "total": 1.002178846999982,
112
+ "mean": 0.04175745195833258,
113
+ "stdev": 0.0024072203492112466,
114
+ "p50": 0.040595760999991626,
115
+ "p90": 0.04619594190000385,
116
+ "p95": 0.04656686894998643,
117
+ "p99": 0.04693666874001593,
118
  "values": [
119
+ 0.040718163999997614,
120
+ 0.04075360000001638,
121
+ 0.040562773999994306,
122
+ 0.040545672999996896,
123
+ 0.0401632989999996,
124
+ 0.0400997899999993,
125
+ 0.03984096899998235,
126
+ 0.03989334600001371,
127
+ 0.04009529199998951,
128
+ 0.040628747999988946,
129
+ 0.04232462999999598,
130
+ 0.04016376000001287,
131
+ 0.04129198400002565,
132
+ 0.04133614699998134,
133
+ 0.04056064900001388,
134
+ 0.04471263399997838,
135
+ 0.0459473950000131,
136
+ 0.047033191000025454,
137
+ 0.046613528999984055,
138
+ 0.04630246199999988,
139
+ 0.04331350399999678,
140
+ 0.03951679400000785,
141
+ 0.04007705799998007,
142
+ 0.039683454999988044
143
  ]
144
  },
145
  "throughput": {
146
  "unit": "samples/s",
147
+ "value": 23.94782136127089
148
  },
149
  "energy": {
150
  "unit": "kWh",
151
+ "cpu": 1.5722363260057238e-06,
152
+ "ram": 6.570593579440356e-08,
153
  "gpu": 0.0,
154
+ "total": 1.6379422618001276e-06
155
  },
156
  "efficiency": {
157
  "unit": "samples/kWh",
158
+ "value": 610522.1309211366
159
  }
160
  }
161
  }