IlyasMoutawwakil HF staff commited on
Commit
fd1e78a
·
verified ·
1 Parent(s): 684c03d

Upload cuda_training_transformers_fill-mask_google-bert/bert-base-uncased/benchmark.json with huggingface_hub

Browse files
cuda_training_transformers_fill-mask_google-bert/bert-base-uncased/benchmark.json CHANGED
@@ -110,7 +110,7 @@
110
  "overall": {
111
  "memory": {
112
  "unit": "MB",
113
- "max_ram": 1103.310848,
114
  "max_global_vram": 3169.32096,
115
  "max_process_vram": 0.0,
116
  "max_reserved": 2520.776704,
@@ -119,24 +119,24 @@
119
  "latency": {
120
  "unit": "s",
121
  "count": 5,
122
- "total": 0.8952390060424805,
123
- "mean": 0.17904780120849612,
124
- "stdev": 0.2739236153506071,
125
- "p50": 0.042137504577636715,
126
- "p90": 0.45317533111572267,
127
- "p95": 0.5900350227355955,
128
- "p99": 0.699522776031494,
129
  "values": [
130
- 0.7268947143554687,
131
- 0.04259625625610351,
132
- 0.04188979339599609,
133
- 0.04172073745727539,
134
- 0.042137504577636715
135
  ]
136
  },
137
  "throughput": {
138
  "unit": "samples/s",
139
- "value": 55.85100700764977
140
  },
141
  "energy": null,
142
  "efficiency": null
@@ -144,7 +144,7 @@
144
  "warmup": {
145
  "memory": {
146
  "unit": "MB",
147
- "max_ram": 1103.310848,
148
  "max_global_vram": 3169.32096,
149
  "max_process_vram": 0.0,
150
  "max_reserved": 2520.776704,
@@ -153,21 +153,21 @@
153
  "latency": {
154
  "unit": "s",
155
  "count": 2,
156
- "total": 0.7694909706115722,
157
- "mean": 0.3847454853057861,
158
- "stdev": 0.3421492290496826,
159
- "p50": 0.3847454853057861,
160
- "p90": 0.6584648685455322,
161
- "p95": 0.6926797914505004,
162
- "p99": 0.7200517297744751,
163
  "values": [
164
- 0.7268947143554687,
165
- 0.04259625625610351
166
  ]
167
  },
168
  "throughput": {
169
  "unit": "samples/s",
170
- "value": 10.396483266907993
171
  },
172
  "energy": null,
173
  "efficiency": null
@@ -175,7 +175,7 @@
175
  "train": {
176
  "memory": {
177
  "unit": "MB",
178
- "max_ram": 1103.310848,
179
  "max_global_vram": 3169.32096,
180
  "max_process_vram": 0.0,
181
  "max_reserved": 2520.776704,
@@ -184,22 +184,22 @@
184
  "latency": {
185
  "unit": "s",
186
  "count": 3,
187
- "total": 0.1257480354309082,
188
- "mean": 0.04191601181030274,
189
- "stdev": 0.0001711515158957768,
190
- "p50": 0.04188979339599609,
191
- "p90": 0.04208796234130859,
192
- "p95": 0.042112733459472654,
193
- "p99": 0.0421325503540039,
194
  "values": [
195
- 0.04188979339599609,
196
- 0.04172073745727539,
197
- 0.042137504577636715
198
  ]
199
  },
200
  "throughput": {
201
  "unit": "samples/s",
202
- "value": 143.1433893843219
203
  },
204
  "energy": null,
205
  "efficiency": null
 
110
  "overall": {
111
  "memory": {
112
  "unit": "MB",
113
+ "max_ram": 1103.81056,
114
  "max_global_vram": 3169.32096,
115
  "max_process_vram": 0.0,
116
  "max_reserved": 2520.776704,
 
119
  "latency": {
120
  "unit": "s",
121
  "count": 5,
122
+ "total": 0.8396492996215821,
123
+ "mean": 0.16792985992431642,
124
+ "stdev": 0.2508389356576258,
125
+ "p50": 0.04234239959716797,
126
+ "p90": 0.4191111297607423,
127
+ "p95": 0.544359031677246,
128
+ "p99": 0.6445573532104492,
129
  "values": [
130
+ 0.66960693359375,
131
+ 0.04336742401123047,
132
+ 0.04234239959716797,
133
+ 0.04218675231933594,
134
+ 0.042145790100097655
135
  ]
136
  },
137
  "throughput": {
138
  "unit": "samples/s",
139
+ "value": 59.548671120829
140
  },
141
  "energy": null,
142
  "efficiency": null
 
144
  "warmup": {
145
  "memory": {
146
  "unit": "MB",
147
+ "max_ram": 1103.81056,
148
  "max_global_vram": 3169.32096,
149
  "max_process_vram": 0.0,
150
  "max_reserved": 2520.776704,
 
153
  "latency": {
154
  "unit": "s",
155
  "count": 2,
156
+ "total": 0.7129743576049805,
157
+ "mean": 0.35648717880249026,
158
+ "stdev": 0.3131197547912598,
159
+ "p50": 0.35648717880249026,
160
+ "p90": 0.6069829826354981,
161
+ "p95": 0.638294958114624,
162
+ "p99": 0.6633445384979249,
163
  "values": [
164
+ 0.66960693359375,
165
+ 0.04336742401123047
166
  ]
167
  },
168
  "throughput": {
169
  "unit": "samples/s",
170
+ "value": 11.220599892082452
171
  },
172
  "energy": null,
173
  "efficiency": null
 
175
  "train": {
176
  "memory": {
177
  "unit": "MB",
178
+ "max_ram": 1103.81056,
179
  "max_global_vram": 3169.32096,
180
  "max_process_vram": 0.0,
181
  "max_reserved": 2520.776704,
 
184
  "latency": {
185
  "unit": "s",
186
  "count": 3,
187
+ "total": 0.12667494201660157,
188
+ "mean": 0.04222498067220052,
189
+ "stdev": 8.469505615880557e-05,
190
+ "p50": 0.04218675231933594,
191
+ "p90": 0.04231127014160156,
192
+ "p95": 0.042326834869384766,
193
+ "p99": 0.04233928665161133,
194
  "values": [
195
+ 0.04234239959716797,
196
+ 0.04218675231933594,
197
+ 0.042145790100097655
198
  ]
199
  },
200
  "throughput": {
201
  "unit": "samples/s",
202
+ "value": 142.09597978454954
203
  },
204
  "energy": null,
205
  "efficiency": null