IlyasMoutawwakil HF staff commited on
Commit
16c6102
·
verified ·
1 Parent(s): ec8adae

Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json ADDED
@@ -0,0 +1,304 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config": {
3
+ "name": "cuda_inference_transformers_text-classification_FacebookAI/roberta-base",
4
+ "backend": {
5
+ "name": "pytorch",
6
+ "version": "2.2.2",
7
+ "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
+ "task": "text-classification",
9
+ "model": "FacebookAI/roberta-base",
10
+ "library": "transformers",
11
+ "device": "cuda",
12
+ "device_ids": "0",
13
+ "seed": 42,
14
+ "inter_op_num_threads": null,
15
+ "intra_op_num_threads": null,
16
+ "hub_kwargs": {
17
+ "revision": "main",
18
+ "force_download": false,
19
+ "local_files_only": false,
20
+ "trust_remote_code": false
21
+ },
22
+ "no_weights": true,
23
+ "device_map": null,
24
+ "torch_dtype": null,
25
+ "eval_mode": true,
26
+ "to_bettertransformer": false,
27
+ "low_cpu_mem_usage": null,
28
+ "attn_implementation": null,
29
+ "cache_implementation": null,
30
+ "autocast_enabled": false,
31
+ "autocast_dtype": null,
32
+ "torch_compile": false,
33
+ "torch_compile_target": "forward",
34
+ "torch_compile_config": {},
35
+ "quantization_scheme": null,
36
+ "quantization_config": {},
37
+ "deepspeed_inference": false,
38
+ "deepspeed_inference_config": {},
39
+ "peft_type": null,
40
+ "peft_config": {}
41
+ },
42
+ "scenario": {
43
+ "name": "inference",
44
+ "_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario",
45
+ "iterations": 1,
46
+ "duration": 1,
47
+ "warmup_runs": 1,
48
+ "input_shapes": {
49
+ "batch_size": 1,
50
+ "num_choices": 2,
51
+ "sequence_length": 2
52
+ },
53
+ "new_tokens": null,
54
+ "latency": true,
55
+ "memory": true,
56
+ "energy": true,
57
+ "forward_kwargs": {},
58
+ "generate_kwargs": {
59
+ "max_new_tokens": 2,
60
+ "min_new_tokens": 2
61
+ },
62
+ "call_kwargs": {
63
+ "num_inference_steps": 2
64
+ }
65
+ },
66
+ "launcher": {
67
+ "name": "process",
68
+ "_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
69
+ "device_isolation": true,
70
+ "device_isolation_action": "error",
71
+ "start_method": "spawn"
72
+ },
73
+ "environment": {
74
+ "cpu": " AMD EPYC 7R32",
75
+ "cpu_count": 16,
76
+ "cpu_ram_mb": 66697.29792,
77
+ "system": "Linux",
78
+ "machine": "x86_64",
79
+ "platform": "Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35",
80
+ "processor": "x86_64",
81
+ "python_version": "3.10.14",
82
+ "gpu": [
83
+ "NVIDIA A10G"
84
+ ],
85
+ "gpu_count": 1,
86
+ "gpu_vram_mb": 24146608128,
87
+ "optimum_benchmark_version": "0.2.0",
88
+ "optimum_benchmark_commit": null,
89
+ "transformers_version": "4.40.2",
90
+ "transformers_commit": null,
91
+ "accelerate_version": "0.30.0",
92
+ "accelerate_commit": null,
93
+ "diffusers_version": "0.27.2",
94
+ "diffusers_commit": null,
95
+ "optimum_version": null,
96
+ "optimum_commit": null,
97
+ "timm_version": "0.9.16",
98
+ "timm_commit": null,
99
+ "peft_version": null,
100
+ "peft_commit": null
101
+ }
102
+ },
103
+ "report": {
104
+ "forward": {
105
+ "memory": {
106
+ "unit": "MB",
107
+ "max_ram": 890.114048,
108
+ "max_global_vram": 1195.900928,
109
+ "max_process_vram": 0.0,
110
+ "max_reserved": 555.74528,
111
+ "max_allocated": 508.993536
112
+ },
113
+ "latency": {
114
+ "unit": "s",
115
+ "count": 161,
116
+ "total": 0.996849409103394,
117
+ "mean": 0.006191611236667041,
118
+ "stdev": 0.0002436492177730374,
119
+ "p50": 0.006165503978729248,
120
+ "p90": 0.006308864116668702,
121
+ "p95": 0.006900735855102539,
122
+ "p99": 0.0070588417053222665,
123
+ "values": [
124
+ 0.007296000003814697,
125
+ 0.006905856132507324,
126
+ 0.007013376235961914,
127
+ 0.0069959678649902345,
128
+ 0.0069847040176391605,
129
+ 0.006951935768127441,
130
+ 0.006890495777130127,
131
+ 0.006927360057830811,
132
+ 0.006900735855102539,
133
+ 0.007127039909362793,
134
+ 0.006194176197052002,
135
+ 0.006061056137084961,
136
+ 0.006073311805725097,
137
+ 0.006281216144561768,
138
+ 0.0062740478515625,
139
+ 0.006308864116668702,
140
+ 0.006292479991912842,
141
+ 0.0062904319763183595,
142
+ 0.006284287929534912,
143
+ 0.006266880035400391,
144
+ 0.006223872184753418,
145
+ 0.0062197761535644535,
146
+ 0.006240255832672119,
147
+ 0.006207488059997559,
148
+ 0.0061521921157836916,
149
+ 0.006211616039276123,
150
+ 0.0061859841346740725,
151
+ 0.006235136032104492,
152
+ 0.006254591941833496,
153
+ 0.006281184196472168,
154
+ 0.006270976066589356,
155
+ 0.006248447895050049,
156
+ 0.006266880035400391,
157
+ 0.006253568172454834,
158
+ 0.006244383811950684,
159
+ 0.006255616188049316,
160
+ 0.006270976066589356,
161
+ 0.006342656135559082,
162
+ 0.0064471039772033695,
163
+ 0.0062791681289672855,
164
+ 0.006164480209350586,
165
+ 0.00623199987411499,
166
+ 0.006181888103485108,
167
+ 0.00617574405670166,
168
+ 0.006145023822784424,
169
+ 0.006105055809020996,
170
+ 0.006182911872863769,
171
+ 0.006113279819488526,
172
+ 0.006157311916351318,
173
+ 0.006195168018341065,
174
+ 0.006160384178161621,
175
+ 0.006149055957794189,
176
+ 0.006148096084594727,
177
+ 0.006151167869567871,
178
+ 0.0061634559631347655,
179
+ 0.006190080165863037,
180
+ 0.0061521921157836916,
181
+ 0.00621670389175415,
182
+ 0.006144991874694824,
183
+ 0.006194176197052002,
184
+ 0.006158336162567139,
185
+ 0.00622489595413208,
186
+ 0.006109151840209961,
187
+ 0.006139904022216797,
188
+ 0.006120448112487793,
189
+ 0.006195199966430664,
190
+ 0.005987328052520752,
191
+ 0.005975039958953857,
192
+ 0.005929984092712402,
193
+ 0.005948416233062744,
194
+ 0.00592793607711792,
195
+ 0.005955584049224853,
196
+ 0.005934080123901367,
197
+ 0.0060631041526794435,
198
+ 0.005945343971252442,
199
+ 0.005989376068115234,
200
+ 0.0060405759811401364,
201
+ 0.006080512046813965,
202
+ 0.00606822395324707,
203
+ 0.005970943927764893,
204
+ 0.005984255790710449,
205
+ 0.005982207775115967,
206
+ 0.005967872142791748,
207
+ 0.005981184005737304,
208
+ 0.005991424083709717,
209
+ 0.005967872142791748,
210
+ 0.006119423866271972,
211
+ 0.006124544143676758,
212
+ 0.006145023822784424,
213
+ 0.006147071838378906,
214
+ 0.006169600009918213,
215
+ 0.006165503978729248,
216
+ 0.0061562881469726565,
217
+ 0.006266880035400391,
218
+ 0.00628223991394043,
219
+ 0.006301695823669433,
220
+ 0.006244351863861084,
221
+ 0.0063170561790466305,
222
+ 0.0062638077735900875,
223
+ 0.006285312175750732,
224
+ 0.0062566399574279785,
225
+ 0.006362112045288086,
226
+ 0.006311935901641846,
227
+ 0.006303743839263916,
228
+ 0.006308864116668702,
229
+ 0.006304768085479737,
230
+ 0.006292479991912842,
231
+ 0.006245376110076905,
232
+ 0.006260735988616943,
233
+ 0.0063211522102355954,
234
+ 0.0062863359451293946,
235
+ 0.006195199966430664,
236
+ 0.006146048069000244,
237
+ 0.006182911872863769,
238
+ 0.00612556791305542,
239
+ 0.006165503978729248,
240
+ 0.0061859841346740725,
241
+ 0.006145023822784424,
242
+ 0.006196224212646485,
243
+ 0.006305791854858398,
244
+ 0.006276095867156982,
245
+ 0.006253568172454834,
246
+ 0.006236159801483154,
247
+ 0.006252543926239014,
248
+ 0.006250495910644531,
249
+ 0.006243328094482422,
250
+ 0.0061224961280822755,
251
+ 0.006173696041107178,
252
+ 0.006157311916351318,
253
+ 0.006203392028808594,
254
+ 0.006169568061828614,
255
+ 0.006020095825195313,
256
+ 0.005963776111602783,
257
+ 0.005960703849792481,
258
+ 0.005955584049224853,
259
+ 0.005962751865386963,
260
+ 0.005940224170684814,
261
+ 0.005963776111602783,
262
+ 0.005921792030334472,
263
+ 0.005955584049224853,
264
+ 0.005957632064819336,
265
+ 0.005970943927764893,
266
+ 0.005970943927764893,
267
+ 0.0060026879310607914,
268
+ 0.005943295955657959,
269
+ 0.005985280036926269,
270
+ 0.005966847896575928,
271
+ 0.005989376068115234,
272
+ 0.005985280036926269,
273
+ 0.005982207775115967,
274
+ 0.005974016189575195,
275
+ 0.006013951778411865,
276
+ 0.005957632064819336,
277
+ 0.0059770879745483394,
278
+ 0.005948416233062744,
279
+ 0.005991424083709717,
280
+ 0.005936192035675049,
281
+ 0.006103040218353272,
282
+ 0.005943295955657959,
283
+ 0.006046688079833985,
284
+ 0.005954559803009033
285
+ ]
286
+ },
287
+ "throughput": {
288
+ "unit": "samples/s",
289
+ "value": 161.50884830719806
290
+ },
291
+ "energy": {
292
+ "unit": "kWh",
293
+ "cpu": 7.143940532223449e-08,
294
+ "ram": 3.904175865138686e-08,
295
+ "gpu": 1.322156345542155e-07,
296
+ "total": 2.4269679852783684e-07
297
+ },
298
+ "efficiency": {
299
+ "unit": "samples/kWh",
300
+ "value": 4120367.4958460648
301
+ }
302
+ }
303
+ }
304
+ }