Upload cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark_report.json with huggingface_hub
Browse files
cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark_report.json
CHANGED
@@ -2,7 +2,7 @@
|
|
2 |
"forward": {
|
3 |
"memory": {
|
4 |
"unit": "MB",
|
5 |
-
"max_ram":
|
6 |
"max_global_vram": 1195.900928,
|
7 |
"max_process_vram": 0.0,
|
8 |
"max_reserved": 555.74528,
|
@@ -10,163 +10,167 @@
|
|
10 |
},
|
11 |
"latency": {
|
12 |
"unit": "s",
|
13 |
-
"count":
|
14 |
-
"total":
|
15 |
-
"mean": 0.
|
16 |
-
"stdev": 0.
|
17 |
-
"p50": 0.
|
18 |
-
"p90": 0.
|
19 |
-
"p95": 0.
|
20 |
-
"p99": 0.
|
21 |
"values": [
|
22 |
-
0.
|
23 |
-
0.
|
24 |
-
0.
|
25 |
-
0.
|
26 |
-
0.
|
27 |
-
0.
|
28 |
-
0.
|
29 |
-
0.
|
30 |
-
0.
|
31 |
-
0.
|
32 |
-
0.
|
33 |
-
0.
|
34 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
35 |
0.007482367992401123,
|
36 |
-
0.
|
37 |
-
0.
|
38 |
-
0.
|
39 |
-
0.007582719802856445,
|
40 |
-
0.0075632638931274416,
|
41 |
-
0.007606272220611572,
|
42 |
-
0.007525375843048096,
|
43 |
0.007524352073669433,
|
44 |
-
0.00760422420501709,
|
45 |
-
0.007590911865234375,
|
46 |
-
0.00759603214263916,
|
47 |
-
0.007565248012542725,
|
48 |
-
0.0075673599243164065,
|
49 |
-
0.007497695922851563,
|
50 |
-
0.007548927783966064,
|
51 |
-
0.0075560321807861324,
|
52 |
-
0.0075642881393432615,
|
53 |
-
0.0074670081138610836,
|
54 |
-
0.00742195177078247,
|
55 |
-
0.007434239864349365,
|
56 |
-
0.007481344223022461,
|
57 |
-
0.007458816051483155,
|
58 |
-
0.007413792133331299,
|
59 |
-
0.007539711952209473,
|
60 |
-
0.007570432186126709,
|
61 |
-
0.0076995201110839845,
|
62 |
-
0.00758787202835083,
|
63 |
-
0.007576576232910156,
|
64 |
-
0.007557119846343994,
|
65 |
-
0.007481344223022461,
|
66 |
-
0.0074670081138610836,
|
67 |
-
0.007519231796264648,
|
68 |
-
0.007585792064666748,
|
69 |
0.0075345921516418455,
|
70 |
-
0.
|
71 |
-
0.
|
72 |
-
0.
|
73 |
-
0.
|
74 |
-
0.
|
75 |
-
0.0075038719177246095,
|
76 |
-
0.007574528217315674,
|
77 |
-
0.007444479942321777,
|
78 |
-
0.0074106879234313965,
|
79 |
-
0.007468031883239746,
|
80 |
-
0.007447487831115722,
|
81 |
-
0.007460864067077637,
|
82 |
-
0.00745472002029419,
|
83 |
-
0.00754585599899292,
|
84 |
-
0.007478240013122558,
|
85 |
-
0.007583744049072265,
|
86 |
-
0.007634943962097168,
|
87 |
-
0.00757862377166748,
|
88 |
-
0.008241151809692383,
|
89 |
-
0.007532576084136963,
|
90 |
-
0.007526400089263916,
|
91 |
-
0.007493631839752197,
|
92 |
-
0.007549952030181885,
|
93 |
-
0.007684095859527588,
|
94 |
-
0.007513023853302002,
|
95 |
-
0.007294976234436036,
|
96 |
-
0.007320576190948487,
|
97 |
-
0.007574528217315674,
|
98 |
-
0.007635968208312988,
|
99 |
-
0.007527423858642578,
|
100 |
-
0.0075008001327514645,
|
101 |
-
0.007717855930328369,
|
102 |
-
0.007701504230499268,
|
103 |
-
0.0076943359375,
|
104 |
-
0.007763967990875244,
|
105 |
-
0.008019968032836914,
|
106 |
-
0.007824384212493896,
|
107 |
-
0.008060928344726562,
|
108 |
-
0.007709695816040039,
|
109 |
-
0.008787967681884766,
|
110 |
-
0.00790835189819336,
|
111 |
-
0.007599103927612305,
|
112 |
-
0.007510015964508057,
|
113 |
-
0.0074107198715209965,
|
114 |
-
0.007468031883239746,
|
115 |
-
0.007484416007995606,
|
116 |
-
0.007451648235321045,
|
117 |
-
0.0074741759300231934,
|
118 |
-
0.007448575973510742,
|
119 |
-
0.007489535808563232,
|
120 |
-
0.007450623989105225,
|
121 |
-
0.007435264110565186,
|
122 |
-
0.00744652795791626,
|
123 |
-
0.007468031883239746,
|
124 |
-
0.0074741759300231934,
|
125 |
-
0.007513088226318359,
|
126 |
-
0.0074670081138610836,
|
127 |
-
0.0074670081138610836,
|
128 |
-
0.007461887836456299,
|
129 |
-
0.0074301438331604,
|
130 |
-
0.007435264110565186,
|
131 |
-
0.007457791805267334,
|
132 |
-
0.00744758415222168,
|
133 |
-
0.007456768035888672,
|
134 |
-
0.007460864067077637,
|
135 |
-
0.00745468807220459,
|
136 |
-
0.007475200176239013,
|
137 |
-
0.007442431926727295,
|
138 |
-
0.007501823902130127,
|
139 |
-
0.007491583824157715,
|
140 |
-
0.007455743789672851,
|
141 |
-
0.00748748779296875,
|
142 |
-
0.007434239864349365,
|
143 |
-
0.0074301438331604,
|
144 |
0.007436287879943848,
|
145 |
-
0.
|
146 |
-
0.
|
147 |
-
0.
|
148 |
-
0.
|
149 |
-
0.
|
150 |
-
0.
|
151 |
-
0.
|
152 |
-
0.
|
153 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
154 |
]
|
155 |
},
|
156 |
"throughput": {
|
157 |
"unit": "samples/s",
|
158 |
-
"value":
|
159 |
},
|
160 |
"energy": {
|
161 |
"unit": "kWh",
|
162 |
-
"cpu": 8.
|
163 |
-
"ram": 4.
|
164 |
-
"gpu": 1.
|
165 |
-
"total": 2.
|
166 |
},
|
167 |
"efficiency": {
|
168 |
"unit": "samples/kWh",
|
169 |
-
"value":
|
170 |
}
|
171 |
}
|
172 |
}
|
|
|
2 |
"forward": {
|
3 |
"memory": {
|
4 |
"unit": "MB",
|
5 |
+
"max_ram": 908.353536,
|
6 |
"max_global_vram": 1195.900928,
|
7 |
"max_process_vram": 0.0,
|
8 |
"max_reserved": 555.74528,
|
|
|
10 |
},
|
11 |
"latency": {
|
12 |
"unit": "s",
|
13 |
+
"count": 136,
|
14 |
+
"total": 1.0017927684783936,
|
15 |
+
"mean": 0.007366123297635247,
|
16 |
+
"stdev": 0.00026709037432654973,
|
17 |
+
"p50": 0.007221776008605957,
|
18 |
+
"p90": 0.007710720062255859,
|
19 |
+
"p95": 0.007831039905548096,
|
20 |
+
"p99": 0.008297676992416384,
|
21 |
"values": [
|
22 |
+
0.00810086441040039,
|
23 |
+
0.007712768077850342,
|
24 |
+
0.00786636781692505,
|
25 |
+
0.007795711994171142,
|
26 |
+
0.007781343936920166,
|
27 |
+
0.007796735763549805,
|
28 |
+
0.008393728256225585,
|
29 |
+
0.0074332160949707035,
|
30 |
+
0.007196671962738037,
|
31 |
+
0.007253024101257324,
|
32 |
+
0.007223296165466309,
|
33 |
+
0.007193600177764893,
|
34 |
+
0.007186431884765625,
|
35 |
+
0.007166975975036621,
|
36 |
+
0.007197696208953858,
|
37 |
+
0.007193600177764893,
|
38 |
+
0.007178239822387696,
|
39 |
+
0.007180287837982178,
|
40 |
+
0.007170048236846924,
|
41 |
+
0.007194623947143554,
|
42 |
+
0.00719974422454834,
|
43 |
+
0.007192575931549072,
|
44 |
+
0.0071198720932006835,
|
45 |
+
0.007150591850280762,
|
46 |
+
0.007117824077606201,
|
47 |
+
0.007177216053009033,
|
48 |
+
0.007202816009521484,
|
49 |
+
0.0072202558517456055,
|
50 |
+
0.007186431884765625,
|
51 |
+
0.007174143791198731,
|
52 |
+
0.007186431884765625,
|
53 |
+
0.0071833600997924804,
|
54 |
+
0.0074301438331604,
|
55 |
+
0.007168000221252442,
|
56 |
+
0.0071905279159545895,
|
57 |
+
0.007158783912658692,
|
58 |
+
0.007172095775604248,
|
59 |
+
0.007164927959442138,
|
60 |
+
0.007111680030822754,
|
61 |
+
0.007130112171173096,
|
62 |
+
0.007081984043121338,
|
63 |
+
0.007110655784606934,
|
64 |
+
0.007358463764190673,
|
65 |
+
0.007428160190582275,
|
66 |
+
0.007437312126159668,
|
67 |
+
0.007485439777374267,
|
68 |
+
0.007528448104858398,
|
69 |
+
0.0074997758865356446,
|
70 |
+
0.0074629120826721195,
|
71 |
+
0.007408639907836914,
|
72 |
+
0.007394303798675537,
|
73 |
+
0.00739737606048584,
|
74 |
+
0.007410751819610596,
|
75 |
+
0.007384064197540283,
|
76 |
+
0.007397439956665039,
|
77 |
+
0.007708672046661377,
|
78 |
+
0.007819263935089112,
|
79 |
+
0.008058879852294922,
|
80 |
+
0.008474623680114746,
|
81 |
+
0.0077199358940124516,
|
82 |
+
0.007493631839752197,
|
83 |
+
0.0076912641525268555,
|
84 |
+
0.007520256042480469,
|
85 |
0.007482367992401123,
|
86 |
+
0.007379968166351319,
|
87 |
+
0.007412767887115478,
|
88 |
+
0.008119296073913575,
|
|
|
|
|
|
|
|
|
89 |
0.007524352073669433,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
90 |
0.0075345921516418455,
|
91 |
+
0.007544832229614258,
|
92 |
+
0.007529471874237061,
|
93 |
+
0.007528448104858398,
|
94 |
+
0.007520224094390869,
|
95 |
+
0.007518208026885987,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
96 |
0.007436287879943848,
|
97 |
+
0.007408671855926514,
|
98 |
+
0.007414783954620361,
|
99 |
+
0.007412735939025879,
|
100 |
+
0.007400447845458984,
|
101 |
+
0.007388160228729248,
|
102 |
+
0.007402495861053467,
|
103 |
+
0.007485504150390625,
|
104 |
+
0.007651328086853027,
|
105 |
+
0.007632895946502686,
|
106 |
+
0.007481344223022461,
|
107 |
+
0.007505951881408691,
|
108 |
+
0.007230463981628418,
|
109 |
+
0.007155712127685547,
|
110 |
+
0.007102464199066162,
|
111 |
+
0.007205887794494629,
|
112 |
+
0.007241727828979493,
|
113 |
+
0.0076277761459350585,
|
114 |
+
0.007525375843048096,
|
115 |
+
0.007490560054779053,
|
116 |
+
0.0074997758865356446,
|
117 |
+
0.007452640056610107,
|
118 |
+
0.007490560054779053,
|
119 |
+
0.00780291223526001,
|
120 |
+
0.00765337610244751,
|
121 |
+
0.008041472434997558,
|
122 |
+
0.007614463806152344,
|
123 |
+
0.007328767776489258,
|
124 |
+
0.007151616096496582,
|
125 |
+
0.007155712127685547,
|
126 |
+
0.007138239860534668,
|
127 |
+
0.007159840106964111,
|
128 |
+
0.007178239822387696,
|
129 |
+
0.007154687881469727,
|
130 |
+
0.007155712127685547,
|
131 |
+
0.007116799831390381,
|
132 |
+
0.007176191806793213,
|
133 |
+
0.007169023990631103,
|
134 |
+
0.007169023990631103,
|
135 |
+
0.007200767993927002,
|
136 |
+
0.007146495819091797,
|
137 |
+
0.007198719978332519,
|
138 |
+
0.0071792640686035155,
|
139 |
+
0.007175168037414551,
|
140 |
+
0.007194591999053955,
|
141 |
+
0.007166975975036621,
|
142 |
+
0.007168000221252442,
|
143 |
+
0.0071495680809020995,
|
144 |
+
0.007163904190063477,
|
145 |
+
0.007172095775604248,
|
146 |
+
0.007143424034118652,
|
147 |
+
0.007187456130981445,
|
148 |
+
0.007178239822387696,
|
149 |
+
0.007176224231719971,
|
150 |
+
0.007143424034118652,
|
151 |
+
0.007135231971740722,
|
152 |
+
0.007154623985290528,
|
153 |
+
0.0071485438346862796,
|
154 |
+
0.007131135940551757,
|
155 |
+
0.007165952205657959,
|
156 |
+
0.0071198720932006835,
|
157 |
+
0.007163904190063477
|
158 |
]
|
159 |
},
|
160 |
"throughput": {
|
161 |
"unit": "samples/s",
|
162 |
+
"value": 135.7566198112691
|
163 |
},
|
164 |
"energy": {
|
165 |
"unit": "kWh",
|
166 |
+
"cpu": 8.701522806651788e-08,
|
167 |
+
"ram": 4.755494025836267e-08,
|
168 |
+
"gpu": 1.5051512446715494e-07,
|
169 |
+
"total": 2.850852927920355e-07
|
170 |
},
|
171 |
"efficiency": {
|
172 |
"unit": "samples/kWh",
|
173 |
+
"value": 3507722.163449104
|
174 |
}
|
175 |
}
|
176 |
}
|