IlyasMoutawwakil HF staff commited on
Commit
95111b6
·
verified ·
1 Parent(s): d30c13a

Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark_report.json with huggingface_hub

Browse files
cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark_report.json ADDED
@@ -0,0 +1,194 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "forward": {
3
+ "memory": {
4
+ "unit": "MB",
5
+ "max_ram": 890.138624,
6
+ "max_global_vram": 1195.900928,
7
+ "max_process_vram": 0.0,
8
+ "max_reserved": 555.74528,
9
+ "max_allocated": 508.993536
10
+ },
11
+ "latency": {
12
+ "unit": "s",
13
+ "count": 154,
14
+ "total": 1.0014327363967899,
15
+ "mean": 0.00650280997660253,
16
+ "stdev": 0.00021388913393038193,
17
+ "p50": 0.006459903955459595,
18
+ "p90": 0.006831820774078369,
19
+ "p95": 0.006857471919059753,
20
+ "p99": 0.0069436825037002565,
21
+ "values": [
22
+ 0.007262207984924316,
23
+ 0.0067983360290527345,
24
+ 0.006879231929779053,
25
+ 0.006853631973266602,
26
+ 0.006860799789428711,
27
+ 0.006831103801727295,
28
+ 0.00682700777053833,
29
+ 0.00685152006149292,
30
+ 0.00694271993637085,
31
+ 0.006814720153808594,
32
+ 0.006944767951965332,
33
+ 0.006825984001159668,
34
+ 0.006788095951080322,
35
+ 0.006792191982269287,
36
+ 0.0067041277885437015,
37
+ 0.006729728221893311,
38
+ 0.006793216228485107,
39
+ 0.006850560188293457,
40
+ 0.006896639823913574,
41
+ 0.006820864200592041,
42
+ 0.006834176063537598,
43
+ 0.006816768169403077,
44
+ 0.006791168212890625,
45
+ 0.0068321280479431154,
46
+ 0.006855679988861084,
47
+ 0.006842368125915528,
48
+ 0.006790143966674805,
49
+ 0.00682700777053833,
50
+ 0.006816768169403077,
51
+ 0.006427648067474365,
52
+ 0.006453248023986816,
53
+ 0.006552576065063476,
54
+ 0.006458367824554443,
55
+ 0.006496223926544189,
56
+ 0.006458367824554443,
57
+ 0.00657203197479248,
58
+ 0.006557695865631104,
59
+ 0.006412288188934326,
60
+ 0.0064204797744750975,
61
+ 0.0063907837867736815,
62
+ 0.006481919765472412,
63
+ 0.006502431869506836,
64
+ 0.006575104236602783,
65
+ 0.006882304191589355,
66
+ 0.0068618240356445315,
67
+ 0.006852608203887939,
68
+ 0.006523903846740723,
69
+ 0.0064767999649047855,
70
+ 0.006418432235717773,
71
+ 0.0064440321922302245,
72
+ 0.006808576107025147,
73
+ 0.006523903846740723,
74
+ 0.006467584133148193,
75
+ 0.006665184020996094,
76
+ 0.006419456005096436,
77
+ 0.006407167911529541,
78
+ 0.006486015796661377,
79
+ 0.006436863899230957,
80
+ 0.006506559848785401,
81
+ 0.0065781760215759275,
82
+ 0.006404096126556396,
83
+ 0.006455296039581298,
84
+ 0.006433792114257812,
85
+ 0.006498303890228272,
86
+ 0.006457344055175781,
87
+ 0.006512639999389648,
88
+ 0.006477759838104248,
89
+ 0.006490111827850342,
90
+ 0.006460415840148926,
91
+ 0.00658739185333252,
92
+ 0.006543360233306885,
93
+ 0.006508480072021485,
94
+ 0.006599679946899414,
95
+ 0.006478847980499268,
96
+ 0.006461440086364746,
97
+ 0.006533120155334473,
98
+ 0.006404096126556396,
99
+ 0.006509568214416504,
100
+ 0.006450175762176514,
101
+ 0.006375423908233643,
102
+ 0.00636518383026123,
103
+ 0.006194176197052002,
104
+ 0.006404096126556396,
105
+ 0.006467584133148193,
106
+ 0.006201312065124511,
107
+ 0.006201344013214111,
108
+ 0.006171648025512695,
109
+ 0.006261760234832763,
110
+ 0.006228991985321045,
111
+ 0.006301695823669433,
112
+ 0.006254591941833496,
113
+ 0.0063303041458129886,
114
+ 0.006481919765472412,
115
+ 0.006253568172454834,
116
+ 0.00618393611907959,
117
+ 0.006127615928649902,
118
+ 0.006198272228240966,
119
+ 0.006131711959838867,
120
+ 0.006128640174865723,
121
+ 0.006179840087890625,
122
+ 0.006153215885162353,
123
+ 0.00618393611907959,
124
+ 0.006137792110443115,
125
+ 0.006278143882751465,
126
+ 0.00611737585067749,
127
+ 0.006278143882751465,
128
+ 0.006145023822784424,
129
+ 0.00614192008972168,
130
+ 0.0062494721412658695,
131
+ 0.006147039890289307,
132
+ 0.0066826238632202144,
133
+ 0.006419456005096436,
134
+ 0.006433792114257812,
135
+ 0.006445055961608887,
136
+ 0.006451200008392334,
137
+ 0.00652288007736206,
138
+ 0.006462463855743408,
139
+ 0.006412288188934326,
140
+ 0.00643071985244751,
141
+ 0.006379519939422608,
142
+ 0.00643174409866333,
143
+ 0.0064174079895019534,
144
+ 0.00638976001739502,
145
+ 0.006446080207824707,
146
+ 0.00637440013885498,
147
+ 0.006553599834442139,
148
+ 0.006446080207824707,
149
+ 0.006403071880340576,
150
+ 0.006481919765472412,
151
+ 0.00643174409866333,
152
+ 0.006437888145446777,
153
+ 0.006496255874633789,
154
+ 0.006459392070770263,
155
+ 0.00648089599609375,
156
+ 0.006452223777770996,
157
+ 0.006419424057006836,
158
+ 0.006425663948059082,
159
+ 0.006435840129852295,
160
+ 0.006437888145446777,
161
+ 0.006432767868041992,
162
+ 0.006435840129852295,
163
+ 0.006478847980499268,
164
+ 0.0064287037849426265,
165
+ 0.0064430079460144046,
166
+ 0.0064778242111206055,
167
+ 0.006423552036285401,
168
+ 0.006436863899230957,
169
+ 0.006426623821258545,
170
+ 0.0064471039772033695,
171
+ 0.006494175910949707,
172
+ 0.00648089599609375,
173
+ 0.006475776195526123,
174
+ 0.006486015796661377,
175
+ 0.006469632148742676
176
+ ]
177
+ },
178
+ "throughput": {
179
+ "unit": "samples/s",
180
+ "value": 153.7796742635961
181
+ },
182
+ "energy": {
183
+ "unit": "kWh",
184
+ "cpu": 7.514877954783488e-08,
185
+ "ram": 4.1082792785910214e-08,
186
+ "gpu": 1.3599166997468262e-07,
187
+ "total": 2.5222324230842775e-07
188
+ },
189
+ "efficiency": {
190
+ "unit": "samples/kWh",
191
+ "value": 3964741.674271095
192
+ }
193
+ }
194
+ }