IlyasMoutawwakil HF staff commited on
Commit
a89c7e2
·
verified ·
1 Parent(s): 2d6b80f

Upload cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark_report.json with huggingface_hub

Browse files
cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark_report.json ADDED
@@ -0,0 +1,178 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "forward": {
3
+ "memory": {
4
+ "unit": "MB",
5
+ "max_ram": 891.981824,
6
+ "max_global_vram": 1195.900928,
7
+ "max_process_vram": 0.0,
8
+ "max_reserved": 555.74528,
9
+ "max_allocated": 509.039104
10
+ },
11
+ "latency": {
12
+ "unit": "s",
13
+ "count": 138,
14
+ "total": 0.9976554260253904,
15
+ "mean": 0.007229387145111526,
16
+ "stdev": 0.00011782743380629321,
17
+ "p50": 0.007202816009521484,
18
+ "p90": 0.007294464159011841,
19
+ "p95": 0.0074711038351058965,
20
+ "p99": 0.007776327791213989,
21
+ "values": [
22
+ 0.00781004810333252,
23
+ 0.007461887836456299,
24
+ 0.007544832229614258,
25
+ 0.007541759967803955,
26
+ 0.007544896125793457,
27
+ 0.007523327827453613,
28
+ 0.007885824203491211,
29
+ 0.007292928218841553,
30
+ 0.006912000179290771,
31
+ 0.006923264026641846,
32
+ 0.007062528133392334,
33
+ 0.00724889612197876,
34
+ 0.007193600177764893,
35
+ 0.0072499198913574215,
36
+ 0.007225344181060791,
37
+ 0.007227392196655274,
38
+ 0.007223296165466309,
39
+ 0.007214079856872559,
40
+ 0.007178239822387696,
41
+ 0.007196703910827637,
42
+ 0.007222271919250488,
43
+ 0.007188479900360107,
44
+ 0.007204864025115967,
45
+ 0.00719974422454834,
46
+ 0.007227392196655274,
47
+ 0.007209983825683594,
48
+ 0.007247871875762939,
49
+ 0.00719155216217041,
50
+ 0.0071905279159545895,
51
+ 0.007193600177764893,
52
+ 0.007208960056304932,
53
+ 0.007201791763305664,
54
+ 0.007208992004394532,
55
+ 0.007165952205657959,
56
+ 0.007192575931549072,
57
+ 0.007193632125854492,
58
+ 0.007188479900360107,
59
+ 0.007345151901245117,
60
+ 0.007188479900360107,
61
+ 0.007183328151702881,
62
+ 0.00724070405960083,
63
+ 0.007258111953735351,
64
+ 0.007174143791198731,
65
+ 0.007177216053009033,
66
+ 0.007159808158874512,
67
+ 0.007165952205657959,
68
+ 0.00719974422454834,
69
+ 0.0071823358535766605,
70
+ 0.007204864025115967,
71
+ 0.0072130560874938965,
72
+ 0.007398399829864502,
73
+ 0.007267327785491944,
74
+ 0.007221248149871826,
75
+ 0.007196671962738037,
76
+ 0.007146495819091797,
77
+ 0.00724070405960083,
78
+ 0.007217152118682861,
79
+ 0.007243775844573975,
80
+ 0.007202816009521484,
81
+ 0.007193600177764893,
82
+ 0.007220191955566406,
83
+ 0.007234560012817383,
84
+ 0.007173120021820068,
85
+ 0.0071833600997924804,
86
+ 0.007195648193359375,
87
+ 0.007718912124633789,
88
+ 0.00723967981338501,
89
+ 0.007220191955566406,
90
+ 0.007217152118682861,
91
+ 0.007207935810089112,
92
+ 0.007186431884765625,
93
+ 0.007185408115386963,
94
+ 0.00719974422454834,
95
+ 0.007198719978332519,
96
+ 0.007185408115386963,
97
+ 0.007206912040710449,
98
+ 0.007164927959442138,
99
+ 0.007198719978332519,
100
+ 0.0071905279159545895,
101
+ 0.007187456130981445,
102
+ 0.007196671962738037,
103
+ 0.007202816009521484,
104
+ 0.007234560012817383,
105
+ 0.007231488227844239,
106
+ 0.007214079856872559,
107
+ 0.007196671962738037,
108
+ 0.007261184215545655,
109
+ 0.007218175888061523,
110
+ 0.00733900785446167,
111
+ 0.0072130560874938965,
112
+ 0.007185408115386963,
113
+ 0.007226367950439453,
114
+ 0.007177184104919434,
115
+ 0.007197696208953858,
116
+ 0.007225344181060791,
117
+ 0.00729804801940918,
118
+ 0.007229440212249756,
119
+ 0.007208960056304932,
120
+ 0.007218175888061523,
121
+ 0.007218175888061523,
122
+ 0.007206912040710449,
123
+ 0.007188416004180908,
124
+ 0.007251967906951904,
125
+ 0.007228415966033935,
126
+ 0.007227392196655274,
127
+ 0.007260159969329834,
128
+ 0.007193600177764893,
129
+ 0.007200767993927002,
130
+ 0.007153664112091064,
131
+ 0.007180287837982178,
132
+ 0.007217152118682861,
133
+ 0.007177216053009033,
134
+ 0.007180287837982178,
135
+ 0.0071495680809020995,
136
+ 0.00719155216217041,
137
+ 0.007200767993927002,
138
+ 0.007168000221252442,
139
+ 0.007303167819976806,
140
+ 0.007139328002929687,
141
+ 0.0071833600997924804,
142
+ 0.007184351921081543,
143
+ 0.007201791763305664,
144
+ 0.0072540159225463864,
145
+ 0.007172095775604248,
146
+ 0.007184383869171143,
147
+ 0.007332863807678222,
148
+ 0.007204864025115967,
149
+ 0.007203839778900147,
150
+ 0.007265279769897461,
151
+ 0.007225344181060791,
152
+ 0.007264256000518799,
153
+ 0.007197696208953858,
154
+ 0.007194623947143554,
155
+ 0.007179200172424316,
156
+ 0.007143424034118652,
157
+ 0.007169023990631103,
158
+ 0.007176191806793213,
159
+ 0.007174143791198731
160
+ ]
161
+ },
162
+ "throughput": {
163
+ "unit": "samples/s",
164
+ "value": 138.32431158098854
165
+ },
166
+ "energy": {
167
+ "unit": "kWh",
168
+ "cpu": 8.240090453146417e-08,
169
+ "ram": 4.504373190457045e-08,
170
+ "gpu": 1.5436354941666363e-07,
171
+ "total": 2.818081858526982e-07
172
+ },
173
+ "efficiency": {
174
+ "unit": "samples/kWh",
175
+ "value": 3548512.960949624
176
+ }
177
+ }
178
+ }