judge02 commited on
Commit
39c1c9b
1 Parent(s): 45e8e9b

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,330 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ onnx/up_blocks.0/onnx__MatMul_7288 filter=lfs diff=lfs merge=lfs -text
37
+ onnx/up_blocks.0/onnx__MatMul_7227 filter=lfs diff=lfs merge=lfs -text
38
+ onnx/up_blocks.0/onnx__MatMul_7265 filter=lfs diff=lfs merge=lfs -text
39
+ onnx/up_blocks.0/onnx__MatMul_7632 filter=lfs diff=lfs merge=lfs -text
40
+ onnx/up_blocks.0/onnx__MatMul_7280 filter=lfs diff=lfs merge=lfs -text
41
+ onnx/up_blocks.0/onnx__MatMul_7206 filter=lfs diff=lfs merge=lfs -text
42
+ onnx/up_blocks.0/onnx__MatMul_7103 filter=lfs diff=lfs merge=lfs -text
43
+ onnx/up_blocks.0/onnx__MatMul_7207 filter=lfs diff=lfs merge=lfs -text
44
+ onnx/up_blocks.0/onnx__MatMul_7056 filter=lfs diff=lfs merge=lfs -text
45
+ onnx/up_blocks.0/onnx__MatMul_7240 filter=lfs diff=lfs merge=lfs -text
46
+ onnx/up_blocks.0/onnx__MatMul_7599 filter=lfs diff=lfs merge=lfs -text
47
+ onnx/up_blocks.0/onnx__MatMul_7219 filter=lfs diff=lfs merge=lfs -text
48
+ onnx/up_blocks.0/onnx__MatMul_7232 filter=lfs diff=lfs merge=lfs -text
49
+ onnx/up_blocks.0/onnx__MatMul_7184 filter=lfs diff=lfs merge=lfs -text
50
+ onnx/up_blocks.0/onnx__MatMul_7251 filter=lfs diff=lfs merge=lfs -text
51
+ onnx/up_blocks.0/onnx__MatMul_6983 filter=lfs diff=lfs merge=lfs -text
52
+ onnx/up_blocks.0/onnx__MatMul_7099 filter=lfs diff=lfs merge=lfs -text
53
+ onnx/up_blocks.0/onnx__MatMul_7194 filter=lfs diff=lfs merge=lfs -text
54
+ onnx/up_blocks.0/onnx__MatMul_7091 filter=lfs diff=lfs merge=lfs -text
55
+ onnx/up_blocks.0/onnx__MatMul_7243 filter=lfs diff=lfs merge=lfs -text
56
+ onnx/up_blocks.0/onnx__MatMul_7600 filter=lfs diff=lfs merge=lfs -text
57
+ onnx/up_blocks.0/onnx__MatMul_7017 filter=lfs diff=lfs merge=lfs -text
58
+ onnx/up_blocks.0/onnx__MatMul_7336 filter=lfs diff=lfs merge=lfs -text
59
+ onnx/up_blocks.0/onnx__MatMul_6944 filter=lfs diff=lfs merge=lfs -text
60
+ onnx/up_blocks.0/onnx__MatMul_7027 filter=lfs diff=lfs merge=lfs -text
61
+ onnx/up_blocks.0/onnx__MatMul_7159 filter=lfs diff=lfs merge=lfs -text
62
+ onnx/up_blocks.0/onnx__MatMul_7303 filter=lfs diff=lfs merge=lfs -text
63
+ onnx/up_blocks.0/onnx__MatMul_7051 filter=lfs diff=lfs merge=lfs -text
64
+ onnx/up_blocks.0/onnx__MatMul_7102 filter=lfs diff=lfs merge=lfs -text
65
+ onnx/up_blocks.0/onnx__MatMul_7137 filter=lfs diff=lfs merge=lfs -text
66
+ onnx/up_blocks.0/onnx__MatMul_7032 filter=lfs diff=lfs merge=lfs -text
67
+ onnx/up_blocks.0/onnx__MatMul_7139 filter=lfs diff=lfs merge=lfs -text
68
+ onnx/up_blocks.0/onnx__MatMul_7138 filter=lfs diff=lfs merge=lfs -text
69
+ onnx/up_blocks.0/onnx__MatMul_7113 filter=lfs diff=lfs merge=lfs -text
70
+ onnx/up_blocks.0/onnx__MatMul_6935 filter=lfs diff=lfs merge=lfs -text
71
+ onnx/up_blocks.0/onnx__MatMul_7352 filter=lfs diff=lfs merge=lfs -text
72
+ onnx/up_blocks.0/onnx__MatMul_6931 filter=lfs diff=lfs merge=lfs -text
73
+ onnx/up_blocks.0/resnets.1.time_emb_proj.weight filter=lfs diff=lfs merge=lfs -text
74
+ onnx/up_blocks.0/onnx__MatMul_7218 filter=lfs diff=lfs merge=lfs -text
75
+ onnx/up_blocks.0/onnx__MatMul_7019 filter=lfs diff=lfs merge=lfs -text
76
+ onnx/up_blocks.0/onnx__MatMul_6958 filter=lfs diff=lfs merge=lfs -text
77
+ onnx/up_blocks.0/onnx__MatMul_7408 filter=lfs diff=lfs merge=lfs -text
78
+ onnx/up_blocks.0/onnx__MatMul_7374 filter=lfs diff=lfs merge=lfs -text
79
+ onnx/up_blocks.0/onnx__MatMul_7255 filter=lfs diff=lfs merge=lfs -text
80
+ onnx/up_blocks.0/onnx__MatMul_7256 filter=lfs diff=lfs merge=lfs -text
81
+ onnx/up_blocks.0/onnx__MatMul_7622 filter=lfs diff=lfs merge=lfs -text
82
+ onnx/up_blocks.0/onnx__MatMul_7179 filter=lfs diff=lfs merge=lfs -text
83
+ onnx/up_blocks.0/onnx__MatMul_7598 filter=lfs diff=lfs merge=lfs -text
84
+ onnx/up_blocks.0/resnets.1.conv_shortcut.weight filter=lfs diff=lfs merge=lfs -text
85
+ onnx/up_blocks.0/onnx__MatMul_7217 filter=lfs diff=lfs merge=lfs -text
86
+ onnx/up_blocks.0/onnx__MatMul_7315 filter=lfs diff=lfs merge=lfs -text
87
+ onnx/up_blocks.0/onnx__MatMul_7203 filter=lfs diff=lfs merge=lfs -text
88
+ onnx/up_blocks.0/onnx__MatMul_6936 filter=lfs diff=lfs merge=lfs -text
89
+ onnx/up_blocks.0/onnx__MatMul_7480 filter=lfs diff=lfs merge=lfs -text
90
+ onnx/up_blocks.0/onnx__MatMul_7502 filter=lfs diff=lfs merge=lfs -text
91
+ onnx/up_blocks.0/onnx__MatMul_7395 filter=lfs diff=lfs merge=lfs -text
92
+ onnx/up_blocks.0/onnx__MatMul_7231 filter=lfs diff=lfs merge=lfs -text
93
+ onnx/up_blocks.0/onnx__MatMul_7066 filter=lfs diff=lfs merge=lfs -text
94
+ onnx/up_blocks.0/onnx__MatMul_7475 filter=lfs diff=lfs merge=lfs -text
95
+ onnx/up_blocks.0/onnx__MatMul_6993 filter=lfs diff=lfs merge=lfs -text
96
+ onnx/up_blocks.0/onnx__MatMul_7208 filter=lfs diff=lfs merge=lfs -text
97
+ onnx/up_blocks.0/onnx__MatMul_7552 filter=lfs diff=lfs merge=lfs -text
98
+ onnx/up_blocks.0/onnx__MatMul_7114 filter=lfs diff=lfs merge=lfs -text
99
+ onnx/up_blocks.0/onnx__MatMul_7053 filter=lfs diff=lfs merge=lfs -text
100
+ onnx/up_blocks.0/onnx__MatMul_7386 filter=lfs diff=lfs merge=lfs -text
101
+ onnx/up_blocks.0/onnx__MatMul_6923 filter=lfs diff=lfs merge=lfs -text
102
+ onnx/up_blocks.0/onnx__MatMul_7489 filter=lfs diff=lfs merge=lfs -text
103
+ onnx/up_blocks.0/onnx__MatMul_7361 filter=lfs diff=lfs merge=lfs -text
104
+ onnx/up_blocks.0/onnx__MatMul_7528 filter=lfs diff=lfs merge=lfs -text
105
+ onnx/up_blocks.0/onnx__MatMul_7563 filter=lfs diff=lfs merge=lfs -text
106
+ onnx/up_blocks.0/onnx__MatMul_6934 filter=lfs diff=lfs merge=lfs -text
107
+ onnx/up_blocks.0/onnx__MatMul_7441 filter=lfs diff=lfs merge=lfs -text
108
+ onnx/up_blocks.0/onnx__MatMul_7363 filter=lfs diff=lfs merge=lfs -text
109
+ onnx/up_blocks.0/onnx__MatMul_7635 filter=lfs diff=lfs merge=lfs -text
110
+ onnx/up_blocks.0/onnx__MatMul_7499 filter=lfs diff=lfs merge=lfs -text
111
+ onnx/up_blocks.0/onnx__MatMul_6960 filter=lfs diff=lfs merge=lfs -text
112
+ onnx/up_blocks.0/onnx__MatMul_7646 filter=lfs diff=lfs merge=lfs -text
113
+ onnx/up_blocks.0/onnx__MatMul_7326 filter=lfs diff=lfs merge=lfs -text
114
+ onnx/up_blocks.0/onnx__MatMul_7432 filter=lfs diff=lfs merge=lfs -text
115
+ onnx/up_blocks.0/onnx__MatMul_7586 filter=lfs diff=lfs merge=lfs -text
116
+ onnx/up_blocks.0/onnx__MatMul_6912 filter=lfs diff=lfs merge=lfs -text
117
+ onnx/up_blocks.0/onnx__MatMul_7080 filter=lfs diff=lfs merge=lfs -text
118
+ onnx/up_blocks.0/onnx__MatMul_7550 filter=lfs diff=lfs merge=lfs -text
119
+ onnx/up_blocks.0/onnx__MatMul_7634 filter=lfs diff=lfs merge=lfs -text
120
+ onnx/up_blocks.0/onnx__MatMul_7430 filter=lfs diff=lfs merge=lfs -text
121
+ onnx/up_blocks.0/onnx__MatMul_7416 filter=lfs diff=lfs merge=lfs -text
122
+ onnx/up_blocks.0/onnx__MatMul_7417 filter=lfs diff=lfs merge=lfs -text
123
+ onnx/up_blocks.0/onnx__MatMul_7406 filter=lfs diff=lfs merge=lfs -text
124
+ onnx/up_blocks.0/onnx__MatMul_7418 filter=lfs diff=lfs merge=lfs -text
125
+ onnx/up_blocks.0/onnx__MatMul_7327 filter=lfs diff=lfs merge=lfs -text
126
+ onnx/up_blocks.0/onnx__MatMul_7490 filter=lfs diff=lfs merge=lfs -text
127
+ onnx/up_blocks.0/onnx__MatMul_7523 filter=lfs diff=lfs merge=lfs -text
128
+ src/assets/SDXL_Cache_Diffusion_Img.png filter=lfs diff=lfs merge=lfs -text
129
+ onnx/up_blocks.0/onnx__MatMul_6994 filter=lfs diff=lfs merge=lfs -text
130
+ onnx/up_blocks.0/onnx__MatMul_7313 filter=lfs diff=lfs merge=lfs -text
131
+ onnx/up_blocks.0/onnx__MatMul_7478 filter=lfs diff=lfs merge=lfs -text
132
+ onnx/up_blocks.0/onnx__MatMul_7384 filter=lfs diff=lfs merge=lfs -text
133
+ onnx/up_blocks.0/onnx__MatMul_7514 filter=lfs diff=lfs merge=lfs -text
134
+ onnx/up_blocks.0/onnx__MatMul_7347 filter=lfs diff=lfs merge=lfs -text
135
+ onnx/up_blocks.0/onnx__MatMul_7275 filter=lfs diff=lfs merge=lfs -text
136
+ onnx/up_blocks.0/onnx__MatMul_6992 filter=lfs diff=lfs merge=lfs -text
137
+ onnx/up_blocks.0/onnx__MatMul_7291 filter=lfs diff=lfs merge=lfs -text
138
+ onnx/up_blocks.0/onnx__MatMul_7526 filter=lfs diff=lfs merge=lfs -text
139
+ onnx/up_blocks.0/onnx__MatMul_7512 filter=lfs diff=lfs merge=lfs -text
140
+ onnx/up_blocks.0/onnx__MatMul_7407 filter=lfs diff=lfs merge=lfs -text
141
+ onnx/up_blocks.0/onnx__MatMul_7328 filter=lfs diff=lfs merge=lfs -text
142
+ onnx/up_blocks.0/onnx__MatMul_7440 filter=lfs diff=lfs merge=lfs -text
143
+ onnx/up_blocks.0/onnx__MatMul_7360 filter=lfs diff=lfs merge=lfs -text
144
+ onnx/up_blocks.0/onnx__MatMul_7193 filter=lfs diff=lfs merge=lfs -text
145
+ onnx/up_blocks.0/onnx__MatMul_7371 filter=lfs diff=lfs merge=lfs -text
146
+ onnx/up_blocks.0/onnx__MatMul_7299 filter=lfs diff=lfs merge=lfs -text
147
+ onnx/up_blocks.0/onnx__MatMul_7312 filter=lfs diff=lfs merge=lfs -text
148
+ onnx/up_blocks.0/onnx__MatMul_7267 filter=lfs diff=lfs merge=lfs -text
149
+ onnx/up_blocks.0/onnx__MatMul_7290 filter=lfs diff=lfs merge=lfs -text
150
+ onnx/up_blocks.0/onnx__MatMul_7375 filter=lfs diff=lfs merge=lfs -text
151
+ onnx/up_blocks.0/onnx__MatMul_7503 filter=lfs diff=lfs merge=lfs -text
152
+ onnx/up_blocks.0/onnx__MatMul_7539 filter=lfs diff=lfs merge=lfs -text
153
+ onnx/up_blocks.0/onnx__MatMul_6995 filter=lfs diff=lfs merge=lfs -text
154
+ onnx/up_blocks.0/onnx__MatMul_7466 filter=lfs diff=lfs merge=lfs -text
155
+ onnx/up_blocks.0/onnx__MatMul_7337 filter=lfs diff=lfs merge=lfs -text
156
+ onnx/up_blocks.0/onnx__MatMul_7398 filter=lfs diff=lfs merge=lfs -text
157
+ onnx/up_blocks.0/onnx__MatMul_7560 filter=lfs diff=lfs merge=lfs -text
158
+ onnx/up_blocks.0/onnx__MatMul_7643 filter=lfs diff=lfs merge=lfs -text
159
+ onnx/up_blocks.0/onnx__MatMul_7419 filter=lfs diff=lfs merge=lfs -text
160
+ onnx/up_blocks.0/onnx__MatMul_7465 filter=lfs diff=lfs merge=lfs -text
161
+ onnx/up_blocks.0/onnx__MatMul_7504 filter=lfs diff=lfs merge=lfs -text
162
+ onnx/up_blocks.0/onnx__MatMul_7254 filter=lfs diff=lfs merge=lfs -text
163
+ onnx/up_blocks.0/onnx__MatMul_7325 filter=lfs diff=lfs merge=lfs -text
164
+ onnx/up_blocks.0/onnx__MatMul_7464 filter=lfs diff=lfs merge=lfs -text
165
+ onnx/up_blocks.0/onnx__MatMul_7574 filter=lfs diff=lfs merge=lfs -text
166
+ onnx/up_blocks.0/onnx__MatMul_7467 filter=lfs diff=lfs merge=lfs -text
167
+ onnx/up_blocks.0/onnx__MatMul_7451 filter=lfs diff=lfs merge=lfs -text
168
+ onnx/up_blocks.0/onnx__MatMul_7351 filter=lfs diff=lfs merge=lfs -text
169
+ onnx/up_blocks.0/onnx__MatMul_7323 filter=lfs diff=lfs merge=lfs -text
170
+ onnx/up_blocks.0/onnx__MatMul_7339 filter=lfs diff=lfs merge=lfs -text
171
+ onnx/up_blocks.0/onnx__MatMul_7350 filter=lfs diff=lfs merge=lfs -text
172
+ onnx/up_blocks.0/onnx__MatMul_7427 filter=lfs diff=lfs merge=lfs -text
173
+ onnx/up_blocks.0/onnx__MatMul_6946 filter=lfs diff=lfs merge=lfs -text
174
+ onnx/up_blocks.0/onnx__MatMul_7455 filter=lfs diff=lfs merge=lfs -text
175
+ onnx/up_blocks.0/onnx__MatMul_7314 filter=lfs diff=lfs merge=lfs -text
176
+ onnx/up_blocks.0/onnx__MatMul_7454 filter=lfs diff=lfs merge=lfs -text
177
+ onnx/up_blocks.0/onnx__MatMul_7405 filter=lfs diff=lfs merge=lfs -text
178
+ onnx/up_blocks.0/onnx__MatMul_7491 filter=lfs diff=lfs merge=lfs -text
179
+ onnx/up_blocks.0/onnx__MatMul_7443 filter=lfs diff=lfs merge=lfs -text
180
+ onnx/up_blocks.0/onnx__MatMul_7171 filter=lfs diff=lfs merge=lfs -text
181
+ onnx/up_blocks.0/onnx__MatMul_7376 filter=lfs diff=lfs merge=lfs -text
182
+ onnx/up_blocks.0/onnx__MatMul_7456 filter=lfs diff=lfs merge=lfs -text
183
+ onnx/up_blocks.0/onnx__MatMul_7349 filter=lfs diff=lfs merge=lfs -text
184
+ onnx/up_blocks.0/onnx__MatMul_7515 filter=lfs diff=lfs merge=lfs -text
185
+ onnx/up_blocks.0/onnx__MatMul_7160 filter=lfs diff=lfs merge=lfs -text
186
+ onnx/up_blocks.0/onnx__MatMul_7170 filter=lfs diff=lfs merge=lfs -text
187
+ onnx/up_blocks.0/onnx__MatMul_7610 filter=lfs diff=lfs merge=lfs -text
188
+ onnx/up_blocks.0/onnx__MatMul_7338 filter=lfs diff=lfs merge=lfs -text
189
+ onnx/up_blocks.0/onnx__MatMul_7585 filter=lfs diff=lfs merge=lfs -text
190
+ onnx/up_blocks.0/onnx__MatMul_6922 filter=lfs diff=lfs merge=lfs -text
191
+ onnx/up_blocks.0/onnx__MatMul_7645 filter=lfs diff=lfs merge=lfs -text
192
+ onnx/up_blocks.0/onnx__MatMul_7205 filter=lfs diff=lfs merge=lfs -text
193
+ onnx/up_blocks.0/onnx__MatMul_7195 filter=lfs diff=lfs merge=lfs -text
194
+ onnx/up_blocks.0/onnx__MatMul_7043 filter=lfs diff=lfs merge=lfs -text
195
+ onnx/up_blocks.0/onnx__MatMul_7301 filter=lfs diff=lfs merge=lfs -text
196
+ onnx/up_blocks.0/onnx__MatMul_7229 filter=lfs diff=lfs merge=lfs -text
197
+ onnx/up_blocks.0/onnx__MatMul_7289 filter=lfs diff=lfs merge=lfs -text
198
+ onnx/up_blocks.0/onnx__MatMul_7575 filter=lfs diff=lfs merge=lfs -text
199
+ onnx/up_blocks.0/onnx__MatMul_7181 filter=lfs diff=lfs merge=lfs -text
200
+ onnx/up_blocks.0/onnx__MatMul_7126 filter=lfs diff=lfs merge=lfs -text
201
+ onnx/up_blocks.0/onnx__MatMul_7362 filter=lfs diff=lfs merge=lfs -text
202
+ onnx/up_blocks.0/onnx__MatMul_7488 filter=lfs diff=lfs merge=lfs -text
203
+ onnx/up_blocks.0/onnx__MatMul_6911 filter=lfs diff=lfs merge=lfs -text
204
+ onnx/up_blocks.0/onnx__MatMul_7157 filter=lfs diff=lfs merge=lfs -text
205
+ onnx/up_blocks.0/onnx__MatMul_7431 filter=lfs diff=lfs merge=lfs -text
206
+ onnx/up_blocks.0/onnx__MatMul_7089 filter=lfs diff=lfs merge=lfs -text
207
+ onnx/up_blocks.0/onnx__MatMul_6920 filter=lfs diff=lfs merge=lfs -text
208
+ onnx/up_blocks.0/onnx__MatMul_7537 filter=lfs diff=lfs merge=lfs -text
209
+ onnx/up_blocks.0/onnx__MatMul_7104 filter=lfs diff=lfs merge=lfs -text
210
+ onnx/up_blocks.0/onnx__MatMul_6969 filter=lfs diff=lfs merge=lfs -text
211
+ onnx/up_blocks.0/onnx__MatMul_7595 filter=lfs diff=lfs merge=lfs -text
212
+ onnx/up_blocks.0/onnx__MatMul_7147 filter=lfs diff=lfs merge=lfs -text
213
+ onnx/up_blocks.0/onnx__MatMul_7041 filter=lfs diff=lfs merge=lfs -text
214
+ onnx/up_blocks.0/onnx__MatMul_7278 filter=lfs diff=lfs merge=lfs -text
215
+ onnx/up_blocks.0/onnx__MatMul_7561 filter=lfs diff=lfs merge=lfs -text
216
+ onnx/up_blocks.0/onnx__MatMul_7031 filter=lfs diff=lfs merge=lfs -text
217
+ onnx/up_blocks.0/onnx__MatMul_7230 filter=lfs diff=lfs merge=lfs -text
218
+ onnx/up_blocks.0/onnx__MatMul_7079 filter=lfs diff=lfs merge=lfs -text
219
+ onnx/up_blocks.0/onnx__MatMul_7633 filter=lfs diff=lfs merge=lfs -text
220
+ onnx/up_blocks.0/onnx__MatMul_7123 filter=lfs diff=lfs merge=lfs -text
221
+ onnx/up_blocks.0/onnx__MatMul_7065 filter=lfs diff=lfs merge=lfs -text
222
+ onnx/up_blocks.0/onnx__MatMul_6968 filter=lfs diff=lfs merge=lfs -text
223
+ onnx/up_blocks.0/onnx__MatMul_7150 filter=lfs diff=lfs merge=lfs -text
224
+ onnx/up_blocks.0/onnx__MatMul_7609 filter=lfs diff=lfs merge=lfs -text
225
+ onnx/up_blocks.0/onnx__MatMul_7055 filter=lfs diff=lfs merge=lfs -text
226
+ onnx/up_blocks.0/onnx__MatMul_7088 filter=lfs diff=lfs merge=lfs -text
227
+ onnx/up_blocks.0/onnx__MatMul_6970 filter=lfs diff=lfs merge=lfs -text
228
+ onnx/up_blocks.0/onnx__MatMul_7183 filter=lfs diff=lfs merge=lfs -text
229
+ onnx/up_blocks.0/onnx__MatMul_7075 filter=lfs diff=lfs merge=lfs -text
230
+ onnx/up_blocks.0/onnx__MatMul_7571 filter=lfs diff=lfs merge=lfs -text
231
+ onnx/up_blocks.0/onnx__MatMul_7136 filter=lfs diff=lfs merge=lfs -text
232
+ onnx/up_blocks.0/resnets.2.time_emb_proj.weight filter=lfs diff=lfs merge=lfs -text
233
+ onnx/up_blocks.0/onnx__MatMul_6984 filter=lfs diff=lfs merge=lfs -text
234
+ onnx/up_blocks.0/onnx__MatMul_7016 filter=lfs diff=lfs merge=lfs -text
235
+ onnx/up_blocks.0/onnx__MatMul_6955 filter=lfs diff=lfs merge=lfs -text
236
+ onnx/up_blocks.0/onnx__MatMul_7182 filter=lfs diff=lfs merge=lfs -text
237
+ onnx/up_blocks.0/onnx__MatMul_7216 filter=lfs diff=lfs merge=lfs -text
238
+ onnx/up_blocks.0/onnx__MatMul_6909 filter=lfs diff=lfs merge=lfs -text
239
+ onnx/up_blocks.0/resnets.0.time_emb_proj.weight filter=lfs diff=lfs merge=lfs -text
240
+ onnx/up_blocks.0/onnx__MatMul_7373 filter=lfs diff=lfs merge=lfs -text
241
+ onnx/up_blocks.0/onnx__MatMul_7623 filter=lfs diff=lfs merge=lfs -text
242
+ onnx/up_blocks.0/onnx__MatMul_7042 filter=lfs diff=lfs merge=lfs -text
243
+ onnx/up_blocks.0/onnx__MatMul_7479 filter=lfs diff=lfs merge=lfs -text
244
+ onnx/up_blocks.0/onnx__MatMul_7576 filter=lfs diff=lfs merge=lfs -text
245
+ onnx/up_blocks.0/onnx__MatMul_7054 filter=lfs diff=lfs merge=lfs -text
246
+ onnx/up_blocks.0/onnx__MatMul_7030 filter=lfs diff=lfs merge=lfs -text
247
+ onnx/up_blocks.0/onnx__MatMul_7127 filter=lfs diff=lfs merge=lfs -text
248
+ onnx/up_blocks.0/onnx__MatMul_7501 filter=lfs diff=lfs merge=lfs -text
249
+ onnx/up_blocks.0/onnx__MatMul_7192 filter=lfs diff=lfs merge=lfs -text
250
+ onnx/up_blocks.0/onnx__MatMul_7279 filter=lfs diff=lfs merge=lfs -text
251
+ onnx/up_blocks.0/onnx__MatMul_7112 filter=lfs diff=lfs merge=lfs -text
252
+ onnx/up_blocks.0/onnx__MatMul_7624 filter=lfs diff=lfs merge=lfs -text
253
+ onnx/up_blocks.0/onnx__MatMul_6921 filter=lfs diff=lfs merge=lfs -text
254
+ onnx/up_blocks.0/onnx__MatMul_6910 filter=lfs diff=lfs merge=lfs -text
255
+ onnx/up_blocks.0/onnx__MatMul_7264 filter=lfs diff=lfs merge=lfs -text
256
+ onnx/up_blocks.0/onnx__MatMul_7608 filter=lfs diff=lfs merge=lfs -text
257
+ onnx/up_blocks.0/onnx__MatMul_7149 filter=lfs diff=lfs merge=lfs -text
258
+ onnx/up_blocks.0/onnx__MatMul_6947 filter=lfs diff=lfs merge=lfs -text
259
+ onnx/up_blocks.0/onnx__MatMul_7006 filter=lfs diff=lfs merge=lfs -text
260
+ onnx/up_blocks.0/onnx__MatMul_6979 filter=lfs diff=lfs merge=lfs -text
261
+ onnx/up_blocks.0/onnx__MatMul_7078 filter=lfs diff=lfs merge=lfs -text
262
+ onnx/up_blocks.0/onnx__MatMul_6945 filter=lfs diff=lfs merge=lfs -text
263
+ onnx/up_blocks.0/onnx__MatMul_7128 filter=lfs diff=lfs merge=lfs -text
264
+ onnx/up_blocks.0/onnx__MatMul_7397 filter=lfs diff=lfs merge=lfs -text
265
+ onnx/up_blocks.0/onnx__MatMul_7538 filter=lfs diff=lfs merge=lfs -text
266
+ onnx/up_blocks.0/onnx__MatMul_7302 filter=lfs diff=lfs merge=lfs -text
267
+ onnx/up_blocks.0/onnx__MatMul_7169 filter=lfs diff=lfs merge=lfs -text
268
+ onnx/up_blocks.0/onnx__MatMul_6959 filter=lfs diff=lfs merge=lfs -text
269
+ onnx/up_blocks.0/onnx__MatMul_7018 filter=lfs diff=lfs merge=lfs -text
270
+ onnx/up_blocks.0/onnx__MatMul_7158 filter=lfs diff=lfs merge=lfs -text
271
+ onnx/up_blocks.0/onnx__MatMul_7040 filter=lfs diff=lfs merge=lfs -text
272
+ onnx/up_blocks.0/onnx__MatMul_7008 filter=lfs diff=lfs merge=lfs -text
273
+ onnx/up_blocks.0/resnets.2.conv_shortcut.weight filter=lfs diff=lfs merge=lfs -text
274
+ onnx/up_blocks.0/onnx__MatMul_7003 filter=lfs diff=lfs merge=lfs -text
275
+ onnx/up_blocks.0/onnx__MatMul_6982 filter=lfs diff=lfs merge=lfs -text
276
+ onnx/up_blocks.0/onnx__MatMul_7007 filter=lfs diff=lfs merge=lfs -text
277
+ onnx/up_blocks.0/onnx__MatMul_7619 filter=lfs diff=lfs merge=lfs -text
278
+ onnx/up_blocks.0/onnx__MatMul_7304 filter=lfs diff=lfs merge=lfs -text
279
+ onnx/up_blocks.0/onnx__MatMul_7241 filter=lfs diff=lfs merge=lfs -text
280
+ onnx/up_blocks.0/onnx__MatMul_7513 filter=lfs diff=lfs merge=lfs -text
281
+ onnx/up_blocks.0/onnx__MatMul_7536 filter=lfs diff=lfs merge=lfs -text
282
+ onnx/up_blocks.0/onnx__MatMul_7584 filter=lfs diff=lfs merge=lfs -text
283
+ onnx/up_blocks.0/onnx__MatMul_7266 filter=lfs diff=lfs merge=lfs -text
284
+ onnx/up_blocks.0/onnx__MatMul_7551 filter=lfs diff=lfs merge=lfs -text
285
+ onnx/up_blocks.0/onnx__MatMul_7527 filter=lfs diff=lfs merge=lfs -text
286
+ onnx/up_blocks.0/onnx__MatMul_7064 filter=lfs diff=lfs merge=lfs -text
287
+ onnx/up_blocks.0/onnx__MatMul_7090 filter=lfs diff=lfs merge=lfs -text
288
+ onnx/up_blocks.0/onnx__MatMul_7547 filter=lfs diff=lfs merge=lfs -text
289
+ onnx/up_blocks.0/onnx__MatMul_7385 filter=lfs diff=lfs merge=lfs -text
290
+ onnx/up_blocks.0/onnx__MatMul_7115 filter=lfs diff=lfs merge=lfs -text
291
+ onnx/up_blocks.0/onnx__MatMul_7253 filter=lfs diff=lfs merge=lfs -text
292
+ onnx/up_blocks.0/onnx__MatMul_7242 filter=lfs diff=lfs merge=lfs -text
293
+ onnx/up_blocks.0/onnx__MatMul_7562 filter=lfs diff=lfs merge=lfs -text
294
+ onnx/up_blocks.0/resnets.0.conv_shortcut.weight filter=lfs diff=lfs merge=lfs -text
295
+ onnx/up_blocks.0/onnx__MatMul_7168 filter=lfs diff=lfs merge=lfs -text
296
+ onnx/up_blocks.0/onnx__MatMul_7442 filter=lfs diff=lfs merge=lfs -text
297
+ onnx/up_blocks.0/onnx__MatMul_6981 filter=lfs diff=lfs merge=lfs -text
298
+ onnx/up_blocks.0/onnx__MatMul_7067 filter=lfs diff=lfs merge=lfs -text
299
+ onnx/up_blocks.0/onnx__MatMul_7587 filter=lfs diff=lfs merge=lfs -text
300
+ onnx/up_blocks.0/onnx__MatMul_7387 filter=lfs diff=lfs merge=lfs -text
301
+ onnx/up_blocks.0/onnx__MatMul_6971 filter=lfs diff=lfs merge=lfs -text
302
+ onnx/up_blocks.0/onnx__MatMul_7597 filter=lfs diff=lfs merge=lfs -text
303
+ onnx/up_blocks.0/onnx__MatMul_7180 filter=lfs diff=lfs merge=lfs -text
304
+ onnx/up_blocks.0/onnx__MatMul_7549 filter=lfs diff=lfs merge=lfs -text
305
+ engine/down_blocks.0.plan filter=lfs diff=lfs merge=lfs -text
306
+ onnx/up_blocks.0/onnx__MatMul_7611 filter=lfs diff=lfs merge=lfs -text
307
+ onnx/up_blocks.0/onnx__MatMul_7277 filter=lfs diff=lfs merge=lfs -text
308
+ onnx/up_blocks.0/onnx__MatMul_7005 filter=lfs diff=lfs merge=lfs -text
309
+ onnx/up_blocks.0/onnx__MatMul_7029 filter=lfs diff=lfs merge=lfs -text
310
+ onnx/up_blocks.0/onnx__MatMul_7621 filter=lfs diff=lfs merge=lfs -text
311
+ onnx/up_blocks.0/onnx__MatMul_7525 filter=lfs diff=lfs merge=lfs -text
312
+ onnx/up_blocks.0/onnx__MatMul_7453 filter=lfs diff=lfs merge=lfs -text
313
+ onnx/up_blocks.0/onnx__MatMul_7125 filter=lfs diff=lfs merge=lfs -text
314
+ onnx/up_blocks.0/onnx__MatMul_7573 filter=lfs diff=lfs merge=lfs -text
315
+ onnx/up_blocks.0/onnx__MatMul_7429 filter=lfs diff=lfs merge=lfs -text
316
+ onnx/up_blocks.0/onnx__MatMul_6933 filter=lfs diff=lfs merge=lfs -text
317
+ onnx/up_blocks.0/onnx__MatMul_7101 filter=lfs diff=lfs merge=lfs -text
318
+ onnx/up_blocks.0/onnx__MatMul_6957 filter=lfs diff=lfs merge=lfs -text
319
+ onnx/up_blocks.0/onnx__MatMul_7077 filter=lfs diff=lfs merge=lfs -text
320
+ onnx/up_blocks.0/onnx__MatMul_7477 filter=lfs diff=lfs merge=lfs -text
321
+ onnx/up_blocks.0/onnx__MatMul_7372 filter=lfs diff=lfs merge=lfs -text
322
+ onnx/up_blocks.0/onnx__MatMul_7148 filter=lfs diff=lfs merge=lfs -text
323
+ onnx/up_blocks.0/onnx__MatMul_7204 filter=lfs diff=lfs merge=lfs -text
324
+ onnx/up_blocks.0/onnx__MatMul_7028 filter=lfs diff=lfs merge=lfs -text
325
+ onnx/up_blocks.0/onnx__MatMul_7572 filter=lfs diff=lfs merge=lfs -text
326
+ onnx/up_blocks.0/onnx__MatMul_7620 filter=lfs diff=lfs merge=lfs -text
327
+ onnx/up_blocks.0/upsamplers.0.conv.weight filter=lfs diff=lfs merge=lfs -text
328
+ onnx/up_blocks.0/onnx__MatMul_7500 filter=lfs diff=lfs merge=lfs -text
329
+ onnx/up_blocks.0/onnx__MatMul_7596 filter=lfs diff=lfs merge=lfs -text
330
+ onnx/up_blocks.0/onnx__MatMul_7228 filter=lfs diff=lfs merge=lfs -text
331
+ onnx/up_blocks.0/onnx__MatMul_6956 filter=lfs diff=lfs merge=lfs -text
332
+ onnx/up_blocks.0/onnx__MatMul_7276 filter=lfs diff=lfs merge=lfs -text
333
+ onnx/up_blocks.0/onnx__MatMul_7348 filter=lfs diff=lfs merge=lfs -text
334
+ onnx/up_blocks.0/onnx__MatMul_7644 filter=lfs diff=lfs merge=lfs -text
335
+ onnx/up_blocks.0/onnx__MatMul_7548 filter=lfs diff=lfs merge=lfs -text
336
+ onnx/up_blocks.0/onnx__MatMul_7076 filter=lfs diff=lfs merge=lfs -text
337
+ onnx/up_blocks.0/onnx__MatMul_7396 filter=lfs diff=lfs merge=lfs -text
338
+ onnx/up_blocks.0/onnx__MatMul_7524 filter=lfs diff=lfs merge=lfs -text
339
+ onnx/up_blocks.0/resnets.2.conv2.weight filter=lfs diff=lfs merge=lfs -text
340
+ onnx/up_blocks.0/onnx__MatMul_7124 filter=lfs diff=lfs merge=lfs -text
341
+ onnx/up_blocks.0/onnx__MatMul_7300 filter=lfs diff=lfs merge=lfs -text
342
+ onnx/up_blocks.0/resnets.1.conv2.weight filter=lfs diff=lfs merge=lfs -text
343
+ onnx/up_blocks.0/onnx__MatMul_7476 filter=lfs diff=lfs merge=lfs -text
344
+ onnx/up_blocks.0/onnx__MatMul_7452 filter=lfs diff=lfs merge=lfs -text
345
+ onnx/up_blocks.0/onnx__MatMul_7428 filter=lfs diff=lfs merge=lfs -text
346
+ engine/up_blocks.2.plan filter=lfs diff=lfs merge=lfs -text
347
+ onnx/up_blocks.0/onnx__MatMul_7052 filter=lfs diff=lfs merge=lfs -text
348
+ onnx/up_blocks.0/onnx__MatMul_7324 filter=lfs diff=lfs merge=lfs -text
349
+ onnx/up_blocks.0/onnx__MatMul_7100 filter=lfs diff=lfs merge=lfs -text
350
+ onnx/up_blocks.0/onnx__MatMul_6980 filter=lfs diff=lfs merge=lfs -text
351
+ onnx/up_blocks.0/onnx__MatMul_6932 filter=lfs diff=lfs merge=lfs -text
352
+ onnx/up_blocks.0/onnx__MatMul_7252 filter=lfs diff=lfs merge=lfs -text
353
+ onnx/up_blocks.0/onnx__MatMul_7004 filter=lfs diff=lfs merge=lfs -text
354
+ onnx/up_blocks.0/resnets.0.conv2.weight filter=lfs diff=lfs merge=lfs -text
355
+ onnx/up_blocks.0/resnets.2.conv1.weight filter=lfs diff=lfs merge=lfs -text
356
+ onnx/up_blocks.0/resnets.1.conv1.weight filter=lfs diff=lfs merge=lfs -text
357
+ onnx/up_blocks.0/resnets.0.conv1.weight filter=lfs diff=lfs merge=lfs -text
358
+ engine/down_blocks.1.plan filter=lfs diff=lfs merge=lfs -text
359
+ engine/up_blocks.1.plan filter=lfs diff=lfs merge=lfs -text
360
+ engine/mid_block.plan filter=lfs diff=lfs merge=lfs -text
361
+ engine/down_blocks.2.plan filter=lfs diff=lfs merge=lfs -text
362
+ engine/up_blocks.0.plan filter=lfs diff=lfs merge=lfs -text
.gitignore ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ **/__pycache__
2
+ **.egg-info
.gitmodules ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ [submodule "models/newdream-sdxl-20"]
2
+ path = models/newdream-sdxl-20
3
+ url = https://huggingface.co/stablediffusionapi/newdream-sdxl-20
README.md ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ Heban olla vogola
2
+
loss_params.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bf0b76d04764575883fcd146c7ae8e0edb9e049ad55db61640e37deffa652fdb
3
+ size 3120
pyproject.toml ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [build-system]
2
+ requires = ["setuptools >= 61.0"]
3
+ build-backend = "setuptools.build_meta"
4
+
5
+ [project]
6
+ name = "edge-maxxing-4090-newdream"
7
+ description = "An edge-maxxing model submission for the 4090 newdream contest"
8
+ requires-python = ">=3.10,<3.11"
9
+ version = "7"
10
+ dependencies = [
11
+ "wheel",
12
+ "diffusers==0.30.2",
13
+ "transformers==4.41.2",
14
+ "accelerate==0.31.0",
15
+ "omegaconf==2.3.0",
16
+ "torch==2.4.1",
17
+ "edge-maxxing-pipelines @ git+https://github.com/womboai/edge-maxxing@8d8ff45863416484b5b4bc547782591bbdfc696a#subdirectory=pipelines",
18
+ "polygraphy",
19
+ "onnx",
20
+ "tensorrt>=10.5.0",
21
+ "tensorrt-cu12-libs>=10.5.0",
22
+ "tensorrt-cu12-bindings>=10.5.0",
23
+ "cuda-python>=12.6.0",
24
+ "setuptools>=75.2.0",
25
+ ]
26
+ [tool.edge-maxxing]
27
+ models = [
28
+ "stablediffusionapi/newdream-sdxl-20",
29
+ "RobertML/edge-onnx",
30
+ "RobertML/edge-engine",
31
+ ]
32
+ [project.scripts]
33
+ start_inference = "main:main"
src/assets/SDXL_Cache_Diffusion_Img.png ADDED

Git LFS Details

  • SHA256: a5557eafc21d0ddf14ba1930c738f063f9c29b1b46aa83a1c0c6541490ad8644
  • Pointer size: 132 Bytes
  • Size of remote file: 1.59 MB
src/assets/sdxl_cache.png ADDED
src/cache_diffusion/cachify.py ADDED
@@ -0,0 +1,144 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
2
+ # SPDX-License-Identifier: MIT
3
+ #
4
+ # Permission is hereby granted, free of charge, to any person obtaining a
5
+ # copy of this software and associated documentation files (the "Software"),
6
+ # to deal in the Software without restriction, including without limitation
7
+ # the rights to use, copy, modify, merge, publish, distribute, sublicense,
8
+ # and/or sell copies of the Software, and to permit persons to whom the
9
+ # Software is furnished to do so, subject to the following conditions:
10
+ #
11
+ # The above copyright notice and this permission notice shall be included in
12
+ # all copies or substantial portions of the Software.
13
+ #
14
+ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15
+ # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16
+ # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17
+ # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18
+ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
19
+ # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
20
+ # DEALINGS IN THE SOFTWARE.
21
+
22
+ import fnmatch
23
+ from contextlib import contextmanager
24
+
25
+ from diffusers.models.attention import BasicTransformerBlock, JointTransformerBlock
26
+ from diffusers.models.transformers.pixart_transformer_2d import PixArtTransformer2DModel
27
+ from diffusers.models.transformers.transformer_sd3 import SD3Transformer2DModel
28
+ from diffusers.models.unets.unet_2d_blocks import (
29
+ CrossAttnDownBlock2D,
30
+ CrossAttnUpBlock2D,
31
+ DownBlock2D,
32
+ UNetMidBlock2DCrossAttn,
33
+ UpBlock2D,
34
+ )
35
+ from diffusers.models.unets.unet_2d_condition import UNet2DConditionModel
36
+ from diffusers.models.unets.unet_3d_blocks import (
37
+ CrossAttnDownBlockSpatioTemporal,
38
+ CrossAttnUpBlockSpatioTemporal,
39
+ DownBlockSpatioTemporal,
40
+ UNetMidBlockSpatioTemporal,
41
+ UpBlockSpatioTemporal,
42
+ )
43
+ from diffusers.models.unets.unet_spatio_temporal_condition import UNetSpatioTemporalConditionModel
44
+
45
+ from .module import CachedModule
46
+ from .utils import replace_module
47
+
48
+ CACHED_PIPE = {
49
+ UNet2DConditionModel: (
50
+ DownBlock2D,
51
+ CrossAttnDownBlock2D,
52
+ UNetMidBlock2DCrossAttn,
53
+ CrossAttnUpBlock2D,
54
+ UpBlock2D,
55
+ ),
56
+ PixArtTransformer2DModel: (BasicTransformerBlock),
57
+ UNetSpatioTemporalConditionModel: (
58
+ CrossAttnDownBlockSpatioTemporal,
59
+ DownBlockSpatioTemporal,
60
+ UpBlockSpatioTemporal,
61
+ CrossAttnUpBlockSpatioTemporal,
62
+ UNetMidBlockSpatioTemporal,
63
+ ),
64
+ SD3Transformer2DModel: (JointTransformerBlock),
65
+ }
66
+
67
+
68
+ def _apply_to_modules(model, action, modules=None, config_list=None):
69
+ if hasattr(model, "use_trt_infer") and model.use_trt_infer:
70
+ for key, module in model.engines.items():
71
+ if isinstance(module, CachedModule):
72
+ action(module)
73
+ elif config_list:
74
+ for config in config_list:
75
+ if _pass(key, config["wildcard_or_filter_func"]):
76
+ model.engines[key] = CachedModule(module, config["select_cache_step_func"])
77
+ else:
78
+ for name, module in model.named_modules():
79
+ if isinstance(module, CachedModule):
80
+ action(module)
81
+ elif modules and config_list:
82
+ for config in config_list:
83
+ if _pass(name, config["wildcard_or_filter_func"]) and isinstance(
84
+ module, modules
85
+ ):
86
+ replace_module(
87
+ model,
88
+ name,
89
+ CachedModule(module, config["select_cache_step_func"]),
90
+ )
91
+
92
+
93
+ def cachify(model, config_list, modules):
94
+ def cache_action(module):
95
+ pass # No action needed, caching is handled in the loop itself
96
+
97
+ _apply_to_modules(model, cache_action, modules, config_list)
98
+
99
+
100
+ def disable(pipe):
101
+ model = get_model(pipe)
102
+ _apply_to_modules(model, lambda module: module.disable_cache())
103
+
104
+
105
+ def enable(pipe):
106
+ model = get_model(pipe)
107
+ _apply_to_modules(model, lambda module: module.enable_cache())
108
+
109
+
110
+ def reset_status(pipe):
111
+ model = get_model(pipe)
112
+ _apply_to_modules(model, lambda module: setattr(module, "cur_step", 0))
113
+
114
+
115
+ def _pass(name, wildcard_or_filter_func):
116
+ if isinstance(wildcard_or_filter_func, str):
117
+ return fnmatch.fnmatch(name, wildcard_or_filter_func)
118
+ elif callable(wildcard_or_filter_func):
119
+ return wildcard_or_filter_func(name)
120
+ else:
121
+ raise NotImplementedError(f"Unsupported type {type(wildcard_or_filter_func)}")
122
+
123
+
124
+ def get_model(pipe):
125
+ if hasattr(pipe, "unet"):
126
+ return pipe.unet
127
+ elif hasattr(pipe, "transformer"):
128
+ return pipe.transformer
129
+ else:
130
+ raise KeyError
131
+
132
+
133
+ @contextmanager
134
+ def infer(pipe):
135
+ try:
136
+ yield pipe
137
+ finally:
138
+ reset_status(pipe)
139
+
140
+
141
+ def prepare(pipe, config_list):
142
+ model = get_model(pipe)
143
+ assert model.__class__ in CACHED_PIPE.keys(), f"{model.__class__} is not supported!"
144
+ cachify(model, config_list, CACHED_PIPE[model.__class__])
src/cache_diffusion/module.py ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
2
+ # SPDX-License-Identifier: MIT
3
+ #
4
+ # Permission is hereby granted, free of charge, to any person obtaining a
5
+ # copy of this software and associated documentation files (the "Software"),
6
+ # to deal in the Software without restriction, including without limitation
7
+ # the rights to use, copy, modify, merge, publish, distribute, sublicense,
8
+ # and/or sell copies of the Software, and to permit persons to whom the
9
+ # Software is furnished to do so, subject to the following conditions:
10
+ #
11
+ # The above copyright notice and this permission notice shall be included in
12
+ # all copies or substantial portions of the Software.
13
+ #
14
+ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15
+ # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16
+ # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17
+ # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18
+ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
19
+ # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
20
+ # DEALINGS IN THE SOFTWARE.
21
+
22
+ from torch import nn
23
+
24
+
25
+ class CachedModule(nn.Module):
26
+ def __init__(self, block, select_cache_step_func) -> None:
27
+ super().__init__()
28
+ self.block = block
29
+ self.select_cache_step_func = select_cache_step_func
30
+ self.cur_step = 0
31
+ self.cached_results = None
32
+ self.enabled = True
33
+
34
+ def __getattr__(self, name):
35
+ try:
36
+ return super().__getattr__(name)
37
+ except AttributeError:
38
+ return getattr(self.block, name)
39
+
40
+ def if_cache(self):
41
+ return self.select_cache_step_func(self.cur_step) and self.enabled
42
+
43
+ def enable_cache(self):
44
+ self.enabled = True
45
+
46
+ def disable_cache(self):
47
+ self.enabled = False
48
+ self.cur_step = 0
49
+
50
+ def forward(self, *args, **kwargs):
51
+ if not self.if_cache():
52
+ self.cached_results = self.block(*args, **kwargs)
53
+ if self.enabled:
54
+ self.cur_step += 1
55
+ return self.cached_results
src/cache_diffusion/utils.py ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
2
+ # SPDX-License-Identifier: MIT
3
+ #
4
+ # Permission is hereby granted, free of charge, to any person obtaining a
5
+ # copy of this software and associated documentation files (the "Software"),
6
+ # to deal in the Software without restriction, including without limitation
7
+ # the rights to use, copy, modify, merge, publish, distribute, sublicense,
8
+ # and/or sell copies of the Software, and to permit persons to whom the
9
+ # Software is furnished to do so, subject to the following conditions:
10
+ #
11
+ # The above copyright notice and this permission notice shall be included in
12
+ # all copies or substantial portions of the Software.
13
+ #
14
+ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15
+ # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16
+ # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17
+ # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18
+ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
19
+ # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
20
+ # DEALINGS IN THE SOFTWARE.
21
+
22
+ import re
23
+
24
+ SDXL_DEFAULT_CONFIG = [
25
+ {
26
+ "wildcard_or_filter_func": lambda name: "up_blocks.2" not in name,
27
+ "select_cache_step_func": lambda step: (step % 2) != 0,
28
+ }
29
+ ]
30
+
31
+ PIXART_DEFAULT_CONFIG = [
32
+ {
33
+ "wildcard_or_filter_func": lambda name: not re.search(
34
+ r"transformer_blocks\.(2[1-7])\.", name
35
+ ),
36
+ "select_cache_step_func": lambda step: (step % 3) != 0,
37
+ }
38
+ ]
39
+
40
+ SVD_DEFAULT_CONFIG = [
41
+ {
42
+ "wildcard_or_filter_func": lambda name: "up_blocks.3" not in name,
43
+ "select_cache_step_func": lambda step: (step % 2) != 0,
44
+ }
45
+ ]
46
+
47
+ SD3_DEFAULT_CONFIG = [
48
+ {
49
+ "wildcard_or_filter_func": lambda name: re.search(
50
+ r"^((?!transformer_blocks\.(1[6-9]|2[0-3])).)*$", name
51
+ ),
52
+ "select_cache_step_func": lambda step: (step % 2) != 0,
53
+ }
54
+ ]
55
+
56
+
57
+ def replace_module(parent, name_path, new_module):
58
+ path_parts = name_path.split(".")
59
+ for part in path_parts[:-1]:
60
+ parent = getattr(parent, part)
61
+ setattr(parent, path_parts[-1], new_module)
src/loss.py ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ _A=None
2
+ import torch
3
+ from tqdm import tqdm
4
+ class LossSchedulerModel(torch.nn.Module):
5
+ def __init__(A,wx,we):super(LossSchedulerModel,A).__init__();assert len(wx.shape)==1 and len(we.shape)==2;B=wx.shape[0];assert B==we.shape[0]and B==we.shape[1];A.register_parameter('wx',torch.nn.Parameter(wx));A.register_parameter('we',torch.nn.Parameter(we))
6
+ def forward(A,t,xT,e_prev):
7
+ B=e_prev;assert t-len(B)+1==0;C=xT*A.wx[t]
8
+ for(D,E)in zip(B,A.we[t]):C+=D*E
9
+ return C.to(xT.dtype)
10
+ class LossScheduler:
11
+ def __init__(A,timesteps,model):A.timesteps=timesteps;A.model=model;A.init_noise_sigma=1.;A.order=1
12
+ @staticmethod
13
+ def load(path):A,B,C=torch.load(path,map_location='cpu');D=LossSchedulerModel(B,C);return LossScheduler(A,D)
14
+ def save(A,path):B,C,D=A.timesteps,A.model.wx,A.model.we;torch.save((B,C,D),path)
15
+ def set_timesteps(A,num_inference_steps,device='cuda'):B=device;A.xT=_A;A.e_prev=[];A.t_prev=-1;A.model=A.model.to(B);A.timesteps=A.timesteps.to(B)
16
+ def scale_model_input(A,sample,*B,**C):return sample
17
+ @torch.no_grad()
18
+ def step(self,model_output,timestep,sample,*D,**E):
19
+ A=self;B=A.timesteps.tolist().index(timestep);assert A.t_prev==-1 or B==A.t_prev+1
20
+ if A.t_prev==-1:A.xT=sample
21
+ A.e_prev.append(model_output);C=A.model(B,A.xT,A.e_prev)
22
+ if B+1==len(A.timesteps):A.xT=_A;A.e_prev=[];A.t_prev=-1
23
+ else:A.t_prev=B
24
+ return C,
25
+ class SchedulerWrapper:
26
+ def __init__(A,scheduler,loss_params_path='loss_params.pth'):A.scheduler=scheduler;A.catch_x,A.catch_e,A.catch_x_={},{},{};A.loss_scheduler=_A;A.loss_params_path=loss_params_path
27
+ def set_timesteps(A,num_inference_steps,**C):
28
+ D=num_inference_steps
29
+ if A.loss_scheduler is _A:B=A.scheduler.set_timesteps(D,**C);A.timesteps=A.scheduler.timesteps;A.init_noise_sigma=A.scheduler.init_noise_sigma;A.order=A.scheduler.order;return B
30
+ else:B=A.loss_scheduler.set_timesteps(D,**C);A.timesteps=A.loss_scheduler.timesteps;A.init_noise_sigma=A.scheduler.init_noise_sigma;A.order=A.scheduler.order;return B
31
+ def step(B,model_output,timestep,sample,**F):
32
+ D=sample;E=model_output;A=timestep
33
+ if B.loss_scheduler is _A:
34
+ C=B.scheduler.step(E,A,D,**F);A=A.tolist()
35
+ if A not in B.catch_x:B.catch_x[A]=[];B.catch_e[A]=[];B.catch_x_[A]=[]
36
+ B.catch_x[A].append(D.clone().detach().cpu());B.catch_e[A].append(E.clone().detach().cpu());B.catch_x_[A].append(C[0].clone().detach().cpu());return C
37
+ else:C=B.loss_scheduler.step(E,A,D,**F);return C
38
+ def scale_model_input(A,sample,timestep):return sample
39
+ def add_noise(A,original_samples,noise,timesteps):B=A.scheduler.add_noise(original_samples,noise,timesteps);return B
40
+ def get_path(C):
41
+ A=sorted([A for A in C.catch_x],reverse=True);B,D=[],[]
42
+ for E in A:F=torch.cat(C.catch_x[E],dim=0);B.append(F);G=torch.cat(C.catch_e[E],dim=0);D.append(G)
43
+ H=A[-1];I=torch.cat(C.catch_x_[H],dim=0);B.append(I);A=torch.tensor(A,dtype=torch.int32);B=torch.stack(B);D=torch.stack(D);return A,B,D
44
+ def load_loss_params(A):B,C,D=torch.load(A.loss_params_path,map_location='cpu');A.loss_model=LossSchedulerModel(C,D);A.loss_scheduler=LossScheduler(B,A.loss_model)
45
+ def prepare_loss(A,num_accelerate_steps=15):A.load_loss_params()
src/main.py ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import atexit
2
+ from io import BytesIO
3
+ from multiprocessing.connection import Listener
4
+ from os import chmod, remove
5
+ from os.path import abspath, exists
6
+ from pathlib import Path
7
+
8
+ import torch
9
+
10
+ from PIL.JpegImagePlugin import JpegImageFile
11
+ from pipelines.models import TextToImageRequest
12
+
13
+ from pipeline import load_pipeline, infer
14
+
15
+ SOCKET = abspath(Path(__file__).parent.parent / "inferences.sock")
16
+
17
+
18
+ def at_exit():
19
+ torch.cuda.empty_cache()
20
+
21
+
22
+ def main():
23
+ atexit.register(at_exit)
24
+
25
+ print(f"Loading pipeline")
26
+ pipeline = load_pipeline()
27
+
28
+ print(f"Pipeline loaded, creating socket at '{SOCKET}'")
29
+
30
+ if exists(SOCKET):
31
+ remove(SOCKET)
32
+
33
+ with Listener(SOCKET) as listener:
34
+ chmod(SOCKET, 0o777)
35
+
36
+ print(f"Awaiting connections")
37
+ with listener.accept() as connection:
38
+ print(f"Connected")
39
+
40
+ while True:
41
+ try:
42
+ request = TextToImageRequest.model_validate_json(connection.recv_bytes().decode("utf-8"))
43
+ except EOFError:
44
+ print(f"Inference socket exiting")
45
+
46
+ return
47
+
48
+ image = infer(request, pipeline)
49
+
50
+ data = BytesIO()
51
+ image.save(data, format=JpegImageFile.format)
52
+
53
+ packet = data.getvalue()
54
+
55
+ connection.send_bytes(packet)
56
+
57
+
58
+ if __name__ == '__main__':
59
+ main()
src/pipe/config.py ADDED
@@ -0,0 +1,162 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
2
+ # SPDX-License-Identifier: MIT
3
+ #
4
+ # Permission is hereby granted, free of charge, to any person obtaining a
5
+ # copy of this software and associated documentation files (the "Software"),
6
+ # to deal in the Software without restriction, including without limitation
7
+ # the rights to use, copy, modify, merge, publish, distribute, sublicense,
8
+ # and/or sell copies of the Software, and to permit persons to whom the
9
+ # Software is furnished to do so, subject to the following conditions:
10
+ #
11
+ # The above copyright notice and this permission notice shall be included in
12
+ # all copies or substantial portions of the Software.
13
+ #
14
+ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15
+ # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16
+ # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17
+ # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18
+ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
19
+ # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
20
+ # DEALINGS IN THE SOFTWARE.
21
+ from diffusers.models.transformers.transformer_sd3 import SD3Transformer2DModel
22
+ from diffusers.models.unets.unet_2d_condition import UNet2DConditionModel
23
+
24
+ sd3_common_transformer_block_config = {
25
+ "dummy_input": {
26
+ "hidden_states": (2, 4096, 1536),
27
+ "encoder_hidden_states": (2, 333, 1536),
28
+ "temb": (2, 1536),
29
+ },
30
+ "output_names": ["encoder_hidden_states_out", "hidden_states_out"],
31
+ "dynamic_axes": {
32
+ "hidden_states": {0: "batch_size"},
33
+ "encoder_hidden_states": {0: "batch_size"},
34
+ "temb": {0: "steps"},
35
+ },
36
+ }
37
+
38
+ ONNX_CONFIG = {
39
+ UNet2DConditionModel: {
40
+ "down_blocks.0": {
41
+ "dummy_input": {
42
+ "hidden_states": (2, 320, 128, 128),
43
+ "temb": (2, 1280),
44
+ },
45
+ "output_names": ["sample", "res_samples_0", "res_samples_1", "res_samples_2"],
46
+ "dynamic_axes": {
47
+ "hidden_states": {0: "batch_size"},
48
+ "temb": {0: "steps"},
49
+ },
50
+ },
51
+ "down_blocks.1": {
52
+ "dummy_input": {
53
+ "hidden_states": (2, 320, 64, 64),
54
+ "temb": (2, 1280),
55
+ "encoder_hidden_states": (2, 77, 2048),
56
+ },
57
+ "output_names": ["sample", "res_samples_0", "res_samples_1", "res_samples_2"],
58
+ "dynamic_axes": {
59
+ "hidden_states": {0: "batch_size"},
60
+ "temb": {0: "steps"},
61
+ "encoder_hidden_states": {0: "batch_size"},
62
+ },
63
+ },
64
+ "down_blocks.2": {
65
+ "dummy_input": {
66
+ "hidden_states": (2, 640, 32, 32),
67
+ "temb": (2, 1280),
68
+ "encoder_hidden_states": (2, 77, 2048),
69
+ },
70
+ "output_names": ["sample", "res_samples_0", "res_samples_1"],
71
+ "dynamic_axes": {
72
+ "hidden_states": {0: "batch_size"},
73
+ "temb": {0: "steps"},
74
+ "encoder_hidden_states": {0: "batch_size"},
75
+ },
76
+ },
77
+ "mid_block": {
78
+ "dummy_input": {
79
+ "hidden_states": (2, 1280, 32, 32),
80
+ "temb": (2, 1280),
81
+ "encoder_hidden_states": (2, 77, 2048),
82
+ },
83
+ "output_names": ["sample"],
84
+ "dynamic_axes": {
85
+ "hidden_states": {0: "batch_size"},
86
+ "temb": {0: "steps"},
87
+ "encoder_hidden_states": {0: "batch_size"},
88
+ },
89
+ },
90
+ "up_blocks.0": {
91
+ "dummy_input": {
92
+ "hidden_states": (2, 1280, 32, 32),
93
+ "res_hidden_states_0": (2, 640, 32, 32),
94
+ "res_hidden_states_1": (2, 1280, 32, 32),
95
+ "res_hidden_states_2": (2, 1280, 32, 32),
96
+ "temb": (2, 1280),
97
+ "encoder_hidden_states": (2, 77, 2048),
98
+ },
99
+ "output_names": ["sample"],
100
+ "dynamic_axes": {
101
+ "hidden_states": {0: "batch_size"},
102
+ "temb": {0: "steps"},
103
+ "encoder_hidden_states": {0: "batch_size"},
104
+ "res_hidden_states_0": {0: "batch_size"},
105
+ "res_hidden_states_1": {0: "batch_size"},
106
+ "res_hidden_states_2": {0: "batch_size"},
107
+ },
108
+ },
109
+ "up_blocks.1": {
110
+ "dummy_input": {
111
+ "hidden_states": (2, 1280, 64, 64),
112
+ "res_hidden_states_0": (2, 320, 64, 64),
113
+ "res_hidden_states_1": (2, 640, 64, 64),
114
+ "res_hidden_states_2": (2, 640, 64, 64),
115
+ "temb": (2, 1280),
116
+ "encoder_hidden_states": (2, 77, 2048),
117
+ },
118
+ "output_names": ["sample"],
119
+ "dynamic_axes": {
120
+ "hidden_states": {0: "batch_size"},
121
+ "temb": {0: "steps"},
122
+ "encoder_hidden_states": {0: "batch_size"},
123
+ "res_hidden_states_0": {0: "batch_size"},
124
+ "res_hidden_states_1": {0: "batch_size"},
125
+ "res_hidden_states_2": {0: "batch_size"},
126
+ },
127
+ },
128
+ "up_blocks.2": {
129
+ "dummy_input": {
130
+ "hidden_states": (2, 640, 128, 128),
131
+ "res_hidden_states_0": (2, 320, 128, 128),
132
+ "res_hidden_states_1": (2, 320, 128, 128),
133
+ "res_hidden_states_2": (2, 320, 128, 128),
134
+ "temb": (2, 1280),
135
+ },
136
+ "output_names": ["sample"],
137
+ "dynamic_axes": {
138
+ "hidden_states": {0: "batch_size"},
139
+ "temb": {0: "steps"},
140
+ "res_hidden_states_0": {0: "batch_size"},
141
+ "res_hidden_states_1": {0: "batch_size"},
142
+ "res_hidden_states_2": {0: "batch_size"},
143
+ },
144
+ },
145
+ },
146
+ SD3Transformer2DModel: {
147
+ **{f"transformer_blocks.{i}": sd3_common_transformer_block_config for i in range(23)},
148
+ "transformer_blocks.23": {
149
+ "dummy_input": {
150
+ "hidden_states": (2, 4096, 1536),
151
+ "encoder_hidden_states": (2, 333, 1536),
152
+ "temb": (2, 1536),
153
+ },
154
+ "output_names": ["hidden_states_out"],
155
+ "dynamic_axes": {
156
+ "hidden_states": {0: "batch_size"},
157
+ "encoder_hidden_states": {0: "batch_size"},
158
+ "temb": {0: "steps"},
159
+ },
160
+ },
161
+ },
162
+ }
src/pipe/deploy.py ADDED
@@ -0,0 +1,210 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
2
+ # SPDX-License-Identifier: MIT
3
+ #
4
+ # Permission is hereby granted, free of charge, to any person obtaining a
5
+ # copy of this software and associated documentation files (the "Software"),
6
+ # to deal in the Software without restriction, including without limitation
7
+ # the rights to use, copy, modify, merge, publish, distribute, sublicense,
8
+ # and/or sell copies of the Software, and to permit persons to whom the
9
+ # Software is furnished to do so, subject to the following conditions:
10
+ #
11
+ # The above copyright notice and this permission notice shall be included in
12
+ # all copies or substantial portions of the Software.
13
+ #
14
+ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15
+ # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16
+ # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17
+ # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18
+ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
19
+ # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
20
+ # DEALINGS IN THE SOFTWARE.
21
+
22
+ import types
23
+ from pathlib import Path
24
+
25
+ import tensorrt as trt
26
+ import torch
27
+ from cache_diffusion.cachify import CACHED_PIPE, get_model
28
+ from cuda import cudart
29
+ from diffusers.models.transformers.transformer_sd3 import SD3Transformer2DModel
30
+ from diffusers.models.unets.unet_2d_condition import UNet2DConditionModel
31
+ from pipe.config import ONNX_CONFIG
32
+ from pipe.models.sd3 import sd3_forward
33
+ from pipe.models.sdxl import (
34
+ cachecrossattnupblock2d_forward,
35
+ cacheunet_forward,
36
+ cacheupblock2d_forward,
37
+ )
38
+ from polygraphy.backend.trt import (
39
+ CreateConfig,
40
+ Profile,
41
+ engine_from_network,
42
+ network_from_onnx_path,
43
+ save_engine,
44
+ )
45
+ from torch.onnx import export as onnx_export
46
+
47
+ from .utils import Engine
48
+
49
+
50
+ def replace_new_forward(backbone):
51
+ if backbone.__class__ == UNet2DConditionModel:
52
+ backbone.forward = types.MethodType(cacheunet_forward, backbone)
53
+ for upsample_block in backbone.up_blocks:
54
+ if (
55
+ hasattr(upsample_block, "has_cross_attention")
56
+ and upsample_block.has_cross_attention
57
+ ):
58
+ upsample_block.forward = types.MethodType(
59
+ cachecrossattnupblock2d_forward, upsample_block
60
+ )
61
+ else:
62
+ upsample_block.forward = types.MethodType(cacheupblock2d_forward, upsample_block)
63
+ elif backbone.__class__ == SD3Transformer2DModel:
64
+ backbone.forward = types.MethodType(sd3_forward, backbone)
65
+
66
+
67
+ def get_input_info(dummy_dict, info: str = None, batch_size: int = 1):
68
+ return_val = [] if info == "profile_shapes" or info == "input_names" else {}
69
+
70
+ def collect_leaf_keys(d):
71
+ for key, value in d.items():
72
+ if isinstance(value, dict):
73
+ collect_leaf_keys(value)
74
+ else:
75
+ value = (value[0] * batch_size,) + value[1:]
76
+ if info == "profile_shapes":
77
+ return_val.append((key, value)) # type: ignore
78
+ elif info == "profile_shapes_dict":
79
+ return_val[key] = value # type: ignore
80
+ elif info == "dummy_input":
81
+ return_val[key] = torch.ones(value).half().cuda() # type: ignore
82
+ elif info == "input_names":
83
+ return_val.append(key) # type: ignore
84
+
85
+ collect_leaf_keys(dummy_dict)
86
+ return return_val
87
+
88
+
89
+ def complie2trt(cls, onnx_path: Path, engine_path: Path, batch_size: int = 1):
90
+ subdirs = [f for f in onnx_path.iterdir() if f.is_dir()]
91
+ for subdir in subdirs:
92
+ if subdir.name not in ONNX_CONFIG[cls].keys():
93
+ continue
94
+ model_path = subdir / "model.onnx"
95
+ plan_path = engine_path / f"{subdir.name}.plan"
96
+ if not plan_path.exists():
97
+ print(f"Building {str(model_path)}")
98
+ build_profile = Profile()
99
+ profile_shapes = get_input_info(
100
+ ONNX_CONFIG[cls][subdir.name]["dummy_input"], "profile_shapes", batch_size
101
+ )
102
+ for input_name, input_shape in profile_shapes:
103
+ min_input_shape = (2,) + input_shape[1:]
104
+ build_profile.add(input_name, min_input_shape, input_shape, input_shape)
105
+ block_network = network_from_onnx_path(
106
+ str(model_path), flags=[trt.OnnxParserFlag.NATIVE_INSTANCENORM], strongly_typed=True
107
+ )
108
+ build_config = CreateConfig(
109
+ builder_optimization_level=6,
110
+ tf32=True,
111
+ #bf16=True,
112
+ profiles=[build_profile],
113
+ )
114
+ engine = engine_from_network(
115
+ block_network,
116
+ config=build_config,
117
+ )
118
+ save_engine(engine, path=plan_path)
119
+ else:
120
+ print(f"{str(model_path)} already exists!")
121
+
122
+
123
+ def get_total_device_memory(backbone):
124
+ max_device_memory = 0
125
+ for _, engine in backbone.engines.items():
126
+ max_device_memory = max(max_device_memory, engine.engine.device_memory_size)
127
+ return max_device_memory
128
+
129
+
130
+ def load_engines(backbone, engine_path: Path, batch_size: int = 1):
131
+ backbone.engines = {}
132
+ for f in engine_path.iterdir():
133
+ if f.is_file():
134
+ eng = Engine()
135
+ eng.load(str(f))
136
+ backbone.engines[f"{f.stem}"] = eng
137
+ _, shared_device_memory = cudart.cudaMalloc(get_total_device_memory(backbone))
138
+ for engine in backbone.engines.values():
139
+ engine.activate(shared_device_memory)
140
+ backbone.cuda_stream = cudart.cudaStreamCreate()[1]
141
+ for block_name in backbone.engines.keys():
142
+ backbone.engines[block_name].allocate_buffers(
143
+ shape_dict=get_input_info(
144
+ ONNX_CONFIG[backbone.__class__][block_name]["dummy_input"],
145
+ "profile_shapes_dict",
146
+ batch_size,
147
+ ),
148
+ device=backbone.device,
149
+ batch_size=batch_size,
150
+ )
151
+ # TODO: Free and clean up the origin pytorch cuda memory
152
+
153
+
154
+ def export_onnx(backbone, onnx_path: Path):
155
+ for name, module in backbone.named_modules():
156
+ if isinstance(module, CACHED_PIPE[backbone.__class__]):
157
+ _onnx_dir = onnx_path.joinpath(f"{name}")
158
+ _onnx_file = _onnx_dir.joinpath("model.onnx")
159
+ if not _onnx_file.exists():
160
+ _onnx_dir.mkdir(parents=True, exist_ok=True)
161
+ dummy_input = get_input_info(
162
+ ONNX_CONFIG[backbone.__class__][f"{name}"]["dummy_input"], "dummy_input"
163
+ )
164
+ input_names = get_input_info(
165
+ ONNX_CONFIG[backbone.__class__][f"{name}"]["dummy_input"], "input_names"
166
+ )
167
+ output_names = ONNX_CONFIG[backbone.__class__][f"{name}"]["output_names"]
168
+ onnx_export(
169
+ module,
170
+ args=dummy_input,
171
+ f=_onnx_file.as_posix(),
172
+ input_names=input_names,
173
+ output_names=output_names,
174
+ dynamic_axes=ONNX_CONFIG[backbone.__class__][f"{name}"]["dynamic_axes"],
175
+ do_constant_folding=True,
176
+ opset_version=17,
177
+ )
178
+ else:
179
+ print(f"{str(_onnx_file)} alread exists!")
180
+
181
+
182
+ def warm_up(backbone, batch_size: int = 1):
183
+ print("Warming-up TensorRT engines...")
184
+ for name, engine in backbone.engines.items():
185
+ dummy_input = get_input_info(
186
+ ONNX_CONFIG[backbone.__class__][name]["dummy_input"], "dummy_input", batch_size
187
+ )
188
+ _ = engine(dummy_input, backbone.cuda_stream)
189
+
190
+
191
+ def teardown(pipe):
192
+ backbone = get_model(pipe)
193
+ for engine in backbone.engines.values():
194
+ del engine
195
+
196
+ cudart.cudaStreamDestroy(backbone.cuda_stream)
197
+ del backbone.cuda_stream
198
+
199
+
200
+ def compile(pipe, onnx_path: Path, engine_path: Path, batch_size: int = 1):
201
+ backbone = get_model(pipe)
202
+ onnx_path.mkdir(parents=True, exist_ok=True)
203
+ engine_path.mkdir(parents=True, exist_ok=True)
204
+
205
+ replace_new_forward(backbone)
206
+ export_onnx(backbone, onnx_path)
207
+ complie2trt(backbone.__class__, onnx_path, engine_path, batch_size)
208
+ load_engines(backbone, engine_path, batch_size)
209
+ warm_up(backbone, batch_size)
210
+ backbone.use_trt_infer = True
src/pipe/models/sd3.py ADDED
@@ -0,0 +1,159 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
2
+ # SPDX-License-Identifier: MIT
3
+ #
4
+ # Permission is hereby granted, free of charge, to any person obtaining a
5
+ # copy of this software and associated documentation files (the "Software"),
6
+ # to deal in the Software without restriction, including without limitation
7
+ # the rights to use, copy, modify, merge, publish, distribute, sublicense,
8
+ # and/or sell copies of the Software, and to permit persons to whom the
9
+ # Software is furnished to do so, subject to the following conditions:
10
+ #
11
+ # The above copyright notice and this permission notice shall be included in
12
+ # all copies or substantial portions of the Software.
13
+ #
14
+ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15
+ # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16
+ # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17
+ # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18
+ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
19
+ # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
20
+ # DEALINGS IN THE SOFTWARE.
21
+
22
+ from typing import Any, Dict, List, Optional, Union
23
+
24
+ import torch
25
+ from diffusers.models.modeling_outputs import Transformer2DModelOutput
26
+ from diffusers.utils import (
27
+ USE_PEFT_BACKEND,
28
+ is_torch_version,
29
+ scale_lora_layers,
30
+ unscale_lora_layers,
31
+ )
32
+
33
+
34
+ def sd3_forward(
35
+ self,
36
+ hidden_states: torch.FloatTensor,
37
+ encoder_hidden_states: torch.FloatTensor = None,
38
+ pooled_projections: torch.FloatTensor = None,
39
+ timestep: torch.LongTensor = None,
40
+ block_controlnet_hidden_states: List = None,
41
+ joint_attention_kwargs: Optional[Dict[str, Any]] = None,
42
+ return_dict: bool = True,
43
+ ) -> Union[torch.FloatTensor, Transformer2DModelOutput]:
44
+ """
45
+ The [`SD3Transformer2DModel`] forward method.
46
+
47
+ Args:
48
+ hidden_states (`torch.FloatTensor` of shape `(batch size, channel, height, width)`):
49
+ Input `hidden_states`.
50
+ encoder_hidden_states (`torch.FloatTensor` of shape `(batch size, sequence_len, embed_dims)`):
51
+ Conditional embeddings (embeddings computed from the input conditions such as prompts) to use.
52
+ pooled_projections (`torch.FloatTensor` of shape `(batch_size, projection_dim)`): Embeddings projected
53
+ from the embeddings of input conditions.
54
+ timestep ( `torch.LongTensor`):
55
+ Used to indicate denoising step.
56
+ block_controlnet_hidden_states: (`list` of `torch.Tensor`):
57
+ A list of tensors that if specified are added to the residuals of transformer blocks.
58
+ joint_attention_kwargs (`dict`, *optional*):
59
+ A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under
60
+ `self.processor` in
61
+ [diffusers.models.attention_processor](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/attention_processor.py).
62
+ return_dict (`bool`, *optional*, defaults to `True`):
63
+ Whether or not to return a [`~models.transformer_2d.Transformer2DModelOutput`] instead of a plain
64
+ tuple.
65
+
66
+ Returns:
67
+ If `return_dict` is True, an [`~models.transformer_2d.Transformer2DModelOutput`] is returned, otherwise a
68
+ `tuple` where the first element is the sample tensor.
69
+ """
70
+ if joint_attention_kwargs is not None:
71
+ joint_attention_kwargs = joint_attention_kwargs.copy()
72
+ lora_scale = joint_attention_kwargs.pop("scale", 1.0)
73
+ else:
74
+ lora_scale = 1.0
75
+
76
+ if USE_PEFT_BACKEND:
77
+ # weight the lora layers by setting `lora_scale` for each PEFT layer
78
+ scale_lora_layers(self, lora_scale)
79
+
80
+ height, width = hidden_states.shape[-2:]
81
+
82
+ hidden_states = self.pos_embed(hidden_states) # takes care of adding positional embeddings too.
83
+ temb = self.time_text_embed(timestep, pooled_projections)
84
+ encoder_hidden_states = self.context_embedder(encoder_hidden_states)
85
+
86
+ for index_block, block in enumerate(self.transformer_blocks):
87
+ if self.training and self.gradient_checkpointing:
88
+
89
+ def create_custom_forward(module, return_dict=None):
90
+ def custom_forward(*inputs):
91
+ if return_dict is not None:
92
+ return module(*inputs, return_dict=return_dict)
93
+ else:
94
+ return module(*inputs)
95
+
96
+ return custom_forward
97
+
98
+ ckpt_kwargs: Dict[str, Any] = (
99
+ {"use_reentrant": False} if is_torch_version(">=", "1.11.0") else {}
100
+ )
101
+ encoder_hidden_states, hidden_states = torch.utils.checkpoint.checkpoint(
102
+ create_custom_forward(block),
103
+ hidden_states,
104
+ encoder_hidden_states,
105
+ temb,
106
+ **ckpt_kwargs,
107
+ )
108
+
109
+ else:
110
+ if hasattr(self, "use_trt_infer") and self.use_trt_infer:
111
+ feed_dict = {
112
+ "hidden_states": hidden_states,
113
+ "encoder_hidden_states": encoder_hidden_states,
114
+ "temb": temb,
115
+ }
116
+ _results = self.engines[f"transformer_blocks.{index_block}"](
117
+ feed_dict, self.cuda_stream
118
+ )
119
+ if index_block != 23:
120
+ encoder_hidden_states = _results["encoder_hidden_states_out"]
121
+ hidden_states = _results["hidden_states_out"]
122
+ else:
123
+ encoder_hidden_states, hidden_states = block(
124
+ hidden_states=hidden_states,
125
+ encoder_hidden_states=encoder_hidden_states,
126
+ temb=temb,
127
+ )
128
+
129
+ # controlnet residual
130
+ if block_controlnet_hidden_states is not None and block.context_pre_only is False:
131
+ interval_control = len(self.transformer_blocks) // len(block_controlnet_hidden_states)
132
+ hidden_states = (
133
+ hidden_states + block_controlnet_hidden_states[index_block // interval_control]
134
+ )
135
+
136
+ hidden_states = self.norm_out(hidden_states, temb)
137
+ hidden_states = self.proj_out(hidden_states)
138
+
139
+ # unpatchify
140
+ patch_size = self.config.patch_size
141
+ height = height // patch_size
142
+ width = width // patch_size
143
+
144
+ hidden_states = hidden_states.reshape(
145
+ shape=(hidden_states.shape[0], height, width, patch_size, patch_size, self.out_channels)
146
+ )
147
+ hidden_states = torch.einsum("nhwpqc->nchpwq", hidden_states)
148
+ output = hidden_states.reshape(
149
+ shape=(hidden_states.shape[0], self.out_channels, height * patch_size, width * patch_size)
150
+ )
151
+
152
+ if USE_PEFT_BACKEND:
153
+ # remove `lora_scale` from each PEFT layer
154
+ unscale_lora_layers(self, lora_scale)
155
+
156
+ if not return_dict:
157
+ return (output,)
158
+
159
+ return Transformer2DModelOutput(sample=output)
src/pipe/models/sdxl.py ADDED
@@ -0,0 +1,275 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Adapted from
2
+ # https://github.com/huggingface/diffusers/blob/73acebb8cfbd1d2954cabe1af4185f9994e61917/src/diffusers/models/unets/unet_2d_condition.py#L1039-L1312
3
+ # https://github.com/huggingface/diffusers/blob/73acebb8cfbd1d2954cabe1af4185f9994e61917/src/diffusers/models/unets/unet_2d_blocks.py#L2482-L2564
4
+ # https://github.com/huggingface/diffusers/blob/73acebb8cfbd1d2954cabe1af4185f9994e61917/src/diffusers/models/unets/unet_2d_blocks.py#L2617-L2679
5
+
6
+ # Copyright 2024 The HuggingFace Team. All rights reserved.
7
+ #
8
+ # Licensed under the Apache License, Version 2.0 (the "License");
9
+ # you may not use this file except in compliance with the License.
10
+ # You may obtain a copy of the License at
11
+ #
12
+ # http://www.apache.org/licenses/LICENSE-2.0
13
+ #
14
+ # Unless required by applicable law or agreed to in writing, software
15
+ # distributed under the License is distributed on an "AS IS" BASIS,
16
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17
+ # See the License for the specific language governing permissions and
18
+ # limitations under the License.
19
+ #
20
+ # Not a contribution
21
+ # Changes made by NVIDIA CORPORATION & AFFILIATES or otherwise documented as
22
+ # NVIDIA-proprietary are not a contribution and subject to the following terms and conditions:
23
+ # SPDX-FileCopyrightText: Copyright (c) 2023 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
24
+ # SPDX-License-Identifier: LicenseRef-NvidiaProprietary
25
+ #
26
+ # NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
27
+ # property and proprietary rights in and to this material, related
28
+ # documentation and any modifications thereto. Any use, reproduction,
29
+ # disclosure or distribution of this material and related documentation
30
+ # without an express license agreement from NVIDIA CORPORATION or
31
+ # its affiliates is strictly prohibited.
32
+
33
+ from typing import Any, Dict, Optional, Tuple, Union
34
+
35
+ import torch
36
+ from diffusers.models.unets.unet_2d_condition import UNet2DConditionOutput
37
+
38
+
39
+ def cachecrossattnupblock2d_forward(
40
+ self,
41
+ hidden_states: torch.FloatTensor,
42
+ res_hidden_states_0: torch.FloatTensor,
43
+ res_hidden_states_1: torch.FloatTensor,
44
+ res_hidden_states_2: torch.FloatTensor,
45
+ temb: Optional[torch.FloatTensor] = None,
46
+ encoder_hidden_states: Optional[torch.FloatTensor] = None,
47
+ cross_attention_kwargs: Optional[Dict[str, Any]] = None,
48
+ upsample_size: Optional[int] = None,
49
+ attention_mask: Optional[torch.FloatTensor] = None,
50
+ encoder_attention_mask: Optional[torch.FloatTensor] = None,
51
+ ) -> torch.FloatTensor:
52
+ res_hidden_states_tuple = (res_hidden_states_0, res_hidden_states_1, res_hidden_states_2)
53
+ for resnet, attn in zip(self.resnets, self.attentions):
54
+ # pop res hidden states
55
+ res_hidden_states = res_hidden_states_tuple[-1]
56
+ res_hidden_states_tuple = res_hidden_states_tuple[:-1]
57
+
58
+ hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1)
59
+
60
+ hidden_states = resnet(hidden_states, temb)
61
+ hidden_states = attn(
62
+ hidden_states,
63
+ encoder_hidden_states=encoder_hidden_states,
64
+ cross_attention_kwargs=cross_attention_kwargs,
65
+ attention_mask=attention_mask,
66
+ encoder_attention_mask=encoder_attention_mask,
67
+ return_dict=False,
68
+ )[0]
69
+
70
+ if self.upsamplers is not None:
71
+ for upsampler in self.upsamplers:
72
+ hidden_states = upsampler(hidden_states, upsample_size)
73
+
74
+ return hidden_states
75
+
76
+
77
+ def cacheupblock2d_forward(
78
+ self,
79
+ hidden_states: torch.FloatTensor,
80
+ res_hidden_states_0: torch.FloatTensor,
81
+ res_hidden_states_1: torch.FloatTensor,
82
+ res_hidden_states_2: torch.FloatTensor,
83
+ temb: Optional[torch.FloatTensor] = None,
84
+ upsample_size: Optional[int] = None,
85
+ ) -> torch.FloatTensor:
86
+ res_hidden_states_tuple = (res_hidden_states_0, res_hidden_states_1, res_hidden_states_2)
87
+ for resnet in self.resnets:
88
+ # pop res hidden states
89
+ res_hidden_states = res_hidden_states_tuple[-1]
90
+ res_hidden_states_tuple = res_hidden_states_tuple[:-1]
91
+
92
+ hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1)
93
+
94
+ hidden_states = resnet(hidden_states, temb)
95
+
96
+ if self.upsamplers is not None:
97
+ for upsampler in self.upsamplers:
98
+ hidden_states = upsampler(hidden_states, upsample_size)
99
+
100
+ return hidden_states
101
+
102
+
103
+ def cacheunet_forward(
104
+ self,
105
+ sample: torch.FloatTensor,
106
+ timestep: Union[torch.Tensor, float, int],
107
+ encoder_hidden_states: torch.Tensor,
108
+ class_labels: Optional[torch.Tensor] = None,
109
+ timestep_cond: Optional[torch.Tensor] = None,
110
+ attention_mask: Optional[torch.Tensor] = None,
111
+ cross_attention_kwargs: Optional[Dict[str, Any]] = None,
112
+ added_cond_kwargs: Optional[Dict[str, torch.Tensor]] = None,
113
+ down_block_additional_residuals: Optional[Tuple[torch.Tensor]] = None,
114
+ mid_block_additional_residual: Optional[torch.Tensor] = None,
115
+ down_intrablock_additional_residuals: Optional[Tuple[torch.Tensor]] = None,
116
+ encoder_attention_mask: Optional[torch.Tensor] = None,
117
+ return_dict: bool = True,
118
+ ) -> Union[UNet2DConditionOutput, Tuple]:
119
+ # 1. time
120
+ t_emb = self.get_time_embed(sample=sample, timestep=timestep)
121
+ emb = self.time_embedding(t_emb, timestep_cond)
122
+ aug_emb = None
123
+
124
+ aug_emb = self.get_aug_embed(
125
+ emb=emb,
126
+ encoder_hidden_states=encoder_hidden_states,
127
+ added_cond_kwargs=added_cond_kwargs,
128
+ )
129
+
130
+ emb = emb + aug_emb if aug_emb is not None else emb
131
+
132
+ encoder_hidden_states = self.process_encoder_hidden_states(
133
+ encoder_hidden_states=encoder_hidden_states, added_cond_kwargs=added_cond_kwargs
134
+ )
135
+
136
+ # 2. pre-process
137
+ sample = self.conv_in(sample)
138
+
139
+ if hasattr(self, "_export_precess_onnx") and self._export_precess_onnx:
140
+ return (
141
+ sample,
142
+ encoder_hidden_states,
143
+ emb,
144
+ )
145
+
146
+ down_block_res_samples = (sample,)
147
+ for i, downsample_block in enumerate(self.down_blocks):
148
+ if (
149
+ hasattr(downsample_block, "has_cross_attention")
150
+ and downsample_block.has_cross_attention
151
+ ):
152
+ if hasattr(self, "use_trt_infer") and self.use_trt_infer:
153
+ feed_dict = {
154
+ "hidden_states": sample,
155
+ "temb": emb,
156
+ "encoder_hidden_states": encoder_hidden_states,
157
+ }
158
+ down_results = self.engines[f"down_blocks.{i}"](feed_dict, self.cuda_stream)
159
+ sample = down_results["sample"]
160
+ res_samples_0 = down_results["res_samples_0"]
161
+ res_samples_1 = down_results["res_samples_1"]
162
+ if "res_samples_2" in down_results.keys():
163
+ res_samples_2 = down_results["res_samples_2"]
164
+ else:
165
+ # For t2i-adapter CrossAttnDownBlock2D
166
+ additional_residuals = {}
167
+
168
+ sample, res_samples = downsample_block(
169
+ hidden_states=sample,
170
+ temb=emb,
171
+ encoder_hidden_states=encoder_hidden_states,
172
+ attention_mask=attention_mask,
173
+ cross_attention_kwargs=cross_attention_kwargs,
174
+ encoder_attention_mask=encoder_attention_mask,
175
+ **additional_residuals,
176
+ )
177
+ else:
178
+ if hasattr(self, "use_trt_infer") and self.use_trt_infer:
179
+ feed_dict = {"hidden_states": sample, "temb": emb}
180
+ down_results = self.engines[f"down_blocks.{i}"](feed_dict, self.cuda_stream)
181
+ sample = down_results["sample"]
182
+ res_samples_0 = down_results["res_samples_0"]
183
+ res_samples_1 = down_results["res_samples_1"]
184
+ if "res_samples_2" in down_results.keys():
185
+ res_samples_2 = down_results["res_samples_2"]
186
+ else:
187
+ sample, res_samples = downsample_block(hidden_states=sample, temb=emb)
188
+
189
+ if hasattr(self, "use_trt_infer") and self.use_trt_infer:
190
+ down_block_res_samples += (
191
+ res_samples_0,
192
+ res_samples_1,
193
+ )
194
+ if "res_samples_2" in down_results.keys():
195
+ down_block_res_samples += (res_samples_2,)
196
+ else:
197
+ down_block_res_samples += res_samples
198
+
199
+ if hasattr(self, "use_trt_infer") and self.use_trt_infer:
200
+ feed_dict = {
201
+ "hidden_states": sample,
202
+ "temb": emb,
203
+ "encoder_hidden_states": encoder_hidden_states,
204
+ }
205
+ mid_results = self.engines["mid_block"](feed_dict, self.cuda_stream)
206
+ sample = mid_results["sample"]
207
+ else:
208
+ sample = self.mid_block(
209
+ sample,
210
+ emb,
211
+ encoder_hidden_states=encoder_hidden_states,
212
+ attention_mask=attention_mask,
213
+ cross_attention_kwargs=cross_attention_kwargs,
214
+ encoder_attention_mask=encoder_attention_mask,
215
+ )
216
+
217
+ # 5. up
218
+ for i, upsample_block in enumerate(self.up_blocks):
219
+ res_samples = down_block_res_samples[-len(upsample_block.resnets) :]
220
+ down_block_res_samples = down_block_res_samples[: -len(upsample_block.resnets)]
221
+
222
+ if hasattr(upsample_block, "has_cross_attention") and upsample_block.has_cross_attention:
223
+ if hasattr(self, "use_trt_infer") and self.use_trt_infer:
224
+ feed_dict = {
225
+ "hidden_states": sample,
226
+ "res_hidden_states_0": res_samples[0],
227
+ "res_hidden_states_1": res_samples[1],
228
+ "res_hidden_states_2": res_samples[2],
229
+ "temb": emb,
230
+ "encoder_hidden_states": encoder_hidden_states,
231
+ }
232
+ up_results = self.engines[f"up_blocks.{i}"](feed_dict, self.cuda_stream)
233
+ sample = up_results["sample"]
234
+ else:
235
+ sample = upsample_block(
236
+ hidden_states=sample,
237
+ temb=emb,
238
+ res_hidden_states_0=res_samples[0],
239
+ res_hidden_states_1=res_samples[1],
240
+ res_hidden_states_2=res_samples[2],
241
+ encoder_hidden_states=encoder_hidden_states,
242
+ cross_attention_kwargs=cross_attention_kwargs,
243
+ attention_mask=attention_mask,
244
+ encoder_attention_mask=encoder_attention_mask,
245
+ )
246
+ else:
247
+ if hasattr(self, "use_trt_infer") and self.use_trt_infer:
248
+ feed_dict = {
249
+ "hidden_states": sample,
250
+ "res_hidden_states_0": res_samples[0],
251
+ "res_hidden_states_1": res_samples[1],
252
+ "res_hidden_states_2": res_samples[2],
253
+ "temb": emb,
254
+ }
255
+ up_results = self.engines[f"up_blocks.{i}"](feed_dict, self.cuda_stream)
256
+ sample = up_results["sample"]
257
+ else:
258
+ sample = upsample_block(
259
+ hidden_states=sample,
260
+ temb=emb,
261
+ res_hidden_states_0=res_samples[0],
262
+ res_hidden_states_1=res_samples[1],
263
+ res_hidden_states_2=res_samples[2],
264
+ )
265
+
266
+ # 6. post-process
267
+ if self.conv_norm_out:
268
+ sample = self.conv_norm_out(sample)
269
+ sample = self.conv_act(sample)
270
+ sample = self.conv_out(sample)
271
+
272
+ if not return_dict:
273
+ return (sample,)
274
+
275
+ return UNet2DConditionOutput(sample=sample)
src/pipe/utils.py ADDED
@@ -0,0 +1,129 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
2
+ # SPDX-License-Identifier: MIT
3
+ #
4
+ # Permission is hereby granted, free of charge, to any person obtaining a
5
+ # copy of this software and associated documentation files (the "Software"),
6
+ # to deal in the Software without restriction, including without limitation
7
+ # the rights to use, copy, modify, merge, publish, distribute, sublicense,
8
+ # and/or sell copies of the Software, and to permit persons to whom the
9
+ # Software is furnished to do so, subject to the following conditions:
10
+ #
11
+ # The above copyright notice and this permission notice shall be included in
12
+ # all copies or substantial portions of the Software.
13
+ #
14
+ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15
+ # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16
+ # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17
+ # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18
+ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
19
+ # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
20
+ # DEALINGS IN THE SOFTWARE.
21
+
22
+ from collections import OrderedDict
23
+
24
+ import numpy as np
25
+ import tensorrt as trt
26
+ import torch
27
+ from cuda import cudart
28
+ from polygraphy.backend.common import bytes_from_path
29
+ from polygraphy.backend.trt import engine_from_bytes
30
+
31
+ numpy_to_torch_dtype_dict = {
32
+ np.uint8: torch.uint8,
33
+ np.int8: torch.int8,
34
+ np.int16: torch.int16,
35
+ np.int32: torch.int32,
36
+ np.int64: torch.int64,
37
+ np.float16: torch.float16,
38
+ np.float32: torch.float32,
39
+ np.float64: torch.float64,
40
+ np.complex64: torch.complex64,
41
+ np.complex128: torch.complex128,
42
+ }
43
+
44
+
45
+ class Engine:
46
+ def __init__(
47
+ self,
48
+ ):
49
+ self.engine = None
50
+ self.context = None
51
+ self.buffers = OrderedDict()
52
+ self.tensors = OrderedDict()
53
+ self.cuda_graph_instance = None # cuda graph
54
+ self.has_cross_attention = False
55
+
56
+ def __del__(self):
57
+ del self.engine
58
+ del self.context
59
+ del self.buffers
60
+ del self.tensors
61
+
62
+ def load(self, engine_path):
63
+ self.engine = engine_from_bytes(bytes_from_path(engine_path))
64
+
65
+ def activate(self, reuse_device_memory=None):
66
+ if reuse_device_memory:
67
+ self.context = self.engine.create_execution_context_without_device_memory() # type: ignore
68
+ self.context.device_memory = reuse_device_memory
69
+ else:
70
+ self.context = self.engine.create_execution_context() # type: ignore
71
+
72
+ def allocate_buffers(self, shape_dict=None, device="cuda", batch_size=1):
73
+ for binding in range(self.engine.num_io_tensors): # type: ignore
74
+ name = self.engine.get_tensor_name(binding) # type: ignore
75
+ if shape_dict and name in shape_dict:
76
+ shape = shape_dict[name]
77
+ else:
78
+ shape = self.engine.get_tensor_shape(name) # type: ignore
79
+ shape = (batch_size * 2,) + shape[1:]
80
+ dtype = trt.nptype(self.engine.get_tensor_dtype(name)) # type: ignore
81
+ if self.engine.get_tensor_mode(name) == trt.TensorIOMode.INPUT: # type: ignore
82
+ self.context.set_input_shape(name, shape) # type: ignore
83
+ tensor = torch.empty(tuple(shape), dtype=numpy_to_torch_dtype_dict[dtype]).to(
84
+ device=device
85
+ )
86
+ self.tensors[name] = tensor
87
+
88
+ def __call__(self, feed_dict, stream, use_cuda_graph=False):
89
+ for name, buf in feed_dict.items():
90
+ self.tensors[name].copy_(buf)
91
+
92
+ for name, tensor in self.tensors.items():
93
+ self.context.set_tensor_address(name, tensor.data_ptr()) # type: ignore
94
+
95
+ if use_cuda_graph:
96
+ if self.cuda_graph_instance is not None:
97
+ cuassert(cudart.cudaGraphLaunch(self.cuda_graph_instance, stream))
98
+ cuassert(cudart.cudaStreamSynchronize(stream))
99
+ else:
100
+ # do inference before CUDA graph capture
101
+ noerror = self.context.execute_async_v3(stream) # type: ignore
102
+ if not noerror:
103
+ raise ValueError("ERROR: inference failed.")
104
+ # capture cuda graph
105
+ cuassert(
106
+ cudart.cudaStreamBeginCapture(
107
+ stream, cudart.cudaStreamCaptureMode.cudaStreamCaptureModeGlobal
108
+ )
109
+ )
110
+ self.context.execute_async_v3(stream) # type: ignore
111
+ self.graph = cuassert(cudart.cudaStreamEndCapture(stream))
112
+ self.cuda_graph_instance = cuassert(cudart.cudaGraphInstantiate(self.graph, 0))
113
+ else:
114
+ noerror = self.context.execute_async_v3(stream) # type: ignore
115
+ if not noerror:
116
+ raise ValueError("ERROR: inference failed.")
117
+
118
+ return self.tensors
119
+
120
+
121
+ def cuassert(cuda_ret):
122
+ err = cuda_ret[0]
123
+ if err != cudart.cudaError_t.cudaSuccess:
124
+ raise RuntimeError(
125
+ f"CUDA ERROR: {err}, error code reference: https://nvidia.github.io/cuda-python/module/cudart.html#cuda.cudart.cudaError_t"
126
+ )
127
+ if len(cuda_ret) > 1:
128
+ return cuda_ret[1]
129
+ return None
src/pipeline.py ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from pathlib import Path
3
+ from PIL.Image import Image
4
+ from diffusers import StableDiffusionXLPipeline, DDIMScheduler
5
+ from pipelines.models import TextToImageRequest
6
+ from torch import Generator
7
+ from cache_diffusion import cachify
8
+ from pipe.deploy import compile
9
+ from loss import SchedulerWrapper
10
+
11
+ generator = Generator(torch.device("cuda")).manual_seed(6969)
12
+ prompt = "Make submissions great again"
13
+ SDXL_DEFAULT_CONFIG = [
14
+ {
15
+ "wildcard_or_filter_func": lambda name: "down_blocks.2" not in name and"down_blocks.3" not in name and "up_blocks.2" not in name,
16
+ "select_cache_step_func": lambda step: (step % 2 != 0) and (step >= 10),
17
+ }]
18
+ def load_pipeline() -> StableDiffusionXLPipeline:
19
+ pipe = StableDiffusionXLPipeline.from_pretrained(
20
+ "stablediffusionapi/newdream-sdxl-20",torch_dtype=torch.float16, use_safetensors=True
21
+ ).to("cuda")
22
+ compile(
23
+ pipe,
24
+ onnx_path=Path("/home/sandbox/.cache/huggingface/hub/models--RobertML--edge-onnx/snapshots/d56fa8ea1dc675b87de08eece735bc5ec80a247f"),
25
+ engine_path=Path("/home/sandbox/.cache/huggingface/hub/models--RobertML--edge-engine/snapshots/e0dd02be0c58057947801857c41839f76df2fc88"),
26
+ batch_size=1,
27
+ )
28
+ cachify.prepare(pipe, SDXL_DEFAULT_CONFIG)
29
+ cachify.enable(pipe)
30
+ pipe.scheduler = SchedulerWrapper(DDIMScheduler.from_config(pipe.scheduler.config))
31
+ with cachify.infer(pipe) as cached_pipe:
32
+ for _ in range(4):
33
+ pipe(prompt=prompt, num_inference_steps=20)
34
+ pipe.scheduler.prepare_loss()
35
+ cachify.disable(pipe)
36
+ return pipe
37
+
38
+ def infer(request: TextToImageRequest, pipeline: StableDiffusionXLPipeline) -> Image:
39
+
40
+ if request.seed is None:
41
+ generator = None
42
+ else:
43
+ generator = Generator(pipeline.device).manual_seed(request.seed)
44
+ cachify.enable(pipeline)
45
+ with cachify.infer(pipeline) as cached_pipe:
46
+ image = cached_pipe(
47
+ prompt=request.prompt,
48
+ negative_prompt=request.negative_prompt,
49
+ width=request.width,
50
+ height=request.height,
51
+ generator=generator,
52
+ num_inference_steps=13,
53
+ ).images[0]
54
+ return image
uv.lock ADDED
@@ -0,0 +1,816 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ version = 1
2
+ requires-python = "==3.10.*"
3
+
4
+ [[package]]
5
+ name = "accelerate"
6
+ version = "0.31.0"
7
+ source = { registry = "https://pypi.org/simple" }
8
+ dependencies = [
9
+ { name = "huggingface-hub" },
10
+ { name = "numpy" },
11
+ { name = "packaging" },
12
+ { name = "psutil" },
13
+ { name = "pyyaml" },
14
+ { name = "safetensors" },
15
+ { name = "torch" },
16
+ ]
17
+ sdist = { url = "https://files.pythonhosted.org/packages/89/e2/94937840162a87baa6b56c82247bbb06690b290ad3da0f083192d7b539a9/accelerate-0.31.0.tar.gz", hash = "sha256:b5199865b26106ccf9205acacbe8e4b3b428ad585e7c472d6a46f6fb75b6c176", size = 307110 }
18
+ wheels = [
19
+ { url = "https://files.pythonhosted.org/packages/f0/62/9ebaf1fdd3d3c737a8814f9ae409d4ac04bc93b26a46a7dab456bb7e16f8/accelerate-0.31.0-py3-none-any.whl", hash = "sha256:0fc608dc49584f64d04711a39711d73cb0ad4ef3d21cddee7ef2216e29471144", size = 309428 },
20
+ ]
21
+
22
+ [[package]]
23
+ name = "annotated-types"
24
+ version = "0.7.0"
25
+ source = { registry = "https://pypi.org/simple" }
26
+ sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 }
27
+ wheels = [
28
+ { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 },
29
+ ]
30
+
31
+ [[package]]
32
+ name = "antlr4-python3-runtime"
33
+ version = "4.9.3"
34
+ source = { registry = "https://pypi.org/simple" }
35
+ sdist = { url = "https://files.pythonhosted.org/packages/3e/38/7859ff46355f76f8d19459005ca000b6e7012f2f1ca597746cbcd1fbfe5e/antlr4-python3-runtime-4.9.3.tar.gz", hash = "sha256:f224469b4168294902bb1efa80a8bf7855f24c99aef99cbefc1bcd3cce77881b", size = 117034 }
36
+
37
+ [[package]]
38
+ name = "certifi"
39
+ version = "2024.8.30"
40
+ source = { registry = "https://pypi.org/simple" }
41
+ sdist = { url = "https://files.pythonhosted.org/packages/b0/ee/9b19140fe824b367c04c5e1b369942dd754c4c5462d5674002f75c4dedc1/certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9", size = 168507 }
42
+ wheels = [
43
+ { url = "https://files.pythonhosted.org/packages/12/90/3c9ff0512038035f59d279fddeb79f5f1eccd8859f06d6163c58798b9487/certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8", size = 167321 },
44
+ ]
45
+
46
+ [[package]]
47
+ name = "charset-normalizer"
48
+ version = "3.4.0"
49
+ source = { registry = "https://pypi.org/simple" }
50
+ sdist = { url = "https://files.pythonhosted.org/packages/f2/4f/e1808dc01273379acc506d18f1504eb2d299bd4131743b9fc54d7be4df1e/charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e", size = 106620 }
51
+ wheels = [
52
+ { url = "https://files.pythonhosted.org/packages/69/8b/825cc84cf13a28bfbcba7c416ec22bf85a9584971be15b21dd8300c65b7f/charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6", size = 196363 },
53
+ { url = "https://files.pythonhosted.org/packages/23/81/d7eef6a99e42c77f444fdd7bc894b0ceca6c3a95c51239e74a722039521c/charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b", size = 125639 },
54
+ { url = "https://files.pythonhosted.org/packages/21/67/b4564d81f48042f520c948abac7079356e94b30cb8ffb22e747532cf469d/charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99", size = 120451 },
55
+ { url = "https://files.pythonhosted.org/packages/c2/72/12a7f0943dd71fb5b4e7b55c41327ac0a1663046a868ee4d0d8e9c369b85/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca", size = 140041 },
56
+ { url = "https://files.pythonhosted.org/packages/67/56/fa28c2c3e31217c4c52158537a2cf5d98a6c1e89d31faf476c89391cd16b/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d", size = 150333 },
57
+ { url = "https://files.pythonhosted.org/packages/f9/d2/466a9be1f32d89eb1554cf84073a5ed9262047acee1ab39cbaefc19635d2/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7", size = 142921 },
58
+ { url = "https://files.pythonhosted.org/packages/f8/01/344ec40cf5d85c1da3c1f57566c59e0c9b56bcc5566c08804a95a6cc8257/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3", size = 144785 },
59
+ { url = "https://files.pythonhosted.org/packages/73/8b/2102692cb6d7e9f03b9a33a710e0164cadfce312872e3efc7cfe22ed26b4/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907", size = 146631 },
60
+ { url = "https://files.pythonhosted.org/packages/d8/96/cc2c1b5d994119ce9f088a9a0c3ebd489d360a2eb058e2c8049f27092847/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b", size = 140867 },
61
+ { url = "https://files.pythonhosted.org/packages/c9/27/cde291783715b8ec30a61c810d0120411844bc4c23b50189b81188b273db/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912", size = 149273 },
62
+ { url = "https://files.pythonhosted.org/packages/3a/a4/8633b0fc1a2d1834d5393dafecce4a1cc56727bfd82b4dc18fc92f0d3cc3/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95", size = 152437 },
63
+ { url = "https://files.pythonhosted.org/packages/64/ea/69af161062166b5975ccbb0961fd2384853190c70786f288684490913bf5/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e", size = 150087 },
64
+ { url = "https://files.pythonhosted.org/packages/3b/fd/e60a9d9fd967f4ad5a92810138192f825d77b4fa2a557990fd575a47695b/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe", size = 145142 },
65
+ { url = "https://files.pythonhosted.org/packages/6d/02/8cb0988a1e49ac9ce2eed1e07b77ff118f2923e9ebd0ede41ba85f2dcb04/charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc", size = 94701 },
66
+ { url = "https://files.pythonhosted.org/packages/d6/20/f1d4670a8a723c46be695dff449d86d6092916f9e99c53051954ee33a1bc/charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749", size = 102191 },
67
+ { url = "https://files.pythonhosted.org/packages/bf/9b/08c0432272d77b04803958a4598a51e2a4b51c06640af8b8f0f908c18bf2/charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079", size = 49446 },
68
+ ]
69
+
70
+ [[package]]
71
+ name = "colorama"
72
+ version = "0.4.6"
73
+ source = { registry = "https://pypi.org/simple" }
74
+ sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 }
75
+ wheels = [
76
+ { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 },
77
+ ]
78
+
79
+ [[package]]
80
+ name = "cuda-python"
81
+ version = "12.6.0"
82
+ source = { registry = "https://pypi.org/simple" }
83
+ wheels = [
84
+ { url = "https://files.pythonhosted.org/packages/0b/a3/ad3148d068d78e8ad1e40094ab787338ea4bef06fbe2915cf1557a5c5f98/cuda_python-12.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dee03e2ba78a807a907a7939dddf089bb8a780faaf7ccbcbfc2461090af11e78", size = 23793330 },
85
+ { url = "https://files.pythonhosted.org/packages/86/93/f00a5f48eb67216d8a8818b93c0e8bbe5949f297add3367522081ec5223c/cuda_python-12.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e177f584094d9c9fd9c7d153168486a3966765c79cb2a80e86feb15e3b5adc14", size = 24223726 },
86
+ { url = "https://files.pythonhosted.org/packages/f6/e0/c2302ff6796eac6c6f1e1414f163c6a38deba62af0b7df2b77562656188c/cuda_python-12.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:3b1e9711c6455fabd947076d52eb21ea508ade95eb4dd33838b0339a84238125", size = 9995130 },
87
+ ]
88
+
89
+ [[package]]
90
+ name = "diffusers"
91
+ version = "0.30.2"
92
+ source = { registry = "https://pypi.org/simple" }
93
+ dependencies = [
94
+ { name = "filelock" },
95
+ { name = "huggingface-hub" },
96
+ { name = "importlib-metadata" },
97
+ { name = "numpy" },
98
+ { name = "pillow" },
99
+ { name = "regex" },
100
+ { name = "requests" },
101
+ { name = "safetensors" },
102
+ ]
103
+ sdist = { url = "https://files.pythonhosted.org/packages/04/ee/13a6327f04f21420ab4d8ada635aba7d884bf57b09f9b847b9af3818b348/diffusers-0.30.2.tar.gz", hash = "sha256:641875f78f36bdfa4b9af752b124d1fd6d431eadd5547fe0a3f354ae0af2636c", size = 2095560 }
104
+ wheels = [
105
+ { url = "https://files.pythonhosted.org/packages/2f/ee/f67b0888229be96a276257579a58eb2331733d246fdb8620e09ca7253971/diffusers-0.30.2-py3-none-any.whl", hash = "sha256:739826043147c2b59560944591dfdea5d24cd4fb15e751abbe20679a289bece8", size = 2636928 },
106
+ ]
107
+
108
+ [[package]]
109
+ name = "edge-maxxing-4090-newdream"
110
+ version = "7"
111
+ source = { editable = "." }
112
+ dependencies = [
113
+ { name = "accelerate" },
114
+ { name = "cuda-python" },
115
+ { name = "diffusers" },
116
+ { name = "edge-maxxing-pipelines" },
117
+ { name = "omegaconf" },
118
+ { name = "onnx" },
119
+ { name = "polygraphy" },
120
+ { name = "setuptools" },
121
+ { name = "tensorrt" },
122
+ { name = "tensorrt-cu12-bindings" },
123
+ { name = "tensorrt-cu12-libs" },
124
+ { name = "torch" },
125
+ { name = "transformers" },
126
+ { name = "wheel" },
127
+ ]
128
+
129
+ [package.metadata]
130
+ requires-dist = [
131
+ { name = "accelerate", specifier = "==0.31.0" },
132
+ { name = "cuda-python", specifier = ">=12.6.0" },
133
+ { name = "diffusers", specifier = "==0.30.2" },
134
+ { name = "edge-maxxing-pipelines", git = "https://github.com/womboai/edge-maxxing?subdirectory=pipelines&rev=8d8ff45863416484b5b4bc547782591bbdfc696a#8d8ff45863416484b5b4bc547782591bbdfc696a" },
135
+ { name = "omegaconf", specifier = "==2.3.0" },
136
+ { name = "onnx" },
137
+ { name = "polygraphy" },
138
+ { name = "setuptools", specifier = ">=75.2.0" },
139
+ { name = "tensorrt", specifier = ">=10.5.0" },
140
+ { name = "tensorrt-cu12-bindings", specifier = ">=10.5.0" },
141
+ { name = "tensorrt-cu12-libs", specifier = ">=10.5.0" },
142
+ { name = "torch", specifier = "==2.4.1" },
143
+ { name = "transformers", specifier = "==4.41.2" },
144
+ { name = "wheel" },
145
+ ]
146
+
147
+ [[package]]
148
+ name = "edge-maxxing-pipelines"
149
+ version = "1.0.0"
150
+ source = { git = "https://github.com/womboai/edge-maxxing?subdirectory=pipelines&rev=8d8ff45863416484b5b4bc547782591bbdfc696a#8d8ff45863416484b5b4bc547782591bbdfc696a" }
151
+ dependencies = [
152
+ { name = "pydantic" },
153
+ ]
154
+
155
+ [[package]]
156
+ name = "filelock"
157
+ version = "3.16.1"
158
+ source = { registry = "https://pypi.org/simple" }
159
+ sdist = { url = "https://files.pythonhosted.org/packages/9d/db/3ef5bb276dae18d6ec2124224403d1d67bccdbefc17af4cc8f553e341ab1/filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435", size = 18037 }
160
+ wheels = [
161
+ { url = "https://files.pythonhosted.org/packages/b9/f8/feced7779d755758a52d1f6635d990b8d98dc0a29fa568bbe0625f18fdf3/filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0", size = 16163 },
162
+ ]
163
+
164
+ [[package]]
165
+ name = "fsspec"
166
+ version = "2024.10.0"
167
+ source = { registry = "https://pypi.org/simple" }
168
+ sdist = { url = "https://files.pythonhosted.org/packages/a0/52/f16a068ebadae42526484c31f4398e62962504e5724a8ba5dc3409483df2/fsspec-2024.10.0.tar.gz", hash = "sha256:eda2d8a4116d4f2429db8550f2457da57279247dd930bb12f821b58391359493", size = 286853 }
169
+ wheels = [
170
+ { url = "https://files.pythonhosted.org/packages/c6/b2/454d6e7f0158951d8a78c2e1eb4f69ae81beb8dca5fee9809c6c99e9d0d0/fsspec-2024.10.0-py3-none-any.whl", hash = "sha256:03b9a6785766a4de40368b88906366755e2819e758b83705c88cd7cb5fe81871", size = 179641 },
171
+ ]
172
+
173
+ [[package]]
174
+ name = "huggingface-hub"
175
+ version = "0.26.1"
176
+ source = { registry = "https://pypi.org/simple" }
177
+ dependencies = [
178
+ { name = "filelock" },
179
+ { name = "fsspec" },
180
+ { name = "packaging" },
181
+ { name = "pyyaml" },
182
+ { name = "requests" },
183
+ { name = "tqdm" },
184
+ { name = "typing-extensions" },
185
+ ]
186
+ sdist = { url = "https://files.pythonhosted.org/packages/44/99/c8fdef6fe09a1719e5e5de24b012de5824889168c96143f5531cab5af42b/huggingface_hub-0.26.1.tar.gz", hash = "sha256:414c0d9b769eecc86c70f9d939d0f48bb28e8461dd1130021542eff0212db890", size = 375458 }
187
+ wheels = [
188
+ { url = "https://files.pythonhosted.org/packages/d7/4d/017d8d7cff5100092da8ea19139bcb1965bbadcbb5ddd0480e2badc299e8/huggingface_hub-0.26.1-py3-none-any.whl", hash = "sha256:5927a8fc64ae68859cd954b7cc29d1c8390a5e15caba6d3d349c973be8fdacf3", size = 447439 },
189
+ ]
190
+
191
+ [[package]]
192
+ name = "idna"
193
+ version = "3.10"
194
+ source = { registry = "https://pypi.org/simple" }
195
+ sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 }
196
+ wheels = [
197
+ { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 },
198
+ ]
199
+
200
+ [[package]]
201
+ name = "importlib-metadata"
202
+ version = "8.5.0"
203
+ source = { registry = "https://pypi.org/simple" }
204
+ dependencies = [
205
+ { name = "zipp" },
206
+ ]
207
+ sdist = { url = "https://files.pythonhosted.org/packages/cd/12/33e59336dca5be0c398a7482335911a33aa0e20776128f038019f1a95f1b/importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7", size = 55304 }
208
+ wheels = [
209
+ { url = "https://files.pythonhosted.org/packages/a0/d9/a1e041c5e7caa9a05c925f4bdbdfb7f006d1f74996af53467bc394c97be7/importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b", size = 26514 },
210
+ ]
211
+
212
+ [[package]]
213
+ name = "jinja2"
214
+ version = "3.1.4"
215
+ source = { registry = "https://pypi.org/simple" }
216
+ dependencies = [
217
+ { name = "markupsafe" },
218
+ ]
219
+ sdist = { url = "https://files.pythonhosted.org/packages/ed/55/39036716d19cab0747a5020fc7e907f362fbf48c984b14e62127f7e68e5d/jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369", size = 240245 }
220
+ wheels = [
221
+ { url = "https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d", size = 133271 },
222
+ ]
223
+
224
+ [[package]]
225
+ name = "markupsafe"
226
+ version = "3.0.2"
227
+ source = { registry = "https://pypi.org/simple" }
228
+ sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537 }
229
+ wheels = [
230
+ { url = "https://files.pythonhosted.org/packages/04/90/d08277ce111dd22f77149fd1a5d4653eeb3b3eaacbdfcbae5afb2600eebd/MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8", size = 14357 },
231
+ { url = "https://files.pythonhosted.org/packages/04/e1/6e2194baeae0bca1fae6629dc0cbbb968d4d941469cbab11a3872edff374/MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158", size = 12393 },
232
+ { url = "https://files.pythonhosted.org/packages/1d/69/35fa85a8ece0a437493dc61ce0bb6d459dcba482c34197e3efc829aa357f/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579", size = 21732 },
233
+ { url = "https://files.pythonhosted.org/packages/22/35/137da042dfb4720b638d2937c38a9c2df83fe32d20e8c8f3185dbfef05f7/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d", size = 20866 },
234
+ { url = "https://files.pythonhosted.org/packages/29/28/6d029a903727a1b62edb51863232152fd335d602def598dade38996887f0/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb", size = 20964 },
235
+ { url = "https://files.pythonhosted.org/packages/cc/cd/07438f95f83e8bc028279909d9c9bd39e24149b0d60053a97b2bc4f8aa51/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b", size = 21977 },
236
+ { url = "https://files.pythonhosted.org/packages/29/01/84b57395b4cc062f9c4c55ce0df7d3108ca32397299d9df00fedd9117d3d/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c", size = 21366 },
237
+ { url = "https://files.pythonhosted.org/packages/bd/6e/61ebf08d8940553afff20d1fb1ba7294b6f8d279df9fd0c0db911b4bbcfd/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171", size = 21091 },
238
+ { url = "https://files.pythonhosted.org/packages/11/23/ffbf53694e8c94ebd1e7e491de185124277964344733c45481f32ede2499/MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50", size = 15065 },
239
+ { url = "https://files.pythonhosted.org/packages/44/06/e7175d06dd6e9172d4a69a72592cb3f7a996a9c396eee29082826449bbc3/MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a", size = 15514 },
240
+ ]
241
+
242
+ [[package]]
243
+ name = "mpmath"
244
+ version = "1.3.0"
245
+ source = { registry = "https://pypi.org/simple" }
246
+ sdist = { url = "https://files.pythonhosted.org/packages/e0/47/dd32fa426cc72114383ac549964eecb20ecfd886d1e5ccf5340b55b02f57/mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f", size = 508106 }
247
+ wheels = [
248
+ { url = "https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c", size = 536198 },
249
+ ]
250
+
251
+ [[package]]
252
+ name = "networkx"
253
+ version = "3.4.2"
254
+ source = { registry = "https://pypi.org/simple" }
255
+ sdist = { url = "https://files.pythonhosted.org/packages/fd/1d/06475e1cd5264c0b870ea2cc6fdb3e37177c1e565c43f56ff17a10e3937f/networkx-3.4.2.tar.gz", hash = "sha256:307c3669428c5362aab27c8a1260aa8f47c4e91d3891f48be0141738d8d053e1", size = 2151368 }
256
+ wheels = [
257
+ { url = "https://files.pythonhosted.org/packages/b9/54/dd730b32ea14ea797530a4479b2ed46a6fb250f682a9cfb997e968bf0261/networkx-3.4.2-py3-none-any.whl", hash = "sha256:df5d4365b724cf81b8c6a7312509d0c22386097011ad1abe274afd5e9d3bbc5f", size = 1723263 },
258
+ ]
259
+
260
+ [[package]]
261
+ name = "numpy"
262
+ version = "2.1.2"
263
+ source = { registry = "https://pypi.org/simple" }
264
+ sdist = { url = "https://files.pythonhosted.org/packages/4b/d1/8a730ea07f4a37d94f9172f4ce1d81064b7a64766b460378be278952de75/numpy-2.1.2.tar.gz", hash = "sha256:13532a088217fa624c99b843eeb54640de23b3414b14aa66d023805eb731066c", size = 18878063 }
265
+ wheels = [
266
+ { url = "https://files.pythonhosted.org/packages/1c/a2/40a76d357f168e9f9f06d6cc2c8e22dd5fb2bfbe63fe2c433057258c145a/numpy-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:30d53720b726ec36a7f88dc873f0eec8447fbc93d93a8f079dfac2629598d6ee", size = 21150947 },
267
+ { url = "https://files.pythonhosted.org/packages/b5/d0/ba271ea9108d7278d3889a7eb38d77370a88713fb94339964e71ac184d4a/numpy-2.1.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8d3ca0a72dd8846eb6f7dfe8f19088060fcb76931ed592d29128e0219652884", size = 13758184 },
268
+ { url = "https://files.pythonhosted.org/packages/7c/b9/5c6507439cd756201010f7937bf90712c2469052ae094584af14557dd64f/numpy-2.1.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:fc44e3c68ff00fd991b59092a54350e6e4911152682b4782f68070985aa9e648", size = 5354091 },
269
+ { url = "https://files.pythonhosted.org/packages/60/21/7938cf724d9e84e45fb886f3fc794ab431d71facfebc261e3e9f19f3233a/numpy-2.1.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:7c1c60328bd964b53f8b835df69ae8198659e2b9302ff9ebb7de4e5a5994db3d", size = 6887169 },
270
+ { url = "https://files.pythonhosted.org/packages/09/8d/42a124657f5d31902fca73921b25a0d022cead2b32ce7e6975762cd2995a/numpy-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6cdb606a7478f9ad91c6283e238544451e3a95f30fb5467fbf715964341a8a86", size = 13888165 },
271
+ { url = "https://files.pythonhosted.org/packages/fb/25/ba023652a39a2c127200e85aed975fc6119b421e2c348e5d0171e2046edb/numpy-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d666cb72687559689e9906197e3bec7b736764df6a2e58ee265e360663e9baf7", size = 16326954 },
272
+ { url = "https://files.pythonhosted.org/packages/34/58/23e6b07fad492b7c47cf09cd8bad6983658f0f925b6c535fd008e3e86274/numpy-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c6eef7a2dbd0abfb0d9eaf78b73017dbfd0b54051102ff4e6a7b2980d5ac1a03", size = 16702916 },
273
+ { url = "https://files.pythonhosted.org/packages/91/24/37b5cf2dc7d385ac97f7b7fe50cba312abb70a2a5eac74c23af028811f73/numpy-2.1.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:12edb90831ff481f7ef5f6bc6431a9d74dc0e5ff401559a71e5e4611d4f2d466", size = 14384372 },
274
+ { url = "https://files.pythonhosted.org/packages/ea/ec/0f6d471058a01d1a05a50d2793898de1549280fa715a8537987ee866b5d9/numpy-2.1.2-cp310-cp310-win32.whl", hash = "sha256:a65acfdb9c6ebb8368490dbafe83c03c7e277b37e6857f0caeadbbc56e12f4fb", size = 6535361 },
275
+ { url = "https://files.pythonhosted.org/packages/c2/3d/293cc5927f916a7bc6bf74da8f6defab63d1b13f0959d7e21878ad8a20d8/numpy-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:860ec6e63e2c5c2ee5e9121808145c7bf86c96cca9ad396c0bd3e0f2798ccbe2", size = 12865501 },
276
+ { url = "https://files.pythonhosted.org/packages/73/c9/3e1d6bbe6d3d2e2c5a9483b24b2f29a229b323f62054278a3bba7fee11e5/numpy-2.1.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:bdd407c40483463898b84490770199d5714dcc9dd9b792f6c6caccc523c00952", size = 20981945 },
277
+ { url = "https://files.pythonhosted.org/packages/6e/62/989c4988bde1a8e08117fccc3bab73d2886421fb98cde597168714f3c54e/numpy-2.1.2-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:da65fb46d4cbb75cb417cddf6ba5e7582eb7bb0b47db4b99c9fe5787ce5d91f5", size = 6750558 },
278
+ { url = "https://files.pythonhosted.org/packages/53/b1/00ef9f30975f1312a53257f68e57b4513d14d537e03d507e2773a684b1e8/numpy-2.1.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c193d0b0238638e6fc5f10f1b074a6993cb13b0b431f64079a509d63d3aa8b7", size = 16141552 },
279
+ { url = "https://files.pythonhosted.org/packages/c0/ec/0c04903b48dfea6be1d7b47ba70f98709fb7198fd970784a1400c391d522/numpy-2.1.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a7d80b2e904faa63068ead63107189164ca443b42dd1930299e0d1cb041cec2e", size = 12789924 },
280
+ ]
281
+
282
+ [[package]]
283
+ name = "nvidia-cublas-cu12"
284
+ version = "12.1.3.1"
285
+ source = { registry = "https://pypi.org/simple" }
286
+ wheels = [
287
+ { url = "https://files.pythonhosted.org/packages/37/6d/121efd7382d5b0284239f4ab1fc1590d86d34ed4a4a2fdb13b30ca8e5740/nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl", hash = "sha256:ee53ccca76a6fc08fb9701aa95b6ceb242cdaab118c3bb152af4e579af792728", size = 410594774 },
288
+ ]
289
+
290
+ [[package]]
291
+ name = "nvidia-cuda-cupti-cu12"
292
+ version = "12.1.105"
293
+ source = { registry = "https://pypi.org/simple" }
294
+ wheels = [
295
+ { url = "https://files.pythonhosted.org/packages/7e/00/6b218edd739ecfc60524e585ba8e6b00554dd908de2c9c66c1af3e44e18d/nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:e54fde3983165c624cb79254ae9818a456eb6e87a7fd4d56a2352c24ee542d7e", size = 14109015 },
296
+ ]
297
+
298
+ [[package]]
299
+ name = "nvidia-cuda-nvrtc-cu12"
300
+ version = "12.1.105"
301
+ source = { registry = "https://pypi.org/simple" }
302
+ wheels = [
303
+ { url = "https://files.pythonhosted.org/packages/b6/9f/c64c03f49d6fbc56196664d05dba14e3a561038a81a638eeb47f4d4cfd48/nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:339b385f50c309763ca65456ec75e17bbefcbbf2893f462cb8b90584cd27a1c2", size = 23671734 },
304
+ ]
305
+
306
+ [[package]]
307
+ name = "nvidia-cuda-runtime-cu12"
308
+ version = "12.1.105"
309
+ source = { registry = "https://pypi.org/simple" }
310
+ wheels = [
311
+ { url = "https://files.pythonhosted.org/packages/eb/d5/c68b1d2cdfcc59e72e8a5949a37ddb22ae6cade80cd4a57a84d4c8b55472/nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:6e258468ddf5796e25f1dc591a31029fa317d97a0a94ed93468fc86301d61e40", size = 823596 },
312
+ { url = "https://files.pythonhosted.org/packages/9f/e2/7a2b4b5064af56ea8ea2d8b2776c0f2960d95c88716138806121ae52a9c9/nvidia_cuda_runtime_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:dfb46ef84d73fababab44cf03e3b83f80700d27ca300e537f85f636fac474344", size = 821226 },
313
+ ]
314
+
315
+ [[package]]
316
+ name = "nvidia-cudnn-cu12"
317
+ version = "9.1.0.70"
318
+ source = { registry = "https://pypi.org/simple" }
319
+ dependencies = [
320
+ { name = "nvidia-cublas-cu12" },
321
+ ]
322
+ wheels = [
323
+ { url = "https://files.pythonhosted.org/packages/9f/fd/713452cd72343f682b1c7b9321e23829f00b842ceaedcda96e742ea0b0b3/nvidia_cudnn_cu12-9.1.0.70-py3-none-manylinux2014_x86_64.whl", hash = "sha256:165764f44ef8c61fcdfdfdbe769d687e06374059fbb388b6c89ecb0e28793a6f", size = 664752741 },
324
+ ]
325
+
326
+ [[package]]
327
+ name = "nvidia-cufft-cu12"
328
+ version = "11.0.2.54"
329
+ source = { registry = "https://pypi.org/simple" }
330
+ wheels = [
331
+ { url = "https://files.pythonhosted.org/packages/86/94/eb540db023ce1d162e7bea9f8f5aa781d57c65aed513c33ee9a5123ead4d/nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl", hash = "sha256:794e3948a1aa71fd817c3775866943936774d1c14e7628c74f6f7417224cdf56", size = 121635161 },
332
+ ]
333
+
334
+ [[package]]
335
+ name = "nvidia-curand-cu12"
336
+ version = "10.3.2.106"
337
+ source = { registry = "https://pypi.org/simple" }
338
+ wheels = [
339
+ { url = "https://files.pythonhosted.org/packages/44/31/4890b1c9abc496303412947fc7dcea3d14861720642b49e8ceed89636705/nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl", hash = "sha256:9d264c5036dde4e64f1de8c50ae753237c12e0b1348738169cd0f8a536c0e1e0", size = 56467784 },
340
+ ]
341
+
342
+ [[package]]
343
+ name = "nvidia-cusolver-cu12"
344
+ version = "11.4.5.107"
345
+ source = { registry = "https://pypi.org/simple" }
346
+ dependencies = [
347
+ { name = "nvidia-cublas-cu12" },
348
+ { name = "nvidia-cusparse-cu12" },
349
+ { name = "nvidia-nvjitlink-cu12" },
350
+ ]
351
+ wheels = [
352
+ { url = "https://files.pythonhosted.org/packages/bc/1d/8de1e5c67099015c834315e333911273a8c6aaba78923dd1d1e25fc5f217/nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl", hash = "sha256:8a7ec542f0412294b15072fa7dab71d31334014a69f953004ea7a118206fe0dd", size = 124161928 },
353
+ ]
354
+
355
+ [[package]]
356
+ name = "nvidia-cusparse-cu12"
357
+ version = "12.1.0.106"
358
+ source = { registry = "https://pypi.org/simple" }
359
+ dependencies = [
360
+ { name = "nvidia-nvjitlink-cu12" },
361
+ ]
362
+ wheels = [
363
+ { url = "https://files.pythonhosted.org/packages/65/5b/cfaeebf25cd9fdec14338ccb16f6b2c4c7fa9163aefcf057d86b9cc248bb/nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl", hash = "sha256:f3b50f42cf363f86ab21f720998517a659a48131e8d538dc02f8768237bd884c", size = 195958278 },
364
+ ]
365
+
366
+ [[package]]
367
+ name = "nvidia-nccl-cu12"
368
+ version = "2.20.5"
369
+ source = { registry = "https://pypi.org/simple" }
370
+ wheels = [
371
+ { url = "https://files.pythonhosted.org/packages/c1/bb/d09dda47c881f9ff504afd6f9ca4f502ded6d8fc2f572cacc5e39da91c28/nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_aarch64.whl", hash = "sha256:1fc150d5c3250b170b29410ba682384b14581db722b2531b0d8d33c595f33d01", size = 176238458 },
372
+ { url = "https://files.pythonhosted.org/packages/4b/2a/0a131f572aa09f741c30ccd45a8e56316e8be8dfc7bc19bf0ab7cfef7b19/nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_x86_64.whl", hash = "sha256:057f6bf9685f75215d0c53bf3ac4a10b3e6578351de307abad9e18a99182af56", size = 176249402 },
373
+ ]
374
+
375
+ [[package]]
376
+ name = "nvidia-nvjitlink-cu12"
377
+ version = "12.6.77"
378
+ source = { registry = "https://pypi.org/simple" }
379
+ wheels = [
380
+ { url = "https://files.pythonhosted.org/packages/11/8c/386018fdffdce2ff8d43fedf192ef7d14cab7501cbf78a106dd2e9f1fc1f/nvidia_nvjitlink_cu12-12.6.77-py3-none-manylinux2014_aarch64.whl", hash = "sha256:3bf10d85bb1801e9c894c6e197e44dd137d2a0a9e43f8450e9ad13f2df0dd52d", size = 19270432 },
381
+ { url = "https://files.pythonhosted.org/packages/fe/e4/486de766851d58699bcfeb3ba6a3beb4d89c3809f75b9d423b9508a8760f/nvidia_nvjitlink_cu12-12.6.77-py3-none-manylinux2014_x86_64.whl", hash = "sha256:9ae346d16203ae4ea513be416495167a0101d33d2d14935aa9c1829a3fb45142", size = 19745114 },
382
+ ]
383
+
384
+ [[package]]
385
+ name = "nvidia-nvtx-cu12"
386
+ version = "12.1.105"
387
+ source = { registry = "https://pypi.org/simple" }
388
+ wheels = [
389
+ { url = "https://files.pythonhosted.org/packages/da/d3/8057f0587683ed2fcd4dbfbdfdfa807b9160b809976099d36b8f60d08f03/nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:dc21cf308ca5691e7c04d962e213f8a4aa9bbfa23d95412f452254c2caeb09e5", size = 99138 },
390
+ ]
391
+
392
+ [[package]]
393
+ name = "omegaconf"
394
+ version = "2.3.0"
395
+ source = { registry = "https://pypi.org/simple" }
396
+ dependencies = [
397
+ { name = "antlr4-python3-runtime" },
398
+ { name = "pyyaml" },
399
+ ]
400
+ sdist = { url = "https://files.pythonhosted.org/packages/09/48/6388f1bb9da707110532cb70ec4d2822858ddfb44f1cdf1233c20a80ea4b/omegaconf-2.3.0.tar.gz", hash = "sha256:d5d4b6d29955cc50ad50c46dc269bcd92c6e00f5f90d23ab5fee7bfca4ba4cc7", size = 3298120 }
401
+ wheels = [
402
+ { url = "https://files.pythonhosted.org/packages/e3/94/1843518e420fa3ed6919835845df698c7e27e183cb997394e4a670973a65/omegaconf-2.3.0-py3-none-any.whl", hash = "sha256:7b4df175cdb08ba400f45cae3bdcae7ba8365db4d165fc65fd04b050ab63b46b", size = 79500 },
403
+ ]
404
+
405
+ [[package]]
406
+ name = "onnx"
407
+ version = "1.17.0"
408
+ source = { registry = "https://pypi.org/simple" }
409
+ dependencies = [
410
+ { name = "numpy" },
411
+ { name = "protobuf" },
412
+ ]
413
+ sdist = { url = "https://files.pythonhosted.org/packages/9a/54/0e385c26bf230d223810a9c7d06628d954008a5e5e4b73ee26ef02327282/onnx-1.17.0.tar.gz", hash = "sha256:48ca1a91ff73c1d5e3ea2eef20ae5d0e709bb8a2355ed798ffc2169753013fd3", size = 12165120 }
414
+ wheels = [
415
+ { url = "https://files.pythonhosted.org/packages/2e/29/57053ba7787788ac75efb095cfc1ae290436b6d3a26754693cd7ed1b4fac/onnx-1.17.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:38b5df0eb22012198cdcee527cc5f917f09cce1f88a69248aaca22bd78a7f023", size = 16645616 },
416
+ { url = "https://files.pythonhosted.org/packages/75/0d/831807a18db2a5e8f7813848c59272b904a4ef3939fe4d1288cbce9ea735/onnx-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d545335cb49d4d8c47cc803d3a805deb7ad5d9094dc67657d66e568610a36d7d", size = 15908420 },
417
+ { url = "https://files.pythonhosted.org/packages/dd/5b/c4f95dbe652d14aeba9afaceb177e9ffc48ac3c03048dd3f872f26f07e34/onnx-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3193a3672fc60f1a18c0f4c93ac81b761bc72fd8a6c2035fa79ff5969f07713e", size = 16046244 },
418
+ { url = "https://files.pythonhosted.org/packages/08/a9/c1f218085043dccc6311460239e253fa6957cf12ee4b0a56b82014938d0b/onnx-1.17.0-cp310-cp310-win32.whl", hash = "sha256:0141c2ce806c474b667b7e4499164227ef594584da432fd5613ec17c1855e311", size = 14423516 },
419
+ { url = "https://files.pythonhosted.org/packages/0e/d3/d26ebf590a65686dde6b27fef32493026c5be9e42083340d947395f93405/onnx-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:dfd777d95c158437fda6b34758f0877d15b89cbe9ff45affbedc519b35345cf9", size = 14528496 },
420
+ ]
421
+
422
+ [[package]]
423
+ name = "packaging"
424
+ version = "24.1"
425
+ source = { registry = "https://pypi.org/simple" }
426
+ sdist = { url = "https://files.pythonhosted.org/packages/51/65/50db4dda066951078f0a96cf12f4b9ada6e4b811516bf0262c0f4f7064d4/packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002", size = 148788 }
427
+ wheels = [
428
+ { url = "https://files.pythonhosted.org/packages/08/aa/cc0199a5f0ad350994d660967a8efb233fe0416e4639146c089643407ce6/packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124", size = 53985 },
429
+ ]
430
+
431
+ [[package]]
432
+ name = "pillow"
433
+ version = "11.0.0"
434
+ source = { registry = "https://pypi.org/simple" }
435
+ sdist = { url = "https://files.pythonhosted.org/packages/a5/26/0d95c04c868f6bdb0c447e3ee2de5564411845e36a858cfd63766bc7b563/pillow-11.0.0.tar.gz", hash = "sha256:72bacbaf24ac003fea9bff9837d1eedb6088758d41e100c1552930151f677739", size = 46737780 }
436
+ wheels = [
437
+ { url = "https://files.pythonhosted.org/packages/98/fb/a6ce6836bd7fd93fbf9144bf54789e02babc27403b50a9e1583ee877d6da/pillow-11.0.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:6619654954dc4936fcff82db8eb6401d3159ec6be81e33c6000dfd76ae189947", size = 3154708 },
438
+ { url = "https://files.pythonhosted.org/packages/6a/1d/1f51e6e912d8ff316bb3935a8cda617c801783e0b998bf7a894e91d3bd4c/pillow-11.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b3c5ac4bed7519088103d9450a1107f76308ecf91d6dabc8a33a2fcfb18d0fba", size = 2979223 },
439
+ { url = "https://files.pythonhosted.org/packages/90/83/e2077b0192ca8a9ef794dbb74700c7e48384706467067976c2a95a0f40a1/pillow-11.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a65149d8ada1055029fcb665452b2814fe7d7082fcb0c5bed6db851cb69b2086", size = 4183167 },
440
+ { url = "https://files.pythonhosted.org/packages/0e/74/467af0146970a98349cdf39e9b79a6cc8a2e7558f2c01c28a7b6b85c5bda/pillow-11.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88a58d8ac0cc0e7f3a014509f0455248a76629ca9b604eca7dc5927cc593c5e9", size = 4283912 },
441
+ { url = "https://files.pythonhosted.org/packages/85/b1/d95d4f7ca3a6c1ae120959605875a31a3c209c4e50f0029dc1a87566cf46/pillow-11.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:c26845094b1af3c91852745ae78e3ea47abf3dbcd1cf962f16b9a5fbe3ee8488", size = 4195815 },
442
+ { url = "https://files.pythonhosted.org/packages/41/c3/94f33af0762ed76b5a237c5797e088aa57f2b7fa8ee7932d399087be66a8/pillow-11.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:1a61b54f87ab5786b8479f81c4b11f4d61702830354520837f8cc791ebba0f5f", size = 4366117 },
443
+ { url = "https://files.pythonhosted.org/packages/ba/3c/443e7ef01f597497268899e1cca95c0de947c9bbf77a8f18b3c126681e5d/pillow-11.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:674629ff60030d144b7bca2b8330225a9b11c482ed408813924619c6f302fdbb", size = 4278607 },
444
+ { url = "https://files.pythonhosted.org/packages/26/95/1495304448b0081e60c0c5d63f928ef48bb290acee7385804426fa395a21/pillow-11.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:598b4e238f13276e0008299bd2482003f48158e2b11826862b1eb2ad7c768b97", size = 4410685 },
445
+ { url = "https://files.pythonhosted.org/packages/45/da/861e1df971ef0de9870720cb309ca4d553b26a9483ec9be3a7bf1de4a095/pillow-11.0.0-cp310-cp310-win32.whl", hash = "sha256:9a0f748eaa434a41fccf8e1ee7a3eed68af1b690e75328fd7a60af123c193b50", size = 2249185 },
446
+ { url = "https://files.pythonhosted.org/packages/d5/4e/78f7c5202ea2a772a5ab05069c1b82503e6353cd79c7e474d4945f4b82c3/pillow-11.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:a5629742881bcbc1f42e840af185fd4d83a5edeb96475a575f4da50d6ede337c", size = 2566726 },
447
+ { url = "https://files.pythonhosted.org/packages/77/e4/6e84eada35cbcc646fc1870f72ccfd4afacb0fae0c37ffbffe7f5dc24bf1/pillow-11.0.0-cp310-cp310-win_arm64.whl", hash = "sha256:ee217c198f2e41f184f3869f3e485557296d505b5195c513b2bfe0062dc537f1", size = 2254585 },
448
+ { url = "https://files.pythonhosted.org/packages/36/57/42a4dd825eab762ba9e690d696d894ba366e06791936056e26e099398cda/pillow-11.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1187739620f2b365de756ce086fdb3604573337cc28a0d3ac4a01ab6b2d2a6d2", size = 3119239 },
449
+ { url = "https://files.pythonhosted.org/packages/98/f7/25f9f9e368226a1d6cf3507081a1a7944eddd3ca7821023377043f5a83c8/pillow-11.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:fbbcb7b57dc9c794843e3d1258c0fbf0f48656d46ffe9e09b63bbd6e8cd5d0a2", size = 2950803 },
450
+ { url = "https://files.pythonhosted.org/packages/59/01/98ead48a6c2e31e6185d4c16c978a67fe3ccb5da5c2ff2ba8475379bb693/pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d203af30149ae339ad1b4f710d9844ed8796e97fda23ffbc4cc472968a47d0b", size = 3281098 },
451
+ { url = "https://files.pythonhosted.org/packages/51/c0/570255b2866a0e4d500a14f950803a2ec273bac7badc43320120b9262450/pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21a0d3b115009ebb8ac3d2ebec5c2982cc693da935f4ab7bb5c8ebe2f47d36f2", size = 3323665 },
452
+ { url = "https://files.pythonhosted.org/packages/0e/75/689b4ec0483c42bfc7d1aacd32ade7a226db4f4fac57c6fdcdf90c0731e3/pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:73853108f56df97baf2bb8b522f3578221e56f646ba345a372c78326710d3830", size = 3310533 },
453
+ { url = "https://files.pythonhosted.org/packages/3d/30/38bd6149cf53da1db4bad304c543ade775d225961c4310f30425995cb9ec/pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e58876c91f97b0952eb766123bfef372792ab3f4e3e1f1a2267834c2ab131734", size = 3414886 },
454
+ { url = "https://files.pythonhosted.org/packages/ec/3d/c32a51d848401bd94cabb8767a39621496491ee7cd5199856b77da9b18ad/pillow-11.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:224aaa38177597bb179f3ec87eeefcce8e4f85e608025e9cfac60de237ba6316", size = 2567508 },
455
+ ]
456
+
457
+ [[package]]
458
+ name = "polygraphy"
459
+ version = "0.49.9"
460
+ source = { registry = "https://pypi.org/simple" }
461
+ wheels = [
462
+ { url = "https://files.pythonhosted.org/packages/4a/f5/a2b20c677c1a856cc9e08cd0b5a5105450ed5253e369e938ddd31d91c547/polygraphy-0.49.9-py2.py3-none-any.whl", hash = "sha256:62ae22825efdd3288222e5b1d2d791fe58e87844fcd848bcd1251fbce02ba956", size = 346910 },
463
+ ]
464
+
465
+ [[package]]
466
+ name = "protobuf"
467
+ version = "5.28.3"
468
+ source = { registry = "https://pypi.org/simple" }
469
+ sdist = { url = "https://files.pythonhosted.org/packages/74/6e/e69eb906fddcb38f8530a12f4b410699972ab7ced4e21524ece9d546ac27/protobuf-5.28.3.tar.gz", hash = "sha256:64badbc49180a5e401f373f9ce7ab1d18b63f7dd4a9cdc43c92b9f0b481cef7b", size = 422479 }
470
+ wheels = [
471
+ { url = "https://files.pythonhosted.org/packages/d1/c5/05163fad52d7c43e124a545f1372d18266db36036377ad29de4271134a6a/protobuf-5.28.3-cp310-abi3-win32.whl", hash = "sha256:0c4eec6f987338617072592b97943fdbe30d019c56126493111cf24344c1cc24", size = 419624 },
472
+ { url = "https://files.pythonhosted.org/packages/9c/4c/4563ebe001ff30dca9d7ed12e471fa098d9759712980cde1fd03a3a44fb7/protobuf-5.28.3-cp310-abi3-win_amd64.whl", hash = "sha256:91fba8f445723fcf400fdbe9ca796b19d3b1242cd873907979b9ed71e4afe868", size = 431464 },
473
+ { url = "https://files.pythonhosted.org/packages/1c/f2/baf397f3dd1d3e4af7e3f5a0382b868d25ac068eefe1ebde05132333436c/protobuf-5.28.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a3f6857551e53ce35e60b403b8a27b0295f7d6eb63d10484f12bc6879c715687", size = 414743 },
474
+ { url = "https://files.pythonhosted.org/packages/85/50/cd61a358ba1601f40e7d38bcfba22e053f40ef2c50d55b55926aecc8fec7/protobuf-5.28.3-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:3fa2de6b8b29d12c61911505d893afe7320ce7ccba4df913e2971461fa36d584", size = 316511 },
475
+ { url = "https://files.pythonhosted.org/packages/5d/ae/3257b09328c0b4e59535e497b0c7537d4954038bdd53a2f0d2f49d15a7c4/protobuf-5.28.3-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:712319fbdddb46f21abb66cd33cb9e491a5763b2febd8f228251add221981135", size = 316624 },
476
+ { url = "https://files.pythonhosted.org/packages/ad/c3/2377c159e28ea89a91cf1ca223f827ae8deccb2c9c401e5ca233cd73002f/protobuf-5.28.3-py3-none-any.whl", hash = "sha256:cee1757663fa32a1ee673434fcf3bf24dd54763c79690201208bafec62f19eed", size = 169511 },
477
+ ]
478
+
479
+ [[package]]
480
+ name = "psutil"
481
+ version = "6.1.0"
482
+ source = { registry = "https://pypi.org/simple" }
483
+ sdist = { url = "https://files.pythonhosted.org/packages/26/10/2a30b13c61e7cf937f4adf90710776b7918ed0a9c434e2c38224732af310/psutil-6.1.0.tar.gz", hash = "sha256:353815f59a7f64cdaca1c0307ee13558a0512f6db064e92fe833784f08539c7a", size = 508565 }
484
+ wheels = [
485
+ { url = "https://files.pythonhosted.org/packages/01/9e/8be43078a171381953cfee33c07c0d628594b5dbfc5157847b85022c2c1b/psutil-6.1.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6e2dcd475ce8b80522e51d923d10c7871e45f20918e027ab682f94f1c6351688", size = 247762 },
486
+ { url = "https://files.pythonhosted.org/packages/1d/cb/313e80644ea407f04f6602a9e23096540d9dc1878755f3952ea8d3d104be/psutil-6.1.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0895b8414afafc526712c498bd9de2b063deaac4021a3b3c34566283464aff8e", size = 248777 },
487
+ { url = "https://files.pythonhosted.org/packages/65/8e/bcbe2025c587b5d703369b6a75b65d41d1367553da6e3f788aff91eaf5bd/psutil-6.1.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9dcbfce5d89f1d1f2546a2090f4fcf87c7f669d1d90aacb7d7582addece9fb38", size = 284259 },
488
+ { url = "https://files.pythonhosted.org/packages/58/4d/8245e6f76a93c98aab285a43ea71ff1b171bcd90c9d238bf81f7021fb233/psutil-6.1.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:498c6979f9c6637ebc3a73b3f87f9eb1ec24e1ce53a7c5173b8508981614a90b", size = 287255 },
489
+ { url = "https://files.pythonhosted.org/packages/27/c2/d034856ac47e3b3cdfa9720d0e113902e615f4190d5d1bdb8df4b2015fb2/psutil-6.1.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d905186d647b16755a800e7263d43df08b790d709d575105d419f8b6ef65423a", size = 288804 },
490
+ { url = "https://files.pythonhosted.org/packages/ea/55/5389ed243c878725feffc0d6a3bc5ef6764312b6fc7c081faaa2cfa7ef37/psutil-6.1.0-cp37-abi3-win32.whl", hash = "sha256:1ad45a1f5d0b608253b11508f80940985d1d0c8f6111b5cb637533a0e6ddc13e", size = 250386 },
491
+ { url = "https://files.pythonhosted.org/packages/11/91/87fa6f060e649b1e1a7b19a4f5869709fbf750b7c8c262ee776ec32f3028/psutil-6.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:a8fb3752b491d246034fa4d279ff076501588ce8cbcdbb62c32fd7a377d996be", size = 254228 },
492
+ ]
493
+
494
+ [[package]]
495
+ name = "pydantic"
496
+ version = "2.9.2"
497
+ source = { registry = "https://pypi.org/simple" }
498
+ dependencies = [
499
+ { name = "annotated-types" },
500
+ { name = "pydantic-core" },
501
+ { name = "typing-extensions" },
502
+ ]
503
+ sdist = { url = "https://files.pythonhosted.org/packages/a9/b7/d9e3f12af310e1120c21603644a1cd86f59060e040ec5c3a80b8f05fae30/pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f", size = 769917 }
504
+ wheels = [
505
+ { url = "https://files.pythonhosted.org/packages/df/e4/ba44652d562cbf0bf320e0f3810206149c8a4e99cdbf66da82e97ab53a15/pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12", size = 434928 },
506
+ ]
507
+
508
+ [[package]]
509
+ name = "pydantic-core"
510
+ version = "2.23.4"
511
+ source = { registry = "https://pypi.org/simple" }
512
+ dependencies = [
513
+ { name = "typing-extensions" },
514
+ ]
515
+ sdist = { url = "https://files.pythonhosted.org/packages/e2/aa/6b6a9b9f8537b872f552ddd46dd3da230367754b6f707b8e1e963f515ea3/pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863", size = 402156 }
516
+ wheels = [
517
+ { url = "https://files.pythonhosted.org/packages/5c/8b/d3ae387f66277bd8104096d6ec0a145f4baa2966ebb2cad746c0920c9526/pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b", size = 1867835 },
518
+ { url = "https://files.pythonhosted.org/packages/46/76/f68272e4c3a7df8777798282c5e47d508274917f29992d84e1898f8908c7/pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166", size = 1776689 },
519
+ { url = "https://files.pythonhosted.org/packages/cc/69/5f945b4416f42ea3f3bc9d2aaec66c76084a6ff4ff27555bf9415ab43189/pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb", size = 1800748 },
520
+ { url = "https://files.pythonhosted.org/packages/50/ab/891a7b0054bcc297fb02d44d05c50e68154e31788f2d9d41d0b72c89fdf7/pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916", size = 1806469 },
521
+ { url = "https://files.pythonhosted.org/packages/31/7c/6e3fa122075d78f277a8431c4c608f061881b76c2b7faca01d317ee39b5d/pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07", size = 2002246 },
522
+ { url = "https://files.pythonhosted.org/packages/ad/6f/22d5692b7ab63fc4acbc74de6ff61d185804a83160adba5e6cc6068e1128/pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232", size = 2659404 },
523
+ { url = "https://files.pythonhosted.org/packages/11/ac/1e647dc1121c028b691028fa61a4e7477e6aeb5132628fde41dd34c1671f/pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2", size = 2053940 },
524
+ { url = "https://files.pythonhosted.org/packages/91/75/984740c17f12c3ce18b5a2fcc4bdceb785cce7df1511a4ce89bca17c7e2d/pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f", size = 1921437 },
525
+ { url = "https://files.pythonhosted.org/packages/a0/74/13c5f606b64d93f0721e7768cd3e8b2102164866c207b8cd6f90bb15d24f/pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3", size = 1966129 },
526
+ { url = "https://files.pythonhosted.org/packages/18/03/9c4aa5919457c7b57a016c1ab513b1a926ed9b2bb7915bf8e506bf65c34b/pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071", size = 2110908 },
527
+ { url = "https://files.pythonhosted.org/packages/92/2c/053d33f029c5dc65e5cf44ff03ceeefb7cce908f8f3cca9265e7f9b540c8/pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119", size = 1735278 },
528
+ { url = "https://files.pythonhosted.org/packages/de/81/7dfe464eca78d76d31dd661b04b5f2036ec72ea8848dd87ab7375e185c23/pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f", size = 1917453 },
529
+ { url = "https://files.pythonhosted.org/packages/13/a9/5d582eb3204464284611f636b55c0a7410d748ff338756323cb1ce721b96/pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5", size = 1857135 },
530
+ { url = "https://files.pythonhosted.org/packages/2c/57/faf36290933fe16717f97829eabfb1868182ac495f99cf0eda9f59687c9d/pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec", size = 1740583 },
531
+ { url = "https://files.pythonhosted.org/packages/91/7c/d99e3513dc191c4fec363aef1bf4c8af9125d8fa53af7cb97e8babef4e40/pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480", size = 1793637 },
532
+ { url = "https://files.pythonhosted.org/packages/29/18/812222b6d18c2d13eebbb0f7cdc170a408d9ced65794fdb86147c77e1982/pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068", size = 1941963 },
533
+ { url = "https://files.pythonhosted.org/packages/0f/36/c1f3642ac3f05e6bb4aec3ffc399fa3f84895d259cf5f0ce3054b7735c29/pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801", size = 1915332 },
534
+ { url = "https://files.pythonhosted.org/packages/f7/ca/9c0854829311fb446020ebb540ee22509731abad886d2859c855dd29b904/pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728", size = 1957926 },
535
+ { url = "https://files.pythonhosted.org/packages/c0/1c/7836b67c42d0cd4441fcd9fafbf6a027ad4b79b6559f80cf11f89fd83648/pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433", size = 2100342 },
536
+ { url = "https://files.pythonhosted.org/packages/a9/f9/b6bcaf874f410564a78908739c80861a171788ef4d4f76f5009656672dfe/pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753", size = 1920344 },
537
+ ]
538
+
539
+ [[package]]
540
+ name = "pyyaml"
541
+ version = "6.0.2"
542
+ source = { registry = "https://pypi.org/simple" }
543
+ sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631 }
544
+ wheels = [
545
+ { url = "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", size = 184199 },
546
+ { url = "https://files.pythonhosted.org/packages/c7/7a/68bd47624dab8fd4afbfd3c48e3b79efe09098ae941de5b58abcbadff5cb/PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf", size = 171758 },
547
+ { url = "https://files.pythonhosted.org/packages/49/ee/14c54df452143b9ee9f0f29074d7ca5516a36edb0b4cc40c3f280131656f/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237", size = 718463 },
548
+ { url = "https://files.pythonhosted.org/packages/4d/61/de363a97476e766574650d742205be468921a7b532aa2499fcd886b62530/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b", size = 719280 },
549
+ { url = "https://files.pythonhosted.org/packages/6b/4e/1523cb902fd98355e2e9ea5e5eb237cbc5f3ad5f3075fa65087aa0ecb669/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed", size = 751239 },
550
+ { url = "https://files.pythonhosted.org/packages/b7/33/5504b3a9a4464893c32f118a9cc045190a91637b119a9c881da1cf6b7a72/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180", size = 695802 },
551
+ { url = "https://files.pythonhosted.org/packages/5c/20/8347dcabd41ef3a3cdc4f7b7a2aff3d06598c8779faa189cdbf878b626a4/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68", size = 720527 },
552
+ { url = "https://files.pythonhosted.org/packages/be/aa/5afe99233fb360d0ff37377145a949ae258aaab831bde4792b32650a4378/PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99", size = 144052 },
553
+ { url = "https://files.pythonhosted.org/packages/b5/84/0fa4b06f6d6c958d207620fc60005e241ecedceee58931bb20138e1e5776/PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e", size = 161774 },
554
+ ]
555
+
556
+ [[package]]
557
+ name = "regex"
558
+ version = "2024.9.11"
559
+ source = { registry = "https://pypi.org/simple" }
560
+ sdist = { url = "https://files.pythonhosted.org/packages/f9/38/148df33b4dbca3bd069b963acab5e0fa1a9dbd6820f8c322d0dd6faeff96/regex-2024.9.11.tar.gz", hash = "sha256:6c188c307e8433bcb63dc1915022deb553b4203a70722fc542c363bf120a01fd", size = 399403 }
561
+ wheels = [
562
+ { url = "https://files.pythonhosted.org/packages/63/12/497bd6599ce8a239ade68678132296aec5ee25ebea45fc8ba91aa60fceec/regex-2024.9.11-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1494fa8725c285a81d01dc8c06b55287a1ee5e0e382d8413adc0a9197aac6408", size = 482488 },
563
+ { url = "https://files.pythonhosted.org/packages/c1/24/595ddb9bec2a9b151cdaf9565b0c9f3da9f0cb1dca6c158bc5175332ddf8/regex-2024.9.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0e12c481ad92d129c78f13a2a3662317e46ee7ef96c94fd332e1c29131875b7d", size = 287443 },
564
+ { url = "https://files.pythonhosted.org/packages/69/a8/b2fb45d9715b1469383a0da7968f8cacc2f83e9fbbcd6b8713752dd980a6/regex-2024.9.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:16e13a7929791ac1216afde26f712802e3df7bf0360b32e4914dca3ab8baeea5", size = 284561 },
565
+ { url = "https://files.pythonhosted.org/packages/88/87/1ce4a5357216b19b7055e7d3b0efc75a6e426133bf1e7d094321df514257/regex-2024.9.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46989629904bad940bbec2106528140a218b4a36bb3042d8406980be1941429c", size = 783177 },
566
+ { url = "https://files.pythonhosted.org/packages/3c/65/b9f002ab32f7b68e7d1dcabb67926f3f47325b8dbc22cc50b6a043e1d07c/regex-2024.9.11-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a906ed5e47a0ce5f04b2c981af1c9acf9e8696066900bf03b9d7879a6f679fc8", size = 823193 },
567
+ { url = "https://files.pythonhosted.org/packages/22/91/8339dd3abce101204d246e31bc26cdd7ec07c9f91598472459a3a902aa41/regex-2024.9.11-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a091b0550b3b0207784a7d6d0f1a00d1d1c8a11699c1a4d93db3fbefc3ad35", size = 809950 },
568
+ { url = "https://files.pythonhosted.org/packages/cb/19/556638aa11c2ec9968a1da998f07f27ec0abb9bf3c647d7c7985ca0b8eea/regex-2024.9.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ddcd9a179c0a6fa8add279a4444015acddcd7f232a49071ae57fa6e278f1f71", size = 782661 },
569
+ { url = "https://files.pythonhosted.org/packages/d1/e9/7a5bc4c6ef8d9cd2bdd83a667888fc35320da96a4cc4da5fa084330f53db/regex-2024.9.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6b41e1adc61fa347662b09398e31ad446afadff932a24807d3ceb955ed865cc8", size = 772348 },
570
+ { url = "https://files.pythonhosted.org/packages/f1/0b/29f2105bfac3ed08e704914c38e93b07c784a6655f8a015297ee7173e95b/regex-2024.9.11-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ced479f601cd2f8ca1fd7b23925a7e0ad512a56d6e9476f79b8f381d9d37090a", size = 697460 },
571
+ { url = "https://files.pythonhosted.org/packages/71/3a/52ff61054d15a4722605f5872ad03962b319a04c1ebaebe570b8b9b7dde1/regex-2024.9.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:635a1d96665f84b292e401c3d62775851aedc31d4f8784117b3c68c4fcd4118d", size = 769151 },
572
+ { url = "https://files.pythonhosted.org/packages/97/07/37e460ab5ca84be8e1e197c3b526c5c86993dcc9e13cbc805c35fc2463c1/regex-2024.9.11-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:c0256beda696edcf7d97ef16b2a33a8e5a875affd6fa6567b54f7c577b30a137", size = 777478 },
573
+ { url = "https://files.pythonhosted.org/packages/65/7b/953075723dd5ab00780043ac2f9de667306ff9e2a85332975e9f19279174/regex-2024.9.11-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:3ce4f1185db3fbde8ed8aa223fc9620f276c58de8b0d4f8cc86fd1360829edb6", size = 845373 },
574
+ { url = "https://files.pythonhosted.org/packages/40/b8/3e9484c6230b8b6e8f816ab7c9a080e631124991a4ae2c27a81631777db0/regex-2024.9.11-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:09d77559e80dcc9d24570da3745ab859a9cf91953062e4ab126ba9d5993688ca", size = 845369 },
575
+ { url = "https://files.pythonhosted.org/packages/b7/99/38434984d912edbd2e1969d116257e869578f67461bd7462b894c45ed874/regex-2024.9.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7a22ccefd4db3f12b526eccb129390942fe874a3a9fdbdd24cf55773a1faab1a", size = 773935 },
576
+ { url = "https://files.pythonhosted.org/packages/ab/67/43174d2b46fa947b7b9dfe56b6c8a8a76d44223f35b1d64645a732fd1d6f/regex-2024.9.11-cp310-cp310-win32.whl", hash = "sha256:f745ec09bc1b0bd15cfc73df6fa4f726dcc26bb16c23a03f9e3367d357eeedd0", size = 261624 },
577
+ { url = "https://files.pythonhosted.org/packages/c4/2a/4f9c47d9395b6aff24874c761d8d620c0232f97c43ef3cf668c8b355e7a7/regex-2024.9.11-cp310-cp310-win_amd64.whl", hash = "sha256:01c2acb51f8a7d6494c8c5eafe3d8e06d76563d8a8a4643b37e9b2dd8a2ff623", size = 274020 },
578
+ ]
579
+
580
+ [[package]]
581
+ name = "requests"
582
+ version = "2.32.3"
583
+ source = { registry = "https://pypi.org/simple" }
584
+ dependencies = [
585
+ { name = "certifi" },
586
+ { name = "charset-normalizer" },
587
+ { name = "idna" },
588
+ { name = "urllib3" },
589
+ ]
590
+ sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 }
591
+ wheels = [
592
+ { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 },
593
+ ]
594
+
595
+ [[package]]
596
+ name = "safetensors"
597
+ version = "0.4.5"
598
+ source = { registry = "https://pypi.org/simple" }
599
+ sdist = { url = "https://files.pythonhosted.org/packages/cb/46/a1c56ed856c6ac3b1a8b37abe5be0cac53219367af1331e721b04d122577/safetensors-0.4.5.tar.gz", hash = "sha256:d73de19682deabb02524b3d5d1f8b3aaba94c72f1bbfc7911b9b9d5d391c0310", size = 65702 }
600
+ wheels = [
601
+ { url = "https://files.pythonhosted.org/packages/38/10/0798ec2c8704c2d172620d8a3725bed92cdd75516357b1a3e64d4229ea4e/safetensors-0.4.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:a63eaccd22243c67e4f2b1c3e258b257effc4acd78f3b9d397edc8cf8f1298a7", size = 392312 },
602
+ { url = "https://files.pythonhosted.org/packages/2b/9e/9648d8dbb485c40a4a0212b7537626ae440b48156cc74601ca0b7a7615e0/safetensors-0.4.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:23fc9b4ec7b602915cbb4ec1a7c1ad96d2743c322f20ab709e2c35d1b66dad27", size = 381858 },
603
+ { url = "https://files.pythonhosted.org/packages/8b/67/49556aeacc00df353767ed31d68b492fecf38c3f664c52692e4d92aa0032/safetensors-0.4.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6885016f34bef80ea1085b7e99b3c1f92cb1be78a49839203060f67b40aee761", size = 441382 },
604
+ { url = "https://files.pythonhosted.org/packages/5d/ce/e9f4869a37bb11229e6cdb4e73a6ef23b4f360eee9dca5f7e40982779704/safetensors-0.4.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:133620f443450429322f238fda74d512c4008621227fccf2f8cf4a76206fea7c", size = 439001 },
605
+ { url = "https://files.pythonhosted.org/packages/a0/27/aee8cf031b89c34caf83194ec6b7f2eed28d053fff8b6da6d00c85c56035/safetensors-0.4.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4fb3e0609ec12d2a77e882f07cced530b8262027f64b75d399f1504ffec0ba56", size = 478026 },
606
+ { url = "https://files.pythonhosted.org/packages/da/33/1d9fc4805c623636e7d460f28eec92ebd1856f7a552df8eb78398a1ef4de/safetensors-0.4.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d0f1dd769f064adc33831f5e97ad07babbd728427f98e3e1db6902e369122737", size = 495545 },
607
+ { url = "https://files.pythonhosted.org/packages/b9/df/6f766b56690709d22e83836e4067a1109a7d84ea152a6deb5692743a2805/safetensors-0.4.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6d156bdb26732feada84f9388a9f135528c1ef5b05fae153da365ad4319c4c5", size = 435016 },
608
+ { url = "https://files.pythonhosted.org/packages/90/fa/7bc3f18086201b1e55a42c88b822ae197d0158e12c54cd45c887305f1b7e/safetensors-0.4.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9e347d77e2c77eb7624400ccd09bed69d35c0332f417ce8c048d404a096c593b", size = 456273 },
609
+ { url = "https://files.pythonhosted.org/packages/3e/59/2ae50150d37a65c1c5f01aec74dc737707b8bbecdc76307e5a1a12c8a376/safetensors-0.4.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9f556eea3aec1d3d955403159fe2123ddd68e880f83954ee9b4a3f2e15e716b6", size = 619669 },
610
+ { url = "https://files.pythonhosted.org/packages/fe/43/10f0bb597aef62c9c154152e265057089f3c729bdd980e6c32c3ec2407a4/safetensors-0.4.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9483f42be3b6bc8ff77dd67302de8ae411c4db39f7224dec66b0eb95822e4163", size = 605212 },
611
+ { url = "https://files.pythonhosted.org/packages/7c/75/ede6887ea0ceaba55730988bfc7668dc147a8758f907fa6db26fbb681b8e/safetensors-0.4.5-cp310-none-win32.whl", hash = "sha256:7389129c03fadd1ccc37fd1ebbc773f2b031483b04700923c3511d2a939252cc", size = 272652 },
612
+ { url = "https://files.pythonhosted.org/packages/ba/f0/919c72a9eef843781e652d0650f2819039943e69b69d5af2d0451a23edc3/safetensors-0.4.5-cp310-none-win_amd64.whl", hash = "sha256:e98ef5524f8b6620c8cdef97220c0b6a5c1cef69852fcd2f174bb96c2bb316b1", size = 285879 },
613
+ { url = "https://files.pythonhosted.org/packages/cf/ff/037ae4c0ee32db496669365e66079b6329906c6814722b159aa700e67208/safetensors-0.4.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:fdadf66b5a22ceb645d5435a0be7a0292ce59648ca1d46b352f13cff3ea80410", size = 392951 },
614
+ { url = "https://files.pythonhosted.org/packages/f1/d6/6621e16b35bf83ae099eaab07338f04991a26c9aa43879d05f19f35e149c/safetensors-0.4.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d42ffd4c2259f31832cb17ff866c111684c87bd930892a1ba53fed28370c918c", size = 383417 },
615
+ { url = "https://files.pythonhosted.org/packages/ae/88/3068e1bb16f5e9f9068901de3cf7b3db270b9bfe6e7d51d4b55c1da0425d/safetensors-0.4.5-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd8a1f6d2063a92cd04145c7fd9e31a1c7d85fbec20113a14b487563fdbc0597", size = 442311 },
616
+ { url = "https://files.pythonhosted.org/packages/f7/15/a2bb77ebbaa76b61ec2e9f731fe4db7f9473fd855d881957c51b3a168892/safetensors-0.4.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:951d2fcf1817f4fb0ef0b48f6696688a4e852a95922a042b3f96aaa67eedc920", size = 436678 },
617
+ { url = "https://files.pythonhosted.org/packages/ec/79/9608c4546cdbfe3860dd7aa59e3562c9289113398b1a0bd89b68ce0a9d41/safetensors-0.4.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ac85d9a8c1af0e3132371d9f2d134695a06a96993c2e2f0bbe25debb9e3f67a", size = 457316 },
618
+ { url = "https://files.pythonhosted.org/packages/0f/23/b17b483f2857835962ad33e38014efd4911791187e177bc23b057d35bee8/safetensors-0.4.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e3cec4a29eb7fe8da0b1c7988bc3828183080439dd559f720414450de076fcab", size = 620565 },
619
+ { url = "https://files.pythonhosted.org/packages/19/46/5d11dc300feaad285c2f1bd784ff3f689f5e0ab6be49aaf568f3a77019eb/safetensors-0.4.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:21742b391b859e67b26c0b2ac37f52c9c0944a879a25ad2f9f9f3cd61e7fda8f", size = 606660 },
620
+ ]
621
+
622
+ [[package]]
623
+ name = "setuptools"
624
+ version = "75.2.0"
625
+ source = { registry = "https://pypi.org/simple" }
626
+ sdist = { url = "https://files.pythonhosted.org/packages/07/37/b31be7e4b9f13b59cde9dcaeff112d401d49e0dc5b37ed4a9fc8fb12f409/setuptools-75.2.0.tar.gz", hash = "sha256:753bb6ebf1f465a1912e19ed1d41f403a79173a9acf66a42e7e6aec45c3c16ec", size = 1350308 }
627
+ wheels = [
628
+ { url = "https://files.pythonhosted.org/packages/31/2d/90165d51ecd38f9a02c6832198c13a4e48652485e2ccf863ebb942c531b6/setuptools-75.2.0-py3-none-any.whl", hash = "sha256:a7fcb66f68b4d9e8e66b42f9876150a3371558f98fa32222ffaa5bced76406f8", size = 1249825 },
629
+ ]
630
+
631
+ [[package]]
632
+ name = "sympy"
633
+ version = "1.13.3"
634
+ source = { registry = "https://pypi.org/simple" }
635
+ dependencies = [
636
+ { name = "mpmath" },
637
+ ]
638
+ sdist = { url = "https://files.pythonhosted.org/packages/11/8a/5a7fd6284fa8caac23a26c9ddf9c30485a48169344b4bd3b0f02fef1890f/sympy-1.13.3.tar.gz", hash = "sha256:b27fd2c6530e0ab39e275fc9b683895367e51d5da91baa8d3d64db2565fec4d9", size = 7533196 }
639
+ wheels = [
640
+ { url = "https://files.pythonhosted.org/packages/99/ff/c87e0622b1dadea79d2fb0b25ade9ed98954c9033722eb707053d310d4f3/sympy-1.13.3-py3-none-any.whl", hash = "sha256:54612cf55a62755ee71824ce692986f23c88ffa77207b30c1368eda4a7060f73", size = 6189483 },
641
+ ]
642
+
643
+ [[package]]
644
+ name = "tensorrt"
645
+ version = "10.5.0"
646
+ source = { registry = "https://pypi.org/simple" }
647
+ dependencies = [
648
+ { name = "tensorrt-cu12" },
649
+ ]
650
+ sdist = { url = "https://files.pythonhosted.org/packages/ee/b9/f917eb7dfe02da30bc91206a464c850f4b94a1e14b8f95870074c9b9abea/tensorrt-10.5.0.tar.gz", hash = "sha256:d5c6338d44aeda20250fdbe31f9df8ca152b830f811aaf19d6c4d1dafd18c84b", size = 16401 }
651
+
652
+ [[package]]
653
+ name = "tensorrt-cu12"
654
+ version = "10.5.0"
655
+ source = { registry = "https://pypi.org/simple" }
656
+ sdist = { url = "https://files.pythonhosted.org/packages/22/d5/a4c3e22482d4273e151123990934d7c8d0ba1e4efb9a483eba807cdce279/tensorrt-cu12-10.5.0.tar.gz", hash = "sha256:46edbda08c54c8ffa88c75d75b4761eb9839e81678135e8d1530adc8cef6a61b", size = 18341 }
657
+
658
+ [[package]]
659
+ name = "tensorrt-cu12-bindings"
660
+ version = "10.5.0"
661
+ source = { registry = "https://pypi.org/simple" }
662
+ wheels = [
663
+ { url = "https://files.pythonhosted.org/packages/21/be/cab39a2c387887fa87bb8f199d113a10ebd0ba8b052927c2ae43b1495cf6/tensorrt_cu12_bindings-10.5.0-cp310-none-manylinux_2_17_x86_64.whl", hash = "sha256:45a31cc3f25489bb05fc9cb8dae0e63b205bf3da1656c44430f97cf263d5720c", size = 1117215 },
664
+ { url = "https://files.pythonhosted.org/packages/02/49/36db3b3c0bd0c7dc68964c75b1691b46abe8388708b4da04c3261f8ab7c0/tensorrt_cu12_bindings-10.5.0-cp310-none-manylinux_2_31_aarch64.whl", hash = "sha256:900b87824ebbc9e1059a4a9a5ed3040eb9d74ba9a601674086030d373996692a", size = 1091646 },
665
+ { url = "https://files.pythonhosted.org/packages/17/df/e95a92fa4d43df918cc8bc681697b1423a988db339af25bafe25068c522d/tensorrt_cu12_bindings-10.5.0-cp310-none-win_amd64.whl", hash = "sha256:2bf2eb6d36ed9fe44a4b416def538775012abec34fdb5a6fb8461dd569717055", size = 769305 },
666
+ ]
667
+
668
+ [[package]]
669
+ name = "tensorrt-cu12-libs"
670
+ version = "10.5.0"
671
+ source = { registry = "https://pypi.org/simple" }
672
+ dependencies = [
673
+ { name = "nvidia-cuda-runtime-cu12" },
674
+ ]
675
+ sdist = { url = "https://files.pythonhosted.org/packages/ff/d2/28d4bdadcb4690e7c051ae23ac5559dffca7ee6bf859ea76c9ab9931ba53/tensorrt_cu12_libs-10.5.0.tar.gz", hash = "sha256:358b3a36c30ab74ad710f227b410206ae94e8d1003c09b75216e39813dac0d9d", size = 630 }
676
+
677
+ [[package]]
678
+ name = "tokenizers"
679
+ version = "0.19.1"
680
+ source = { registry = "https://pypi.org/simple" }
681
+ dependencies = [
682
+ { name = "huggingface-hub" },
683
+ ]
684
+ sdist = { url = "https://files.pythonhosted.org/packages/48/04/2071c150f374aab6d5e92aaec38d0f3c368d227dd9e0469a1f0966ac68d1/tokenizers-0.19.1.tar.gz", hash = "sha256:ee59e6680ed0fdbe6b724cf38bd70400a0c1dd623b07ac729087270caeac88e3", size = 321039 }
685
+ wheels = [
686
+ { url = "https://files.pythonhosted.org/packages/c1/60/91cac8d496b304ec5a22f07606893cad35ea8e1a8406dc8909e365f97a80/tokenizers-0.19.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:952078130b3d101e05ecfc7fc3640282d74ed26bcf691400f872563fca15ac97", size = 2533301 },
687
+ { url = "https://files.pythonhosted.org/packages/4c/12/9cb68762ff5fee1efd51aefe2f62cb225f26f060a68a3779e1060bbc7a59/tokenizers-0.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:82c8b8063de6c0468f08e82c4e198763e7b97aabfe573fd4cf7b33930ca4df77", size = 2440223 },
688
+ { url = "https://files.pythonhosted.org/packages/e4/03/b2020e6a78fb994cff1ec962adc157c23109172a46b4fe451d6d0dd33fdb/tokenizers-0.19.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f03727225feaf340ceeb7e00604825addef622d551cbd46b7b775ac834c1e1c4", size = 3683779 },
689
+ { url = "https://files.pythonhosted.org/packages/50/4e/2e5549a26dc6f9e434f83bebf16c2d7dc9dc3477cc0ec8b23ede4d465b90/tokenizers-0.19.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:453e4422efdfc9c6b6bf2eae00d5e323f263fff62b29a8c9cd526c5003f3f642", size = 3569431 },
690
+ { url = "https://files.pythonhosted.org/packages/75/79/158626bd794e75551e0c6bb93f1cd3c9ba08ba14b181b98f09e95994f609/tokenizers-0.19.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:02e81bf089ebf0e7f4df34fa0207519f07e66d8491d963618252f2e0729e0b46", size = 3424739 },
691
+ { url = "https://files.pythonhosted.org/packages/65/8e/5f4316976c26009f1ae0b6543f3d97af29afa5ba5dc145251e6a07314618/tokenizers-0.19.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b07c538ba956843833fee1190cf769c60dc62e1cf934ed50d77d5502194d63b1", size = 3965791 },
692
+ { url = "https://files.pythonhosted.org/packages/6a/e1/5dbac9618709972434eea072670cd69fba1aa988e6200f16057722b4bf96/tokenizers-0.19.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e28cab1582e0eec38b1f38c1c1fb2e56bce5dc180acb1724574fc5f47da2a4fe", size = 4049879 },
693
+ { url = "https://files.pythonhosted.org/packages/40/4f/eb78de4af3b17b589f43a369cbf0c3a7173f25c3d2cd93068852c07689aa/tokenizers-0.19.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b01afb7193d47439f091cd8f070a1ced347ad0f9144952a30a41836902fe09e", size = 3607049 },
694
+ { url = "https://files.pythonhosted.org/packages/f5/f8/141dcb0f88e9452af8d20d14dd53aab5937222a2bb4f2c04bfed6829263c/tokenizers-0.19.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7fb297edec6c6841ab2e4e8f357209519188e4a59b557ea4fafcf4691d1b4c98", size = 9634084 },
695
+ { url = "https://files.pythonhosted.org/packages/2e/be/debb7caa3f88ed54015170db16e07aa3a5fea2d3983d0dde92f98d888dc8/tokenizers-0.19.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2e8a3dd055e515df7054378dc9d6fa8c8c34e1f32777fb9a01fea81496b3f9d3", size = 9949480 },
696
+ { url = "https://files.pythonhosted.org/packages/7a/e7/26bedf5d270d293d572a90bd66b0b030012aedb95d8ee87e8bcd446b76fb/tokenizers-0.19.1-cp310-none-win32.whl", hash = "sha256:7ff898780a155ea053f5d934925f3902be2ed1f4d916461e1a93019cc7250837", size = 2041462 },
697
+ { url = "https://files.pythonhosted.org/packages/f4/85/d999b9a05fd101d48f1a365d68be0b109277bb25c89fb37a389d669f9185/tokenizers-0.19.1-cp310-none-win_amd64.whl", hash = "sha256:bea6f9947e9419c2fda21ae6c32871e3d398cba549b93f4a65a2d369662d9403", size = 2220036 },
698
+ { url = "https://files.pythonhosted.org/packages/cf/7b/38fb7207cde3d1dc5272411cd18178e6437cdc1ef08cac5d0e8cfd57f38c/tokenizers-0.19.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3b11853f17b54c2fe47742c56d8a33bf49ce31caf531e87ac0d7d13d327c9334", size = 2532668 },
699
+ { url = "https://files.pythonhosted.org/packages/1d/0d/2c452fe17fc17f0cdb713acb811eebb1f714b8c21d497c4672af4f491229/tokenizers-0.19.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d26194ef6c13302f446d39972aaa36a1dda6450bc8949f5eb4c27f51191375bd", size = 2438321 },
700
+ { url = "https://files.pythonhosted.org/packages/19/e0/f9e915d028b45798723eab59c253da28040aa66b9f31dcb7cfc3be88fa37/tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e8d1ed93beda54bbd6131a2cb363a576eac746d5c26ba5b7556bc6f964425594", size = 3682304 },
701
+ { url = "https://files.pythonhosted.org/packages/ce/2b/db8a94608c392752681c2ca312487b7cd5bcc4f77e24a90daa4916138271/tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca407133536f19bdec44b3da117ef0d12e43f6d4b56ac4c765f37eca501c7bda", size = 3566208 },
702
+ { url = "https://files.pythonhosted.org/packages/d8/58/2e998462677c4c0eb5123ce386bcb488a155664d273d0283122866515f09/tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce05fde79d2bc2e46ac08aacbc142bead21614d937aac950be88dc79f9db9022", size = 3605791 },
703
+ { url = "https://files.pythonhosted.org/packages/83/ac/26bc2e2bb2a054dc2e51699628936f5474e093b68da6ccdde04b2fc39ab8/tokenizers-0.19.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:35583cd46d16f07c054efd18b5d46af4a2f070a2dd0a47914e66f3ff5efb2b1e", size = 9632867 },
704
+ { url = "https://files.pythonhosted.org/packages/45/b6/36c1bb106bbe96012c9367df89ed01599cada036c0b96d38fbbdbeb75c9f/tokenizers-0.19.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:43350270bfc16b06ad3f6f07eab21f089adb835544417afda0f83256a8bf8b75", size = 9945103 },
705
+ ]
706
+
707
+ [[package]]
708
+ name = "torch"
709
+ version = "2.4.1"
710
+ source = { registry = "https://pypi.org/simple" }
711
+ dependencies = [
712
+ { name = "filelock" },
713
+ { name = "fsspec" },
714
+ { name = "jinja2" },
715
+ { name = "networkx" },
716
+ { name = "nvidia-cublas-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" },
717
+ { name = "nvidia-cuda-cupti-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" },
718
+ { name = "nvidia-cuda-nvrtc-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" },
719
+ { name = "nvidia-cuda-runtime-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" },
720
+ { name = "nvidia-cudnn-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" },
721
+ { name = "nvidia-cufft-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" },
722
+ { name = "nvidia-curand-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" },
723
+ { name = "nvidia-cusolver-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" },
724
+ { name = "nvidia-cusparse-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" },
725
+ { name = "nvidia-nccl-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" },
726
+ { name = "nvidia-nvtx-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" },
727
+ { name = "sympy" },
728
+ { name = "triton", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" },
729
+ { name = "typing-extensions" },
730
+ ]
731
+ wheels = [
732
+ { url = "https://files.pythonhosted.org/packages/41/05/d540049b1832d1062510efc6829634b7fbef5394c757d8312414fb65a3cb/torch-2.4.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:362f82e23a4cd46341daabb76fba08f04cd646df9bfaf5da50af97cb60ca4971", size = 797072810 },
733
+ { url = "https://files.pythonhosted.org/packages/a0/12/2162df9c47386ae7cedbc938f9703fee4792d93504fab8608d541e71ece3/torch-2.4.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:e8ac1985c3ff0f60d85b991954cfc2cc25f79c84545aead422763148ed2759e3", size = 89699259 },
734
+ { url = "https://files.pythonhosted.org/packages/5d/4c/b2a59ff0e265f5ee154f0d81e948b1518b94f545357731e1a3245ee5d45b/torch-2.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:91e326e2ccfb1496e3bee58f70ef605aeb27bd26be07ba64f37dcaac3d070ada", size = 199433813 },
735
+ { url = "https://files.pythonhosted.org/packages/dc/fb/1333ba666bbd53846638dd75a7a1d4eaf964aff1c482fc046e2311a1b499/torch-2.4.1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:d36a8ef100f5bff3e9c3cea934b9e0d7ea277cb8210c7152d34a9a6c5830eadd", size = 62139309 },
736
+ ]
737
+
738
+ [[package]]
739
+ name = "tqdm"
740
+ version = "4.66.5"
741
+ source = { registry = "https://pypi.org/simple" }
742
+ dependencies = [
743
+ { name = "colorama", marker = "platform_system == 'Windows'" },
744
+ ]
745
+ sdist = { url = "https://files.pythonhosted.org/packages/58/83/6ba9844a41128c62e810fddddd72473201f3eacde02046066142a2d96cc5/tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad", size = 169504 }
746
+ wheels = [
747
+ { url = "https://files.pythonhosted.org/packages/48/5d/acf5905c36149bbaec41ccf7f2b68814647347b72075ac0b1fe3022fdc73/tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd", size = 78351 },
748
+ ]
749
+
750
+ [[package]]
751
+ name = "transformers"
752
+ version = "4.41.2"
753
+ source = { registry = "https://pypi.org/simple" }
754
+ dependencies = [
755
+ { name = "filelock" },
756
+ { name = "huggingface-hub" },
757
+ { name = "numpy" },
758
+ { name = "packaging" },
759
+ { name = "pyyaml" },
760
+ { name = "regex" },
761
+ { name = "requests" },
762
+ { name = "safetensors" },
763
+ { name = "tokenizers" },
764
+ { name = "tqdm" },
765
+ ]
766
+ sdist = { url = "https://files.pythonhosted.org/packages/50/c9/b8acdf584f19558e29f46d36b5ed80954c2d3831811f129a5a6e84c4537b/transformers-4.41.2.tar.gz", hash = "sha256:80a4db216533d573e9cc7388646c31ed9480918feb7c55eb211249cb23567f87", size = 7841904 }
767
+ wheels = [
768
+ { url = "https://files.pythonhosted.org/packages/d9/b7/98f821d70102e2d38483bbb7013a689d2d646daa4495377bc910374ad727/transformers-4.41.2-py3-none-any.whl", hash = "sha256:05555d20e43f808de1ef211ab64803cdb513170cef70d29a888b589caebefc67", size = 9092643 },
769
+ ]
770
+
771
+ [[package]]
772
+ name = "triton"
773
+ version = "3.0.0"
774
+ source = { registry = "https://pypi.org/simple" }
775
+ dependencies = [
776
+ { name = "filelock" },
777
+ ]
778
+ wheels = [
779
+ { url = "https://files.pythonhosted.org/packages/45/27/14cc3101409b9b4b9241d2ba7deaa93535a217a211c86c4cc7151fb12181/triton-3.0.0-1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e1efef76935b2febc365bfadf74bcb65a6f959a9872e5bddf44cc9e0adce1e1a", size = 209376304 },
780
+ ]
781
+
782
+ [[package]]
783
+ name = "typing-extensions"
784
+ version = "4.12.2"
785
+ source = { registry = "https://pypi.org/simple" }
786
+ sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321 }
787
+ wheels = [
788
+ { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438 },
789
+ ]
790
+
791
+ [[package]]
792
+ name = "urllib3"
793
+ version = "2.2.3"
794
+ source = { registry = "https://pypi.org/simple" }
795
+ sdist = { url = "https://files.pythonhosted.org/packages/ed/63/22ba4ebfe7430b76388e7cd448d5478814d3032121827c12a2cc287e2260/urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9", size = 300677 }
796
+ wheels = [
797
+ { url = "https://files.pythonhosted.org/packages/ce/d9/5f4c13cecde62396b0d3fe530a50ccea91e7dfc1ccf0e09c228841bb5ba8/urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac", size = 126338 },
798
+ ]
799
+
800
+ [[package]]
801
+ name = "wheel"
802
+ version = "0.44.0"
803
+ source = { registry = "https://pypi.org/simple" }
804
+ sdist = { url = "https://files.pythonhosted.org/packages/b7/a0/95e9e962c5fd9da11c1e28aa4c0d8210ab277b1ada951d2aee336b505813/wheel-0.44.0.tar.gz", hash = "sha256:a29c3f2817e95ab89aa4660681ad547c0e9547f20e75b0562fe7723c9a2a9d49", size = 100733 }
805
+ wheels = [
806
+ { url = "https://files.pythonhosted.org/packages/1b/d1/9babe2ccaecff775992753d8686970b1e2755d21c8a63be73aba7a4e7d77/wheel-0.44.0-py3-none-any.whl", hash = "sha256:2376a90c98cc337d18623527a97c31797bd02bad0033d41547043a1cbfbe448f", size = 67059 },
807
+ ]
808
+
809
+ [[package]]
810
+ name = "zipp"
811
+ version = "3.20.2"
812
+ source = { registry = "https://pypi.org/simple" }
813
+ sdist = { url = "https://files.pythonhosted.org/packages/54/bf/5c0000c44ebc80123ecbdddba1f5dcd94a5ada602a9c225d84b5aaa55e86/zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29", size = 24199 }
814
+ wheels = [
815
+ { url = "https://files.pythonhosted.org/packages/62/8b/5ba542fa83c90e09eac972fc9baca7a88e7e7ca4b221a89251954019308b/zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350", size = 9200 },
816
+ ]