Spaces:
Running
on
T4
Running
on
T4
Update modules/model.py
Browse files- modules/model.py +1 -1
modules/model.py
CHANGED
@@ -179,7 +179,7 @@ class CrossAttnProcessor(nn.Module):
|
|
179 |
k_bucket_size = 1024
|
180 |
|
181 |
# use flash-attention
|
182 |
-
hidden_states = FlashAttentionFunction
|
183 |
query.contiguous(), key.contiguous(), value.contiguous(),
|
184 |
attention_mask, causal=False, q_bucket_size=q_bucket_size, k_bucket_size=k_bucket_size
|
185 |
)
|
|
|
179 |
k_bucket_size = 1024
|
180 |
|
181 |
# use flash-attention
|
182 |
+
hidden_states = FlashAttentionFunction(
|
183 |
query.contiguous(), key.contiguous(), value.contiguous(),
|
184 |
attention_mask, causal=False, q_bucket_size=q_bucket_size, k_bucket_size=k_bucket_size
|
185 |
)
|