Spaces:
Runtime error
Runtime error
attention-refocusing
commited on
Commit
Β·
99d6c89
1
Parent(s):
138fffd
Update gligen/ldm/modules/attention.py
Browse files
gligen/ldm/modules/attention.py
CHANGED
@@ -371,10 +371,10 @@ class BasicTransformerBlock(nn.Module):
|
|
371 |
def forward(self, x, context, objs,t):
|
372 |
# return checkpoint(self._forward, (x, context, objs), self.parameters(), self.use_checkpoint)
|
373 |
# import pdb; pdb.set_trace()
|
374 |
-
if self.use_checkpoint and x.requires_grad:
|
375 |
-
|
376 |
-
else:
|
377 |
-
|
378 |
|
379 |
def _forward(self, x, context, objs,t):
|
380 |
# self_att_grounding = []
|
|
|
371 |
def forward(self, x, context, objs,t):
|
372 |
# return checkpoint(self._forward, (x, context, objs), self.parameters(), self.use_checkpoint)
|
373 |
# import pdb; pdb.set_trace()
|
374 |
+
# if self.use_checkpoint and x.requires_grad:
|
375 |
+
# return checkpoint.checkpoint(self._forward, x, context, objs,t)
|
376 |
+
# else:
|
377 |
+
return self._forward(x, context, objs,t)
|
378 |
|
379 |
def _forward(self, x, context, objs,t):
|
380 |
# self_att_grounding = []
|