attention-refocusing commited on
Commit
99d6c89
Β·
1 Parent(s): 138fffd

Update gligen/ldm/modules/attention.py

Browse files
Files changed (1) hide show
  1. gligen/ldm/modules/attention.py +4 -4
gligen/ldm/modules/attention.py CHANGED
@@ -371,10 +371,10 @@ class BasicTransformerBlock(nn.Module):
371
  def forward(self, x, context, objs,t):
372
  # return checkpoint(self._forward, (x, context, objs), self.parameters(), self.use_checkpoint)
373
  # import pdb; pdb.set_trace()
374
- if self.use_checkpoint and x.requires_grad:
375
- return checkpoint.checkpoint(self._forward, x, context, objs,t)
376
- else:
377
- return self._forward(x, context, objs,t)
378
 
379
  def _forward(self, x, context, objs,t):
380
  # self_att_grounding = []
 
371
  def forward(self, x, context, objs,t):
372
  # return checkpoint(self._forward, (x, context, objs), self.parameters(), self.use_checkpoint)
373
  # import pdb; pdb.set_trace()
374
+ # if self.use_checkpoint and x.requires_grad:
375
+ # return checkpoint.checkpoint(self._forward, x, context, objs,t)
376
+ # else:
377
+ return self._forward(x, context, objs,t)
378
 
379
  def _forward(self, x, context, objs,t):
380
  # self_att_grounding = []