PeteBleackley commited on
Commit
7b60f84
·
1 Parent(s): 5f107ac

Ensure attention mask is assigned to GPU

Browse files
Files changed (1) hide show
  1. qarac/corpora/CombinedCorpus.py +1 -1
qarac/corpora/CombinedCorpus.py CHANGED
@@ -181,7 +181,7 @@ class CombinedCorpus(torch.utils.data.IterableDataset):
181
  result = input_ids
182
  if inputs:
183
  attention_mask = torch.not_equal(input_ids,
184
- self.pad_token)
185
  result = transformers.BatchEncoding({'input_ids':input_ids,
186
  'attention_mask':attention_mask})
187
  return result
 
181
  result = input_ids
182
  if inputs:
183
  attention_mask = torch.not_equal(input_ids,
184
+ self.pad_token).to(self.device)
185
  result = transformers.BatchEncoding({'input_ids':input_ids,
186
  'attention_mask':attention_mask})
187
  return result