rdiehlmartinez commited on
Commit
ed98a53
1 Parent(s): 4d83749

removing tqdm logging

Browse files
Files changed (1) hide show
  1. perplexity.py +1 -2
perplexity.py CHANGED
@@ -20,7 +20,6 @@ from torch.nn import CrossEntropyLoss
20
  from transformers import AutoModelForCausalLM, AutoTokenizer
21
 
22
  import evaluate
23
- from evaluate import logging
24
 
25
 
26
  _CITATION = """\
@@ -161,7 +160,7 @@ class Perplexity(evaluate.Metric):
161
  ppls = []
162
  loss_fct = CrossEntropyLoss(reduction="none")
163
 
164
- for start_index in logging.tqdm(range(0, len(encoded_texts), batch_size)):
165
  end_index = min(start_index + batch_size, len(encoded_texts))
166
  encoded_batch = encoded_texts[start_index:end_index]
167
  attn_mask = attn_masks[start_index:end_index]
 
20
  from transformers import AutoModelForCausalLM, AutoTokenizer
21
 
22
  import evaluate
 
23
 
24
 
25
  _CITATION = """\
 
160
  ppls = []
161
  loss_fct = CrossEntropyLoss(reduction="none")
162
 
163
+ for start_index in range(0, len(encoded_texts), batch_size):
164
  end_index = min(start_index + batch_size, len(encoded_texts))
165
  encoded_batch = encoded_texts[start_index:end_index]
166
  attn_mask = attn_masks[start_index:end_index]