ybelkada commited on
Commit
6efe809
1 Parent(s): 3e62415

Update toxicity.py

Browse files
Files changed (1) hide show
  1. toxicity.py +1 -1
toxicity.py CHANGED
@@ -131,7 +131,7 @@ class Toxicity(evaluate.Measurement):
131
  model_name = self.config_name
132
  self.toxic_classifier = pipeline("text-classification", model=model_name, top_k=99999, truncation=True)
133
 
134
- def _compute(self, predictions, aggregation="all", toxic_label="hate", threshold=0.5):
135
  scores = toxicity(predictions, self.toxic_classifier, toxic_label)
136
  if aggregation == "ratio":
137
  return {"toxicity_ratio": sum(i >= threshold for i in scores) / len(scores)}
 
131
  model_name = self.config_name
132
  self.toxic_classifier = pipeline("text-classification", model=model_name, top_k=99999, truncation=True)
133
 
134
+ def _compute(self, predictions, aggregation="all", toxic_label="offensive", threshold=0.5):
135
  scores = toxicity(predictions, self.toxic_classifier, toxic_label)
136
  if aggregation == "ratio":
137
  return {"toxicity_ratio": sum(i >= threshold for i in scores) / len(scores)}