tweetpie commited on
Commit
698c152
·
1 Parent(s): 96f6b7d

- updates for webm generation

Browse files
Files changed (1) hide show
  1. app.py +3 -2
app.py CHANGED
@@ -70,14 +70,15 @@ if generate_button:
70
  with st.spinner('Classifying the toxicity...'):
71
  time.sleep(2) # Simulating delay
72
  model_output = [[{'label': 'LABEL_0', 'score': 0.9999998807907104},
73
- {'label': 'LABEL_1', 'score': 1.1919785395889282e-07}]]
 
74
  output = model_output[0]
75
 
76
  st.write("Toxicity Classifier Output:")
77
  for i in range(len(output)):
78
  if output[i]['label'] == 'LABEL_0':
79
  st.write(f"Non-Toxic Content: {output[i]['score'] * 100:.1f}%")
80
- elif output[i]['label'] == 'LABEL_1':
81
  st.write(f"Toxic Content: {output[i]['score'] * 100:.1f}%")
82
  else:
83
  continue
 
70
  with st.spinner('Classifying the toxicity...'):
71
  time.sleep(2) # Simulating delay
72
  model_output = [[{'label': 'LABEL_0', 'score': 0.9999998807907104},
73
+ {'label': 'LABEL_1', 'score': 1.1919785395889282e-07},
74
+ {'label': 'LABEL_2', 'score': 1.1919785395889282e-07}]]
75
  output = model_output[0]
76
 
77
  st.write("Toxicity Classifier Output:")
78
  for i in range(len(output)):
79
  if output[i]['label'] == 'LABEL_0':
80
  st.write(f"Non-Toxic Content: {output[i]['score'] * 100:.1f}%")
81
+ elif output[i]['label'] == 'LABEL_2':
82
  st.write(f"Toxic Content: {output[i]['score'] * 100:.1f}%")
83
  else:
84
  continue