Jensen-holm commited on
Commit
28a7ac6
1 Parent(s): b471057

adding Sigmoid as an option for output layer activation

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -134,7 +134,7 @@ if __name__ == "__main__":
134
  choices=["Relu", "Sigmoid", "TanH"],
135
  label="Hidden Layer Activation",
136
  ),
137
- gr.Dropdown(choices=["SoftMax"], label="Output Activation"),
138
  gr.Dropdown(
139
  choices=["CrossEntropy", "CrossEntropyWithLogitsLoss"],
140
  label="Loss Function",
 
134
  choices=["Relu", "Sigmoid", "TanH"],
135
  label="Hidden Layer Activation",
136
  ),
137
+ gr.Dropdown(choices=["SoftMax", "Sigmoid"], label="Output Activation"),
138
  gr.Dropdown(
139
  choices=["CrossEntropy", "CrossEntropyWithLogitsLoss"],
140
  label="Loss Function",