Spaces:
Sleeping
Sleeping
Jensen Holm
commited on
Commit
•
ff1254a
1
Parent(s):
9249567
adding unit tests for activation functions (using pytest)
Browse files- numpyneuron/__init__.py +7 -0
- numpyneuron/activation.py +3 -10
- requirements.txt +8 -7
- test/__init__.py +0 -0
- test/test_activation.py +66 -0
numpyneuron/__init__.py
CHANGED
@@ -1,3 +1,10 @@
|
|
1 |
from .loss import *
|
2 |
from .activation import *
|
3 |
from .nn import *
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
from .loss import *
|
2 |
from .activation import *
|
3 |
from .nn import *
|
4 |
+
|
5 |
+
ACTIVATIONS: dict[str, Activation] = {
|
6 |
+
"Relu": Relu(),
|
7 |
+
"Sigmoid": Sigmoid(),
|
8 |
+
"TanH": TanH(),
|
9 |
+
"SoftMax": SoftMax(),
|
10 |
+
}
|
numpyneuron/activation.py
CHANGED
@@ -39,18 +39,11 @@ class Sigmoid(Activation):
|
|
39 |
|
40 |
class SoftMax(Activation):
|
41 |
def forward(self, X: np.ndarray) -> np.ndarray:
|
|
|
42 |
exps = np.exp(
|
43 |
-
X - np.max(X, axis=
|
44 |
) # Avoid numerical instability
|
45 |
-
return exps / np.sum(exps, axis=
|
46 |
|
47 |
def backward(self, X: np.ndarray) -> np.ndarray:
|
48 |
return X
|
49 |
-
|
50 |
-
|
51 |
-
ACTIVATIONS: dict[str, Activation] = {
|
52 |
-
"Relu": Relu(),
|
53 |
-
"Sigmoid": Sigmoid(),
|
54 |
-
"TanH": TanH(),
|
55 |
-
"SoftMax": SoftMax(),
|
56 |
-
}
|
|
|
39 |
|
40 |
class SoftMax(Activation):
|
41 |
def forward(self, X: np.ndarray) -> np.ndarray:
|
42 |
+
ax = 1 if X.ndim > 1 else 0
|
43 |
exps = np.exp(
|
44 |
+
X - np.max(X, axis=ax, keepdims=True)
|
45 |
) # Avoid numerical instability
|
46 |
+
return exps / np.sum(exps, axis=ax, keepdims=True)
|
47 |
|
48 |
def backward(self, X: np.ndarray) -> np.ndarray:
|
49 |
return X
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
requirements.txt
CHANGED
@@ -1,7 +1,8 @@
|
|
1 |
-
gradio==4.
|
2 |
-
matplotlib==3.
|
3 |
-
numpy==
|
4 |
-
plotly==5.
|
5 |
-
|
6 |
-
|
7 |
-
|
|
|
|
1 |
+
gradio==4.39.0
|
2 |
+
matplotlib==3.6.3
|
3 |
+
numpy==2.0.1
|
4 |
+
plotly==5.22.0
|
5 |
+
pytest==7.4.4
|
6 |
+
scikit_learn==1.5.1
|
7 |
+
setuptools==68.1.2
|
8 |
+
tqdm==4.66.4
|
test/__init__.py
ADDED
File without changes
|
test/test_activation.py
ADDED
@@ -0,0 +1,66 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import sys
|
3 |
+
import pytest
|
4 |
+
|
5 |
+
sys.path.append(os.path.abspath(".."))
|
6 |
+
|
7 |
+
import random
|
8 |
+
import numpy as np
|
9 |
+
from numpyneuron import (
|
10 |
+
TanH,
|
11 |
+
Sigmoid,
|
12 |
+
Relu,
|
13 |
+
SoftMax,
|
14 |
+
Sigmoid,
|
15 |
+
)
|
16 |
+
|
17 |
+
# these functions are meant to work with np.ndarray
|
18 |
+
# objects, but they will also work with numbers which
|
19 |
+
# makes testing a little bit simpler
|
20 |
+
|
21 |
+
|
22 |
+
def test_tanh() -> None:
|
23 |
+
"""
|
24 |
+
tanh(1) =~ 0.76
|
25 |
+
tanh'(1) =~ sech^2(1) =~ 0.419
|
26 |
+
"""
|
27 |
+
tanh = TanH()
|
28 |
+
assert tanh.forward(1) == pytest.approx(np.tanh(1))
|
29 |
+
assert tanh.forward(1) == pytest.approx(0.7615941559557649)
|
30 |
+
assert tanh.backward(1) == pytest.approx(0.41997434161402614)
|
31 |
+
|
32 |
+
|
33 |
+
def test_sigmoid() -> None:
|
34 |
+
"""
|
35 |
+
sigmoid(1) =~ 0.73105
|
36 |
+
sigmoid'(1) =~ 0.1966
|
37 |
+
"""
|
38 |
+
sigmoid = Sigmoid()
|
39 |
+
assert sigmoid.forward(1) == pytest.approx(0.7310585786300049)
|
40 |
+
assert sigmoid.backward(1) == pytest.approx(0.4621171572600098)
|
41 |
+
|
42 |
+
|
43 |
+
def test_relu() -> None:
|
44 |
+
"""
|
45 |
+
relu(n > 0) = n
|
46 |
+
relu(n < 0) = 0
|
47 |
+
relu'(n > 0) = 1
|
48 |
+
relu'(n < 0) = 0
|
49 |
+
"""
|
50 |
+
relu = Relu()
|
51 |
+
random_n = random.randint(1, 100)
|
52 |
+
assert relu.forward(random_n) == random_n
|
53 |
+
assert relu.backward(random_n) == 1
|
54 |
+
|
55 |
+
|
56 |
+
def test_softmax() -> None:
|
57 |
+
"""
|
58 |
+
softmax([1, 2, 3]) = [0.090031, 0.244728, 0.665241]
|
59 |
+
"""
|
60 |
+
softmax = SoftMax()
|
61 |
+
vec = np.array([1, 2, 3])
|
62 |
+
assert np.allclose(
|
63 |
+
softmax.forward(vec),
|
64 |
+
np.array([0.090031, 0.244728, 0.665241]),
|
65 |
+
)
|
66 |
+
assert np.allclose(softmax.backward(vec), vec)
|