bastasie commited on
Commit
e3081e3
1 Parent(s): d3fbdb9

Upload CustomModel.py

Browse files
Files changed (1) hide show
  1. CustomModel.py +33 -0
CustomModel.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ def ba_activation(x, weights, a, epsilon):
2
+ # Ensure x is a torch tensor
3
+ x = torch.as_tensor(x, dtype=torch.float32)
4
+
5
+ # Modulate inputs based on weights for the activation
6
+ x = weights * x
7
+
8
+ # Apply the Ba-inspired operation
9
+ # Clamp and normalize x to stabilize the operation
10
+ x_normalized = torch.clamp(x, -1, 1)
11
+ fractional_inspired = torch.pow(torch.abs(x_normalized), x_normalized)
12
+ activation_result = epsilon * torch.cos(np.pi * a * fractional_inspired * torch.log(torch.abs(fractional_inspired) + 1e-7))
13
+
14
+ # Apply an additional non-linearity to ensure the output is stable
15
+ activation_result = torch.tanh(activation_result)
16
+
17
+ return activation_result
18
+
19
+ # Define a custom model using the Ba-inspired activation function
20
+ class CustomModel(nn.Module):
21
+ def __init__(self, input_size, hidden_size, output_size):
22
+ super(CustomModel, self).__init__()
23
+ self.linear1 = nn.Linear(input_size, hidden_size)
24
+ self.linear2 = nn.Linear(hidden_size, output_size)
25
+ self.weights = nn.Parameter(torch.randn(hidden_size))
26
+ self.a = 0.5 # Parameter for the Ba-inspired activation
27
+ self.epsilon = 0.1 # Parameter for the Ba-inspired activation
28
+
29
+ def forward(self, x):
30
+ x = self.linear1(x)
31
+ x = ba_activation(x, self.weights, self.a, self.epsilon) # Use Ba-inspired activation
32
+ x = self.linear2(x)
33
+ return x