Impulse2000 commited on
Commit
441a814
·
unverified ·
1 Parent(s): 554a75a

Added model arch into example

Browse files
Files changed (1) hide show
  1. README.md +20 -1
README.md CHANGED
@@ -33,10 +33,29 @@ The model's bidirectional nature and architectural components make it adept at u
33
  # Example Code
34
 
35
  ```python
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
36
  import torch
37
  from transformers import AutoTokenizer, AutoModel
38
  from pathlib import Path
39
- from model import CodeClassifier
40
 
41
  def infer(text, model_path, embedding_model_name):
42
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
 
33
  # Example Code
34
 
35
  ```python
36
+ import torch.nn as nn
37
+ import torch.nn.functional as F
38
+
39
+ class CodeClassifier(nn.Module):
40
+ def __init__(self, num_classes, embedding_dim, hidden_dim, num_layers, bidirectional=False):
41
+ super(CodeClassifier, self).__init__()
42
+ self.feature_extractor = nn.LSTM(embedding_dim, hidden_dim, num_layers, batch_first=True, bidirectional=bidirectional)
43
+ self.dropout = nn.Dropout(0.5) # Reintroduce dropout
44
+ self.fc1 = nn.Linear(hidden_dim * (2 if bidirectional else 1), hidden_dim) # Intermediate layer
45
+ self.fc2 = nn.Linear(hidden_dim, num_classes) # Output layer
46
+
47
+ def forward(self, x):
48
+ x = x.unsqueeze(1) # Add sequence dimension
49
+ x, _ = self.feature_extractor(x)
50
+ x = x.squeeze(1) # Remove sequence dimension
51
+ x = self.fc1(x)
52
+ x = self.dropout(x) # Apply dropout
53
+ x = self.fc2(x)
54
+ return x
55
+
56
  import torch
57
  from transformers import AutoTokenizer, AutoModel
58
  from pathlib import Path
 
59
 
60
  def infer(text, model_path, embedding_model_name):
61
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")