File size: 1,270 Bytes
73b3148
fdbf1b3
73b3148
 
 
 
718e1b0
73b3148
718e1b0
 
 
 
 
 
73b3148
 
 
 
 
 
 
fdbf1b3
73b3148
 
 
5737312
73b3148
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
from torch import nn
from transformers import BertModel,BertConfig
from transformers.modeling_outputs import TokenClassifierOutput


class BertClassifier(nn.Module):
    def __init__(self, num_labels=2, dropout=0.1,bert_model=None):
        super().__init__()
        if bert_model:
            self.bert = BertModel.from_pretrained(bert_model)
        else:
            config = BertConfig(vocab_size=34688, max_position_embeddings=512)
            self.bert = BertModel(config=config)
            
        self.num_labels = num_labels
        self.classifier = nn.Sequential(
            nn.Linear(self.bert.config.hidden_size, self.bert.config.hidden_size),
            nn.ReLU(),
            nn.Dropout(dropout),
            nn.Linear(self.bert.config.hidden_size, num_labels))

    def forward(self, input_ids=None, attention_mask=None,labels=None):
        output = self.bert(input_ids, attention_mask=attention_mask)
        logits = self.classifier(output.pooler_output)
        loss = None
        if labels:
            loss_fct = nn.CrossEntropyLoss()
            loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1))
        return TokenClassifierOutput(loss=loss, logits=logits, hidden_states=output.hidden_states,attentions=output.attentions)