Spaces:
Running
on
Zero
Running
on
Zero
import torch.nn as nn | |
from transformers import RobertaModel | |
class RoBERTaClassifier(nn.Module): | |
def __init__(self, num_labels): | |
super(RoBERTaClassifier, self).__init__() | |
self.roberta = RobertaModel.from_pretrained('roberta-base') | |
self.dropout = nn.Dropout(0.2) | |
self.linear = nn.Linear(self.roberta.config.hidden_size, num_labels) | |
self.sigmoid = nn.Sigmoid() | |
def forward(self, input_ids, attention_mask): | |
outputs = self.roberta(input_ids=input_ids, attention_mask=attention_mask) | |
pooled_output = outputs.pooler_output | |
pooled_output = self.dropout(pooled_output) | |
logits = self.linear(pooled_output) | |
return self.sigmoid(logits) | |