|
|
|
|
|
import torch |
|
import transformers |
|
from torch.utils.data import Dataset, DataLoader |
|
from transformers import DistilBertModel, DistilBertTokenizer, PreTrainedModel |
|
from .configuration_essay_clarity import DistillBERTClassClarityConfig |
|
|
|
|
|
class DistillBERTClassClarity(PreTrainedModel): |
|
config_class = DistillBERTClassClarityConfig |
|
|
|
def __init__(self, config): |
|
super().__init__(config) |
|
self.l1 = DistilBertModel.from_pretrained("distilbert-base-uncased") |
|
self.pre_classifier = torch.nn.Linear(768, 768) |
|
self.dropout = torch.nn.Dropout(0.3) |
|
self.classifier = torch.nn.Linear(768, 1) |
|
|
|
|
|
|
|
|
|
def forward(self, ids=None, mask=None): |
|
output_1 = self.l1(input_ids=ids, attention_mask=mask) |
|
hidden_state = output_1[0] |
|
pooler = hidden_state[:, 0] |
|
pooler = self.pre_classifier(pooler) |
|
pooler = torch.nn.ReLU()(pooler) |
|
pooler = self.dropout(pooler) |
|
output = self.classifier(pooler) |
|
|
|
return output |
|
|