RRFRRF
init commit without .pth
dee113c
raw
history blame
1.25 kB
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
import torch
import torch.nn as nn
import torch
from torch.autograd import Variable
import copy
from torch.nn import CrossEntropyLoss, MSELoss
class Model(nn.Module):
def __init__(self, encoder,config,tokenizer,args):
super(Model, self).__init__()
self.encoder = encoder
self.config=config
self.tokenizer=tokenizer
self.args=args
# Define dropout layer, dropout_probability is taken from args.
self.dropout = nn.Dropout(args.dropout_probability)
def forward(self, input_ids=None,labels=None, return_vec=None):
outputs=self.encoder(input_ids,attention_mask=input_ids.ne(1))
if return_vec:
return outputs.pooler_output
outputs = outputs[0]
# Apply dropout
outputs = self.dropout(outputs)
logits=outputs
prob=torch.sigmoid(logits)
if labels is not None:
labels=labels.float()
loss=torch.log(prob[:,0]+1e-10)*labels+torch.log((1-prob)[:,0]+1e-10)*(1-labels)
loss=-loss.mean()
return loss,prob
else:
return prob