赞
踩
class SigmoidBinaryCrossEntropyLoss(nn.Module): def __init__(self): # none mean sum super(SigmoidBinaryCrossEntropyLoss, self).__init__() def forward(self, inputs, targets, mask=None): """ input – Tensor shape: (batch_size, len) target – Tensor of the same shape as input """ inputs, targets, mask = inputs.float(), targets.float(), mask.float() res = nn.functional.binary_cross_entropy_with_logits(inputs, targets, reduction="none", weight=mask) return res.mean(dim=1) loss = SigmoidBinaryCrossEntropyLoss() pred = torch.tensor([[1.5, 0.3, -1, 2], [1.1, -0.6, 2.2, 0.4]]) # 标签变量label中的1和0分别代表背景词和噪声词 label = torch.tensor([[1, 0, 0, 0], [1, 1, 0, 0]]) mask = torch.tensor([[1, 1, 1, 1], [1, 1, 1, 0]]) # 掩码变量 loss(pred, label, mask) * mask.shape[1] / mask.float().sum(dim=1)
输出tensor([0.8740, 1.2100])
上面的运算过程就相当于:
def sigmd(x):
return - math.log(1 / (1 + math.exp(-x)))
print('%.4f' % ((sigmd(1.5) + sigmd(-0.3) + sigmd(1) + sigmd(-2)) / 4)) # 注意1-sigmoid(x) = sigmoid(-x)
print('%.4f' % ((sigmd(1.1) + sigmd(-0.6) + sigmd(-2.2)) / 3))
输出
0.8740
1.2100
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。