Skip to content

Commit

Permalink
fix up cross entropy loss
Browse files Browse the repository at this point in the history
  • Loading branch information
darkliang committed Mar 28, 2023
1 parent 3aaf750 commit 5c61842
Show file tree
Hide file tree
Showing 2 changed files with 32 additions and 46 deletions.
32 changes: 32 additions & 0 deletions opengait/modeling/losses/ce.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import torch.nn.functional as F

from .base import BaseLoss


class CrossEntropyLoss(BaseLoss):
def __init__(self, scale=2**4, label_smooth=True, eps=0.1, loss_term_weight=1.0, log_accuracy=False):
super(CrossEntropyLoss, self).__init__(loss_term_weight)
self.scale = scale
self.label_smooth = label_smooth
self.eps = eps
self.log_accuracy = log_accuracy

def forward(self, logits, labels):
"""
logits: [n, c, p]
labels: [n]
"""
n, c, p = logits.size()
logits = logits.float()
labels = labels.unsqueeze(1)
if self.label_smooth:
loss = F.cross_entropy(
logits*self.scale, labels.repeat(1, p), label_smoothing=self.eps)
else:
loss = F.cross_entropy(logits*self.scale, labels.repeat(1, p))
self.info.update({'loss': loss.detach().clone()})
if self.log_accuracy:
pred = logits.argmax(dim=1) # [n, p]
accu = (pred == labels).float().mean()
self.info.update({'accuracy': accu})
return loss, self.info
46 changes: 0 additions & 46 deletions opengait/modeling/losses/softmax.py

This file was deleted.

0 comments on commit 5c61842

Please sign in to comment.