Test additional losses and move them to a meaningful place
Opened this issue · 0 comments
einbandi commented
Currently kullback_leibler_divergence
and cross_entropy_loss
are located in paradime.loss
. Should they be moved to paradime.routines
instead? What about the following losses from the old ptsne.py
file:
def kullback_leibler_loss(p, q, eps=1.0e-7):
eps = torch.tensor(eps, dtype=p.dtype)
kl_matr = torch.mul(p, torch.log(p + eps) - torch.log(q + eps))
kl_matr.fill_diagonal_(0.)
return torch.sum(kl_matr)
def kullback_leibler_reverse_loss(p, q, eps=1.0e-7):
return kullback_leibler_loss(q, p, eps)
def jensen_shannon_loss(p, q, eps=1.0e-7):
m = 0.5*(p+q)
return 0.5*kullback_leibler_loss(p, m, eps) + 0.5*kullback_leibler_loss(q, m, eps)
def frobenius_loss(p, q):
return torch.pow(p - q, 2).sum()
def total_variational_loss(p, q):
return torch.abs(p-q).sum()