Module dimdrop.losses

Source code
from .tsne_loss import TSNELoss
from .vae_loss import VAELoss

__all__ = [
    'TSNELoss', 'VAELoss'
]

Sub-modules

dimdrop.losses.tsne_loss
dimdrop.losses.vae_loss

Classes

class TSNELoss (dim, batch_size)

Custom keras loss function implementing the cost function of t-SNE

Parameters

dim : int
The dimension of the output of the network
batch_size : int
The batch size of the network
Source code
class TSNELoss:
    """
    Custom keras loss function implementing the cost function of t-SNE

    Parameters
    ----------
    dim : int
        The dimension of the output of the network
    batch_size : int
        The batch size of the network
    """
    __name__ = 'tsne_loss'

    def __init__(self, dim, batch_size):
        self.dim = dim
        self.batch_size = batch_size

    def __call__(self, y_true, y_pred):
        d = self.dim
        n = self.batch_size
        v = d - 1.

        eps = K.variable(10e-15)
        sum_act = K.sum(K.square(y_pred), axis=1)
        Q = K.reshape(sum_act, [-1, 1]) + -2 * \
            K.dot(y_pred, K.transpose(y_pred))
        Q = (sum_act + Q) / v
        Q = K.pow(1 + Q, -(v + 1) / 2)
        Q *= K.variable(1 - np.eye(n))
        Q /= K.sum(Q)
        Q = K.maximum(Q, eps)
        C = K.log((y_true + eps) / (Q + eps))
        C = K.sum(y_true * C)
        return C
class VAELoss (in_dim, z_log_var, z_mean)

Custom loss function for a variational autoencoder.

Parameters

in_dim : int
The dimension of the input
z_log_var : tensor
The variance tensor
z_mean : tensor
The mean tensor
Source code
class VAELoss:
    """Custom loss function for a variational autoencoder.

    Parameters
    ----------
    in_dim : int
        The dimension of the input
    z_log_var : tensor
        The variance tensor
    z_mean : tensor
        The mean tensor
    """
    __name__ = 'vae_loss'

    def __init__(self, in_dim, z_log_var, z_mean):

        self.in_dim = in_dim
        self.z_log_var = z_log_var
        self.z_mean = z_mean

    def __call__(self, x, x_decoded_mean):
        xent_loss = self.in_dim * \
            binary_crossentropy(x, x_decoded_mean)
        if self.z_log_var:
            kl_loss = - 0.5 * \
                K.sum(1 + self.z_log_var - K.square(self.z_mean) -
                      K.exp(self.z_log_var), axis=-1)
        else:
            kl_loss = - 0.5 * \
                K.sum(1 + 1 - K.square(self.z_mean) - K.exp(1.0), axis=-1)
        return K.mean(xent_loss + kl_loss)