Skip to content

Loss Layers

simplegrad.nn.loss_layers.CELoss

Bases: Module

Cross-entropy loss layer with built-in softmax.

Parameters:

Name Type Description Default
dim int

Class dimension. Defaults to -1 (last dim).

-1
reduction str | None

"mean", "sum", or None. Defaults to "mean".

'mean'
Source code in simplegrad/nn/loss_layers.py
class CELoss(Module):
    """Cross-entropy loss layer with built-in softmax.

    Args:
        dim: Class dimension. Defaults to -1 (last dim).
        reduction: ``"mean"``, ``"sum"``, or ``None``. Defaults to ``"mean"``.
    """

    def __init__(self, dim: int = -1, reduction: str | None = "mean") -> None:
        super().__init__()
        self.dim = dim
        self.reduction = reduction

    def forward(self, z: Tensor, y: Tensor) -> Tensor:
        """Compute cross-entropy loss.

        Args:
            z: Logits tensor.
            y: Target probability distribution, same shape as ``z``.

        Returns:
            Scalar loss tensor.
        """
        return ce_loss(z, y, dim=self.dim, reduction=self.reduction)

    def __str__(self):
        return f"CELoss(dim={self.dim}, reduction={self.reduction})"

forward(z: Tensor, y: Tensor) -> Tensor

Compute cross-entropy loss.

Parameters:

Name Type Description Default
z Tensor

Logits tensor.

required
y Tensor

Target probability distribution, same shape as z.

required

Returns:

Type Description
Tensor

Scalar loss tensor.

Source code in simplegrad/nn/loss_layers.py
def forward(self, z: Tensor, y: Tensor) -> Tensor:
    """Compute cross-entropy loss.

    Args:
        z: Logits tensor.
        y: Target probability distribution, same shape as ``z``.

    Returns:
        Scalar loss tensor.
    """
    return ce_loss(z, y, dim=self.dim, reduction=self.reduction)

simplegrad.nn.loss_layers.MSELoss

Bases: Module

Mean squared error loss layer.

Parameters:

Name Type Description Default
reduction str | None

"mean", "sum", or None. Defaults to "mean".

'mean'
Source code in simplegrad/nn/loss_layers.py
class MSELoss(Module):
    """Mean squared error loss layer.

    Args:
        reduction: ``"mean"``, ``"sum"``, or ``None``. Defaults to ``"mean"``.
    """

    def __init__(self, reduction: str | None = "mean") -> None:
        super().__init__()
        self.reduction = reduction

    def forward(self, p: Tensor, y: Tensor) -> Tensor:
        """Compute MSE loss.

        Args:
            p: Predictions tensor.
            y: Targets tensor, same shape as ``p``.

        Returns:
            Scalar loss tensor.
        """
        return mse_loss(p, y, reduction=self.reduction)

    def __str__(self):
        return f"MSELoss(reduction={self.reduction})"

forward(p: Tensor, y: Tensor) -> Tensor

Compute MSE loss.

Parameters:

Name Type Description Default
p Tensor

Predictions tensor.

required
y Tensor

Targets tensor, same shape as p.

required

Returns:

Type Description
Tensor

Scalar loss tensor.

Source code in simplegrad/nn/loss_layers.py
def forward(self, p: Tensor, y: Tensor) -> Tensor:
    """Compute MSE loss.

    Args:
        p: Predictions tensor.
        y: Targets tensor, same shape as ``p``.

    Returns:
        Scalar loss tensor.
    """
    return mse_loss(p, y, reduction=self.reduction)