Skip to content

Activation Layers

simplegrad.nn.activation_layers.ReLU

Bases: Module

ReLU activation layer: max(0, x).

Source code in simplegrad/nn/activation_layers.py
class ReLU(Module):
    """ReLU activation layer: max(0, x)."""

    def forward(self, x: Tensor) -> Tensor:
        """Apply ReLU element-wise."""
        return relu(x)

    def __str__(self):
        return "ReLU"

forward(x: Tensor) -> Tensor

Apply ReLU element-wise.

Source code in simplegrad/nn/activation_layers.py
def forward(self, x: Tensor) -> Tensor:
    """Apply ReLU element-wise."""
    return relu(x)

simplegrad.nn.activation_layers.Softmax

Bases: Module

Softmax activation layer.

Parameters:

Name Type Description Default
dim int | None

Dimension to normalize over. Defaults to None (all elements).

None
Source code in simplegrad/nn/activation_layers.py
class Softmax(Module):
    """Softmax activation layer.

    Args:
        dim: Dimension to normalize over. Defaults to None (all elements).
    """

    def __init__(self, dim: int | None = None):
        super().__init__()
        self.dim = dim

    def forward(self, x: Tensor):
        """Apply softmax along ``dim``."""
        return softmax(x, self.dim)

    def __str__(self):
        return f"Softmax(dim={self.dim})"

forward(x: Tensor)

Apply softmax along dim.

Source code in simplegrad/nn/activation_layers.py
def forward(self, x: Tensor):
    """Apply softmax along ``dim``."""
    return softmax(x, self.dim)

simplegrad.nn.activation_layers.Tanh

Bases: Module

Tanh activation layer: tanh(x).

Source code in simplegrad/nn/activation_layers.py
class Tanh(Module):
    """Tanh activation layer: tanh(x)."""

    def forward(self, x: Tensor):
        """Apply tanh element-wise."""
        return tanh(x)

    def __str__(self):
        return "Tanh"

forward(x: Tensor)

Apply tanh element-wise.

Source code in simplegrad/nn/activation_layers.py
def forward(self, x: Tensor):
    """Apply tanh element-wise."""
    return tanh(x)

simplegrad.nn.activation_layers.Sigmoid

Bases: Module

Sigmoid activation layer: 1 / (1 + exp(-x)).

Source code in simplegrad/nn/activation_layers.py
class Sigmoid(Module):
    """Sigmoid activation layer: 1 / (1 + exp(-x))."""

    def forward(self, x: Tensor):
        """Apply sigmoid element-wise."""
        return sigmoid(x)

    def __str__(self):
        return "Sigmoid"

forward(x: Tensor)

Apply sigmoid element-wise.

Source code in simplegrad/nn/activation_layers.py
def forward(self, x: Tensor):
    """Apply sigmoid element-wise."""
    return sigmoid(x)