import torch from torch.nn.parameter import Parameter class AdaptiveSoftplus(torch.nn.Module): ''' Implementation of soft exponential activation. Shape: - Input: (N, *) where * means, any number of additional dimensions - Output: (N, *), same shape as the input Parameters: - alpha - trainable parameter References: - See related paper: https://arxiv.org/pdf/1602.01321.pdf Examples: >>> a1 = soft_exponential(256) >>> x = torch.randn(256) >>> x = a1(x) ''' def __init__(self): ''' Initialization. INPUT: - in_features: shape of the input - aplha: trainable parameter aplha is initialized with zero value by default ''' super().__init__() self.soft = torch.nn.Softplus() self.scale = Parameter(torch.rand(1)) self.scale.requiresGrad = True # set requiresGrad to true! def forward(self, x): ''' Forward pass of the function. Applies the function to the input elementwise. ''' #x += self.translate return self.soft(x)*self.scale