Adaptive Functions (#272)
* adaptive function improvement --------- Co-authored-by: Dario Coscia <dariocoscia@Dario-Coscia.local>
This commit is contained in:
48
tests/test_layers/test_adaptive_func.py
Normal file
48
tests/test_layers/test_adaptive_func.py
Normal file
@@ -0,0 +1,48 @@
|
||||
import torch
|
||||
import pytest
|
||||
|
||||
from pina.model.layers.adaptive_func import AdaptiveActivationFunction
|
||||
|
||||
x = torch.rand(5)
|
||||
torchfunc = torch.nn.Tanh()
|
||||
|
||||
def test_constructor():
|
||||
# simple
|
||||
AdaptiveActivationFunction(torchfunc)
|
||||
|
||||
# setting values
|
||||
af = AdaptiveActivationFunction(torchfunc, alpha=1., beta=2., gamma=3.)
|
||||
assert af.alpha.requires_grad
|
||||
assert af.beta.requires_grad
|
||||
assert af.gamma.requires_grad
|
||||
assert af.alpha == 1.
|
||||
assert af.beta == 2.
|
||||
assert af.gamma == 3.
|
||||
|
||||
# fixed variables
|
||||
af = AdaptiveActivationFunction(torchfunc, alpha=1., beta=2.,
|
||||
gamma=3., fixed=['alpha'])
|
||||
assert af.alpha.requires_grad is False
|
||||
assert af.beta.requires_grad
|
||||
assert af.gamma.requires_grad
|
||||
assert af.alpha == 1.
|
||||
assert af.beta == 2.
|
||||
assert af.gamma == 3.
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
AdaptiveActivationFunction(torchfunc, alpha=1., beta=2.,
|
||||
gamma=3., fixed=['delta'])
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
AdaptiveActivationFunction(torchfunc, alpha='s')
|
||||
AdaptiveActivationFunction(torchfunc, alpha=1., fixed='alpha')
|
||||
AdaptiveActivationFunction(torchfunc, alpha=1)
|
||||
|
||||
def test_forward():
|
||||
af = AdaptiveActivationFunction(torchfunc)
|
||||
af(x)
|
||||
|
||||
def test_backward():
|
||||
af = AdaptiveActivationFunction(torchfunc)
|
||||
y = af(x)
|
||||
y.mean().backward()
|
||||
Reference in New Issue
Block a user