add models and layers backward test

This commit is contained in:
cyberguli
2024-02-19 23:09:10 +01:00
committed by Nicola Demo
parent cbb43a5392
commit eb1af0b50e
10 changed files with 308 additions and 1 deletions

View File

@@ -22,6 +22,17 @@ def test_forward_residual_block():
assert y.shape[1] == 3
assert y.shape[0] == x.shape[0]
def test_backward_residual_block():
res_block = ResidualBlock(input_dim=10, output_dim=3, hidden_dim=4)
x = torch.rand(size=(80, 10))
x.requires_grad = True
y = res_block(x)
l = torch.mean(y)
l.backward()
assert x._grad.shape == torch.Size([80,10])
def test_constructor_no_activation_no_dropout():
linear_layer = nn.Linear(10, 20)
enhanced_linear = EnhancedLinear(linear_layer)
@@ -59,6 +70,17 @@ def test_forward_enhanced_linear_no_dropout():
assert y.shape[1] == 3
assert y.shape[0] == x.shape[0]
def test_backward_enhanced_linear_no_dropout():
enhanced_linear = EnhancedLinear(nn.Linear(10, 3))
x = torch.rand(size=(80, 10))
x.requires_grad = True
y = enhanced_linear(x)
l = torch.mean(y)
l.backward()
assert x._grad.shape == torch.Size([80, 10])
def test_forward_enhanced_linear_dropout():
enhanced_linear = EnhancedLinear(nn.Linear(10, 3), dropout=0.5)
@@ -66,4 +88,15 @@ def test_forward_enhanced_linear_dropout():
x = torch.rand(size=(80, 10))
y = enhanced_linear(x)
assert y.shape[1] == 3
assert y.shape[0] == x.shape[0]
assert y.shape[0] == x.shape[0]
def test_backward_enhanced_linear_dropout():
enhanced_linear = EnhancedLinear(nn.Linear(10, 3), dropout=0.5)
x = torch.rand(size=(80, 10))
x.requires_grad = True
y = enhanced_linear(x)
l = torch.mean(y)
l.backward()
assert x._grad.shape == torch.Size([80, 10])