Documentation for v0.1 version (#199)

* Adding Equations, solving typos
* improve _code.rst
* the team rst and restuctore index.rst
* fixing errors

---------

Co-authored-by: Dario Coscia <dariocoscia@dhcp-015.eduroam.sissa.it>
This commit is contained in:
Dario Coscia
2023-11-08 14:39:00 +01:00
committed by Nicola Demo
parent 3f9305d475
commit 8b7b61b3bd
144 changed files with 2741 additions and 1766 deletions

View File

@@ -1,26 +1,69 @@
from pina.model.layers import ResidualBlock
from pina.model.layers import ResidualBlock, EnhancedLinear
import torch
import torch.nn as nn
def test_constructor():
def test_constructor_residual_block():
res_block = ResidualBlock(input_dim=10, output_dim=3, hidden_dim=4)
res_block = ResidualBlock(input_dim=10,
output_dim=3,
hidden_dim=4)
res_block = ResidualBlock(input_dim=10,
output_dim=3,
hidden_dim=4,
spectral_norm=True)
def test_forward():
def test_forward_residual_block():
res_block = ResidualBlock(input_dim=10, output_dim=3, hidden_dim=4)
res_block = ResidualBlock(input_dim=10,
output_dim=3,
hidden_dim=4)
x = torch.rand(size=(80, 10))
y = res_block(x)
assert y.shape[1]==3
assert y.shape[0]==x.shape[0]
assert y.shape[1] == 3
assert y.shape[0] == x.shape[0]
def test_constructor_no_activation_no_dropout():
linear_layer = nn.Linear(10, 20)
enhanced_linear = EnhancedLinear(linear_layer)
assert len(list(enhanced_linear.parameters())) == len(list(linear_layer.parameters()))
def test_constructor_with_activation_no_dropout():
linear_layer = nn.Linear(10, 20)
activation = nn.ReLU()
enhanced_linear = EnhancedLinear(linear_layer, activation)
assert len(list(enhanced_linear.parameters())) == len(list(linear_layer.parameters())) + len(list(activation.parameters()))
def test_constructor_no_activation_with_dropout():
linear_layer = nn.Linear(10, 20)
dropout_prob = 0.5
enhanced_linear = EnhancedLinear(linear_layer, dropout=dropout_prob)
assert len(list(enhanced_linear.parameters())) == len(list(linear_layer.parameters()))
def test_constructor_with_activation_with_dropout():
linear_layer = nn.Linear(10, 20)
activation = nn.ReLU()
dropout_prob = 0.5
enhanced_linear = EnhancedLinear(linear_layer, activation, dropout_prob)
assert len(list(enhanced_linear.parameters())) == len(list(linear_layer.parameters())) + len(list(activation.parameters()))
def test_forward_enhanced_linear_no_dropout():
enhanced_linear = EnhancedLinear(nn.Linear(10, 3))
x = torch.rand(size=(80, 10))
y = enhanced_linear(x)
assert y.shape[1] == 3
assert y.shape[0] == x.shape[0]
def test_forward_enhanced_linear_dropout():
enhanced_linear = EnhancedLinear(nn.Linear(10, 3), dropout=0.5)
x = torch.rand(size=(80, 10))
y = enhanced_linear(x)
assert y.shape[1] == 3
assert y.shape[0] == x.shape[0]