Dev Update (#582)

* Fix adaptive refinement (#571)


---------

Co-authored-by: Dario Coscia <93731561+dario-coscia@users.noreply.github.com>

* Remove collector

* Fixes

* Fixes

* rm unnecessary comment

* fix advection (#581)

* Fix tutorial .html link (#580)

* fix problem data collection for v0.1 (#584)

* Message Passing Module (#516)

* add deep tensor network block

* add interaction network block

* add radial field network block

* add schnet block

* add equivariant network block

* fix + tests + doc files

* fix egnn + equivariance/invariance tests

Co-authored-by: Dario Coscia <dariocos99@gmail.com>

---------

Co-authored-by: giovanni <giovanni.canali98@yahoo.it>
Co-authored-by: AleDinve <giuseppealessio.d@student.unisi.it>

* add type checker (#527)

---------

Co-authored-by: Filippo Olivo <filippo@filippoolivo.com>
Co-authored-by: Giovanni Canali <115086358+GiovanniCanali@users.noreply.github.com>
Co-authored-by: giovanni <giovanni.canali98@yahoo.it>
Co-authored-by: AleDinve <giuseppealessio.d@student.unisi.it>
This commit is contained in:
Dario Coscia
2025-06-13 17:34:37 +02:00
committed by GitHub
parent 6b355b45de
commit 7bf7d34d0f
40 changed files with 1963 additions and 581 deletions

View File

@@ -0,0 +1,59 @@
import pytest
import torch
from pina.model.block.message_passing import DeepTensorNetworkBlock
# Data for testing
x = torch.rand(10, 3)
edge_index = torch.randint(0, 10, (2, 20))
edge_attr = torch.randn(20, 2)
@pytest.mark.parametrize("node_feature_dim", [1, 3])
@pytest.mark.parametrize("edge_feature_dim", [3, 5])
def test_constructor(node_feature_dim, edge_feature_dim):
DeepTensorNetworkBlock(
node_feature_dim=node_feature_dim,
edge_feature_dim=edge_feature_dim,
)
# Should fail if node_feature_dim is negative
with pytest.raises(AssertionError):
DeepTensorNetworkBlock(
node_feature_dim=-1, edge_feature_dim=edge_feature_dim
)
# Should fail if edge_feature_dim is negative
with pytest.raises(AssertionError):
DeepTensorNetworkBlock(
node_feature_dim=node_feature_dim, edge_feature_dim=-1
)
def test_forward():
model = DeepTensorNetworkBlock(
node_feature_dim=x.shape[1],
edge_feature_dim=edge_attr.shape[1],
)
output_ = model(edge_index=edge_index, x=x, edge_attr=edge_attr)
assert output_.shape == x.shape
def test_backward():
model = DeepTensorNetworkBlock(
node_feature_dim=x.shape[1],
edge_feature_dim=edge_attr.shape[1],
)
output_ = model(
edge_index=edge_index,
x=x.requires_grad_(),
edge_attr=edge_attr.requires_grad_(),
)
loss = torch.mean(output_)
loss.backward()
assert x.grad.shape == x.shape

View File

@@ -0,0 +1,165 @@
import pytest
import torch
from pina.model.block.message_passing import EnEquivariantNetworkBlock
# Data for testing
x = torch.rand(10, 4)
pos = torch.rand(10, 3)
edge_index = torch.randint(0, 10, (2, 20))
edge_attr = torch.randn(20, 2)
@pytest.mark.parametrize("node_feature_dim", [1, 3])
@pytest.mark.parametrize("edge_feature_dim", [0, 2])
@pytest.mark.parametrize("pos_dim", [2, 3])
def test_constructor(node_feature_dim, edge_feature_dim, pos_dim):
EnEquivariantNetworkBlock(
node_feature_dim=node_feature_dim,
edge_feature_dim=edge_feature_dim,
pos_dim=pos_dim,
hidden_dim=64,
n_message_layers=2,
n_update_layers=2,
)
# Should fail if node_feature_dim is negative
with pytest.raises(AssertionError):
EnEquivariantNetworkBlock(
node_feature_dim=-1,
edge_feature_dim=edge_feature_dim,
pos_dim=pos_dim,
)
# Should fail if edge_feature_dim is negative
with pytest.raises(AssertionError):
EnEquivariantNetworkBlock(
node_feature_dim=node_feature_dim,
edge_feature_dim=-1,
pos_dim=pos_dim,
)
# Should fail if pos_dim is negative
with pytest.raises(AssertionError):
EnEquivariantNetworkBlock(
node_feature_dim=node_feature_dim,
edge_feature_dim=edge_feature_dim,
pos_dim=-1,
)
# Should fail if hidden_dim is negative
with pytest.raises(AssertionError):
EnEquivariantNetworkBlock(
node_feature_dim=node_feature_dim,
edge_feature_dim=edge_feature_dim,
pos_dim=pos_dim,
hidden_dim=-1,
)
# Should fail if n_message_layers is negative
with pytest.raises(AssertionError):
EnEquivariantNetworkBlock(
node_feature_dim=node_feature_dim,
edge_feature_dim=edge_feature_dim,
pos_dim=pos_dim,
n_message_layers=-1,
)
# Should fail if n_update_layers is negative
with pytest.raises(AssertionError):
EnEquivariantNetworkBlock(
node_feature_dim=node_feature_dim,
edge_feature_dim=edge_feature_dim,
pos_dim=pos_dim,
n_update_layers=-1,
)
@pytest.mark.parametrize("edge_feature_dim", [0, 2])
def test_forward(edge_feature_dim):
model = EnEquivariantNetworkBlock(
node_feature_dim=x.shape[1],
edge_feature_dim=edge_feature_dim,
pos_dim=pos.shape[1],
hidden_dim=64,
n_message_layers=2,
n_update_layers=2,
)
if edge_feature_dim == 0:
output_ = model(edge_index=edge_index, x=x, pos=pos)
else:
output_ = model(
edge_index=edge_index, x=x, pos=pos, edge_attr=edge_attr
)
assert output_[0].shape == x.shape
assert output_[1].shape == pos.shape
@pytest.mark.parametrize("edge_feature_dim", [0, 2])
def test_backward(edge_feature_dim):
model = EnEquivariantNetworkBlock(
node_feature_dim=x.shape[1],
edge_feature_dim=edge_feature_dim,
pos_dim=pos.shape[1],
hidden_dim=64,
n_message_layers=2,
n_update_layers=2,
)
if edge_feature_dim == 0:
output_ = model(
edge_index=edge_index,
x=x.requires_grad_(),
pos=pos.requires_grad_(),
)
else:
output_ = model(
edge_index=edge_index,
x=x.requires_grad_(),
pos=pos.requires_grad_(),
edge_attr=edge_attr.requires_grad_(),
)
loss = torch.mean(output_[0])
loss.backward()
assert x.grad.shape == x.shape
assert pos.grad.shape == pos.shape
def test_equivariance():
# Graph to be fully connected and undirected
edge_index = torch.combinations(torch.arange(x.shape[0]), r=2).T
edge_index = torch.cat([edge_index, edge_index.flip(0)], dim=1)
# Random rotation (det(rotation) should be 1)
rotation = torch.linalg.qr(torch.rand(pos.shape[-1], pos.shape[-1])).Q
if torch.det(rotation) < 0:
rotation[:, 0] *= -1
# Random translation
translation = torch.rand(1, pos.shape[-1])
model = EnEquivariantNetworkBlock(
node_feature_dim=x.shape[1],
edge_feature_dim=0,
pos_dim=pos.shape[1],
hidden_dim=64,
n_message_layers=2,
n_update_layers=2,
).eval()
h1, pos1 = model(edge_index=edge_index, x=x, pos=pos)
h2, pos2 = model(
edge_index=edge_index, x=x, pos=pos @ rotation.T + translation
)
# Transform model output
pos1_transformed = (pos1 @ rotation.T) + translation
assert torch.allclose(pos2, pos1_transformed, atol=1e-5)
assert torch.allclose(h1, h2, atol=1e-5)

View File

@@ -0,0 +1,84 @@
import pytest
import torch
from pina.model.block.message_passing import InteractionNetworkBlock
# Data for testing
x = torch.rand(10, 3)
edge_index = torch.randint(0, 10, (2, 20))
edge_attr = torch.randn(20, 2)
@pytest.mark.parametrize("node_feature_dim", [1, 3])
@pytest.mark.parametrize("edge_feature_dim", [0, 2])
def test_constructor(node_feature_dim, edge_feature_dim):
InteractionNetworkBlock(
node_feature_dim=node_feature_dim,
edge_feature_dim=edge_feature_dim,
hidden_dim=64,
n_message_layers=2,
n_update_layers=2,
)
# Should fail if node_feature_dim is negative
with pytest.raises(AssertionError):
InteractionNetworkBlock(node_feature_dim=-1)
# Should fail if edge_feature_dim is negative
with pytest.raises(AssertionError):
InteractionNetworkBlock(node_feature_dim=3, edge_feature_dim=-1)
# Should fail if hidden_dim is negative
with pytest.raises(AssertionError):
InteractionNetworkBlock(node_feature_dim=3, hidden_dim=-1)
# Should fail if n_message_layers is negative
with pytest.raises(AssertionError):
InteractionNetworkBlock(node_feature_dim=3, n_message_layers=-1)
# Should fail if n_update_layers is negative
with pytest.raises(AssertionError):
InteractionNetworkBlock(node_feature_dim=3, n_update_layers=-1)
@pytest.mark.parametrize("edge_feature_dim", [0, 2])
def test_forward(edge_feature_dim):
model = InteractionNetworkBlock(
node_feature_dim=x.shape[1],
edge_feature_dim=edge_feature_dim,
hidden_dim=64,
n_message_layers=2,
n_update_layers=2,
)
if edge_feature_dim == 0:
output_ = model(edge_index=edge_index, x=x)
else:
output_ = model(edge_index=edge_index, x=x, edge_attr=edge_attr)
assert output_.shape == x.shape
@pytest.mark.parametrize("edge_feature_dim", [0, 2])
def test_backward(edge_feature_dim):
model = InteractionNetworkBlock(
node_feature_dim=x.shape[1],
edge_feature_dim=edge_feature_dim,
hidden_dim=64,
n_message_layers=2,
n_update_layers=2,
)
if edge_feature_dim == 0:
output_ = model(edge_index=edge_index, x=x.requires_grad_())
else:
output_ = model(
edge_index=edge_index,
x=x.requires_grad_(),
edge_attr=edge_attr.requires_grad_(),
)
loss = torch.mean(output_)
loss.backward()
assert x.grad.shape == x.shape

View File

@@ -0,0 +1,92 @@
import pytest
import torch
from pina.model.block.message_passing import RadialFieldNetworkBlock
# Data for testing
x = torch.rand(10, 3)
edge_index = torch.randint(0, 10, (2, 20))
@pytest.mark.parametrize("node_feature_dim", [1, 3])
def test_constructor(node_feature_dim):
RadialFieldNetworkBlock(
node_feature_dim=node_feature_dim,
hidden_dim=64,
n_layers=2,
)
# Should fail if node_feature_dim is negative
with pytest.raises(AssertionError):
RadialFieldNetworkBlock(
node_feature_dim=-1,
hidden_dim=64,
n_layers=2,
)
# Should fail if hidden_dim is negative
with pytest.raises(AssertionError):
RadialFieldNetworkBlock(
node_feature_dim=node_feature_dim,
hidden_dim=-1,
n_layers=2,
)
# Should fail if n_layers is negative
with pytest.raises(AssertionError):
RadialFieldNetworkBlock(
node_feature_dim=node_feature_dim,
hidden_dim=64,
n_layers=-1,
)
def test_forward():
model = RadialFieldNetworkBlock(
node_feature_dim=x.shape[1],
hidden_dim=64,
n_layers=2,
)
output_ = model(edge_index=edge_index, x=x)
assert output_.shape == x.shape
def test_backward():
model = RadialFieldNetworkBlock(
node_feature_dim=x.shape[1],
hidden_dim=64,
n_layers=2,
)
output_ = model(edge_index=edge_index, x=x.requires_grad_())
loss = torch.mean(output_)
loss.backward()
assert x.grad.shape == x.shape
def test_equivariance():
# Graph to be fully connected and undirected
edge_index = torch.combinations(torch.arange(x.shape[0]), r=2).T
edge_index = torch.cat([edge_index, edge_index.flip(0)], dim=1)
# Random rotation (det(rotation) should be 1)
rotation = torch.linalg.qr(torch.rand(x.shape[-1], x.shape[-1])).Q
if torch.det(rotation) < 0:
rotation[:, 0] *= -1
# Random translation
translation = torch.rand(1, x.shape[-1])
model = RadialFieldNetworkBlock(node_feature_dim=x.shape[1]).eval()
pos1 = model(edge_index=edge_index, x=x)
pos2 = model(edge_index=edge_index, x=x @ rotation.T + translation)
# Transform model output
pos1_transformed = (pos1 @ rotation.T) + translation
assert torch.allclose(pos2, pos1_transformed, atol=1e-5)