Rename classes and modules for GNO
This commit is contained in:
committed by
Nicola Demo
parent
bd24b0c1c2
commit
6964f4e7d9
@@ -1,7 +1,7 @@
|
||||
import pytest
|
||||
import torch
|
||||
from pina.graph import KNNGraph
|
||||
from pina.model import GNO
|
||||
from pina.model import GraphNeuralOperator
|
||||
from torch_geometric.data import Batch
|
||||
|
||||
x = [torch.rand(100, 6) for _ in range(10)]
|
||||
@@ -10,7 +10,6 @@ graph = KNNGraph(x=x, pos=pos, build_edge_attr=True, k=6)
|
||||
input_ = Batch.from_data_list(graph.data)
|
||||
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"shared_weights",
|
||||
[
|
||||
@@ -21,29 +20,29 @@ input_ = Batch.from_data_list(graph.data)
|
||||
def test_constructor(shared_weights):
|
||||
lifting_operator = torch.nn.Linear(6, 16)
|
||||
projection_operator = torch.nn.Linear(16, 3)
|
||||
GNO(lifting_operator=lifting_operator,
|
||||
projection_operator=projection_operator,
|
||||
edge_features=3,
|
||||
internal_layers=[16, 16],
|
||||
shared_weights=shared_weights)
|
||||
GraphNeuralOperator(lifting_operator=lifting_operator,
|
||||
projection_operator=projection_operator,
|
||||
edge_features=3,
|
||||
internal_layers=[16, 16],
|
||||
shared_weights=shared_weights)
|
||||
|
||||
GNO(lifting_operator=lifting_operator,
|
||||
projection_operator=projection_operator,
|
||||
edge_features=3,
|
||||
inner_size=16,
|
||||
internal_n_layers=10,
|
||||
shared_weights=shared_weights)
|
||||
GraphNeuralOperator(lifting_operator=lifting_operator,
|
||||
projection_operator=projection_operator,
|
||||
edge_features=3,
|
||||
inner_size=16,
|
||||
internal_n_layers=10,
|
||||
shared_weights=shared_weights)
|
||||
|
||||
int_func = torch.nn.Softplus
|
||||
ext_func = torch.nn.ReLU
|
||||
|
||||
GNO(lifting_operator=lifting_operator,
|
||||
projection_operator=projection_operator,
|
||||
edge_features=3,
|
||||
internal_n_layers=10,
|
||||
shared_weights=shared_weights,
|
||||
internal_func=int_func,
|
||||
external_func=ext_func)
|
||||
GraphNeuralOperator(lifting_operator=lifting_operator,
|
||||
projection_operator=projection_operator,
|
||||
edge_features=3,
|
||||
internal_n_layers=10,
|
||||
shared_weights=shared_weights,
|
||||
internal_func=int_func,
|
||||
external_func=ext_func)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
@@ -56,14 +55,15 @@ def test_constructor(shared_weights):
|
||||
def test_forward_1(shared_weights):
|
||||
lifting_operator = torch.nn.Linear(6, 16)
|
||||
projection_operator = torch.nn.Linear(16, 3)
|
||||
model = GNO(lifting_operator=lifting_operator,
|
||||
projection_operator=projection_operator,
|
||||
edge_features=3,
|
||||
internal_layers=[16, 16],
|
||||
shared_weights=shared_weights)
|
||||
model = GraphNeuralOperator(lifting_operator=lifting_operator,
|
||||
projection_operator=projection_operator,
|
||||
edge_features=3,
|
||||
internal_layers=[16, 16],
|
||||
shared_weights=shared_weights)
|
||||
output_ = model(input_)
|
||||
assert output_.shape == torch.Size([1000, 3])
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"shared_weights",
|
||||
[
|
||||
@@ -74,15 +74,16 @@ def test_forward_1(shared_weights):
|
||||
def test_forward_2(shared_weights):
|
||||
lifting_operator = torch.nn.Linear(6, 16)
|
||||
projection_operator = torch.nn.Linear(16, 3)
|
||||
model = GNO(lifting_operator=lifting_operator,
|
||||
projection_operator=projection_operator,
|
||||
edge_features=3,
|
||||
inner_size=32,
|
||||
internal_n_layers=2,
|
||||
shared_weights=shared_weights)
|
||||
model = GraphNeuralOperator(lifting_operator=lifting_operator,
|
||||
projection_operator=projection_operator,
|
||||
edge_features=3,
|
||||
inner_size=32,
|
||||
internal_n_layers=2,
|
||||
shared_weights=shared_weights)
|
||||
output_ = model(input_)
|
||||
assert output_.shape == torch.Size([1000, 3])
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"shared_weights",
|
||||
[
|
||||
@@ -93,17 +94,18 @@ def test_forward_2(shared_weights):
|
||||
def test_backward(shared_weights):
|
||||
lifting_operator = torch.nn.Linear(6, 16)
|
||||
projection_operator = torch.nn.Linear(16, 3)
|
||||
model = GNO(lifting_operator=lifting_operator,
|
||||
projection_operator=projection_operator,
|
||||
edge_features=3,
|
||||
internal_layers=[16, 16],
|
||||
shared_weights=shared_weights)
|
||||
model = GraphNeuralOperator(lifting_operator=lifting_operator,
|
||||
projection_operator=projection_operator,
|
||||
edge_features=3,
|
||||
internal_layers=[16, 16],
|
||||
shared_weights=shared_weights)
|
||||
input_.x.requires_grad = True
|
||||
output_ = model(input_)
|
||||
l = torch.mean(output_)
|
||||
l.backward()
|
||||
assert input_.x.grad.shape == torch.Size([1000, 6])
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"shared_weights",
|
||||
[
|
||||
@@ -114,14 +116,14 @@ def test_backward(shared_weights):
|
||||
def test_backward_2(shared_weights):
|
||||
lifting_operator = torch.nn.Linear(6, 16)
|
||||
projection_operator = torch.nn.Linear(16, 3)
|
||||
model = GNO(lifting_operator=lifting_operator,
|
||||
projection_operator=projection_operator,
|
||||
edge_features=3,
|
||||
inner_size=32,
|
||||
internal_n_layers=2,
|
||||
shared_weights=shared_weights)
|
||||
model = GraphNeuralOperator(lifting_operator=lifting_operator,
|
||||
projection_operator=projection_operator,
|
||||
edge_features=3,
|
||||
inner_size=32,
|
||||
internal_n_layers=2,
|
||||
shared_weights=shared_weights)
|
||||
input_.x.requires_grad = True
|
||||
output_ = model(input_)
|
||||
l = torch.mean(output_)
|
||||
l.backward()
|
||||
assert input_.x.grad.shape == torch.Size([1000, 6])
|
||||
assert input_.x.grad.shape == torch.Size([1000, 6])
|
||||
|
||||
Reference in New Issue
Block a user