Simplify Graph class (#459)
* Simplifying Graph class and adjust tests --------- Co-authored-by: Dario Coscia <dariocos99@gmail.com>
This commit is contained in:
committed by
Nicola Demo
parent
4c3e305b09
commit
ab6ca78d85
@@ -15,12 +15,18 @@ def test_supervised_tensor_collector():
|
||||
class SupervisedProblem(AbstractProblem):
|
||||
output_variables = None
|
||||
conditions = {
|
||||
'data1': Condition(input_points=torch.rand((10, 2)),
|
||||
output_points=torch.rand((10, 2))),
|
||||
'data2': Condition(input_points=torch.rand((20, 2)),
|
||||
output_points=torch.rand((20, 2))),
|
||||
'data3': Condition(input_points=torch.rand((30, 2)),
|
||||
output_points=torch.rand((30, 2))),
|
||||
"data1": Condition(
|
||||
input_points=torch.rand((10, 2)),
|
||||
output_points=torch.rand((10, 2)),
|
||||
),
|
||||
"data2": Condition(
|
||||
input_points=torch.rand((20, 2)),
|
||||
output_points=torch.rand((20, 2)),
|
||||
),
|
||||
"data3": Condition(
|
||||
input_points=torch.rand((30, 2)),
|
||||
output_points=torch.rand((30, 2)),
|
||||
),
|
||||
}
|
||||
|
||||
problem = SupervisedProblem()
|
||||
@@ -31,65 +37,58 @@ def test_supervised_tensor_collector():
|
||||
|
||||
def test_pinn_collector():
|
||||
def laplace_equation(input_, output_):
|
||||
force_term = (torch.sin(input_.extract(['x']) * torch.pi) *
|
||||
torch.sin(input_.extract(['y']) * torch.pi))
|
||||
delta_u = laplacian(output_.extract(['u']), input_)
|
||||
force_term = torch.sin(input_.extract(["x"]) * torch.pi) * torch.sin(
|
||||
input_.extract(["y"]) * torch.pi
|
||||
)
|
||||
delta_u = laplacian(output_.extract(["u"]), input_)
|
||||
return delta_u - force_term
|
||||
|
||||
my_laplace = Equation(laplace_equation)
|
||||
in_ = LabelTensor(torch.tensor([[0., 1.]], requires_grad=True), ['x', 'y'])
|
||||
out_ = LabelTensor(torch.tensor([[0.]], requires_grad=True), ['u'])
|
||||
in_ = LabelTensor(
|
||||
torch.tensor([[0.0, 1.0]], requires_grad=True), ["x", "y"]
|
||||
)
|
||||
out_ = LabelTensor(torch.tensor([[0.0]], requires_grad=True), ["u"])
|
||||
|
||||
class Poisson(SpatialProblem):
|
||||
output_variables = ['u']
|
||||
spatial_domain = CartesianDomain({'x': [0, 1], 'y': [0, 1]})
|
||||
output_variables = ["u"]
|
||||
spatial_domain = CartesianDomain({"x": [0, 1], "y": [0, 1]})
|
||||
|
||||
conditions = {
|
||||
'gamma1':
|
||||
Condition(domain=CartesianDomain({
|
||||
'x': [0, 1],
|
||||
'y': 1
|
||||
}),
|
||||
equation=FixedValue(0.0)),
|
||||
'gamma2':
|
||||
Condition(domain=CartesianDomain({
|
||||
'x': [0, 1],
|
||||
'y': 0
|
||||
}),
|
||||
equation=FixedValue(0.0)),
|
||||
'gamma3':
|
||||
Condition(domain=CartesianDomain({
|
||||
'x': 1,
|
||||
'y': [0, 1]
|
||||
}),
|
||||
equation=FixedValue(0.0)),
|
||||
'gamma4':
|
||||
Condition(domain=CartesianDomain({
|
||||
'x': 0,
|
||||
'y': [0, 1]
|
||||
}),
|
||||
equation=FixedValue(0.0)),
|
||||
'D':
|
||||
Condition(domain=CartesianDomain({
|
||||
'x': [0, 1],
|
||||
'y': [0, 1]
|
||||
}),
|
||||
equation=my_laplace),
|
||||
'data':
|
||||
Condition(input_points=in_, output_points=out_)
|
||||
"gamma1": Condition(
|
||||
domain=CartesianDomain({"x": [0, 1], "y": 1}),
|
||||
equation=FixedValue(0.0),
|
||||
),
|
||||
"gamma2": Condition(
|
||||
domain=CartesianDomain({"x": [0, 1], "y": 0}),
|
||||
equation=FixedValue(0.0),
|
||||
),
|
||||
"gamma3": Condition(
|
||||
domain=CartesianDomain({"x": 1, "y": [0, 1]}),
|
||||
equation=FixedValue(0.0),
|
||||
),
|
||||
"gamma4": Condition(
|
||||
domain=CartesianDomain({"x": 0, "y": [0, 1]}),
|
||||
equation=FixedValue(0.0),
|
||||
),
|
||||
"D": Condition(
|
||||
domain=CartesianDomain({"x": [0, 1], "y": [0, 1]}),
|
||||
equation=my_laplace,
|
||||
),
|
||||
"data": Condition(input_points=in_, output_points=out_),
|
||||
}
|
||||
|
||||
def poisson_sol(self, pts):
|
||||
return -(torch.sin(pts.extract(['x']) * torch.pi) *
|
||||
torch.sin(pts.extract(['y']) * torch.pi)) / (
|
||||
2 * torch.pi ** 2)
|
||||
return -(
|
||||
torch.sin(pts.extract(["x"]) * torch.pi)
|
||||
* torch.sin(pts.extract(["y"]) * torch.pi)
|
||||
) / (2 * torch.pi**2)
|
||||
|
||||
truth_solution = poisson_sol
|
||||
|
||||
problem = Poisson()
|
||||
boundaries = ['gamma1', 'gamma2', 'gamma3', 'gamma4']
|
||||
problem.discretise_domain(10, 'grid', domains=boundaries)
|
||||
problem.discretise_domain(10, 'grid', domains='D')
|
||||
boundaries = ["gamma1", "gamma2", "gamma3", "gamma4"]
|
||||
problem.discretise_domain(10, "grid", domains=boundaries)
|
||||
problem.discretise_domain(10, "grid", domains="D")
|
||||
|
||||
collector = Collector(problem)
|
||||
collector.store_fixed_data()
|
||||
@@ -98,31 +97,34 @@ def test_pinn_collector():
|
||||
for k, v in problem.conditions.items():
|
||||
if isinstance(v, InputOutputPointsCondition):
|
||||
assert list(collector.data_collections[k].keys()) == [
|
||||
'input_points', 'output_points']
|
||||
"input_points",
|
||||
"output_points",
|
||||
]
|
||||
|
||||
for k, v in problem.conditions.items():
|
||||
if isinstance(v, DomainEquationCondition):
|
||||
assert list(collector.data_collections[k].keys()) == [
|
||||
'input_points', 'equation']
|
||||
"input_points",
|
||||
"equation",
|
||||
]
|
||||
|
||||
|
||||
def test_supervised_graph_collector():
|
||||
pos = torch.rand((100, 3))
|
||||
x = [torch.rand((100, 3)) for _ in range(10)]
|
||||
graph_list_1 = RadiusGraph(pos=pos, x=x, build_edge_attr=True, r=.4)
|
||||
graph_list_1 = [RadiusGraph(pos=pos, radius=0.4, x=x_) for x_ in x]
|
||||
out_1 = torch.rand((10, 100, 3))
|
||||
|
||||
pos = torch.rand((50, 3))
|
||||
x = [torch.rand((50, 3)) for _ in range(10)]
|
||||
graph_list_2 = RadiusGraph(pos=pos, x=x, build_edge_attr=True, r=.4)
|
||||
graph_list_2 = [RadiusGraph(pos=pos, radius=0.4, x=x_) for x_ in x]
|
||||
out_2 = torch.rand((10, 50, 3))
|
||||
|
||||
class SupervisedProblem(AbstractProblem):
|
||||
output_variables = None
|
||||
conditions = {
|
||||
'data1': Condition(input_points=graph_list_1,
|
||||
output_points=out_1),
|
||||
'data2': Condition(input_points=graph_list_2,
|
||||
output_points=out_2),
|
||||
"data1": Condition(input_points=graph_list_1, output_points=out_1),
|
||||
"data2": Condition(input_points=graph_list_2, output_points=out_2),
|
||||
}
|
||||
|
||||
problem = SupervisedProblem()
|
||||
|
||||
@@ -15,16 +15,15 @@ output_tensor = torch.rand((100, 2))
|
||||
|
||||
x = torch.rand((100, 50, 10))
|
||||
pos = torch.rand((100, 50, 2))
|
||||
input_graph = RadiusGraph(x, pos, r=.1, build_edge_attr=True)
|
||||
input_graph = [
|
||||
RadiusGraph(x=x_, pos=pos_, radius=0.2) for x_, pos_, in zip(x, pos)
|
||||
]
|
||||
output_graph = torch.rand((100, 50, 10))
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"input_, output_",
|
||||
[
|
||||
(input_tensor, output_tensor),
|
||||
(input_graph, output_graph)
|
||||
]
|
||||
[(input_tensor, output_tensor), (input_graph, output_graph)],
|
||||
)
|
||||
def test_constructor(input_, output_):
|
||||
problem = SupervisedProblem(input_=input_, output_=output_)
|
||||
@@ -33,22 +32,16 @@ def test_constructor(input_, output_):
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"input_, output_",
|
||||
[
|
||||
(input_tensor, output_tensor),
|
||||
(input_graph, output_graph)
|
||||
]
|
||||
[(input_tensor, output_tensor), (input_graph, output_graph)],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"train_size, val_size, test_size",
|
||||
[
|
||||
(.7, .2, .1),
|
||||
(.7, .3, 0)
|
||||
]
|
||||
"train_size, val_size, test_size", [(0.7, 0.2, 0.1), (0.7, 0.3, 0)]
|
||||
)
|
||||
def test_setup_train(input_, output_, train_size, val_size, test_size):
|
||||
problem = SupervisedProblem(input_=input_, output_=output_)
|
||||
dm = PinaDataModule(problem, train_size=train_size,
|
||||
val_size=val_size, test_size=test_size)
|
||||
dm = PinaDataModule(
|
||||
problem, train_size=train_size, val_size=val_size, test_size=test_size
|
||||
)
|
||||
dm.setup()
|
||||
assert hasattr(dm, "train_dataset")
|
||||
if isinstance(input_, torch.Tensor):
|
||||
@@ -71,23 +64,17 @@ def test_setup_train(input_, output_, train_size, val_size, test_size):
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"input_, output_",
|
||||
[
|
||||
(input_tensor, output_tensor),
|
||||
(input_graph, output_graph)
|
||||
]
|
||||
[(input_tensor, output_tensor), (input_graph, output_graph)],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"train_size, val_size, test_size",
|
||||
[
|
||||
(.7, .2, .1),
|
||||
(0., 0., 1.)
|
||||
]
|
||||
"train_size, val_size, test_size", [(0.7, 0.2, 0.1), (0.0, 0.0, 1.0)]
|
||||
)
|
||||
def test_setup_test(input_, output_, train_size, val_size, test_size):
|
||||
problem = SupervisedProblem(input_=input_, output_=output_)
|
||||
dm = PinaDataModule(problem, train_size=train_size,
|
||||
val_size=val_size, test_size=test_size)
|
||||
dm.setup(stage='test')
|
||||
dm = PinaDataModule(
|
||||
problem, train_size=train_size, val_size=val_size, test_size=test_size
|
||||
)
|
||||
dm.setup(stage="test")
|
||||
if train_size > 0:
|
||||
assert hasattr(dm, "train_dataset")
|
||||
assert dm.train_dataset is None
|
||||
@@ -109,16 +96,14 @@ def test_setup_test(input_, output_, train_size, val_size, test_size):
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"input_, output_",
|
||||
[
|
||||
(input_tensor, output_tensor),
|
||||
(input_graph, output_graph)
|
||||
]
|
||||
[(input_tensor, output_tensor), (input_graph, output_graph)],
|
||||
)
|
||||
def test_dummy_dataloader(input_, output_):
|
||||
problem = SupervisedProblem(input_=input_, output_=output_)
|
||||
solver = SupervisedSolver(problem=problem, model=torch.nn.Linear(10, 10))
|
||||
trainer = Trainer(solver, batch_size=None, train_size=.7,
|
||||
val_size=.3, test_size=0.)
|
||||
trainer = Trainer(
|
||||
solver, batch_size=None, train_size=0.7, val_size=0.3, test_size=0.0
|
||||
)
|
||||
dm = trainer.data_module
|
||||
dm.setup()
|
||||
dm.trainer = trainer
|
||||
@@ -128,11 +113,11 @@ def test_dummy_dataloader(input_, output_):
|
||||
data = next(dataloader)
|
||||
assert isinstance(data, list)
|
||||
assert isinstance(data[0], tuple)
|
||||
if isinstance(input_, RadiusGraph):
|
||||
assert isinstance(data[0][1]['input_points'], Batch)
|
||||
if isinstance(input_, list):
|
||||
assert isinstance(data[0][1]["input_points"], Batch)
|
||||
else:
|
||||
assert isinstance(data[0][1]['input_points'], torch.Tensor)
|
||||
assert isinstance(data[0][1]['output_points'], torch.Tensor)
|
||||
assert isinstance(data[0][1]["input_points"], torch.Tensor)
|
||||
assert isinstance(data[0][1]["output_points"], torch.Tensor)
|
||||
|
||||
dataloader = dm.val_dataloader()
|
||||
assert isinstance(dataloader, DummyDataloader)
|
||||
@@ -140,31 +125,29 @@ def test_dummy_dataloader(input_, output_):
|
||||
data = next(dataloader)
|
||||
assert isinstance(data, list)
|
||||
assert isinstance(data[0], tuple)
|
||||
if isinstance(input_, RadiusGraph):
|
||||
assert isinstance(data[0][1]['input_points'], Batch)
|
||||
if isinstance(input_, list):
|
||||
assert isinstance(data[0][1]["input_points"], Batch)
|
||||
else:
|
||||
assert isinstance(data[0][1]['input_points'], torch.Tensor)
|
||||
assert isinstance(data[0][1]['output_points'], torch.Tensor)
|
||||
assert isinstance(data[0][1]["input_points"], torch.Tensor)
|
||||
assert isinstance(data[0][1]["output_points"], torch.Tensor)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"input_, output_",
|
||||
[
|
||||
(input_tensor, output_tensor),
|
||||
(input_graph, output_graph)
|
||||
]
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"automatic_batching",
|
||||
[
|
||||
True, False
|
||||
]
|
||||
[(input_tensor, output_tensor), (input_graph, output_graph)],
|
||||
)
|
||||
@pytest.mark.parametrize("automatic_batching", [True, False])
|
||||
def test_dataloader(input_, output_, automatic_batching):
|
||||
problem = SupervisedProblem(input_=input_, output_=output_)
|
||||
solver = SupervisedSolver(problem=problem, model=torch.nn.Linear(10, 10))
|
||||
trainer = Trainer(solver, batch_size=10, train_size=.7, val_size=.3,
|
||||
test_size=0., automatic_batching=automatic_batching)
|
||||
trainer = Trainer(
|
||||
solver,
|
||||
batch_size=10,
|
||||
train_size=0.7,
|
||||
val_size=0.3,
|
||||
test_size=0.0,
|
||||
automatic_batching=automatic_batching,
|
||||
)
|
||||
dm = trainer.data_module
|
||||
dm.setup()
|
||||
dm.trainer = trainer
|
||||
@@ -173,51 +156,53 @@ def test_dataloader(input_, output_, automatic_batching):
|
||||
assert len(dataloader) == 7
|
||||
data = next(iter(dataloader))
|
||||
assert isinstance(data, dict)
|
||||
if isinstance(input_, RadiusGraph):
|
||||
assert isinstance(data['data']['input_points'], Batch)
|
||||
if isinstance(input_, list):
|
||||
assert isinstance(data["data"]["input_points"], Batch)
|
||||
else:
|
||||
assert isinstance(data['data']['input_points'], torch.Tensor)
|
||||
assert isinstance(data['data']['output_points'], torch.Tensor)
|
||||
assert isinstance(data["data"]["input_points"], torch.Tensor)
|
||||
assert isinstance(data["data"]["output_points"], torch.Tensor)
|
||||
|
||||
dataloader = dm.val_dataloader()
|
||||
assert isinstance(dataloader, DataLoader)
|
||||
assert len(dataloader) == 3
|
||||
data = next(iter(dataloader))
|
||||
assert isinstance(data, dict)
|
||||
if isinstance(input_, RadiusGraph):
|
||||
assert isinstance(data['data']['input_points'], Batch)
|
||||
if isinstance(input_, list):
|
||||
assert isinstance(data["data"]["input_points"], Batch)
|
||||
else:
|
||||
assert isinstance(data['data']['input_points'], torch.Tensor)
|
||||
assert isinstance(data['data']['output_points'], torch.Tensor)
|
||||
assert isinstance(data["data"]["input_points"], torch.Tensor)
|
||||
assert isinstance(data["data"]["output_points"], torch.Tensor)
|
||||
|
||||
|
||||
from pina import LabelTensor
|
||||
|
||||
input_tensor = LabelTensor(torch.rand((100, 3)), ['u', 'v', 'w'])
|
||||
output_tensor = LabelTensor(torch.rand((100, 3)), ['u', 'v', 'w'])
|
||||
input_tensor = LabelTensor(torch.rand((100, 3)), ["u", "v", "w"])
|
||||
output_tensor = LabelTensor(torch.rand((100, 3)), ["u", "v", "w"])
|
||||
|
||||
x = LabelTensor(torch.rand((100, 50, 3)), ["u", "v", "w"])
|
||||
pos = LabelTensor(torch.rand((100, 50, 2)), ["x", "y"])
|
||||
input_graph = [
|
||||
RadiusGraph(x=x[i], pos=pos[i], radius=0.1) for i in range(len(x))
|
||||
]
|
||||
output_graph = LabelTensor(torch.rand((100, 50, 3)), ["u", "v", "w"])
|
||||
|
||||
x = LabelTensor(torch.rand((100, 50, 3)), ['u', 'v', 'w'])
|
||||
pos = LabelTensor(torch.rand((100, 50, 2)), ['x', 'y'])
|
||||
input_graph = RadiusGraph(x, pos, r=.1, build_edge_attr=True)
|
||||
output_graph = LabelTensor(torch.rand((100, 50, 3)), ['u', 'v', 'w'])
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"input_, output_",
|
||||
[
|
||||
(input_tensor, output_tensor),
|
||||
(input_graph, output_graph)
|
||||
]
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"automatic_batching",
|
||||
[
|
||||
True, False
|
||||
]
|
||||
[(input_tensor, output_tensor), (input_graph, output_graph)],
|
||||
)
|
||||
@pytest.mark.parametrize("automatic_batching", [True, False])
|
||||
def test_dataloader_labels(input_, output_, automatic_batching):
|
||||
problem = SupervisedProblem(input_=input_, output_=output_)
|
||||
solver = SupervisedSolver(problem=problem, model=torch.nn.Linear(10, 10))
|
||||
trainer = Trainer(solver, batch_size=10, train_size=.7, val_size=.3,
|
||||
test_size=0., automatic_batching=automatic_batching)
|
||||
trainer = Trainer(
|
||||
solver,
|
||||
batch_size=10,
|
||||
train_size=0.7,
|
||||
val_size=0.3,
|
||||
test_size=0.0,
|
||||
automatic_batching=automatic_batching,
|
||||
)
|
||||
dm = trainer.data_module
|
||||
dm.setup()
|
||||
dm.trainer = trainer
|
||||
@@ -226,31 +211,30 @@ def test_dataloader_labels(input_, output_, automatic_batching):
|
||||
assert len(dataloader) == 7
|
||||
data = next(iter(dataloader))
|
||||
assert isinstance(data, dict)
|
||||
if isinstance(input_, RadiusGraph):
|
||||
assert isinstance(data['data']['input_points'], Batch)
|
||||
assert isinstance(data['data']['input_points'].x, LabelTensor)
|
||||
assert data['data']['input_points'].x.labels == ['u', 'v', 'w']
|
||||
assert data['data']['input_points'].pos.labels == ['x', 'y']
|
||||
else:
|
||||
assert isinstance(data['data']['input_points'], LabelTensor)
|
||||
assert data['data']['input_points'].labels == ['u', 'v', 'w']
|
||||
assert isinstance(data['data']['output_points'], LabelTensor)
|
||||
assert data['data']['output_points'].labels == ['u', 'v', 'w']
|
||||
if isinstance(input_, list):
|
||||
assert isinstance(data["data"]["input_points"], Batch)
|
||||
assert isinstance(data["data"]["input_points"].x, LabelTensor)
|
||||
assert data["data"]["input_points"].x.labels == ["u", "v", "w"]
|
||||
assert data["data"]["input_points"].pos.labels == ["x", "y"]
|
||||
else:
|
||||
assert isinstance(data["data"]["input_points"], LabelTensor)
|
||||
assert data["data"]["input_points"].labels == ["u", "v", "w"]
|
||||
assert isinstance(data["data"]["output_points"], LabelTensor)
|
||||
assert data["data"]["output_points"].labels == ["u", "v", "w"]
|
||||
|
||||
dataloader = dm.val_dataloader()
|
||||
assert isinstance(dataloader, DataLoader)
|
||||
assert len(dataloader) == 3
|
||||
data = next(iter(dataloader))
|
||||
assert isinstance(data, dict)
|
||||
if isinstance(input_, RadiusGraph):
|
||||
assert isinstance(data['data']['input_points'], Batch)
|
||||
assert isinstance(data['data']['input_points'].x, LabelTensor)
|
||||
assert data['data']['input_points'].x.labels == ['u', 'v', 'w']
|
||||
assert data['data']['input_points'].pos.labels == ['x', 'y']
|
||||
if isinstance(input_, list):
|
||||
assert isinstance(data["data"]["input_points"], Batch)
|
||||
assert isinstance(data["data"]["input_points"].x, LabelTensor)
|
||||
assert data["data"]["input_points"].x.labels == ["u", "v", "w"]
|
||||
assert data["data"]["input_points"].pos.labels == ["x", "y"]
|
||||
else:
|
||||
assert isinstance(data['data']['input_points'], torch.Tensor)
|
||||
assert isinstance(data['data']['input_points'], LabelTensor)
|
||||
assert data['data']['input_points'].labels == ['u', 'v', 'w']
|
||||
assert isinstance(data['data']['output_points'], torch.Tensor)
|
||||
assert data['data']['output_points'].labels == ['u', 'v', 'w']
|
||||
test_dataloader_labels(input_graph, output_graph, True)
|
||||
assert isinstance(data["data"]["input_points"], torch.Tensor)
|
||||
assert isinstance(data["data"]["input_points"], LabelTensor)
|
||||
assert data["data"]["input_points"].labels == ["u", "v", "w"]
|
||||
assert isinstance(data["data"]["output_points"], torch.Tensor)
|
||||
assert data["data"]["output_points"].labels == ["u", "v", "w"]
|
||||
|
||||
@@ -6,55 +6,58 @@ from torch_geometric.data import Data
|
||||
|
||||
x = torch.rand((100, 20, 10))
|
||||
pos = torch.rand((100, 20, 2))
|
||||
input_ = KNNGraph(x=x, pos=pos, k=3, build_edge_attr=True)
|
||||
input_ = [
|
||||
KNNGraph(x=x_, pos=pos_, neighbours=3, edge_attr=True)
|
||||
for x_, pos_ in zip(x, pos)
|
||||
]
|
||||
output_ = torch.rand((100, 20, 10))
|
||||
|
||||
x_2 = torch.rand((50, 20, 10))
|
||||
pos_2 = torch.rand((50, 20, 2))
|
||||
input_2_ = KNNGraph(x=x_2, pos=pos_2, k=3, build_edge_attr=True)
|
||||
input_2_ = [
|
||||
KNNGraph(x=x_, pos=pos_, neighbours=3, edge_attr=True)
|
||||
for x_, pos_ in zip(x_2, pos_2)
|
||||
]
|
||||
output_2_ = torch.rand((50, 20, 10))
|
||||
|
||||
|
||||
# Problem with a single condition
|
||||
conditions_dict_single = {
|
||||
'data': {
|
||||
'input_points': input_.data,
|
||||
'output_points': output_,
|
||||
"data": {
|
||||
"input_points": input_,
|
||||
"output_points": output_,
|
||||
}
|
||||
}
|
||||
max_conditions_lengths_single = {
|
||||
'data': 100
|
||||
}
|
||||
max_conditions_lengths_single = {"data": 100}
|
||||
|
||||
# Problem with multiple conditions
|
||||
conditions_dict_single_multi = {
|
||||
'data_1': {
|
||||
'input_points': input_.data,
|
||||
'output_points': output_,
|
||||
"data_1": {
|
||||
"input_points": input_,
|
||||
"output_points": output_,
|
||||
},
|
||||
"data_2": {
|
||||
"input_points": input_2_,
|
||||
"output_points": output_2_,
|
||||
},
|
||||
'data_2': {
|
||||
'input_points': input_2_.data,
|
||||
'output_points': output_2_,
|
||||
}
|
||||
}
|
||||
|
||||
max_conditions_lengths_multi = {
|
||||
'data_1': 100,
|
||||
'data_2': 50
|
||||
}
|
||||
max_conditions_lengths_multi = {"data_1": 100, "data_2": 50}
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"conditions_dict, max_conditions_lengths",
|
||||
[
|
||||
(conditions_dict_single, max_conditions_lengths_single),
|
||||
(conditions_dict_single_multi, max_conditions_lengths_multi)
|
||||
]
|
||||
(conditions_dict_single_multi, max_conditions_lengths_multi),
|
||||
],
|
||||
)
|
||||
def test_constructor(conditions_dict, max_conditions_lengths):
|
||||
dataset = PinaDatasetFactory(conditions_dict,
|
||||
max_conditions_lengths=max_conditions_lengths,
|
||||
automatic_batching=True)
|
||||
dataset = PinaDatasetFactory(
|
||||
conditions_dict,
|
||||
max_conditions_lengths=max_conditions_lengths,
|
||||
automatic_batching=True,
|
||||
)
|
||||
assert isinstance(dataset, PinaGraphDataset)
|
||||
assert len(dataset) == 100
|
||||
|
||||
@@ -63,39 +66,67 @@ def test_constructor(conditions_dict, max_conditions_lengths):
|
||||
"conditions_dict, max_conditions_lengths",
|
||||
[
|
||||
(conditions_dict_single, max_conditions_lengths_single),
|
||||
(conditions_dict_single_multi, max_conditions_lengths_multi)
|
||||
]
|
||||
(conditions_dict_single_multi, max_conditions_lengths_multi),
|
||||
],
|
||||
)
|
||||
def test_getitem(conditions_dict, max_conditions_lengths):
|
||||
dataset = PinaDatasetFactory(conditions_dict,
|
||||
max_conditions_lengths=max_conditions_lengths,
|
||||
automatic_batching=True)
|
||||
dataset = PinaDatasetFactory(
|
||||
conditions_dict,
|
||||
max_conditions_lengths=max_conditions_lengths,
|
||||
automatic_batching=True,
|
||||
)
|
||||
data = dataset[50]
|
||||
assert isinstance(data, dict)
|
||||
assert all([isinstance(d['input_points'], Data)
|
||||
for d in data.values()])
|
||||
assert all([isinstance(d['output_points'], torch.Tensor)
|
||||
for d in data.values()])
|
||||
assert all([d['input_points'].x.shape == torch.Size((20, 10))
|
||||
for d in data.values()])
|
||||
assert all([d['output_points'].shape == torch.Size((20, 10))
|
||||
for d in data.values()])
|
||||
assert all([d['input_points'].edge_index.shape ==
|
||||
torch.Size((2, 60)) for d in data.values()])
|
||||
assert all([d['input_points'].edge_attr.shape[0]
|
||||
== 60 for d in data.values()])
|
||||
assert all([isinstance(d["input_points"], Data) for d in data.values()])
|
||||
assert all(
|
||||
[isinstance(d["output_points"], torch.Tensor) for d in data.values()]
|
||||
)
|
||||
assert all(
|
||||
[
|
||||
d["input_points"].x.shape == torch.Size((20, 10))
|
||||
for d in data.values()
|
||||
]
|
||||
)
|
||||
assert all(
|
||||
[
|
||||
d["output_points"].shape == torch.Size((20, 10))
|
||||
for d in data.values()
|
||||
]
|
||||
)
|
||||
assert all(
|
||||
[
|
||||
d["input_points"].edge_index.shape == torch.Size((2, 60))
|
||||
for d in data.values()
|
||||
]
|
||||
)
|
||||
assert all(
|
||||
[d["input_points"].edge_attr.shape[0] == 60 for d in data.values()]
|
||||
)
|
||||
|
||||
data = dataset.fetch_from_idx_list([i for i in range(20)])
|
||||
assert isinstance(data, dict)
|
||||
assert all([isinstance(d['input_points'], Data)
|
||||
for d in data.values()])
|
||||
assert all([isinstance(d['output_points'], torch.Tensor)
|
||||
for d in data.values()])
|
||||
assert all([d['input_points'].x.shape == torch.Size((400, 10))
|
||||
for d in data.values()])
|
||||
assert all([d['output_points'].shape == torch.Size((400, 10))
|
||||
for d in data.values()])
|
||||
assert all([d['input_points'].edge_index.shape ==
|
||||
torch.Size((2, 1200)) for d in data.values()])
|
||||
assert all([d['input_points'].edge_attr.shape[0]
|
||||
== 1200 for d in data.values()])
|
||||
assert all([isinstance(d["input_points"], Data) for d in data.values()])
|
||||
assert all(
|
||||
[isinstance(d["output_points"], torch.Tensor) for d in data.values()]
|
||||
)
|
||||
assert all(
|
||||
[
|
||||
d["input_points"].x.shape == torch.Size((400, 10))
|
||||
for d in data.values()
|
||||
]
|
||||
)
|
||||
assert all(
|
||||
[
|
||||
d["output_points"].shape == torch.Size((400, 10))
|
||||
for d in data.values()
|
||||
]
|
||||
)
|
||||
assert all(
|
||||
[
|
||||
d["input_points"].edge_index.shape == torch.Size((2, 1200))
|
||||
for d in data.values()
|
||||
]
|
||||
)
|
||||
assert all(
|
||||
[d["input_points"].edge_attr.shape[0] == 1200 for d in data.values()]
|
||||
)
|
||||
|
||||
@@ -1,163 +1,346 @@
|
||||
import pytest
|
||||
import torch
|
||||
from pina.graph import RadiusGraph, KNNGraph
|
||||
from pina import LabelTensor
|
||||
from pina.graph import RadiusGraph, KNNGraph, Graph
|
||||
from torch_geometric.data import Data
|
||||
|
||||
|
||||
def build_edge_attr(pos, edge_index):
|
||||
return torch.cat([pos[edge_index[0]], pos[edge_index[1]]], dim=-1)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"x, pos",
|
||||
[
|
||||
([torch.rand(10, 2) for _ in range(3)],
|
||||
[torch.rand(10, 3) for _ in range(3)]),
|
||||
([torch.rand(10, 2) for _ in range(3)],
|
||||
[torch.rand(10, 3) for _ in range(3)]),
|
||||
(torch.rand(3, 10, 2), torch.rand(3, 10, 3)),
|
||||
(torch.rand(3, 10, 2), torch.rand(3, 10, 3)),
|
||||
]
|
||||
(torch.rand(10, 2), torch.rand(10, 3)),
|
||||
(
|
||||
LabelTensor(torch.rand(10, 2), ["u", "v"]),
|
||||
LabelTensor(torch.rand(10, 3), ["x", "y", "z"]),
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_build_multiple_graph_multiple_val(x, pos):
|
||||
graph = RadiusGraph(x=x, pos=pos, build_edge_attr=False, r=.3)
|
||||
assert len(graph.data) == 3
|
||||
data = graph.data
|
||||
assert all(torch.isclose(d_.x, x_).all() for (d_, x_) in zip(data, x))
|
||||
assert all(torch.isclose(d_.pos, pos_).all() for d_, pos_ in zip(data, pos))
|
||||
assert all(len(d.edge_index) == 2 for d in data)
|
||||
graph = RadiusGraph(x=x, pos=pos, build_edge_attr=True, r=.3)
|
||||
data = graph.data
|
||||
assert all(torch.isclose(d_.x, x_).all() for (d_, x_) in zip(data, x))
|
||||
assert all(torch.isclose(d_.pos, pos_).all() for d_, pos_ in zip(data, pos))
|
||||
assert all(len(d.edge_index) == 2 for d in data)
|
||||
assert all(d.edge_attr is not None for d in data)
|
||||
assert all([d.edge_index.shape[1] == d.edge_attr.shape[0]] for d in data)
|
||||
def test_build_graph(x, pos):
|
||||
edge_index = torch.tensor(
|
||||
[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [1, 2, 3, 4, 5, 6, 7, 8, 9, 0]],
|
||||
dtype=torch.int64,
|
||||
)
|
||||
graph = Graph(x=x, pos=pos, edge_index=edge_index)
|
||||
assert hasattr(graph, "x")
|
||||
assert hasattr(graph, "pos")
|
||||
assert hasattr(graph, "edge_index")
|
||||
assert torch.isclose(graph.x, x).all()
|
||||
if isinstance(x, LabelTensor):
|
||||
assert isinstance(graph.x, LabelTensor)
|
||||
assert graph.x.labels == x.labels
|
||||
else:
|
||||
assert isinstance(graph.pos, torch.Tensor)
|
||||
assert torch.isclose(graph.pos, pos).all()
|
||||
if isinstance(pos, LabelTensor):
|
||||
assert isinstance(graph.pos, LabelTensor)
|
||||
assert graph.pos.labels == pos.labels
|
||||
else:
|
||||
assert isinstance(graph.pos, torch.Tensor)
|
||||
|
||||
graph = KNNGraph(x=x, pos=pos, build_edge_attr=True, k=3)
|
||||
data = graph.data
|
||||
assert all(torch.isclose(d_.x, x_).all() for (d_, x_) in zip(data, x))
|
||||
assert all(torch.isclose(d_.pos, pos_).all() for d_, pos_ in zip(data, pos))
|
||||
assert all(len(d.edge_index) == 2 for d in data)
|
||||
assert all(d.edge_attr is not None for d in data)
|
||||
assert all([d.edge_index.shape[1] == d.edge_attr.shape[0]] for d in data)
|
||||
|
||||
|
||||
def test_build_single_graph_multiple_val():
|
||||
x = torch.rand(10, 2)
|
||||
pos = torch.rand(10, 3)
|
||||
graph = RadiusGraph(x=x, pos=pos, build_edge_attr=False, r=.3)
|
||||
assert len(graph.data) == 1
|
||||
data = graph.data
|
||||
assert all(torch.isclose(d.x, x).all() for d in data)
|
||||
assert all(torch.isclose(d_.pos, pos).all() for d_ in data)
|
||||
assert all(len(d.edge_index) == 2 for d in data)
|
||||
graph = RadiusGraph(x=x, pos=pos, build_edge_attr=True, r=.3)
|
||||
data = graph.data
|
||||
assert len(graph.data) == 1
|
||||
assert all(torch.isclose(d.x, x).all() for d in data)
|
||||
assert all(torch.isclose(d_.pos, pos).all() for d_ in data)
|
||||
assert all(len(d.edge_index) == 2 for d in data)
|
||||
assert all(d.edge_attr is not None for d in data)
|
||||
assert all([d.edge_index.shape[1] == d.edge_attr.shape[0]] for d in data)
|
||||
|
||||
x = torch.rand(10, 2)
|
||||
pos = torch.rand(10, 3)
|
||||
graph = KNNGraph(x=x, pos=pos, build_edge_attr=True, k=3)
|
||||
assert len(graph.data) == 1
|
||||
data = graph.data
|
||||
assert all(torch.isclose(d.x, x).all() for d in data)
|
||||
assert all(torch.isclose(d_.pos, pos).all() for d_ in data)
|
||||
assert all(len(d.edge_index) == 2 for d in data)
|
||||
graph = KNNGraph(x=x, pos=pos, build_edge_attr=True, k=3)
|
||||
data = graph.data
|
||||
assert len(graph.data) == 1
|
||||
assert all(torch.isclose(d.x, x).all() for d in data)
|
||||
assert all(torch.isclose(d_.pos, pos).all() for d_ in data)
|
||||
assert all(len(d.edge_index) == 2 for d in data)
|
||||
assert all(d.edge_attr is not None for d in data)
|
||||
assert all([d.edge_index.shape[1] == d.edge_attr.shape[0]] for d in data)
|
||||
edge_index = torch.tensor(
|
||||
[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [1, 2, 3, 4, 5, 6, 7, 8, 9, 0]],
|
||||
dtype=torch.int64,
|
||||
)
|
||||
graph = Graph(x=x, edge_index=edge_index)
|
||||
assert hasattr(graph, "x")
|
||||
assert hasattr(graph, "pos")
|
||||
assert hasattr(graph, "edge_index")
|
||||
assert torch.isclose(graph.x, x).all()
|
||||
if isinstance(x, LabelTensor):
|
||||
assert isinstance(graph.x, LabelTensor)
|
||||
assert graph.x.labels == x.labels
|
||||
else:
|
||||
assert isinstance(graph.x, torch.Tensor)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"pos",
|
||||
"x, pos",
|
||||
[
|
||||
([torch.rand(10, 3) for _ in range(3)]),
|
||||
([torch.rand(10, 3) for _ in range(3)]),
|
||||
(torch.rand(3, 10, 3)),
|
||||
(torch.rand(3, 10, 3))
|
||||
]
|
||||
(torch.rand(10, 2), torch.rand(10, 3)),
|
||||
(
|
||||
LabelTensor(torch.rand(10, 2), ["u", "v"]),
|
||||
LabelTensor(torch.rand(10, 3), ["x", "y", "z"]),
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_build_single_graph_single_val(pos):
|
||||
x = torch.rand(10, 2)
|
||||
graph = RadiusGraph(x=x, pos=pos, build_edge_attr=False, r=.3)
|
||||
assert len(graph.data) == 3
|
||||
data = graph.data
|
||||
assert all(torch.isclose(d.x, x).all() for d in data)
|
||||
assert all(torch.isclose(d_.pos, pos_).all() for d_, pos_ in zip(data, pos))
|
||||
assert all(len(d.edge_index) == 2 for d in data)
|
||||
graph = RadiusGraph(x=x, pos=pos, build_edge_attr=True, r=.3)
|
||||
data = graph.data
|
||||
assert all(torch.isclose(d.x, x).all() for d in data)
|
||||
assert all(torch.isclose(d_.pos, pos_).all() for d_, pos_ in zip(data, pos))
|
||||
assert all(len(d.edge_index) == 2 for d in data)
|
||||
assert all(d.edge_attr is not None for d in data)
|
||||
assert all([d.edge_index.shape[1] == d.edge_attr.shape[0]] for d in data)
|
||||
x = torch.rand(10, 2)
|
||||
graph = KNNGraph(x=x, pos=pos, build_edge_attr=False, k=3)
|
||||
assert len(graph.data) == 3
|
||||
data = graph.data
|
||||
assert all(torch.isclose(d.x, x).all() for d in data)
|
||||
assert all(torch.isclose(d_.pos, pos_).all() for d_, pos_ in zip(data, pos))
|
||||
assert all(len(d.edge_index) == 2 for d in data)
|
||||
graph = KNNGraph(x=x, pos=pos, build_edge_attr=True, k=3)
|
||||
data = graph.data
|
||||
assert all(torch.isclose(d.x, x).all() for d in data)
|
||||
assert all(torch.isclose(d_.pos, pos_).all() for d_, pos_ in zip(data, pos))
|
||||
assert all(len(d.edge_index) == 2 for d in data)
|
||||
assert all(d.edge_attr is not None for d in data)
|
||||
assert all([d.edge_index.shape[1] == d.edge_attr.shape[0]] for d in data)
|
||||
|
||||
|
||||
def test_additional_parameters_1():
|
||||
x = torch.rand(3, 10, 2)
|
||||
pos = torch.rand(3, 10, 2)
|
||||
additional_parameters = {'y': torch.ones(3)}
|
||||
graph = RadiusGraph(x=x, pos=pos, build_edge_attr=True, r=.3,
|
||||
additional_params=additional_parameters)
|
||||
assert len(graph.data) == 3
|
||||
data = graph.data
|
||||
assert all(torch.isclose(d_.x, x_).all() for (d_, x_) in zip(data, x))
|
||||
assert all(hasattr(d, 'y') for d in data)
|
||||
assert all(d_.y == 1 for d_ in data)
|
||||
def test_build_radius_graph(x, pos):
|
||||
graph = RadiusGraph(x=x, pos=pos, radius=0.5)
|
||||
assert hasattr(graph, "x")
|
||||
assert hasattr(graph, "pos")
|
||||
assert hasattr(graph, "edge_index")
|
||||
assert torch.isclose(graph.x, x).all()
|
||||
if isinstance(x, LabelTensor):
|
||||
assert isinstance(graph.x, LabelTensor)
|
||||
assert graph.x.labels == x.labels
|
||||
else:
|
||||
assert isinstance(graph.pos, torch.Tensor)
|
||||
assert torch.isclose(graph.pos, pos).all()
|
||||
if isinstance(pos, LabelTensor):
|
||||
assert isinstance(graph.pos, LabelTensor)
|
||||
assert graph.pos.labels == pos.labels
|
||||
else:
|
||||
assert isinstance(graph.pos, torch.Tensor)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"additional_parameters",
|
||||
"x, pos",
|
||||
[
|
||||
({'y': torch.rand(3, 10, 1)}),
|
||||
({'y': [torch.rand(10, 1) for _ in range(3)]}),
|
||||
]
|
||||
(torch.rand(10, 2), torch.rand(10, 3)),
|
||||
(
|
||||
LabelTensor(torch.rand(10, 2), ["u", "v"]),
|
||||
LabelTensor(torch.rand(10, 3), ["x", "y", "z"]),
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_additional_parameters_2(additional_parameters):
|
||||
x = torch.rand(3, 10, 2)
|
||||
pos = torch.rand(3, 10, 2)
|
||||
graph = RadiusGraph(x=x, pos=pos, build_edge_attr=True, r=.3,
|
||||
additional_params=additional_parameters)
|
||||
assert len(graph.data) == 3
|
||||
data = graph.data
|
||||
assert all(torch.isclose(d_.x, x_).all() for (d_, x_) in zip(data, x))
|
||||
assert all(hasattr(d, 'y') for d in data)
|
||||
assert all(torch.isclose(d_.x, x_).all() for (d_, x_) in zip(data, x))
|
||||
def test_build_radius_graph_edge_attr(x, pos):
|
||||
graph = RadiusGraph(x=x, pos=pos, radius=0.5, edge_attr=True)
|
||||
assert hasattr(graph, "x")
|
||||
assert hasattr(graph, "pos")
|
||||
assert hasattr(graph, "edge_index")
|
||||
assert torch.isclose(graph.x, x).all()
|
||||
if isinstance(x, LabelTensor):
|
||||
assert isinstance(graph.x, LabelTensor)
|
||||
assert graph.x.labels == x.labels
|
||||
else:
|
||||
assert isinstance(graph.pos, torch.Tensor)
|
||||
assert torch.isclose(graph.pos, pos).all()
|
||||
if isinstance(pos, LabelTensor):
|
||||
assert isinstance(graph.pos, LabelTensor)
|
||||
assert graph.pos.labels == pos.labels
|
||||
else:
|
||||
assert isinstance(graph.pos, torch.Tensor)
|
||||
assert hasattr(graph, "edge_attr")
|
||||
assert isinstance(graph.edge_attr, torch.Tensor)
|
||||
assert graph.edge_attr.shape[-1] == 3
|
||||
assert graph.edge_attr.shape[0] == graph.edge_index.shape[1]
|
||||
|
||||
def test_custom_build_edge_attr_func():
|
||||
x = torch.rand(3, 10, 2)
|
||||
pos = torch.rand(3, 10, 2)
|
||||
|
||||
def build_edge_attr(x, pos, edge_index):
|
||||
return torch.cat([pos[edge_index[0]], pos[edge_index[1]]], dim=-1)
|
||||
@pytest.mark.parametrize(
|
||||
"x, pos",
|
||||
[
|
||||
(torch.rand(10, 2), torch.rand(10, 3)),
|
||||
(
|
||||
LabelTensor(torch.rand(10, 2), ["u", "v"]),
|
||||
LabelTensor(torch.rand(10, 3), ["x", "y", "z"]),
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_build_radius_graph_custom_edge_attr(x, pos):
|
||||
graph = RadiusGraph(
|
||||
x=x,
|
||||
pos=pos,
|
||||
radius=0.5,
|
||||
edge_attr=True,
|
||||
custom_edge_func=build_edge_attr,
|
||||
)
|
||||
assert hasattr(graph, "x")
|
||||
assert hasattr(graph, "pos")
|
||||
assert hasattr(graph, "edge_index")
|
||||
assert torch.isclose(graph.x, x).all()
|
||||
if isinstance(x, LabelTensor):
|
||||
assert isinstance(graph.x, LabelTensor)
|
||||
assert graph.x.labels == x.labels
|
||||
else:
|
||||
assert isinstance(graph.pos, torch.Tensor)
|
||||
assert torch.isclose(graph.pos, pos).all()
|
||||
if isinstance(pos, LabelTensor):
|
||||
assert isinstance(graph.pos, LabelTensor)
|
||||
assert graph.pos.labels == pos.labels
|
||||
else:
|
||||
assert isinstance(graph.pos, torch.Tensor)
|
||||
assert hasattr(graph, "edge_attr")
|
||||
assert isinstance(graph.edge_attr, torch.Tensor)
|
||||
assert graph.edge_attr.shape[-1] == 6
|
||||
assert graph.edge_attr.shape[0] == graph.edge_index.shape[1]
|
||||
|
||||
graph = RadiusGraph(x=x, pos=pos, build_edge_attr=True, r=.3,
|
||||
custom_build_edge_attr=build_edge_attr)
|
||||
assert len(graph.data) == 3
|
||||
data = graph.data
|
||||
assert all(hasattr(d, 'edge_attr') for d in data)
|
||||
assert all(d.edge_attr.shape[1] == 4 for d in data)
|
||||
assert all(torch.isclose(d.edge_attr,
|
||||
build_edge_attr(d.x, d.pos, d.edge_index)).all()
|
||||
for d in data)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"x, pos",
|
||||
[
|
||||
(torch.rand(10, 2), torch.rand(10, 3)),
|
||||
(
|
||||
LabelTensor(torch.rand(10, 2), ["u", "v"]),
|
||||
LabelTensor(torch.rand(10, 3), ["x", "y", "z"]),
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_build_knn_graph(x, pos):
|
||||
graph = KNNGraph(x=x, pos=pos, neighbours=2)
|
||||
assert hasattr(graph, "x")
|
||||
assert hasattr(graph, "pos")
|
||||
assert hasattr(graph, "edge_index")
|
||||
assert torch.isclose(graph.x, x).all()
|
||||
if isinstance(x, LabelTensor):
|
||||
assert isinstance(graph.x, LabelTensor)
|
||||
assert graph.x.labels == x.labels
|
||||
else:
|
||||
assert isinstance(graph.pos, torch.Tensor)
|
||||
assert torch.isclose(graph.pos, pos).all()
|
||||
if isinstance(pos, LabelTensor):
|
||||
assert isinstance(graph.pos, LabelTensor)
|
||||
assert graph.pos.labels == pos.labels
|
||||
else:
|
||||
assert isinstance(graph.pos, torch.Tensor)
|
||||
assert graph.edge_attr is None
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"x, pos",
|
||||
[
|
||||
(torch.rand(10, 2), torch.rand(10, 3)),
|
||||
(
|
||||
LabelTensor(torch.rand(10, 2), ["u", "v"]),
|
||||
LabelTensor(torch.rand(10, 3), ["x", "y", "z"]),
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_build_knn_graph_edge_attr(x, pos):
|
||||
graph = KNNGraph(x=x, pos=pos, neighbours=2, edge_attr=True)
|
||||
assert hasattr(graph, "x")
|
||||
assert hasattr(graph, "pos")
|
||||
assert hasattr(graph, "edge_index")
|
||||
assert torch.isclose(graph.x, x).all()
|
||||
if isinstance(x, LabelTensor):
|
||||
assert isinstance(graph.x, LabelTensor)
|
||||
assert graph.x.labels == x.labels
|
||||
else:
|
||||
assert isinstance(graph.pos, torch.Tensor)
|
||||
assert torch.isclose(graph.pos, pos).all()
|
||||
if isinstance(pos, LabelTensor):
|
||||
assert isinstance(graph.pos, LabelTensor)
|
||||
assert graph.pos.labels == pos.labels
|
||||
else:
|
||||
assert isinstance(graph.pos, torch.Tensor)
|
||||
assert isinstance(graph.edge_attr, torch.Tensor)
|
||||
assert graph.edge_attr.shape[-1] == 3
|
||||
assert graph.edge_attr.shape[0] == graph.edge_index.shape[1]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"x, pos",
|
||||
[
|
||||
(torch.rand(10, 2), torch.rand(10, 3)),
|
||||
(
|
||||
LabelTensor(torch.rand(10, 2), ["u", "v"]),
|
||||
LabelTensor(torch.rand(10, 3), ["x", "y", "z"]),
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_build_knn_graph_custom_edge_attr(x, pos):
|
||||
graph = KNNGraph(
|
||||
x=x,
|
||||
pos=pos,
|
||||
neighbours=2,
|
||||
edge_attr=True,
|
||||
custom_edge_func=build_edge_attr,
|
||||
)
|
||||
assert hasattr(graph, "x")
|
||||
assert hasattr(graph, "pos")
|
||||
assert hasattr(graph, "edge_index")
|
||||
assert torch.isclose(graph.x, x).all()
|
||||
if isinstance(x, LabelTensor):
|
||||
assert isinstance(graph.x, LabelTensor)
|
||||
assert graph.x.labels == x.labels
|
||||
else:
|
||||
assert isinstance(graph.pos, torch.Tensor)
|
||||
assert torch.isclose(graph.pos, pos).all()
|
||||
if isinstance(pos, LabelTensor):
|
||||
assert isinstance(graph.pos, LabelTensor)
|
||||
assert graph.pos.labels == pos.labels
|
||||
else:
|
||||
assert isinstance(graph.pos, torch.Tensor)
|
||||
assert isinstance(graph.edge_attr, torch.Tensor)
|
||||
assert graph.edge_attr.shape[-1] == 6
|
||||
assert graph.edge_attr.shape[0] == graph.edge_index.shape[1]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"x, pos, y",
|
||||
[
|
||||
(torch.rand(10, 2), torch.rand(10, 3), torch.rand(10, 4)),
|
||||
(
|
||||
LabelTensor(torch.rand(10, 2), ["u", "v"]),
|
||||
LabelTensor(torch.rand(10, 3), ["x", "y", "z"]),
|
||||
LabelTensor(torch.rand(10, 4), ["a", "b", "c", "d"]),
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_additional_params(x, pos, y):
|
||||
edge_index = torch.tensor(
|
||||
[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [1, 2, 3, 4, 5, 6, 7, 8, 9, 0]],
|
||||
dtype=torch.int64,
|
||||
)
|
||||
graph = Graph(x=x, pos=pos, edge_index=edge_index, y=y)
|
||||
assert hasattr(graph, "y")
|
||||
assert torch.isclose(graph.y, y).all()
|
||||
if isinstance(y, LabelTensor):
|
||||
assert isinstance(graph.y, LabelTensor)
|
||||
assert graph.y.labels == y.labels
|
||||
else:
|
||||
assert isinstance(graph.y, torch.Tensor)
|
||||
assert torch.isclose(graph.y, y).all()
|
||||
if isinstance(y, LabelTensor):
|
||||
assert isinstance(graph.y, LabelTensor)
|
||||
assert graph.y.labels == y.labels
|
||||
else:
|
||||
assert isinstance(graph.y, torch.Tensor)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"x, pos, y",
|
||||
[
|
||||
(torch.rand(10, 2), torch.rand(10, 3), torch.rand(10, 4)),
|
||||
(
|
||||
LabelTensor(torch.rand(10, 2), ["u", "v"]),
|
||||
LabelTensor(torch.rand(10, 3), ["x", "y", "z"]),
|
||||
LabelTensor(torch.rand(10, 4), ["a", "b", "c", "d"]),
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_additional_params_radius_graph(x, pos, y):
|
||||
graph = RadiusGraph(x=x, pos=pos, radius=0.5, y=y)
|
||||
assert hasattr(graph, "y")
|
||||
assert torch.isclose(graph.y, y).all()
|
||||
if isinstance(y, LabelTensor):
|
||||
assert isinstance(graph.y, LabelTensor)
|
||||
assert graph.y.labels == y.labels
|
||||
else:
|
||||
assert isinstance(graph.y, torch.Tensor)
|
||||
assert torch.isclose(graph.y, y).all()
|
||||
if isinstance(y, LabelTensor):
|
||||
assert isinstance(graph.y, LabelTensor)
|
||||
assert graph.y.labels == y.labels
|
||||
else:
|
||||
assert isinstance(graph.y, torch.Tensor)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"x, pos, y",
|
||||
[
|
||||
(torch.rand(10, 2), torch.rand(10, 3), torch.rand(10, 4)),
|
||||
(
|
||||
LabelTensor(torch.rand(10, 2), ["u", "v"]),
|
||||
LabelTensor(torch.rand(10, 3), ["x", "y", "z"]),
|
||||
LabelTensor(torch.rand(10, 4), ["a", "b", "c", "d"]),
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_additional_params_knn_graph(x, pos, y):
|
||||
graph = KNNGraph(x=x, pos=pos, neighbours=3, y=y)
|
||||
assert hasattr(graph, "y")
|
||||
assert torch.isclose(graph.y, y).all()
|
||||
if isinstance(y, LabelTensor):
|
||||
assert isinstance(graph.y, LabelTensor)
|
||||
assert graph.y.labels == y.labels
|
||||
else:
|
||||
assert isinstance(graph.y, torch.Tensor)
|
||||
assert torch.isclose(graph.y, y).all()
|
||||
if isinstance(y, LabelTensor):
|
||||
assert isinstance(graph.y, LabelTensor)
|
||||
assert graph.y.labels == y.labels
|
||||
else:
|
||||
assert isinstance(graph.y, torch.Tensor)
|
||||
|
||||
@@ -6,99 +6,90 @@ from torch_geometric.data import Batch
|
||||
|
||||
x = [torch.rand(100, 6) for _ in range(10)]
|
||||
pos = [torch.rand(100, 3) for _ in range(10)]
|
||||
graph = KNNGraph(x=x, pos=pos, build_edge_attr=True, k=6)
|
||||
input_ = Batch.from_data_list(graph.data)
|
||||
graph = [
|
||||
KNNGraph(x=x_, pos=pos_, neighbours=6, edge_attr=True)
|
||||
for x_, pos_ in zip(x, pos)
|
||||
]
|
||||
input_ = Batch.from_data_list(graph)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"shared_weights",
|
||||
[
|
||||
True,
|
||||
False
|
||||
]
|
||||
)
|
||||
@pytest.mark.parametrize("shared_weights", [True, False])
|
||||
def test_constructor(shared_weights):
|
||||
lifting_operator = torch.nn.Linear(6, 16)
|
||||
projection_operator = torch.nn.Linear(16, 3)
|
||||
GraphNeuralOperator(lifting_operator=lifting_operator,
|
||||
projection_operator=projection_operator,
|
||||
edge_features=3,
|
||||
internal_layers=[16, 16],
|
||||
shared_weights=shared_weights)
|
||||
GraphNeuralOperator(
|
||||
lifting_operator=lifting_operator,
|
||||
projection_operator=projection_operator,
|
||||
edge_features=3,
|
||||
internal_layers=[16, 16],
|
||||
shared_weights=shared_weights,
|
||||
)
|
||||
|
||||
GraphNeuralOperator(lifting_operator=lifting_operator,
|
||||
projection_operator=projection_operator,
|
||||
edge_features=3,
|
||||
inner_size=16,
|
||||
internal_n_layers=10,
|
||||
shared_weights=shared_weights)
|
||||
GraphNeuralOperator(
|
||||
lifting_operator=lifting_operator,
|
||||
projection_operator=projection_operator,
|
||||
edge_features=3,
|
||||
inner_size=16,
|
||||
internal_n_layers=10,
|
||||
shared_weights=shared_weights,
|
||||
)
|
||||
|
||||
int_func = torch.nn.Softplus
|
||||
ext_func = torch.nn.ReLU
|
||||
|
||||
GraphNeuralOperator(lifting_operator=lifting_operator,
|
||||
projection_operator=projection_operator,
|
||||
edge_features=3,
|
||||
internal_n_layers=10,
|
||||
shared_weights=shared_weights,
|
||||
internal_func=int_func,
|
||||
external_func=ext_func)
|
||||
GraphNeuralOperator(
|
||||
lifting_operator=lifting_operator,
|
||||
projection_operator=projection_operator,
|
||||
edge_features=3,
|
||||
internal_n_layers=10,
|
||||
shared_weights=shared_weights,
|
||||
internal_func=int_func,
|
||||
external_func=ext_func,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"shared_weights",
|
||||
[
|
||||
True,
|
||||
False
|
||||
]
|
||||
)
|
||||
@pytest.mark.parametrize("shared_weights", [True, False])
|
||||
def test_forward_1(shared_weights):
|
||||
lifting_operator = torch.nn.Linear(6, 16)
|
||||
projection_operator = torch.nn.Linear(16, 3)
|
||||
model = GraphNeuralOperator(lifting_operator=lifting_operator,
|
||||
projection_operator=projection_operator,
|
||||
edge_features=3,
|
||||
internal_layers=[16, 16],
|
||||
shared_weights=shared_weights)
|
||||
model = GraphNeuralOperator(
|
||||
lifting_operator=lifting_operator,
|
||||
projection_operator=projection_operator,
|
||||
edge_features=3,
|
||||
internal_layers=[16, 16],
|
||||
shared_weights=shared_weights,
|
||||
)
|
||||
output_ = model(input_)
|
||||
assert output_.shape == torch.Size([1000, 3])
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"shared_weights",
|
||||
[
|
||||
True,
|
||||
False
|
||||
]
|
||||
)
|
||||
@pytest.mark.parametrize("shared_weights", [True, False])
|
||||
def test_forward_2(shared_weights):
|
||||
lifting_operator = torch.nn.Linear(6, 16)
|
||||
projection_operator = torch.nn.Linear(16, 3)
|
||||
model = GraphNeuralOperator(lifting_operator=lifting_operator,
|
||||
projection_operator=projection_operator,
|
||||
edge_features=3,
|
||||
inner_size=32,
|
||||
internal_n_layers=2,
|
||||
shared_weights=shared_weights)
|
||||
model = GraphNeuralOperator(
|
||||
lifting_operator=lifting_operator,
|
||||
projection_operator=projection_operator,
|
||||
edge_features=3,
|
||||
inner_size=32,
|
||||
internal_n_layers=2,
|
||||
shared_weights=shared_weights,
|
||||
)
|
||||
output_ = model(input_)
|
||||
assert output_.shape == torch.Size([1000, 3])
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"shared_weights",
|
||||
[
|
||||
True,
|
||||
False
|
||||
]
|
||||
)
|
||||
@pytest.mark.parametrize("shared_weights", [True, False])
|
||||
def test_backward(shared_weights):
|
||||
lifting_operator = torch.nn.Linear(6, 16)
|
||||
projection_operator = torch.nn.Linear(16, 3)
|
||||
model = GraphNeuralOperator(lifting_operator=lifting_operator,
|
||||
projection_operator=projection_operator,
|
||||
edge_features=3,
|
||||
internal_layers=[16, 16],
|
||||
shared_weights=shared_weights)
|
||||
model = GraphNeuralOperator(
|
||||
lifting_operator=lifting_operator,
|
||||
projection_operator=projection_operator,
|
||||
edge_features=3,
|
||||
internal_layers=[16, 16],
|
||||
shared_weights=shared_weights,
|
||||
)
|
||||
input_.x.requires_grad = True
|
||||
output_ = model(input_)
|
||||
l = torch.mean(output_)
|
||||
@@ -106,22 +97,18 @@ def test_backward(shared_weights):
|
||||
assert input_.x.grad.shape == torch.Size([1000, 6])
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"shared_weights",
|
||||
[
|
||||
True,
|
||||
False
|
||||
]
|
||||
)
|
||||
@pytest.mark.parametrize("shared_weights", [True, False])
|
||||
def test_backward_2(shared_weights):
|
||||
lifting_operator = torch.nn.Linear(6, 16)
|
||||
projection_operator = torch.nn.Linear(16, 3)
|
||||
model = GraphNeuralOperator(lifting_operator=lifting_operator,
|
||||
projection_operator=projection_operator,
|
||||
edge_features=3,
|
||||
inner_size=32,
|
||||
internal_n_layers=2,
|
||||
shared_weights=shared_weights)
|
||||
model = GraphNeuralOperator(
|
||||
lifting_operator=lifting_operator,
|
||||
projection_operator=projection_operator,
|
||||
edge_features=3,
|
||||
inner_size=32,
|
||||
internal_n_layers=2,
|
||||
shared_weights=shared_weights,
|
||||
)
|
||||
input_.x.requires_grad = True
|
||||
output_ = model(input_)
|
||||
l = torch.mean(output_)
|
||||
|
||||
@@ -4,28 +4,31 @@ from pina.condition import InputOutputPointsCondition
|
||||
from pina.problem.zoo.supervised_problem import SupervisedProblem
|
||||
from pina.graph import RadiusGraph
|
||||
|
||||
|
||||
def test_constructor():
|
||||
input_ = torch.rand((100,10))
|
||||
output_ = torch.rand((100,10))
|
||||
input_ = torch.rand((100, 10))
|
||||
output_ = torch.rand((100, 10))
|
||||
problem = SupervisedProblem(input_=input_, output_=output_)
|
||||
assert isinstance(problem, AbstractProblem)
|
||||
assert hasattr(problem, "conditions")
|
||||
assert isinstance(problem.conditions, dict)
|
||||
assert list(problem.conditions.keys()) == ['data']
|
||||
assert isinstance(problem.conditions['data'], InputOutputPointsCondition)
|
||||
assert list(problem.conditions.keys()) == ["data"]
|
||||
assert isinstance(problem.conditions["data"], InputOutputPointsCondition)
|
||||
|
||||
|
||||
def test_constructor_graph():
|
||||
x = torch.rand((20,100,10))
|
||||
pos = torch.rand((20,100,2))
|
||||
input_ = RadiusGraph(
|
||||
x=x, pos=pos, r=.2, build_edge_attr=True
|
||||
)
|
||||
output_ = torch.rand((100,10))
|
||||
x = torch.rand((20, 100, 10))
|
||||
pos = torch.rand((20, 100, 2))
|
||||
input_ = [
|
||||
RadiusGraph(x=x_, pos=pos_, radius=0.2, edge_attr=True)
|
||||
for x_, pos_ in zip(x, pos)
|
||||
]
|
||||
output_ = torch.rand((100, 10))
|
||||
problem = SupervisedProblem(input_=input_, output_=output_)
|
||||
assert isinstance(problem, AbstractProblem)
|
||||
assert hasattr(problem, "conditions")
|
||||
assert isinstance(problem.conditions, dict)
|
||||
assert list(problem.conditions.keys()) == ['data']
|
||||
assert isinstance(problem.conditions['data'], InputOutputPointsCondition)
|
||||
assert isinstance(problem.conditions['data'].input_points, list)
|
||||
assert isinstance(problem.conditions['data'].output_points, torch.Tensor)
|
||||
assert list(problem.conditions.keys()) == ["data"]
|
||||
assert isinstance(problem.conditions["data"], InputOutputPointsCondition)
|
||||
assert isinstance(problem.conditions["data"].input_points, list)
|
||||
assert isinstance(problem.conditions["data"].output_points, torch.Tensor)
|
||||
|
||||
Reference in New Issue
Block a user