edited utils to take list (#115)
* enhanced difference domain * refactored utils * fixed typo * added tests --------- Co-authored-by: Dario Coscia <93731561+dario-coscia@users.noreply.github.com>
This commit is contained in:
@@ -3,16 +3,17 @@
|
||||
from .location import Location
|
||||
from ..label_tensor import LabelTensor
|
||||
|
||||
|
||||
class Difference(Location):
|
||||
"""
|
||||
"""
|
||||
def __init__(self, first, second):
|
||||
|
||||
def __init__(self, first, second):
|
||||
|
||||
self.first = first
|
||||
self.second = second
|
||||
|
||||
def sample(self, n, mode ='random', variables='all'):
|
||||
def sample(self, n, mode='random', variables='all'):
|
||||
"""
|
||||
"""
|
||||
assert mode is 'random', 'Only random mode is implemented'
|
||||
@@ -24,4 +25,4 @@ class Difference(Location):
|
||||
samples.append(sample.tolist()[0])
|
||||
|
||||
import torch
|
||||
return LabelTensor(torch.tensor(samples), labels=['x', 'y'])
|
||||
return LabelTensor(torch.tensor(samples), labels=['x', 'y'])
|
||||
|
||||
@@ -25,9 +25,9 @@ class Union(Location):
|
||||
super().__init__()
|
||||
|
||||
# union checks
|
||||
self._check_union_inheritance(geometries)
|
||||
self._check_union_consistency(geometries)
|
||||
|
||||
check_consistency(geometries, Location)
|
||||
self._check_union_dimensions(geometries)
|
||||
|
||||
# assign geometries
|
||||
self._geometries = geometries
|
||||
|
||||
@@ -36,7 +36,7 @@ class Union(Location):
|
||||
"""
|
||||
The geometries."""
|
||||
return self._geometries
|
||||
|
||||
|
||||
@property
|
||||
def variables(self):
|
||||
"""
|
||||
@@ -116,7 +116,7 @@ class Union(Location):
|
||||
|
||||
return LabelTensor(torch.cat(sampled_points), labels=[f'{i}' for i in self.variables])
|
||||
|
||||
def _check_union_consistency(self, geometries):
|
||||
def _check_union_dimensions(self, geometries):
|
||||
"""Check if the dimensions of the geometries are consistent.
|
||||
|
||||
:param geometries: Geometries to be checked.
|
||||
@@ -126,12 +126,3 @@ class Union(Location):
|
||||
if geometry.variables != geometries[0].variables:
|
||||
raise NotImplementedError(
|
||||
f'The geometries need to be the same dimensions. {geometry.variables} is not equal to {geometries[0].variables}')
|
||||
|
||||
def _check_union_inheritance(self, geometries):
|
||||
"""Check if the geometries are inherited from 'pina.geometry.Location'.
|
||||
|
||||
param geometries: Geometries to be checked.
|
||||
:type geometries: list[Location]
|
||||
"""
|
||||
for idx, geometry in enumerate(geometries):
|
||||
check_consistency(geometry, Location, f'geometry[{idx}]')
|
||||
|
||||
@@ -108,9 +108,9 @@ class LpLoss(LossInterface):
|
||||
super().__init__(reduction=reduction)
|
||||
|
||||
# check consistency
|
||||
check_consistency(p, (str,int,float), 'degree p')
|
||||
check_consistency(p, (str,int,float))
|
||||
self.p = p
|
||||
check_consistency(relative, bool, 'relative')
|
||||
check_consistency(relative, bool)
|
||||
self.relative = relative
|
||||
|
||||
def forward(self, input, target):
|
||||
|
||||
@@ -4,20 +4,20 @@ from ..utils import check_consistency
|
||||
|
||||
|
||||
class Network(torch.nn.Module):
|
||||
|
||||
|
||||
def __init__(self, model, extra_features=None):
|
||||
super().__init__()
|
||||
|
||||
# check model consistency
|
||||
check_consistency(model, nn.Module, 'torch model')
|
||||
check_consistency(model, nn.Module)
|
||||
self._model = model
|
||||
|
||||
# check consistency and assign extra fatures
|
||||
# check consistency and assign extra fatures
|
||||
if extra_features is None:
|
||||
self._extra_features = []
|
||||
else:
|
||||
for feat in extra_features:
|
||||
check_consistency(feat, nn.Module, 'extra features')
|
||||
check_consistency(feat, nn.Module)
|
||||
self._extra_features = nn.Sequential(*extra_features)
|
||||
|
||||
# check model works with inputs
|
||||
@@ -44,4 +44,4 @@ class Network(torch.nn.Module):
|
||||
|
||||
@property
|
||||
def extra_features(self):
|
||||
return self._extra_features
|
||||
return self._extra_features
|
||||
|
||||
10
pina/pinn.py
10
pina/pinn.py
@@ -48,11 +48,11 @@ class PINN(SolverInterface):
|
||||
super().__init__(model=model, problem=problem, extra_features=extra_features)
|
||||
|
||||
# check consistency
|
||||
check_consistency(optimizer, torch.optim.Optimizer, 'optimizer', subclass=True)
|
||||
check_consistency(optimizer_kwargs, dict, 'optimizer_kwargs')
|
||||
check_consistency(scheduler, LRScheduler, 'scheduler', subclass=True)
|
||||
check_consistency(scheduler_kwargs, dict, 'scheduler_kwargs')
|
||||
check_consistency(loss, (LossInterface, _Loss), 'loss', subclass=False)
|
||||
check_consistency(optimizer, torch.optim.Optimizer, subclass=True)
|
||||
check_consistency(optimizer_kwargs, dict)
|
||||
check_consistency(scheduler, LRScheduler, subclass=True)
|
||||
check_consistency(scheduler_kwargs, dict)
|
||||
check_consistency(loss, (LossInterface, _Loss), subclass=False)
|
||||
|
||||
# assign variables
|
||||
self._optimizer = optimizer(self.model.parameters(), **optimizer_kwargs)
|
||||
|
||||
@@ -20,7 +20,7 @@ class SolverInterface(pl.LightningModule, metaclass=ABCMeta):
|
||||
super().__init__()
|
||||
|
||||
# check inheritance for pina problem
|
||||
check_consistency(problem, AbstractProblem, 'pina problem')
|
||||
check_consistency(problem, AbstractProblem)
|
||||
|
||||
# assigning class variables (check consistency inside Network class)
|
||||
self._pina_model = Network(model=model, extra_features=extra_features)
|
||||
|
||||
@@ -11,7 +11,7 @@ class Trainer(pl.Trainer):
|
||||
super().__init__(**kwargs)
|
||||
|
||||
# check inheritance consistency for solver
|
||||
check_consistency(solver, SolverInterface, 'Solver model')
|
||||
check_consistency(solver, SolverInterface)
|
||||
self._model = solver
|
||||
|
||||
# create dataloader
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Utils module"""
|
||||
from torch.utils.data import Dataset, DataLoader
|
||||
from functools import reduce
|
||||
import types
|
||||
|
||||
@@ -10,14 +11,14 @@ from .label_tensor import LabelTensor
|
||||
import torch
|
||||
|
||||
|
||||
def check_consistency(object, object_instance, object_name, subclass=False):
|
||||
def check_consistency(object, object_instance, subclass=False):
|
||||
"""Helper function to check object inheritance consistency.
|
||||
Given a specific ``'object'`` we check if the object is
|
||||
instance of a specific ``'object_instance'``, or in case
|
||||
``'subclass=True'`` we check if the object is subclass
|
||||
if the ``'object_instance'``.
|
||||
|
||||
:param Object object: The object to check the inheritance
|
||||
:param (iterable or class object) object: The object to check the inheritance
|
||||
:param Object object_instance: The parent class from where the object
|
||||
is expected to inherit
|
||||
:param str object_name: The name of the object
|
||||
@@ -25,12 +26,17 @@ def check_consistency(object, object_instance, object_name, subclass=False):
|
||||
:raises ValueError: If the object does not inherit from the
|
||||
specified class
|
||||
"""
|
||||
if not subclass:
|
||||
if not isinstance(object, object_instance):
|
||||
raise ValueError(f"{object_name} must be {object_instance}")
|
||||
else:
|
||||
if not issubclass(object, object_instance):
|
||||
raise ValueError(f"{object_name} must be {object_instance}")
|
||||
if not isinstance(object, (list, set, tuple)):
|
||||
object = [object]
|
||||
|
||||
for obj in object:
|
||||
try:
|
||||
if not subclass:
|
||||
assert isinstance(obj, object_instance)
|
||||
else:
|
||||
assert issubclass(obj, object_instance)
|
||||
except AssertionError:
|
||||
raise ValueError(f"{type(obj).__name__} must be {object_instance}.")
|
||||
|
||||
|
||||
def number_parameters(model, aggregate=True, only_trainable=True): # TODO: check
|
||||
@@ -180,13 +186,13 @@ def chebyshev_roots(n):
|
||||
# def __len__(self):
|
||||
# return self._len
|
||||
|
||||
from torch.utils.data import Dataset, DataLoader
|
||||
|
||||
class LabelTensorDataset(Dataset):
|
||||
def __init__(self, d):
|
||||
for k, v in d.items():
|
||||
setattr(self, k, v)
|
||||
self.labels = list(d.keys())
|
||||
|
||||
|
||||
def __getitem__(self, index):
|
||||
print(index)
|
||||
result = {}
|
||||
@@ -201,7 +207,7 @@ class LabelTensorDataset(Dataset):
|
||||
result[label] = sample_tensor[index]
|
||||
except IndexError:
|
||||
result[label] = torch.tensor([])
|
||||
|
||||
|
||||
print(result)
|
||||
return result
|
||||
|
||||
@@ -229,13 +235,13 @@ class LabelTensorDataLoader(DataLoader):
|
||||
# def __len__(self):
|
||||
# return self._len
|
||||
|
||||
from torch.utils.data import Dataset, DataLoader
|
||||
|
||||
class LabelTensorDataset(Dataset):
|
||||
def __init__(self, d):
|
||||
for k, v in d.items():
|
||||
setattr(self, k, v)
|
||||
self.labels = list(d.keys())
|
||||
|
||||
|
||||
def __getitem__(self, index):
|
||||
print(index)
|
||||
result = {}
|
||||
@@ -250,7 +256,7 @@ class LabelTensorDataset(Dataset):
|
||||
result[label] = sample_tensor[index]
|
||||
except IndexError:
|
||||
result[label] = torch.tensor([])
|
||||
|
||||
|
||||
print(result)
|
||||
return result
|
||||
|
||||
@@ -261,4 +267,4 @@ class LabelTensorDataset(Dataset):
|
||||
class LabelTensorDataLoader(DataLoader):
|
||||
|
||||
def collate_fn(self, data):
|
||||
pass
|
||||
pass
|
||||
|
||||
Reference in New Issue
Block a user