diff --git a/docs/source/_rst/_code.rst b/docs/source/_rst/_code.rst index d954920..68ee796 100644 --- a/docs/source/_rst/_code.rst +++ b/docs/source/_rst/_code.rst @@ -79,7 +79,8 @@ Layers Low Rank layer Continuous convolution Proper Orthogonal Decomposition - Periodic Boundary Condition embeddings + Periodic Boundary Condition Embedding + Fourier Feature Embedding Adaptive Activation Functions ------------------------------- diff --git a/docs/source/_rst/layers/fourier_embedding.rst b/docs/source/_rst/layers/fourier_embedding.rst new file mode 100644 index 0000000..f48cef1 --- /dev/null +++ b/docs/source/_rst/layers/fourier_embedding.rst @@ -0,0 +1,8 @@ +Fourier Feature Embedding +======================================= +.. currentmodule:: pina.model.layers.embedding + +.. autoclass:: FourierFeatureEmbedding + :members: + :show-inheritance: + diff --git a/docs/source/_rst/layers/embedding.rst b/docs/source/_rst/layers/pbc_embedding.rst similarity index 81% rename from docs/source/_rst/layers/embedding.rst rename to docs/source/_rst/layers/pbc_embedding.rst index 1a40177..d4d2023 100644 --- a/docs/source/_rst/layers/embedding.rst +++ b/docs/source/_rst/layers/pbc_embedding.rst @@ -1,4 +1,4 @@ -Periodic Boundary Condition embeddings +Periodic Boundary Condition Embedding ======================================= .. currentmodule:: pina.model.layers.embedding diff --git a/pina/model/layers/__init__.py b/pina/model/layers/__init__.py index 5d20340..f0162bd 100644 --- a/pina/model/layers/__init__.py +++ b/pina/model/layers/__init__.py @@ -10,6 +10,7 @@ __all__ = [ "FourierBlock3D", "PODBlock", "PeriodicBoundaryEmbedding", + "FourierFeatureEmbedding", "AVNOBlock", "LowRankBlock", ] @@ -23,6 +24,6 @@ from .spectral import ( ) from .fourier import FourierBlock1D, FourierBlock2D, FourierBlock3D from .pod import PODBlock -from .embedding import PeriodicBoundaryEmbedding +from .embedding import PeriodicBoundaryEmbedding, FourierFeatureEmbedding from .avno_layer import AVNOBlock from .lowrank_layer import LowRankBlock diff --git a/pina/model/layers/embedding.py b/pina/model/layers/embedding.py index 8e623df..4f4c371 100644 --- a/pina/model/layers/embedding.py +++ b/pina/model/layers/embedding.py @@ -1,7 +1,8 @@ -""" Periodic Boundary Embedding modulus. """ +""" Embedding modulus. """ import torch from pina.utils import check_consistency +from typing import Union, Sequence class PeriodicBoundaryEmbedding(torch.nn.Module): @@ -100,7 +101,7 @@ class PeriodicBoundaryEmbedding(torch.nn.Module): Forward pass to compute the periodic boundary conditions embedding. :param torch.Tensor x: Input tensor. - :return: Fourier embeddings of the input. + :return: Periodic embedding of the input. :rtype: torch.Tensor """ omega = torch.stack( @@ -155,3 +156,112 @@ class PeriodicBoundaryEmbedding(torch.nn.Module): The period of the periodic function to approximate. """ return self._period + + + +class FourierFeatureEmbedding(torch.nn.Module): + def __init__(self, + input_dimension : int, + output_dimension : int, + sigmas : Union[float, int, Sequence[float], Sequence[int]], + embedding_output_dimension : int = None): + r""" + Fourier Feature Embedding class for encoding input features + using random Fourier features.This class applies a Fourier + transformation to the input features, + which can help in learning high-frequency variations in data. + If multiple sigmas are provided, the class + supports multiscale feature embedding, creating embeddings for + each scale specified by the sigmas. + + The :obj:`FourierFeatureEmbedding` augments the input + by the following formula (3.10 of original paper): + + .. math:: + \mathbf{x} \rightarrow \tilde{\mathbf{x}} = \left[ + \cos\left( \mathbf{B} \mathbf{x} \right), + \sin\left( \mathbf{B} \mathbf{x} \right)\right], + + where :math:`\mathbf{B}_{ij} \sim \mathcal{N}(0, \sigma^2)`. + + In case multiple ``sigmas`` are passed, the resulting embeddings + are concateneted: + + .. math:: + \mathbf{x} \rightarrow \tilde{\mathbf{x}} = \left[ + \cos\left( \mathbf{B}^1 \mathbf{x} \right), + \sin\left( \mathbf{B}^1 \mathbf{x} \right), + \cos\left( \mathbf{B}^2 \mathbf{x} \right), + \sin\left( \mathbf{B}^3 \mathbf{x} \right), + \dots, + \cos\left( \mathbf{B}^M \mathbf{x} \right), + \sin\left( \mathbf{B}^M \mathbf{x} \right)\right], + + where :math:`\mathbf{B}^k_{ij} \sim \mathcal{N}(0, \sigma_k^2) \quad + k \in (1, \dots, M)`. + + .. seealso:: + **Original reference**: + Wang, Sifan, Hanwen Wang, and Paris Perdikaris. *On the eigenvector + bias of Fourier feature networks: From regression to solving + multi-scale PDEs with physics-informed neural networks.* + Computer Methods in Applied Mechanics and + Engineering 384 (2021): 113938. + DOI: `10.1016/j.cma.2021.113938. + `_ + + :param int input_dimension: The input vector dimension of the layer. + :param int output_dimension: The output dimension of the layer. + :param sigmas: The standard deviation(s) used for the Fourier embedding. + This can be a single float or integer, or a sequence of floats + or integers. If a sequence is provided, the embedding will be + computed for each sigma separately, enabling multiscale embeddings. + :type sigmas: Union[float, int, Sequence[float], Sequence[int]] + :param int output_dimension: The emebedding output dimension of the + random matrix use to compute the fourier feature. If ``None``, it + will be the same as ``output_dimension``, default ``None``. + """ + super().__init__() + + # check consistency + check_consistency(sigmas, (int, float)) + if isinstance(sigmas, (int, float)): + sigmas = [sigmas] + check_consistency(output_dimension, int) + check_consistency(input_dimension, int) + + if embedding_output_dimension is None: + embedding_output_dimension = output_dimension + check_consistency(embedding_output_dimension, int) + + # assign + self.sigmas = sigmas + + # create non-trainable matrices + self._matrices = [ + torch.rand( + size = (input_dimension, + embedding_output_dimension), + requires_grad = False) * sigma for sigma in sigmas + ] + + # create linear layer to map to the output dimension + self._linear = torch.nn.Linear( + in_features=2*len(sigmas)*embedding_output_dimension, + out_features=output_dimension) + + + def forward(self, x): + """ + Forward pass to compute the fourier embedding. + + :param torch.Tensor x: Input tensor. + :return: Fourier embeddings of the input. + :rtype: torch.Tensor + """ + # compute random matrix multiplication + out = torch.cat([torch.mm(x, m) for m in self._matrices], dim=-1) + # compute cos/sin emebedding + out = torch.cat([torch.cos(out), torch.sin(out)], dim=-1) + # return linear layer mapping + return self._linear(out) \ No newline at end of file diff --git a/tests/test_layers/test_embedding.py b/tests/test_layers/test_embedding.py index 5e90dd0..e501efd 100644 --- a/tests/test_layers/test_embedding.py +++ b/tests/test_layers/test_embedding.py @@ -1,8 +1,7 @@ import torch import pytest -from pina.model.layers import PeriodicBoundaryEmbedding -from pina import LabelTensor +from pina.model.layers import PeriodicBoundaryEmbedding, FourierFeatureEmbedding # test tolerance tol = 1e-6 @@ -23,7 +22,7 @@ def grad(u, x): create_graph=True, allow_unused=True, retain_graph=True)[0] -def test_constructor(): +def test_constructor_PeriodicBoundaryEmbedding(): PeriodicBoundaryEmbedding(input_dimension=1, periods=2) PeriodicBoundaryEmbedding(input_dimension=1, periods={'x': 3, 'y' : 4}) PeriodicBoundaryEmbedding(input_dimension=1, periods={0: 3, 1 : 4}) @@ -32,14 +31,16 @@ def test_constructor(): PeriodicBoundaryEmbedding() with pytest.raises(ValueError): PeriodicBoundaryEmbedding(input_dimension=1., periods=1) - PeriodicBoundaryEmbedding(input_dimension=1, periods=1, output_dimension=1.) + PeriodicBoundaryEmbedding(input_dimension=1, periods=1, + output_dimension=1.) PeriodicBoundaryEmbedding(input_dimension=1, periods={'x':'x'}) PeriodicBoundaryEmbedding(input_dimension=1, periods={0:'x'}) @pytest.mark.parametrize("period", [1, 4, 10]) @pytest.mark.parametrize("input_dimension", [1, 2, 3]) -def test_forward_same_period(input_dimension, period): +def test_forward_backward_same_period_PeriodicBoundaryEmbedding(input_dimension, + period): func = torch.nn.Sequential( PeriodicBoundaryEmbedding(input_dimension=input_dimension, output_dimension=60, periods=period), @@ -58,46 +59,46 @@ def test_forward_same_period(input_dimension, period): # output f = func(x) assert check_same_columns(f) + # compute backward + loss = f.mean() + loss.backward() +def test_constructor_FourierFeatureEmbedding(): + FourierFeatureEmbedding(input_dimension=1, output_dimension=20, + sigmas=1) + FourierFeatureEmbedding(input_dimension=1, output_dimension=20, + sigmas=[0.01, 0.1, 1]) + FourierFeatureEmbedding(input_dimension=1, output_dimension=20, + sigmas=[0.01, 0.1, 1]) + FourierFeatureEmbedding(input_dimension=1, output_dimension=20, + sigmas=1, embedding_output_dimension=20) + with pytest.raises(TypeError): + FourierFeatureEmbedding() + with pytest.raises(ValueError): + FourierFeatureEmbedding(input_dimension='x', output_dimension=20, + sigmas=1) + FourierFeatureEmbedding(input_dimension=1, output_dimension='x', + sigmas=1) + FourierFeatureEmbedding(input_dimension=1, output_dimension=20, + sigmas='x') + FourierFeatureEmbedding(input_dimension=1, output_dimension=20, + sigmas=1, embedding_output_dimension='x') - -# def test_forward_same_period_labels(): -# func = torch.nn.Sequential( -# PeriodicBoundaryEmbedding(input_dimension=2, -# output_dimension=60, periods={'x':1, 'y':2}), -# torch.nn.Tanh(), -# torch.nn.Linear(60, 60), -# torch.nn.Tanh(), -# torch.nn.Linear(60, 1) -# ) -# # coordinates -# tensor = torch.tensor([[0., 0.], [0., 2.], [1., 0.], [1., 2.]]) -# with pytest.raises(RuntimeError): -# func(tensor) -# tensor = tensor.as_subclass(LabelTensor) -# tensor.labels = ['x', 'y'] -# tensor.requires_grad = True -# # output -# f = func(tensor) -# assert check_same_columns(f) - -# def test_forward_same_period_index(): -# func = torch.nn.Sequential( -# PeriodicBoundaryEmbedding(input_dimension=2, -# output_dimension=60, periods={0:1, 1:2}), -# torch.nn.Tanh(), -# torch.nn.Linear(60, 60), -# torch.nn.Tanh(), -# torch.nn.Linear(60, 1) -# ) -# # coordinates -# tensor = torch.tensor([[0., 0.], [0., 2.], [1., 0.], [1., 2.]]) -# tensor.requires_grad = True -# # output -# f = func(tensor) -# assert check_same_columns(f) -# tensor = tensor.as_subclass(LabelTensor) -# tensor.labels = ['x', 'y'] -# # output -# f = func(tensor) -# assert check_same_columns(f) \ No newline at end of file +@pytest.mark.parametrize("output_dimension", [1, 2, 2]) +@pytest.mark.parametrize("input_dimension", [1, 2, 3]) +@pytest.mark.parametrize("sigmas", [1, [0.01, 0.1, 1]]) +@pytest.mark.parametrize("embedding_output_dimension", [1, 2, 3]) +def test_forward_backward_FourierFeatureEmbedding(input_dimension, + output_dimension, + sigmas, + embedding_output_dimension): + func = FourierFeatureEmbedding(input_dimension, output_dimension, + sigmas, embedding_output_dimension) + # coordinates + x = torch.rand((10, input_dimension), requires_grad=True) + # output + f = func(x) + assert f.shape[-1] == output_dimension + # compute backward + loss = f.mean() + loss.backward() \ No newline at end of file