Files
thermal-conduction-ml/ThermalSolver/module.py
2025-09-25 14:44:39 +02:00

81 lines
2.5 KiB
Python

import torch
from lightning import LightningModule
from torch_geometric.data import Batch
class GraphSolver(LightningModule):
def __init__(
self,
model: torch.nn.Module,
loss: torch.nn.Module = None,
unrolling_steps: int = 10,
):
super().__init__()
self.model = model
self.loss = loss if loss is not None else torch.nn.MSELoss()
self.unrolling_steps = unrolling_steps
def forward(
self,
x: torch.Tensor,
c: torch.Tensor,
edge_index: torch.Tensor,
edge_attr: torch.Tensor,
):
return self.model(x, c, edge_index, edge_attr)
def _compute_loss_train(self, x, x_prev, y):
return self.loss(x, y) + self.loss(x, x_prev)
def _compute_loss(self, x, y):
return self.loss(x, y)
def _preprocess_batch(self, batch: Batch):
return batch.x, batch.y, batch.c, batch.edge_index, batch.edge_attr
def _log_loss(self, loss, batch, stage: str):
self.log(
f"{stage}_loss",
loss,
on_step=False,
on_epoch=True,
prog_bar=True,
batch_size=int(batch.num_graphs),
)
return loss
def training_step(self, batch: Batch, _):
x, y, c, edge_index, edge_attr = self._preprocess_batch(batch)
loss = 0.0
for _ in range(self.unrolling_steps):
x_prev = x.detach()
x = self(x_prev, c, edge_index=edge_index, edge_attr=edge_attr)
loss += self.loss(x, y)
self._log_loss(loss, batch, "train")
return loss
def validation_step(self, batch: Batch, _):
x, y, c, edge_index, edge_attr = self._preprocess_batch(batch)
for _ in range(self.unrolling_steps):
x_prev = x.detach()
x = self(x_prev, c, edge_index=edge_index, edge_attr=edge_attr)
loss = self.loss(x, x_prev)
if loss < 1e-5:
break
loss = self._compute_loss(x, y)
self._log_loss(loss, batch, "val")
return loss
def test_step(self, batch: Batch, _):
x, y, c, edge_index, edge_attr = self._preprocess_batch(batch)
for _ in range(self.unrolling_steps):
x_prev = x.detach()
x = self(x_prev, c, edge_index=edge_index, edge_attr=edge_attr)
loss = self._compute_loss(x, y)
self._log_loss(loss, batch, "test")
return loss
def configure_optimizers(self):
optimizer = torch.optim.Adam(self.parameters(), lr=1e-3)
return optimizer