Files
thermal-conduction-ml/ThermalSolver/graph_module.py
Filippo Olivo 6e90ef5393 random changes
2025-10-27 10:23:13 +01:00

180 lines
5.5 KiB
Python

import torch
from lightning import LightningModule
from torch_geometric.data import Batch
import importlib
from matplotlib import pyplot as plt
from matplotlib.tri import Triangulation
def import_class(class_path: str):
module_path, class_name = class_path.rsplit(".", 1) # split last dot
module = importlib.import_module(module_path) # import the module
cls = getattr(module, class_name) # get the class
return cls
def _plot_mesh(pos, y, y_pred, batch):
idx = batch == 0
y = y[idx].detach().cpu()
y_pred = y_pred[idx].detach().cpu()
pos = pos[idx].detach().cpu()
pos = pos.detach().cpu()
tria = Triangulation(pos[:, 0], pos[:, 1])
plt.figure(figsize=(18, 5))
plt.subplot(1, 3, 1)
plt.tricontourf(tria, y.squeeze().numpy(), levels=14)
plt.colorbar()
plt.title("True temperature")
plt.subplot(1, 3, 2)
plt.tricontourf(tria, y_pred.squeeze().numpy(), levels=14)
plt.colorbar()
plt.title("Predicted temperature")
plt.subplot(1, 3, 3)
plt.tricontourf(tria, torch.abs(y_pred - y).squeeze().numpy(), levels=14)
plt.colorbar()
plt.title("Error")
plt.suptitle("GNO", fontsize=16)
plt.savefig("gno.png", dpi=300)
class GraphSolver(LightningModule):
def __init__(
self,
model_class_path: str,
model_init_args: dict,
loss: torch.nn.Module = None,
unrolling_steps: int = 48,
):
super().__init__()
self.model = import_class(model_class_path)(**model_init_args)
self.loss = loss if loss is not None else torch.nn.MSELoss()
self.unrolling_steps = unrolling_steps
def forward(
self,
x: torch.Tensor,
c: torch.Tensor,
edge_index: torch.Tensor,
edge_attr: torch.Tensor,
unrolling_steps: int = None,
boundary_mask: torch.Tensor = None,
boundary_values: torch.Tensor = None,
):
return self.model(
x=x,
c=c,
edge_index=edge_index,
edge_attr=edge_attr,
unrolling_steps=unrolling_steps,
boundary_mask=boundary_mask,
boundary_values=boundary_values,
)
def _compute_loss(self, x, y):
return self.loss(x, y)
def _preprocess_batch(self, batch: Batch):
return batch.x, batch.y, batch.c, batch.edge_index, batch.edge_attr
def _log_loss(self, loss, batch, stage: str):
self.log(
f"{stage}/loss",
loss,
on_step=False,
on_epoch=True,
prog_bar=True,
batch_size=int(batch.num_graphs),
)
return loss
def training_step(self, batch: Batch, _):
x, y, c, edge_index, edge_attr = self._preprocess_batch(batch)
y_pred, it = self(
x,
c,
edge_index=edge_index,
edge_attr=edge_attr,
unrolling_steps=self.unrolling_steps,
boundary_mask=batch.boundary_mask,
boundary_values=batch.boundary_values,
)
loss = self.loss(y_pred, y)
boundary_loss = self.loss(
y_pred[batch.boundary_mask], y[batch.boundary_mask]
)
self._log_loss(loss, batch, "train")
# self._log_loss(boundary_loss, batch, "train_boundary")
self.log(
"train/iterations",
it,
on_step=False,
on_epoch=True,
prog_bar=True,
batch_size=int(batch.num_graphs),
)
self.log(
"train/param_p",
self.model.fd_step.p,
on_step=False,
on_epoch=True,
prog_bar=True,
batch_size=int(batch.num_graphs),
)
# self.log("train/param_a", self.model.fd_step.a, on_step=False, on_epoch=True, prog_bar=True, batch_size=int(batch.num_graphs))
return loss
def validation_step(self, batch: Batch, _):
x, y, c, edge_index, edge_attr = self._preprocess_batch(batch)
y_pred, it = self(
x,
c,
edge_index=edge_index,
edge_attr=edge_attr,
unrolling_steps=self.unrolling_steps,
boundary_mask=batch.boundary_mask,
boundary_values=batch.boundary_values,
)
loss = self.loss(y_pred, y)
boundary_loss = self.loss(
y_pred[batch.boundary_mask], y[batch.boundary_mask]
)
self._log_loss(loss, batch, "val")
self.log(
"val/iterations",
it,
on_step=False,
on_epoch=True,
prog_bar=True,
batch_size=int(batch.num_graphs),
)
return loss
def test_step(self, batch: Batch, _):
x, y, c, edge_index, edge_attr = self._preprocess_batch(batch)
y_pred, _ = self.model(
x=x,
c=c,
edge_index=edge_index,
edge_attr=edge_attr,
unrolling_steps=self.unrolling_steps,
batch=batch.batch,
pos=batch.pos,
boundary_mask=batch.boundary_mask,
boundary_values=batch.boundary_values,
plot_results=False,
)
loss = self._compute_loss(y_pred, y)
_plot_mesh(batch.pos, y, y_pred, batch.batch)
self._log_loss(loss, batch, "test")
return loss
def configure_optimizers(self):
optimizer = torch.optim.Adam(self.parameters(), lr=1e-3)
return optimizer
def _impose_bc(self, x: torch.Tensor, data: Batch):
x[data.boundary_mask] = data.boundary_values
return x