import torch from tqdm import tqdm from lightning import LightningDataModule from datasets import load_dataset from torch_geometric.data import Data from torch_geometric.loader import DataLoader from torch_geometric.utils import to_undirected class GraphDataModule(LightningDataModule): def __init__( self, hf_repo: str, split_name: str, train_size: float = 0.8, val_size: float = 0.1, test_size: float = 0.1, batch_size: int = 32, ): super().__init__() self.hf_repo = hf_repo self.split_name = split_name self.dataset = None self.geometry = None self.train_size = train_size self.val_size = val_size self.test_size = test_size self.batch_size = batch_size def prepare_data(self): hf_dataset = load_dataset(self.hf_repo, name="snapshots")[ self.split_name ] self.geometry = load_dataset(self.hf_repo, name="geometry")[ self.split_name ] edge_index = torch.tensor( self.geometry["edge_index"][0], dtype=torch.int64 ) pos = torch.tensor(self.geometry["points"][0], dtype=torch.float32)[ :, :2 ] bottom_boundary_ids = torch.tensor( self.geometry["bottom_boundary_ids"][0], dtype=torch.int64 ) self.data = [ self._build_dataset( torch.tensor(snapshot["conductivity"], dtype=torch.float32), torch.tensor(snapshot["boundary_values"], dtype=torch.float32), torch.tensor(snapshot["temperature"], dtype=torch.float32), edge_index.T, pos, bottom_boundary_ids, ) for snapshot in tqdm(hf_dataset, desc="Building graphs") ] def _build_dataset( self, conductivity: torch.Tensor, boundary_vales: torch.Tensor, temperature: torch.Tensor, edge_index: torch.Tensor, pos: torch.Tensor, bottom_boundary_ids: torch.Tensor, ) -> Data: edge_index = to_undirected(edge_index, num_nodes=pos.size(0)) edge_attr = pos[edge_index[0]] - pos[edge_index[1]] edge_attr = torch.cat( [edge_attr, torch.norm(edge_attr, dim=1).unsqueeze(-1)], dim=1 ) boundary_temperature = boundary_vales[bottom_boundary_ids].max() boundary_vales[bottom_boundary_ids] = 1.0 return Data( x=boundary_vales.unsqueeze(-1), c=conductivity.unsqueeze(-1), edge_index=edge_index, pos=pos, edge_attr=edge_attr, y=temperature.unsqueeze(-1), boundary_temperature=boundary_vales[bottom_boundary_ids].max(), ) def setup(self, stage: str = None): n = len(self.data) train_end = int(n * self.train_size) val_end = train_end + int(n * self.val_size) if stage == "fit" or stage is None: self.train_data = self.data[:train_end] self.val_data = self.data[train_end:val_end] if stage == "test" or stage is None: self.test_data = self.data[val_end:] def train_dataloader(self) -> DataLoader: return DataLoader( self.train_data, batch_size=self.batch_size, shuffle=True ) def val_dataloader(self) -> DataLoader: return DataLoader(self.val_data, batch_size=self.batch_size) def test_dataloader(self) -> DataLoader: return DataLoader(self.test_data, batch_size=self.batch_size)