Add pointnet
This commit is contained in:
191
ThermalSolver/graph_datamodule.py
Normal file
191
ThermalSolver/graph_datamodule.py
Normal file
@@ -0,0 +1,191 @@
|
||||
import torch
|
||||
from tqdm import tqdm
|
||||
from lightning import LightningDataModule
|
||||
from datasets import load_dataset
|
||||
from torch_geometric.data import Data
|
||||
from torch_geometric.loader import DataLoader
|
||||
from torch_geometric.utils import to_undirected
|
||||
from .mesh_data import MeshData
|
||||
import os
|
||||
|
||||
|
||||
class GraphDataModule(LightningDataModule):
|
||||
def __init__(
|
||||
self,
|
||||
hf_repo: str,
|
||||
split_name: str,
|
||||
train_size: float = 0.2,
|
||||
val_size: float = 0.1,
|
||||
test_size: float = 0.1,
|
||||
batch_size: int = 32,
|
||||
remove_boundary_edges: bool = True,
|
||||
):
|
||||
super().__init__()
|
||||
self.hf_repo = hf_repo
|
||||
self.split_name = split_name
|
||||
self.dataset_dict = {}
|
||||
self.geometry_dict = {}
|
||||
self.train_size = train_size
|
||||
self.val_size = val_size
|
||||
self.test_size = test_size
|
||||
self.batch_size = batch_size
|
||||
self.remove_boundary_edges = remove_boundary_edges
|
||||
|
||||
def prepare_data(self):
|
||||
dataset = load_dataset(self.hf_repo, name="snapshots")[self.split_name]
|
||||
geometry = load_dataset(self.hf_repo, name="geometry")[self.split_name]
|
||||
|
||||
total_len = len(dataset)
|
||||
train_len = int(self.train_size * total_len)
|
||||
valid_len = int(self.val_size * total_len)
|
||||
self.dataset_dict = {
|
||||
"train": dataset.select(range(0, train_len)),
|
||||
"val": dataset.select(range(train_len, train_len + valid_len)),
|
||||
"test": dataset.select(range(train_len + valid_len, total_len)),
|
||||
}
|
||||
self.geometry_dict = {
|
||||
"train": geometry.select(range(0, train_len)),
|
||||
"val": geometry.select(range(train_len, train_len + valid_len)),
|
||||
"test": geometry.select(range(train_len + valid_len, total_len)),
|
||||
}
|
||||
|
||||
def _compute_boundary_mask(
|
||||
self, bottom_ids, right_ids, top_ids, left_ids, temperature
|
||||
):
|
||||
left_ids = left_ids[~torch.isin(left_ids, bottom_ids)]
|
||||
right_ids = right_ids[~torch.isin(right_ids, bottom_ids)]
|
||||
left_ids = left_ids[~torch.isin(left_ids, top_ids)]
|
||||
right_ids = right_ids[~torch.isin(right_ids, top_ids)]
|
||||
|
||||
bottom_bc = temperature[bottom_ids].median()
|
||||
bottom_bc_mask = torch.ones(len(bottom_ids)) * bottom_bc
|
||||
left_bc = temperature[left_ids].median()
|
||||
left_bc_mask = torch.ones(len(left_ids)) * left_bc
|
||||
right_bc = temperature[right_ids].median()
|
||||
right_bc_mask = torch.ones(len(right_ids)) * right_bc
|
||||
|
||||
boundary_values = torch.cat(
|
||||
[bottom_bc_mask, right_bc_mask, left_bc_mask], dim=0
|
||||
)
|
||||
boundary_mask = torch.cat([bottom_ids, right_ids, left_ids], dim=0)
|
||||
|
||||
return boundary_mask, boundary_values
|
||||
|
||||
def _build_dataset(
|
||||
self,
|
||||
snapshot: dict,
|
||||
geometry: dict,
|
||||
) -> Data:
|
||||
conductivity = torch.tensor(
|
||||
snapshot["conductivity"], dtype=torch.float32
|
||||
)
|
||||
temperature = torch.tensor(snapshot["temperature"], dtype=torch.float32)
|
||||
|
||||
edge_index = torch.tensor(geometry["edge_index"], dtype=torch.int64).T
|
||||
pos = torch.tensor(geometry["points"], dtype=torch.float32)[:, :2]
|
||||
bottom_ids = torch.tensor(
|
||||
geometry["bottom_boundary_ids"], dtype=torch.long
|
||||
)
|
||||
top_ids = torch.tensor(geometry["top_boundary_ids"], dtype=torch.long)
|
||||
left_ids = torch.tensor(geometry["left_boundary_ids"], dtype=torch.long)
|
||||
right_ids = torch.tensor(
|
||||
geometry["right_boundary_ids"], dtype=torch.long
|
||||
)
|
||||
|
||||
edge_index = to_undirected(edge_index, num_nodes=pos.size(0))
|
||||
|
||||
boundary_mask, boundary_values = self._compute_boundary_mask(
|
||||
bottom_ids, right_ids, top_ids, left_ids, temperature
|
||||
)
|
||||
|
||||
if self.remove_boundary_edges:
|
||||
boundary_idx = torch.unique(boundary_mask)
|
||||
edge_index_mask = ~torch.isin(edge_index[1], boundary_idx)
|
||||
edge_index = edge_index[:, edge_index_mask]
|
||||
|
||||
edge_attr = pos[edge_index[0]] - pos[edge_index[1]]
|
||||
edge_attr = torch.cat(
|
||||
[edge_attr, torch.norm(edge_attr, dim=1).unsqueeze(-1)], dim=1
|
||||
)
|
||||
|
||||
x = torch.zeros_like(temperature, dtype=torch.float32).unsqueeze(-1)
|
||||
if self.remove_boundary_edges:
|
||||
x[boundary_mask] = boundary_values.unsqueeze(-1)
|
||||
return MeshData(
|
||||
x=x,
|
||||
c=conductivity.unsqueeze(-1),
|
||||
edge_index=edge_index,
|
||||
pos=pos,
|
||||
edge_attr=edge_attr,
|
||||
y=temperature.unsqueeze(-1),
|
||||
boundary_mask=boundary_mask,
|
||||
boundary_values=torch.tensor(0), # Fake value (to fix)
|
||||
)
|
||||
|
||||
return MeshData(
|
||||
x=torch.rand_like(temperature).unsqueeze(-1),
|
||||
c=conductivity.unsqueeze(-1),
|
||||
edge_index=edge_index,
|
||||
pos=pos,
|
||||
edge_attr=edge_attr,
|
||||
boundary_mask=boundary_mask,
|
||||
boundary_values=boundary_values.unsqueeze(-1),
|
||||
y=temperature.unsqueeze(-1),
|
||||
)
|
||||
|
||||
def setup(self, stage: str = None):
|
||||
if stage == "fit" or stage is None:
|
||||
self.train_data = [
|
||||
self._build_dataset(snap, geom)
|
||||
for snap, geom in tqdm(
|
||||
zip(
|
||||
self.dataset_dict["train"], self.geometry_dict["train"]
|
||||
),
|
||||
desc="Building train graphs",
|
||||
total=len(self.dataset_dict["train"]),
|
||||
)
|
||||
]
|
||||
self.val_data = [
|
||||
self._build_dataset(snap, geom)
|
||||
for snap, geom in tqdm(
|
||||
zip(self.dataset_dict["val"], self.geometry_dict["val"]),
|
||||
desc="Building val graphs",
|
||||
total=len(self.dataset_dict["val"]),
|
||||
)
|
||||
]
|
||||
if stage == "test" or stage is None:
|
||||
self.test_data = [
|
||||
self._build_dataset(snap, geom)
|
||||
for snap, geom in tqdm(
|
||||
zip(self.dataset_dict["test"], self.geometry_dict["test"]),
|
||||
desc="Building test graphs",
|
||||
total=len(self.dataset_dict["test"]),
|
||||
)
|
||||
]
|
||||
|
||||
def train_dataloader(self):
|
||||
return DataLoader(
|
||||
self.train_data,
|
||||
batch_size=self.batch_size,
|
||||
shuffle=True,
|
||||
num_workers=8,
|
||||
pin_memory=True,
|
||||
)
|
||||
|
||||
def val_dataloader(self):
|
||||
return DataLoader(
|
||||
self.val_data,
|
||||
batch_size=self.batch_size,
|
||||
shuffle=False,
|
||||
num_workers=8,
|
||||
pin_memory=True,
|
||||
)
|
||||
|
||||
def test_dataloader(self):
|
||||
return DataLoader(
|
||||
self.test_data,
|
||||
batch_size=self.batch_size,
|
||||
shuffle=False,
|
||||
num_workers=8,
|
||||
pin_memory=True,
|
||||
)
|
||||
Reference in New Issue
Block a user