add update_data and input functions

This commit is contained in:
FilippoOlivo
2025-11-13 10:48:47 +01:00
parent c0cbb13a92
commit 6bb44052b0

View File

@@ -54,6 +54,7 @@ class PinaDataset(Dataset):
automatic_batching if automatic_batching is not None else True automatic_batching if automatic_batching is not None else True
) )
self.stack_fn = {} self.stack_fn = {}
self.is_graph_dataset = False
# Determine stacking functions for each data type (used in collate_fn) # Determine stacking functions for each data type (used in collate_fn)
for k, v in data_dict.items(): for k, v in data_dict.items():
if isinstance(v, LabelTensor): if isinstance(v, LabelTensor):
@@ -64,6 +65,7 @@ class PinaDataset(Dataset):
isinstance(item, (Data, Graph)) for item in v isinstance(item, (Data, Graph)) for item in v
): ):
self.stack_fn[k] = LabelBatch.from_data_list self.stack_fn[k] = LabelBatch.from_data_list
self.is_graph_dataset = True
else: else:
raise ValueError( raise ValueError(
f"Unsupported data type for stacking: {type(v)}" f"Unsupported data type for stacking: {type(v)}"
@@ -104,55 +106,34 @@ class PinaDataset(Dataset):
[data[i] for i in idx_list] [data[i] for i in idx_list]
) )
else: else:
print(data)
to_return[field_name] = data[idx_list] to_return[field_name] = data[idx_list]
return to_return return to_return
def update_data(self, update_dict):
class PinaGraphDataset(Dataset):
def __init__(self, data_dict, automatic_batching=None):
""" """
Initialize the instance by storing the conditions dictionary. Update the dataset's data in-place.
:param dict update_dict: A dictionary where keys are condition names
:param dict conditions_dict: A dictionary mapping condition names to and values are dictionaries with updated data for those conditions.
their respective data. Each key represents a condition name, and the
corresponding value is a dictionary containing the associated data.
""" """
for field_name, updates in update_dict.items():
if field_name not in self.data:
raise KeyError(
f"Condition '{field_name}' not found in dataset."
)
if not isinstance(updates, (LabelTensor, torch.Tensor)):
raise ValueError(
f"Updates for condition '{field_name}' must be of type "
f"LabelTensor or torch.Tensor."
)
self.data[field_name] = updates
# Store the conditions dictionary @property
self.data = data_dict def input(self):
self.automatic_batching = (
automatic_batching if automatic_batching is not None else True
)
def __len__(self):
return len(next(iter(self.data.values())))
def __getitem__(self, idx):
""" """
Return the data at the given index in the dataset. Get the input data from the dataset.
:param int idx: Index. :return: The input data.
:return: A dictionary containing the data at the given index. :rtype: torch.Tensor | LabelTensor | Data | Graph
:rtype: dict
""" """
return self.data["input"]
if self.automatic_batching:
# Return the data at the given index
return {
field_name: data[idx] for field_name, data in self.data.items()
}
return idx
def _getitem_from_list(self, idx_list):
"""
Return data from the dataset given a list of indices.
:param list[int] idx_list: List of indices.
:return: A dictionary containing the data at the given indices.
:rtype: dict
"""
return {
field_name: [data[i] for i in idx_list]
for field_name, data in self.data.items()
}