fix doc loss and codacy

This commit is contained in:
giovanni
2025-03-12 18:05:42 +01:00
committed by FilippoOlivo
parent 2c9e980c7f
commit da1ac90b99
15 changed files with 114 additions and 108 deletions

View File

@@ -1,14 +1,18 @@
"""Module for Loss Interface"""
"""Module for the Weighting Interface"""
from abc import ABCMeta, abstractmethod
class WeightingInterface(metaclass=ABCMeta):
"""
The ``weightingInterface`` class. TODO
Abstract base class for all loss weighting schemas. All weighting schemas
should inherit from this class.
"""
def __init__(self):
"""
Initialization of the :class:`WeightingInterface` class.
"""
self.condition_names = None
@abstractmethod
@@ -16,7 +20,5 @@ class WeightingInterface(metaclass=ABCMeta):
"""
Aggregate the losses.
:param dict(torch.Tensor) input: The dictionary of losses.
:return: The losses aggregation. It should be a scalar Tensor.
:rtype: torch.Tensor
:param dict losses: The dictionary of losses.
"""