fix doc loss and codacy
This commit is contained in:
@@ -1,14 +1,18 @@
|
||||
"""Module for Loss Interface"""
|
||||
"""Module for the Weighting Interface"""
|
||||
|
||||
from abc import ABCMeta, abstractmethod
|
||||
|
||||
|
||||
class WeightingInterface(metaclass=ABCMeta):
|
||||
"""
|
||||
The ``weightingInterface`` class. TODO
|
||||
Abstract base class for all loss weighting schemas. All weighting schemas
|
||||
should inherit from this class.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
Initialization of the :class:`WeightingInterface` class.
|
||||
"""
|
||||
self.condition_names = None
|
||||
|
||||
@abstractmethod
|
||||
@@ -16,7 +20,5 @@ class WeightingInterface(metaclass=ABCMeta):
|
||||
"""
|
||||
Aggregate the losses.
|
||||
|
||||
:param dict(torch.Tensor) input: The dictionary of losses.
|
||||
:return: The losses aggregation. It should be a scalar Tensor.
|
||||
:rtype: torch.Tensor
|
||||
:param dict losses: The dictionary of losses.
|
||||
"""
|
||||
|
||||
Reference in New Issue
Block a user