Fix Codacy Warnings (#477)

---------

Co-authored-by: Dario Coscia <dariocos99@gmail.com>
This commit is contained in:
Filippo Olivo
2025-03-10 15:38:45 +01:00
committed by Nicola Demo
parent e3790e049a
commit 4177bfbb50
157 changed files with 3473 additions and 3839 deletions

View File

@@ -1,3 +1,5 @@
"""Module for Optimizer class."""
__all__ = [
"Optimizer",
"TorchOptimizer",

View File

@@ -1,15 +1,24 @@
"""Module for PINA Optimizer"""
"""Module for PINA Optimizer."""
from abc import ABCMeta, abstractmethod
class Optimizer(metaclass=ABCMeta): # TODO improve interface
class Optimizer(metaclass=ABCMeta):
"""
TODO
:param metaclass: _description_, defaults to ABCMeta
:type metaclass: _type_, optional
"""
@property
@abstractmethod
def instance(self):
pass
"""
TODO
"""
@abstractmethod
def hook(self):
pass
"""
TODO
"""

View File

@@ -1,15 +1,25 @@
"""Module for PINA Optimizer"""
"""Module for PINA Scheduler."""
from abc import ABCMeta, abstractmethod
class Scheduler(metaclass=ABCMeta): # TODO improve interface
class Scheduler(metaclass=ABCMeta):
"""
TODO
:param metaclass: _description_, defaults to ABCMeta
:type metaclass: _type_, optional
"""
@property
@abstractmethod
def instance(self):
pass
"""
TODO
"""
@abstractmethod
def hook(self):
pass
"""
TODO
"""

View File

@@ -7,8 +7,20 @@ from .optimizer_interface import Optimizer
class TorchOptimizer(Optimizer):
"""
TODO
:param Optimizer: _description_
:type Optimizer: _type_
"""
def __init__(self, optimizer_class, **kwargs):
"""
TODO
:param optimizer_class: _description_
:type optimizer_class: _type_
"""
check_consistency(optimizer_class, torch.optim.Optimizer, subclass=True)
self.optimizer_class = optimizer_class
@@ -16,6 +28,12 @@ class TorchOptimizer(Optimizer):
self._optimizer_instance = None
def hook(self, parameters):
"""
TODO
:param parameters: _description_
:type parameters: _type_
"""
self._optimizer_instance = self.optimizer_class(
parameters, **self.kwargs
)

View File

@@ -1,7 +1,5 @@
"""Module for PINA Torch Optimizer"""
import torch
try:
from torch.optim.lr_scheduler import LRScheduler # torch >= 2.0
except ImportError:
@@ -15,8 +13,20 @@ from .scheduler_interface import Scheduler
class TorchScheduler(Scheduler):
"""
TODO
:param Scheduler: _description_
:type Scheduler: _type_
"""
def __init__(self, scheduler_class, **kwargs):
"""
TODO
:param scheduler_class: _description_
:type scheduler_class: _type_
"""
check_consistency(scheduler_class, LRScheduler, subclass=True)
self.scheduler_class = scheduler_class
@@ -24,6 +34,12 @@ class TorchScheduler(Scheduler):
self._scheduler_instance = None
def hook(self, optimizer):
"""
TODO
:param optimizer: _description_
:type optimizer: _type_
"""
check_consistency(optimizer, Optimizer)
self._scheduler_instance = self.scheduler_class(
optimizer.instance, **self.kwargs