Correct codacy warnings

This commit is contained in:
FilippoOlivo
2024-10-22 14:26:39 +02:00
committed by Nicola Demo
parent c9304fb9bb
commit 1bc1b3a580
15 changed files with 252 additions and 210 deletions

View File

@@ -5,6 +5,7 @@ import torch
from ..utils import check_consistency
from .optimizer_interface import Optimizer
class TorchOptimizer(Optimizer):
def __init__(self, optimizer_class, **kwargs):
@@ -14,6 +15,5 @@ class TorchOptimizer(Optimizer):
self.kwargs = kwargs
def hook(self, parameters):
self.optimizer_instance = self.optimizer_class(
parameters, **self.kwargs
)
self.optimizer_instance = self.optimizer_class(parameters,
**self.kwargs)

View File

@@ -5,13 +5,13 @@ try:
from torch.optim.lr_scheduler import LRScheduler # torch >= 2.0
except ImportError:
from torch.optim.lr_scheduler import (
_LRScheduler as LRScheduler,
) # torch < 2.0
_LRScheduler as LRScheduler, ) # torch < 2.0
from ..utils import check_consistency
from .optimizer_interface import Optimizer
from .scheduler_interface import Scheduler
class TorchScheduler(Scheduler):
def __init__(self, scheduler_class, **kwargs):
@@ -23,5 +23,4 @@ class TorchScheduler(Scheduler):
def hook(self, optimizer):
check_consistency(optimizer, Optimizer)
self.scheduler_instance = self.scheduler_class(
optimizer.optimizer_instance, **self.kwargs
)
optimizer.optimizer_instance, **self.kwargs)