fix optim doc
This commit is contained in:
@@ -1,4 +1,4 @@
|
|||||||
"""Module for Optimizer class."""
|
"""Module for the Optimizers and Schedulers."""
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"Optimizer",
|
"Optimizer",
|
||||||
|
|||||||
@@ -1,24 +1,23 @@
|
|||||||
"""Module for PINA Optimizer."""
|
"""Module for the PINA Optimizer."""
|
||||||
|
|
||||||
from abc import ABCMeta, abstractmethod
|
from abc import ABCMeta, abstractmethod
|
||||||
|
|
||||||
|
|
||||||
class Optimizer(metaclass=ABCMeta):
|
class Optimizer(metaclass=ABCMeta):
|
||||||
"""
|
"""
|
||||||
TODO
|
Abstract base class for defining an optimizer. All specific optimizers
|
||||||
:param metaclass: _description_, defaults to ABCMeta
|
should inherit form this class and implement the required methods.
|
||||||
:type metaclass: _type_, optional
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def instance(self):
|
def instance(self):
|
||||||
"""
|
"""
|
||||||
TODO
|
Abstract property to retrieve the optimizer instance.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def hook(self):
|
def hook(self):
|
||||||
"""
|
"""
|
||||||
TODO
|
Abstract method to define the hook logic for the optimizer.
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -1,25 +1,23 @@
|
|||||||
"""Module for PINA Scheduler."""
|
"""Module for the PINA Scheduler."""
|
||||||
|
|
||||||
from abc import ABCMeta, abstractmethod
|
from abc import ABCMeta, abstractmethod
|
||||||
|
|
||||||
|
|
||||||
class Scheduler(metaclass=ABCMeta):
|
class Scheduler(metaclass=ABCMeta):
|
||||||
"""
|
"""
|
||||||
TODO
|
Abstract base class for defining a scheduler. All specific schedulers should
|
||||||
|
inherit form this class and implement the required methods.
|
||||||
:param metaclass: _description_, defaults to ABCMeta
|
|
||||||
:type metaclass: _type_, optional
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def instance(self):
|
def instance(self):
|
||||||
"""
|
"""
|
||||||
TODO
|
Abstract property to retrieve the scheduler instance.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def hook(self):
|
def hook(self):
|
||||||
"""
|
"""
|
||||||
TODO
|
Abstract method to define the hook logic for the scheduler.
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
"""Module for PINA Torch Optimizer"""
|
"""Module for the PINA Torch Optimizer"""
|
||||||
|
|
||||||
import torch
|
import torch
|
||||||
|
|
||||||
@@ -8,18 +8,17 @@ from .optimizer_interface import Optimizer
|
|||||||
|
|
||||||
class TorchOptimizer(Optimizer):
|
class TorchOptimizer(Optimizer):
|
||||||
"""
|
"""
|
||||||
TODO
|
A wrapper class for using PyTorch optimizers.
|
||||||
|
|
||||||
:param Optimizer: _description_
|
|
||||||
:type Optimizer: _type_
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, optimizer_class, **kwargs):
|
def __init__(self, optimizer_class, **kwargs):
|
||||||
"""
|
"""
|
||||||
TODO
|
Initialization of the :class:`TorchOptimizer` class.
|
||||||
|
|
||||||
:param optimizer_class: _description_
|
:param torch.optim.Optimizer optimizer_class: The PyTorch optimizer
|
||||||
:type optimizer_class: _type_
|
class.
|
||||||
|
:param dict kwargs: Additional parameters passed to `optimizer_class`,
|
||||||
|
see more: <https://pytorch.org/docs/stable/optim.html#algorithms>_.
|
||||||
"""
|
"""
|
||||||
check_consistency(optimizer_class, torch.optim.Optimizer, subclass=True)
|
check_consistency(optimizer_class, torch.optim.Optimizer, subclass=True)
|
||||||
|
|
||||||
@@ -29,10 +28,9 @@ class TorchOptimizer(Optimizer):
|
|||||||
|
|
||||||
def hook(self, parameters):
|
def hook(self, parameters):
|
||||||
"""
|
"""
|
||||||
TODO
|
Initialize the optimizer instance with the given parameters.
|
||||||
|
|
||||||
:param parameters: _description_
|
:param dict parameters: The parameters of the model to be optimized.
|
||||||
:type parameters: _type_
|
|
||||||
"""
|
"""
|
||||||
self._optimizer_instance = self.optimizer_class(
|
self._optimizer_instance = self.optimizer_class(
|
||||||
parameters, **self.kwargs
|
parameters, **self.kwargs
|
||||||
@@ -41,6 +39,9 @@ class TorchOptimizer(Optimizer):
|
|||||||
@property
|
@property
|
||||||
def instance(self):
|
def instance(self):
|
||||||
"""
|
"""
|
||||||
Optimizer instance.
|
Get the optimizer instance.
|
||||||
|
|
||||||
|
:return: The optimizer instance.
|
||||||
|
:rtype: torch.optim.Optimizer
|
||||||
"""
|
"""
|
||||||
return self._optimizer_instance
|
return self._optimizer_instance
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
"""Module for PINA Torch Optimizer"""
|
"""Module for the PINA Torch Optimizer"""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from torch.optim.lr_scheduler import LRScheduler # torch >= 2.0
|
from torch.optim.lr_scheduler import LRScheduler # torch >= 2.0
|
||||||
@@ -14,18 +14,17 @@ from .scheduler_interface import Scheduler
|
|||||||
|
|
||||||
class TorchScheduler(Scheduler):
|
class TorchScheduler(Scheduler):
|
||||||
"""
|
"""
|
||||||
TODO
|
A wrapper class for using PyTorch schedulers.
|
||||||
|
|
||||||
:param Scheduler: _description_
|
|
||||||
:type Scheduler: _type_
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, scheduler_class, **kwargs):
|
def __init__(self, scheduler_class, **kwargs):
|
||||||
"""
|
"""
|
||||||
TODO
|
Initialization of the :class:`TorchScheduler` class.
|
||||||
|
|
||||||
:param scheduler_class: _description_
|
:param torch.optim.LRScheduler scheduler_class: The PyTorch scheduler
|
||||||
:type scheduler_class: _type_
|
class.
|
||||||
|
:param dict kwargs: Additional parameters passed to `scheduler_class`,
|
||||||
|
see more: <https://pytorch.org/docs/stable/optim.html#algorithms>_.
|
||||||
"""
|
"""
|
||||||
check_consistency(scheduler_class, LRScheduler, subclass=True)
|
check_consistency(scheduler_class, LRScheduler, subclass=True)
|
||||||
|
|
||||||
@@ -35,10 +34,9 @@ class TorchScheduler(Scheduler):
|
|||||||
|
|
||||||
def hook(self, optimizer):
|
def hook(self, optimizer):
|
||||||
"""
|
"""
|
||||||
TODO
|
Initialize the scheduler instance with the given parameters.
|
||||||
|
|
||||||
:param optimizer: _description_
|
:param dict parameters: The parameters of the optimizer.
|
||||||
:type optimizer: _type_
|
|
||||||
"""
|
"""
|
||||||
check_consistency(optimizer, Optimizer)
|
check_consistency(optimizer, Optimizer)
|
||||||
self._scheduler_instance = self.scheduler_class(
|
self._scheduler_instance = self.scheduler_class(
|
||||||
@@ -48,6 +46,9 @@ class TorchScheduler(Scheduler):
|
|||||||
@property
|
@property
|
||||||
def instance(self):
|
def instance(self):
|
||||||
"""
|
"""
|
||||||
Scheduler instance.
|
Get the scheduler instance.
|
||||||
|
|
||||||
|
:return: The scheduelr instance.
|
||||||
|
:rtype: torch.optim.LRScheduler
|
||||||
"""
|
"""
|
||||||
return self._scheduler_instance
|
return self._scheduler_instance
|
||||||
|
|||||||
Reference in New Issue
Block a user