New Residual Model and Fix relative import
* Adding Residual MLP * Adding test Residual MLP * Modified relative import Continuous Conv
This commit is contained in:
committed by
Nicola Demo
parent
ba7371f350
commit
17464ceca9
@@ -92,4 +92,39 @@ class ResidualBlock(nn.Module):
|
||||
|
||||
@ property
|
||||
def activation(self):
|
||||
return self._activation
|
||||
return self._activation
|
||||
|
||||
|
||||
class EnhancedLinear(torch.nn.Module):
|
||||
"""
|
||||
TODO
|
||||
"""
|
||||
def __init__(self, layer, activation=None, dropout=None):
|
||||
super().__init__()
|
||||
|
||||
# check consistency
|
||||
check_consistency(layer, nn.Module)
|
||||
if activation is not None:
|
||||
check_consistency(activation, nn.Module)
|
||||
if dropout is not None:
|
||||
check_consistency(dropout, float)
|
||||
|
||||
# assign forward
|
||||
if (dropout is None) and (activation is None):
|
||||
self._model = torch.nn.Sequential(layer)
|
||||
|
||||
elif (dropout is None) and (activation is not None):
|
||||
self._model = torch.nn.Sequential(layer,
|
||||
activation)
|
||||
|
||||
elif (dropout is not None) and (activation is None):
|
||||
self._model = torch.nn.Sequential(layer,
|
||||
self._drop(dropout))
|
||||
|
||||
elif (dropout is not None) and (activation is not None):
|
||||
self._model = torch.nn.Sequential(layer,
|
||||
activation,
|
||||
self._drop(dropout))
|
||||
|
||||
def forward(self, x):
|
||||
return self._model(x)
|
||||
|
||||
Reference in New Issue
Block a user