Gradient accumulation in BPTT (#2)
This commit is contained in:
@@ -14,7 +14,7 @@ class FiniteDifferenceStep(MessagePassing):
|
||||
aggr == "add"
|
||||
), "Per somme pesate, l'aggregazione deve essere 'add'."
|
||||
# self.root_weight = float(root_weight)
|
||||
self.p = torch.nn.Parameter(torch.tensor(0.8))
|
||||
self.p = torch.nn.Parameter(torch.tensor(1.0))
|
||||
self.a = root_weight
|
||||
|
||||
def forward(self, x, edge_index, edge_attr, deg):
|
||||
@@ -43,9 +43,7 @@ class FiniteDifferenceStep(MessagePassing):
|
||||
"""
|
||||
TODO: add docstring.
|
||||
"""
|
||||
a = torch.clamp(self.a, 0.0, 1.0)
|
||||
return a * aggr_out + (1 - a) * x
|
||||
# return self.a * aggr_out + (1 - self.a) * x
|
||||
return aggr_out
|
||||
|
||||
|
||||
class GraphFiniteDifference(nn.Module):
|
||||
|
||||
Reference in New Issue
Block a user