Fix Codacy Warnings (#477)

---------

Co-authored-by: Dario Coscia <dariocos99@gmail.com>
This commit is contained in:
Filippo Olivo
2025-03-10 15:38:45 +01:00
committed by Nicola Demo
parent e3790e049a
commit 4177bfbb50
157 changed files with 3473 additions and 3839 deletions

View File

@@ -2,9 +2,9 @@ import torch
from pina.loss import LpLoss
input = torch.tensor([[3.], [1.], [-8.]])
target = torch.tensor([[6.], [4.], [2.]])
available_reductions = ['str', 'mean', 'none']
input = torch.tensor([[3.0], [1.0], [-8.0]])
target = torch.tensor([[6.0], [4.0], [2.0]])
available_reductions = ["str", "mean", "none"]
def test_LpLoss_constructor():
@@ -12,17 +12,17 @@ def test_LpLoss_constructor():
for reduction in available_reductions:
LpLoss(reduction=reduction)
# test p
for p in [float('inf'), -float('inf'), 1, 10, -8]:
for p in [float("inf"), -float("inf"), 1, 10, -8]:
LpLoss(p=p)
def test_LpLoss_forward():
# l2 loss
loss = LpLoss(p=2, reduction='mean')
loss = LpLoss(p=2, reduction="mean")
l2_loss = torch.mean(torch.sqrt((input - target).pow(2)))
assert loss(input, target) == l2_loss
# l1 loss
loss = LpLoss(p=1, reduction='sum')
loss = LpLoss(p=1, reduction="sum")
l1_loss = torch.sum(torch.abs(input - target))
assert loss(input, target) == l1_loss
@@ -32,16 +32,16 @@ def test_LpRelativeLoss_constructor():
for reduction in available_reductions:
LpLoss(reduction=reduction, relative=True)
# test p
for p in [float('inf'), -float('inf'), 1, 10, -8]:
for p in [float("inf"), -float("inf"), 1, 10, -8]:
LpLoss(p=p, relative=True)
def test_LpRelativeLoss_forward():
# l2 relative loss
loss = LpLoss(p=2, reduction='mean', relative=True)
loss = LpLoss(p=2, reduction="mean", relative=True)
l2_loss = torch.sqrt((input - target).pow(2)) / torch.sqrt(input.pow(2))
assert loss(input, target) == torch.mean(l2_loss)
# l1 relative loss
loss = LpLoss(p=1, reduction='sum', relative=True)
loss = LpLoss(p=1, reduction="sum", relative=True)
l1_loss = torch.abs(input - target) / torch.abs(input)
assert loss(input, target) == torch.sum(l1_loss)