Skip to content

Commit ea82e5d

Browse files
committed
removed unecessary torch.abs
1 parent 9ae9e73 commit ea82e5d

File tree

1 file changed

+5
-5
lines changed

1 file changed

+5
-5
lines changed

loss_functions.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -142,15 +142,15 @@ def hyper_loss(model_output, gt_sdf, ks, ki, kg, gt_normals=None, kn=None):
142142
pred_sdf = model_output["model_out"].float()
143143

144144
sdf_constraint = torch.where(
145-
torch.abs(gt_sdf) == 0, torch.abs(pred_sdf), torch.zeros_like(pred_sdf)
145+
gt_sdf == 0, torch.abs(pred_sdf), torch.zeros_like(pred_sdf)
146146
)
147147
losses['sdf'] = torch.abs(sdf_constraint).mean() * ks
148148

149149
pred_sdf_c = torch.clip(pred_sdf, -0.3, 0.3).float()
150150
gt_sdf_c = torch.clip(gt_sdf, -0.3, 0.3).float()
151151

152152
inter_constraint = torch.where(
153-
torch.abs(gt_sdf) == 0, torch.zeros_like(pred_sdf), abs(gt_sdf_c - pred_sdf_c)
153+
gt_sdf == 0, torch.zeros_like(pred_sdf), abs(gt_sdf_c - pred_sdf_c)
154154
)
155155
losses['inter'] = inter_constraint.mean() * ki
156156

@@ -159,7 +159,7 @@ def hyper_loss(model_output, gt_sdf, ks, ki, kg, gt_normals=None, kn=None):
159159
if gt_normals is not None:
160160
norm = (1 - F.cosine_similarity(gradient, gt_normals, dim=-1))[..., None]
161161
normal_constraint = torch.where(
162-
torch.abs(gt_sdf) == 0, norm, torch.zeros_like(gradient[..., :1])
162+
gt_sdf == 0, norm, torch.zeros_like(gradient[..., :1])
163163
)
164164
losses['normal_constraint'] = normal_constraint.mean() * kn
165165

@@ -176,7 +176,7 @@ def hyper_loss_deform(model_output, gt, kl, fw, ks, ki, kn, kg):
176176
pred_sdf = model_output["model_out"]
177177

178178
sdf_constraint = torch.where(
179-
torch.abs(gt_sdf) == 0, torch.abs(pred_sdf), torch.zeros_like(pred_sdf)
179+
gt_sdf == 0, torch.abs(pred_sdf), torch.zeros_like(pred_sdf)
180180
)
181181
pred_sdf_c = torch.clip(pred_sdf, -0.3, 0.3)
182182
gt_sdf_c = torch.clip(gt_sdf, -0.3, 0.3)
@@ -190,7 +190,7 @@ def hyper_loss_deform(model_output, gt, kl, fw, ks, ki, kn, kg):
190190
norm = (1 - F.cosine_similarity(gradient, gt_normals, dim=-1))[..., None]
191191

192192
normal_constraint = torch.where(
193-
torch.abs(gt_sdf) == 0, norm, torch.zeros_like(gradient[..., :1])
193+
gt_sdf == 0, norm, torch.zeros_like(gradient[..., :1])
194194
)
195195
grad_constraint = abs(1 - torch.linalg.norm(gradient, dim=-1))
196196
else:

0 commit comments

Comments
 (0)