Skip to content

Commit

Permalink
fix ddp unused param bug in distill
Browse files Browse the repository at this point in the history
  • Loading branch information
gengyifei committed Nov 9, 2022
1 parent 5738ccf commit 39996fd
Showing 1 changed file with 5 additions and 5 deletions.
10 changes: 5 additions & 5 deletions yolov6/models/loss_distill.py
Original file line number Diff line number Diff line change
Expand Up @@ -324,14 +324,14 @@ def forward(self, pred_dist, pred_bboxes, t_pred_dist, t_pred_bboxes, temperatur
loss_dfl = loss_dfl.sum() / target_scores_sum
d_loss_dfl = d_loss_dfl.sum() / target_scores_sum
else:
loss_dfl = torch.tensor(0.).to(pred_dist.device)
d_loss_dfl = torch.tensor(0.).to(pred_dist.device)
loss_dfl = pred_dist.sum() * 0.
d_loss_dfl = pred_dist.sum() * 0.

else:

loss_iou = torch.tensor(0.).to(pred_dist.device)
loss_dfl = torch.tensor(0.).to(pred_dist.device)
d_loss_dfl = torch.tensor(0.).to(pred_dist.device)
loss_iou = pred_dist.sum() * 0.
loss_dfl = pred_dist.sum() * 0.
d_loss_dfl = pred_dist.sum() * 0.

return loss_iou, loss_dfl, d_loss_dfl

Expand Down

0 comments on commit 39996fd

Please sign in to comment.