import torch from ..utils import _log_api_usage_once from ._utils import _loss_inter_union, _upcast_non_float def generalized_box_iou_loss( boxes1: torch.Tensor, boxes2: torch.Tensor, reduction: str = "none", eps: float = 1e-7, ) -> torch.Tensor: """ Gradient-friendly IoU loss with an additional penalty that is non-zero when the boxes do not overlap and scales with the size of their smallest enclosing box. This loss is symmetric, so the boxes1 and boxes2 arguments are interchangeable. Both sets of boxes are expected to be in ``(x1, y1, x2, y2)`` format with ``0 <= x1 < x2`` and ``0 <= y1 < y2``, and The two boxes should have the same dimensions. Args: boxes1 (Tensor[N, 4] or Tensor[4]): first set of boxes boxes2 (Tensor[N, 4] or Tensor[4]): second set of boxes reduction (string, optional): Specifies the reduction to apply to the output: ``'none'`` | ``'mean'`` | ``'sum'``. ``'none'``: No reduction will be applied to the output. ``'mean'``: The output will be averaged. ``'sum'``: The output will be summed. Default: ``'none'`` eps (float): small number to prevent division by zero. Default: 1e-7 Returns: Tensor: Loss tensor with the reduction option applied. Reference: Hamid Rezatofighi et al.: Generalized Intersection over Union: A Metric and A Loss for Bounding Box Regression: https://arxiv.org/abs/1902.09630 """ # Original implementation from https://github.com/facebookresearch/fvcore/blob/bfff2ef/fvcore/nn/giou_loss.py if not torch.jit.is_scripting() and not torch.jit.is_tracing(): _log_api_usage_once(generalized_box_iou_loss) boxes1 = _upcast_non_float(boxes1) boxes2 = _upcast_non_float(boxes2) intsctk, unionk = _loss_inter_union(boxes1, boxes2) iouk = intsctk / (unionk + eps) x1, y1, x2, y2 = boxes1.unbind(dim=-1) x1g, y1g, x2g, y2g = boxes2.unbind(dim=-1) # smallest enclosing box xc1 = torch.min(x1, x1g) yc1 = torch.min(y1, y1g) xc2 = torch.max(x2, x2g) yc2 = torch.max(y2, y2g) area_c = (xc2 - xc1) * (yc2 - yc1) miouk = iouk - ((area_c - unionk) / (area_c + eps)) loss = 1 - miouk # Check reduction option and return loss accordingly if reduction == "none": pass elif reduction == "mean": loss = loss.mean() if loss.numel() > 0 else 0.0 * loss.sum() elif reduction == "sum": loss = loss.sum() else: raise ValueError( f"Invalid Value for arg 'reduction': '{reduction} \n Supported reduction modes: 'none', 'mean', 'sum'" ) return loss