@@ -17,13 +17,15 @@ class PowerLoss(LossInterface):
1717
1818 .. math::
1919 \ell(x, y) = L = \{l_1,\dots,l_N\}^\top, \quad
20- l_n = \frac{1}{D}\left[\sum_{i=1}^{D} \left| x_n^i - y_n^i \right|^p \right],
20+ l_n = \frac{1}{D}\left[\sum_{i=1}^{D}
21+ \left| x_n^i - y_n^i \right|^p\right],
2122
2223 If ``'relative'`` is set to true:
2324
2425 .. math::
2526 \ell(x, y) = L = \{l_1,\dots,l_N\}^\top, \quad
26- l_n = \frac{ \sum_{i=1}^{D} | x_n^i - y_n^i|^p }{\sum_{i=1}^{D}|y_n^i|^p},
27+ l_n = \frac{ \sum_{i=1}^{D} | x_n^i - y_n^i|^p }
28+ {\sum_{i=1}^{D}|y_n^i|^p},
2729
2830 where :math:`N` is the batch size. If ``reduction`` is not ``none``
2931 (default ``mean``), then:
@@ -38,16 +40,19 @@ class PowerLoss(LossInterface):
3840 :math:`x` and :math:`y` are tensors of arbitrary shapes with a total
3941 of :math:`n` elements each.
4042
41- The sum operation still operates over all the elements, and divides by :math:`n`.
43+ The sum operation still operates over all the elements, and divides by
44+ :math:`n`.
4245
43- The division by :math:`n` can be avoided if one sets ``reduction`` to ``sum``.
46+ The division by :math:`n` can be avoided if one sets ``reduction`` to
47+ ``sum``.
4448 """
4549
4650 def __init__ (self , p = 2 , reduction = "mean" , relative = False ):
4751 """
4852 :param int p: Degree of Lp norm. It specifies the type of norm to
4953 be calculated. See `list of possible orders in torch linalg
50- <https://pytorch.org/docs/stable/generated/torch.linalg.norm.html#torch.linalg.norm>`_ to
54+ <https://pytorch.org/docs/stable/generated/
55+ torch.linalg.norm.html#torch.linalg.norm>`_ to
5156 see the possible degrees. Default 2 (euclidean norm).
5257 :param str reduction: Specifies the reduction to apply to the output:
5358 ``none`` | ``mean`` | ``sum``. When ``none``: no reduction
0 commit comments