@@ -12556,8 +12556,8 @@ def merge_dicts(*dicts):
1255612556add_docstr (
1255712557 torch .hamming_window ,
1255812558 """
12559- hamming_window(window_length, periodic=True, alpha=0.54, beta=0.46, *, dtype=None , \
12560- layout=torch.strided, device=None, requires_grad=False) -> Tensor
12559+ hamming_window(window_length, *, dtype=None, layout=None, device=None, pin_memory=False , \
12560+ requires_grad=False) -> Tensor
1256112561"""
1256212562 + r"""
1256312563Hamming window function.
@@ -12585,16 +12585,82 @@ def merge_dicts(*dicts):
1258512585 + r"""
1258612586Arguments:
1258712587 window_length (int): the size of returned window
12588- periodic (bool, optional): If True, returns a window to be used as periodic
12588+
12589+ Keyword args:
12590+ {dtype} Only floating point types are supported.
12591+ layout (:class:`torch.layout`, optional): the desired layout of returned window tensor. Only
12592+ ``torch.strided`` (dense layout) is supported.
12593+ {device}
12594+ {pin_memory}
12595+ {requires_grad}
12596+
12597+ Returns:
12598+ Tensor: A 1-D tensor of size :math:`(\text{{window\_length}},)` containing the window.
12599+
12600+ .. function:: hamming_window(window_length, periodic, *, dtype=None, layout=None, device=None, \
12601+ pin_memory=False, requires_grad=False) -> Tensor
12602+ :noindex:
12603+
12604+ Hamming window function with periodic specified.
12605+
12606+ Arguments:
12607+ window_length (int): the size of returned window
12608+ periodic (bool): If True, returns a window to be used as periodic
12609+ function. If False, return a symmetric window.
12610+
12611+ Keyword args:
12612+ {dtype} Only floating point types are supported.
12613+ layout (:class:`torch.layout`, optional): the desired layout of returned window tensor. Only
12614+ ``torch.strided`` (dense layout) is supported.
12615+ {device}
12616+ {pin_memory}
12617+ {requires_grad}
12618+
12619+ Returns:
12620+ Tensor: A 1-D tensor of size :math:`(\text{{window\_length}},)` containing the window.
12621+
12622+ .. function:: hamming_window(window_length, periodic, float alpha, *, dtype=None, layout=None, device=None, \
12623+ pin_memory=False, requires_grad=False) -> Tensor
12624+ :noindex:
12625+
12626+ Hamming window function with periodic and alpha specified.
12627+
12628+ Arguments:
12629+ window_length (int): the size of returned window
12630+ periodic (bool): If True, returns a window to be used as periodic
12631+ function. If False, return a symmetric window.
12632+ alpha (float): The coefficient :math:`\alpha` in the equation above
12633+
12634+ Keyword args:
12635+ {dtype} Only floating point types are supported.
12636+ layout (:class:`torch.layout`, optional): the desired layout of returned window tensor. Only
12637+ ``torch.strided`` (dense layout) is supported.
12638+ {device}
12639+ {pin_memory}
12640+ {requires_grad}
12641+
12642+ Returns:
12643+ Tensor: A 1-D tensor of size :math:`(\text{{window\_length}},)` containing the window.
12644+
12645+ .. function:: hamming_window(window_length, periodic, float alpha, float beta, *, dtype=None, layout=None, \
12646+ device=None, pin_memory=False, requires_grad=False) -> Tensor
12647+ :noindex:
12648+
12649+ Hamming window function with periodic, alpha and beta specified.
12650+
12651+ Arguments:
12652+ window_length (int): the size of returned window
12653+ periodic (bool): If True, returns a window to be used as periodic
1258912654 function. If False, return a symmetric window.
12590- alpha (float, optional ): The coefficient :math:`\alpha` in the equation above
12591- beta (float, optional ): The coefficient :math:`\beta` in the equation above
12655+ alpha (float): The coefficient :math:`\alpha` in the equation above
12656+ beta (float): The coefficient :math:`\beta` in the equation above
1259212657
1259312658Keyword args:
1259412659 {dtype} Only floating point types are supported.
1259512660 layout (:class:`torch.layout`, optional): the desired layout of returned window tensor. Only
1259612661 ``torch.strided`` (dense layout) is supported.
1259712662 {device}
12663+ {pin_memory}
1259812664 {requires_grad}
1259912665
1260012666Returns:
0 commit comments