|
| 1 | +from itertools import product |
| 2 | + |
| 3 | +import torch |
| 4 | +from torch import nn |
| 5 | + |
| 6 | +from i6_models.parts.frontend.common import apply_same_padding, get_same_padding |
| 7 | + |
| 8 | + |
| 9 | +def test_output_shape(): |
| 10 | + # test for even and odd dim |
| 11 | + last_dim = 101 |
| 12 | + pre_last_dim = 100 |
| 13 | + |
| 14 | + iff = lambda x, y: x and y or not x and not y # x <=> y |
| 15 | + strided_dim = lambda d, s: (d - 1) // s + 1 # expected out dimension for strided conv |
| 16 | + |
| 17 | + # `get_same_padding` seems to work for some stride > 1 |
| 18 | + for kernel in product(range(1, 21), repeat=2): |
| 19 | + conv = nn.Conv2d(1, 1, kernel_size=kernel, stride=(1, 1), padding=get_same_padding(kernel)) |
| 20 | + |
| 21 | + x = torch.randn(1, 1, pre_last_dim, last_dim) |
| 22 | + |
| 23 | + out = conv(x) |
| 24 | + |
| 25 | + # we expect `get_same_padding` to only cover odd kernel sizes |
| 26 | + assert all( |
| 27 | + iff(out_dim == in_dim, k % 2 == 1) for in_dim, out_dim, k in zip(x.shape[2:], out.shape[2:], kernel) |
| 28 | + ), f"Failed for {x.shape=}, {out.shape=}, {kernel=} and stride=1" |
| 29 | + |
| 30 | + for kernel, stride in product(product(range(1, 21), repeat=2), range(1, 7)): |
| 31 | + conv = nn.Conv2d(1, 1, kernel_size=kernel, stride=(1, stride)) |
| 32 | + |
| 33 | + x = torch.randn(1, 1, pre_last_dim, last_dim) |
| 34 | + x_padded = apply_same_padding(x, kernel) |
| 35 | + |
| 36 | + out = conv(x_padded) |
| 37 | + |
| 38 | + # correct out dimensions for all possible kernel sizes and strides |
| 39 | + assert all( |
| 40 | + out_dim == strided_dim(in_dim, s) |
| 41 | + for in_dim, out_dim, k, s in zip(x.shape[2:], out.shape[2:], kernel, (1, stride)) |
| 42 | + ), f"Failed for {x.shape=}, {out.shape=}, {kernel=} and {stride=}" |
0 commit comments