We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 1860894 commit 77ffb1eCopy full SHA for 77ffb1e
vector_quantize_pytorch/finite_scalar_quantization.py
@@ -80,6 +80,8 @@ def __init__(
80
):
81
super().__init__()
82
83
+ assert not (any([l == 2 for l in levels]) and not preserve_symmetry), 'turn on `preserve_symmetry` for using any levels == 2, or use a greater level'
84
+
85
if isinstance(levels, tuple):
86
levels = list(levels)
87
0 commit comments