We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 6251fb2 commit c037f92Copy full SHA for c037f92
tests/test_optimizer_parameters.py
@@ -22,7 +22,7 @@ def test_learning_rate(optimizer_name):
22
23
@pytest.mark.parametrize('optimizer_name', VALID_OPTIMIZER_NAMES)
24
def test_epsilon(optimizer_name):
25
- if optimizer_name in ('nero', 'shampoo', 'scalableshampoo', 'dadaptsgd', 'adafactor'):
+ if optimizer_name in ('nero', 'shampoo', 'scalableshampoo', 'dadaptsgd', 'adafactor', 'lion'):
26
pytest.skip(f'skip {optimizer_name} optimizer')
27
28
optimizer = load_optimizer(optimizer_name)
0 commit comments