Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Test] Add more test cases #45

Merged
merged 16 commits into from
Jan 29, 2022
Prev Previous commit
Next Next commit
update: split tests
  • Loading branch information
kozistr committed Jan 29, 2022
commit f3274055069e733adfb5a91eba7989c77c4012b9
21 changes: 0 additions & 21 deletions tests/test_load_optimizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,24 +38,3 @@ def test_load_optimizers_valid(valid_optimizer_names):
def test_load_optimizers_invalid(invalid_optimizer_names):
with pytest.raises(NotImplementedError):
load_optimizers(invalid_optimizer_names)


@pytest.mark.parametrize('optimizer_names', VALID_OPTIMIZER_NAMES)
def test_learning_rate(optimizer_names):
with pytest.raises(ValueError):
optimizer = load_optimizers(optimizer_names)
optimizer(None, lr=-1e-2)


@pytest.mark.parametrize('optimizer_names', VALID_OPTIMIZER_NAMES)
def test_epsilon(optimizer_names):
with pytest.raises(ValueError):
optimizer = load_optimizers(optimizer_names)
optimizer(None, eps=-1e-6)


@pytest.mark.parametrize('optimizer_names', VALID_OPTIMIZER_NAMES)
def test_weight_decay(optimizer_names):
with pytest.raises(ValueError):
optimizer = load_optimizers(optimizer_names)
optimizer(None, weight_decay=-1e-3)
41 changes: 41 additions & 0 deletions tests/test_optimizer_parameters.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
from typing import List

import pytest

from pytorch_optimizer import load_optimizers

VALID_OPTIMIZER_NAMES: List[str] = [
'adamp',
'sgdp',
'madgrad',
'ranger',
'ranger21',
'radam',
'adabound',
'adahessian',
'adabelief',
'diffgrad',
'diffrgrad',
'lamb',
]


@pytest.mark.parametrize('optimizer_names', VALID_OPTIMIZER_NAMES)
def test_learning_rate(optimizer_names):
with pytest.raises(ValueError):
optimizer = load_optimizers(optimizer_names)
optimizer(None, lr=-1e-2)


@pytest.mark.parametrize('optimizer_names', VALID_OPTIMIZER_NAMES)
def test_epsilon(optimizer_names):
with pytest.raises(ValueError):
optimizer = load_optimizers(optimizer_names)
optimizer(None, eps=-1e-6)


@pytest.mark.parametrize('optimizer_names', VALID_OPTIMIZER_NAMES)
def test_weight_decay(optimizer_names):
with pytest.raises(ValueError):
optimizer = load_optimizers(optimizer_names)
optimizer(None, weight_decay=-1e-3)