Skip to content

Commit

Permalink
Fix RandomState in the tests of linear model. (scikit-learn#7881)
Browse files Browse the repository at this point in the history
  • Loading branch information
tguillemot authored and agramfort committed Nov 15, 2016
1 parent 680a5b8 commit 915458b
Show file tree
Hide file tree
Showing 4 changed files with 13 additions and 9 deletions.
5 changes: 3 additions & 2 deletions sklearn/linear_model/tests/test_coordinate_descent.py
Original file line number Diff line number Diff line change
Expand Up @@ -437,8 +437,9 @@ def test_enet_multitarget():


def test_multioutput_enetcv_error():
X = np.random.randn(10, 2)
y = np.random.randn(10, 2)
rng = np.random.RandomState(0)
X = rng.randn(10, 2)
y = rng.randn(10, 2)
clf = ElasticNetCV()
assert_raises(ValueError, clf.fit, X, y)

Expand Down
3 changes: 2 additions & 1 deletion sklearn/linear_model/tests/test_least_angle.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,7 @@ def test_collinearity():
[2., 2., 0.],
[1., 1., 0]])
y = np.array([1., 0., 0])
rng = np.random.RandomState(0)

f = ignore_warnings
_, _, coef_path_ = f(linear_model.lars_path)(X, y, alpha_min=0.01)
Expand All @@ -115,7 +116,7 @@ def test_collinearity():
assert_less((residual ** 2).sum(), 1.) # just make sure it's bounded

n_samples = 10
X = np.random.rand(n_samples, 5)
X = rng.rand(n_samples, 5)
y = np.zeros(n_samples)
_, _, coef_path_ = linear_model.lars_path(X, y, Gram='auto', copy_X=False,
copy_Gram=False, alpha_min=0.,
Expand Down
5 changes: 3 additions & 2 deletions sklearn/linear_model/tests/test_ransac.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,8 +54,9 @@ def is_data_valid(X, y):
assert_equal(y.shape[0], 2)
return False

X = np.random.rand(10, 2)
y = np.random.rand(10, 1)
rng = np.random.RandomState(0)
X = rng.rand(10, 2)
y = rng.rand(10, 1)

base_estimator = LinearRegression()
ransac_estimator = RANSACRegressor(base_estimator, min_samples=2,
Expand Down
9 changes: 5 additions & 4 deletions sklearn/linear_model/tests/test_sgd.py
Original file line number Diff line number Diff line change
Expand Up @@ -611,7 +611,8 @@ def test_wrong_class_weight_format(self):
def test_weights_multiplied(self):
# Tests that class_weight and sample_weight are multiplicative
class_weights = {1: .6, 2: .3}
sample_weights = np.random.random(Y4.shape[0])
rng = np.random.RandomState(0)
sample_weights = rng.random_sample(Y4.shape[0])
multiplied_together = np.copy(sample_weights)
multiplied_together[Y4 == 1] *= class_weights[1]
multiplied_together[Y4 == 2] *= class_weights[2]
Expand Down Expand Up @@ -960,6 +961,7 @@ def test_sgd_least_squares_fit(self):
def test_sgd_epsilon_insensitive(self):
xmin, xmax = -5, 5
n_samples = 100
rng = np.random.RandomState(0)
X = np.linspace(xmin, xmax, n_samples).reshape(n_samples, 1)

# simple linear function without noise
Expand All @@ -973,8 +975,7 @@ def test_sgd_epsilon_insensitive(self):
assert_true(score > 0.99)

# simple linear function with noise
y = 0.5 * X.ravel() \
+ np.random.randn(n_samples, 1).ravel()
y = 0.5 * X.ravel() + rng.randn(n_samples, 1).ravel()

clf = self.factory(loss='epsilon_insensitive', epsilon=0.01,
alpha=0.1, n_iter=20,
Expand Down Expand Up @@ -1012,7 +1013,7 @@ def test_elasticnet_convergence(self):

n_samples, n_features = 1000, 5
rng = np.random.RandomState(0)
X = np.random.randn(n_samples, n_features)
X = rng.randn(n_samples, n_features)
# ground_truth linear model that generate y from X and to which the
# models should converge if the regularizer would be set to 0.0
ground_truth_coef = rng.randn(n_features)
Expand Down

0 comments on commit 915458b

Please sign in to comment.