|
20 | 20 | from ..preprocessing import LabelEncoder, LabelBinarizer
|
21 | 21 | from ..svm.base import _fit_liblinear
|
22 | 22 | from ..utils import check_array, check_consistent_length, compute_class_weight
|
| 23 | +from ..utils import check_random_state |
23 | 24 | from ..utils.extmath import (logsumexp, log_logistic, safe_sparse_dot,
|
24 | 25 | squared_norm)
|
25 | 26 | from ..utils.optimize import newton_cg
|
@@ -417,7 +418,8 @@ def logistic_regression_path(X, y, pos_class=None, Cs=10, fit_intercept=True,
|
417 | 418 | max_iter=100, tol=1e-4, verbose=0,
|
418 | 419 | solver='lbfgs', coef=None, copy=True,
|
419 | 420 | class_weight=None, dual=False, penalty='l2',
|
420 |
| - intercept_scaling=1., multi_class='ovr'): |
| 421 | + intercept_scaling=1., multi_class='ovr', |
| 422 | + random_state=None): |
421 | 423 | """Compute a Logistic Regression model for a list of regularization
|
422 | 424 | parameters.
|
423 | 425 |
|
@@ -502,8 +504,12 @@ def logistic_regression_path(X, y, pos_class=None, Cs=10, fit_intercept=True,
|
502 | 504 | Multiclass option can be either 'ovr' or 'multinomial'. If the option
|
503 | 505 | chosen is 'ovr', then a binary problem is fit for each label. Else
|
504 | 506 | the loss minimised is the multinomial loss fit across
|
505 |
| - the entire probability distribution. Works only for the 'lbfgs' |
506 |
| - solver. |
| 507 | + the entire probability distribution. Works only for the 'lbfgs' and |
| 508 | + 'newton-cg' solvers. |
| 509 | +
|
| 510 | + random_state : int seed, RandomState instance, or None (default) |
| 511 | + The seed of the pseudo random number generator to use when |
| 512 | + shuffling the data. |
507 | 513 |
|
508 | 514 | Returns
|
509 | 515 | -------
|
@@ -531,6 +537,7 @@ def logistic_regression_path(X, y, pos_class=None, Cs=10, fit_intercept=True,
|
531 | 537 | _, n_features = X.shape
|
532 | 538 | check_consistent_length(X, y)
|
533 | 539 | classes = np.unique(y)
|
| 540 | + random_state = check_random_state(random_state) |
534 | 541 |
|
535 | 542 | if pos_class is None and multi_class != 'multinomial':
|
536 | 543 | if (classes.size > 2):
|
@@ -659,7 +666,7 @@ def logistic_regression_path(X, y, pos_class=None, Cs=10, fit_intercept=True,
|
659 | 666 | elif solver == 'liblinear':
|
660 | 667 | coef_, intercept_, _, = _fit_liblinear(
|
661 | 668 | X, y, C, fit_intercept, intercept_scaling, class_weight,
|
662 |
| - penalty, dual, verbose, max_iter, tol, |
| 669 | + penalty, dual, verbose, max_iter, tol, random_state |
663 | 670 | )
|
664 | 671 | if fit_intercept:
|
665 | 672 | w0 = np.concatenate([coef_.ravel(), intercept_])
|
@@ -1029,7 +1036,7 @@ def fit(self, X, y):
|
1029 | 1036 | self.coef_, self.intercept_, self.n_iter_ = _fit_liblinear(
|
1030 | 1037 | X, y, self.C, self.fit_intercept, self.intercept_scaling,
|
1031 | 1038 | self.class_weight, self.penalty, self.dual, self.verbose,
|
1032 |
| - self.max_iter, self.tol |
| 1039 | + self.max_iter, self.tol, self.random_state |
1033 | 1040 | )
|
1034 | 1041 | return self
|
1035 | 1042 |
|
|
0 commit comments