當前位置: 首頁>>代碼示例>>Python>>正文


Python linear_model.LassoCV方法代碼示例

本文整理匯總了Python中sklearn.linear_model.LassoCV方法的典型用法代碼示例。如果您正苦於以下問題:Python linear_model.LassoCV方法的具體用法?Python linear_model.LassoCV怎麽用?Python linear_model.LassoCV使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在sklearn.linear_model的用法示例。


在下文中一共展示了linear_model.LassoCV方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: test_lasso_cv

# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import LassoCV [as 別名]
def test_lasso_cv():
    X, y, X_test, y_test = build_dataset()
    max_iter = 150
    clf = LassoCV(n_alphas=10, eps=1e-3, max_iter=max_iter).fit(X, y)
    assert_almost_equal(clf.alpha_, 0.056, 2)

    clf = LassoCV(n_alphas=10, eps=1e-3, max_iter=max_iter, precompute=True)
    clf.fit(X, y)
    assert_almost_equal(clf.alpha_, 0.056, 2)

    # Check that the lars and the coordinate descent implementation
    # select a similar alpha
    lars = LassoLarsCV(normalize=False, max_iter=30).fit(X, y)
    # for this we check that they don't fall in the grid of
    # clf.alphas further than 1
    assert np.abs(np.searchsorted(clf.alphas_[::-1], lars.alpha_) -
                  np.searchsorted(clf.alphas_[::-1], clf.alpha_)) <= 1
    # check that they also give a similar MSE
    mse_lars = interpolate.interp1d(lars.cv_alphas_, lars.mse_path_.T)
    np.testing.assert_approx_equal(mse_lars(clf.alphas_[5]).mean(),
                                   clf.mse_path_[5].mean(), significant=2)

    # test set
    assert_greater(clf.score(X_test, y_test), 0.99) 
開發者ID:PacktPublishing,項目名稱:Mastering-Elasticsearch-7.0,代碼行數:26,代碼來源:test_coordinate_descent.py

示例2: test_lasso_cv_with_some_model_selection

# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import LassoCV [as 別名]
def test_lasso_cv_with_some_model_selection():
    from sklearn.pipeline import make_pipeline
    from sklearn.preprocessing import StandardScaler
    from sklearn.model_selection import StratifiedKFold
    from sklearn import datasets
    from sklearn.linear_model import LassoCV

    diabetes = datasets.load_diabetes()
    X = diabetes.data
    y = diabetes.target

    pipe = make_pipeline(
        StandardScaler(),
        LassoCV(cv=StratifiedKFold(n_splits=5))
    )
    pipe.fit(X, y) 
開發者ID:PacktPublishing,項目名稱:Mastering-Elasticsearch-7.0,代碼行數:18,代碼來源:test_coordinate_descent.py

示例3: feature_inspection

# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import LassoCV [as 別名]
def feature_inspection(self, lower=0, upper=1, interval=10**2,
                           alpha_list=None):
        """Generate interval used to search for the alpha.

        Parameters
        ----------
        lower : int
            Lower bound for the interval search.
        upper : int
            Upper bound for the interval search.
        interval: int
            Number of alphas in interval inspected.
        """
        feat_vec, alpha_vec = [], []
        if alpha_list is None:
            alpha_list = np.linspace(float(upper), float(lower), int(interval))
        for alpha in alpha_list:
            model = LassoCV(alphas=[alpha], cv=3).fit(X=self.train_features,
                                                      y=self.train_targets)
            feat_vec.append(np.shape(np.nonzero(model.coef_))[1])
            alpha_vec.append(alpha)
        return feat_vec, alpha_vec, np.nonzero(model.coef_) 
開發者ID:SUNCAT-Center,項目名稱:CatLearn,代碼行數:24,代碼來源:feature_selection.py

示例4: learn_model

# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import LassoCV [as 別名]
def learn_model(self, x, y, clf, lam = None):
        if (lam is None and self.initlam != -1): # hack for first training
            lam = self.initlam
        if clf is None:
            if lam is None:
                clf = linear_model.LassoCV(max_iter=10000)
                clf.fit(x, y)
                lam = clf.alpha_
            clf = linear_model.Lasso(alpha = lam, \
                                 max_iter = 10000, \
                                 warm_start = True)
        clf.fit(x, y)
        return clf, lam


############################################################################################
# Implements GD Poisoning for Ridge Linear Regression
############################################################################################ 
開發者ID:jagielski,項目名稱:manip-ml,代碼行數:20,代碼來源:gd_poisoners.py

示例5: load_default

# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import LassoCV [as 別名]
def load_default(self, machine_list=['lasso', 'tree', 'ridge', 'random_forest', 'svm']):
        """
        Loads 4 different scikit-learn regressors by default.

        Parameters
        ----------
        machine_list: optional, list of strings
            List of default machine names to be loaded.

        """
        for machine in machine_list:
            try:
                if machine == 'lasso':
                    self.estimators_['lasso'] = linear_model.LassoCV(random_state=self.random_state).fit(self.X_k_, self.y_k_)
                if machine == 'tree':
                    self.estimators_['tree'] = DecisionTreeRegressor(random_state=self.random_state).fit(self.X_k_, self.y_k_)
                if machine == 'ridge':
                    self.estimators_['ridge'] = linear_model.RidgeCV().fit(self.X_k_, self.y_k_)
                if machine == 'random_forest':
                    self.estimators_['random_forest'] = RandomForestRegressor(random_state=self.random_state).fit(self.X_k_, self.y_k_)
                if machine == 'svm':
                    self.estimators_['svm'] = SVR().fit(self.X_k_, self.y_k_)
            except ValueError:
                continue 
開發者ID:bhargavvader,項目名稱:pycobra,代碼行數:26,代碼來源:ewa.py

示例6: test_lasso_cv

# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import LassoCV [as 別名]
def test_lasso_cv():
    X, y, X_test, y_test = build_dataset()
    max_iter = 150
    clf = LassoCV(n_alphas=10, eps=1e-3, max_iter=max_iter).fit(X, y)
    assert_almost_equal(clf.alpha_, 0.056, 2)

    clf = LassoCV(n_alphas=10, eps=1e-3, max_iter=max_iter, precompute=True)
    clf.fit(X, y)
    assert_almost_equal(clf.alpha_, 0.056, 2)

    # Check that the lars and the coordinate descent implementation
    # select a similar alpha
    lars = LassoLarsCV(normalize=False, max_iter=30).fit(X, y)
    # for this we check that they don't fall in the grid of
    # clf.alphas further than 1
    assert_true(np.abs(
        np.searchsorted(clf.alphas_[::-1], lars.alpha_) -
        np.searchsorted(clf.alphas_[::-1], clf.alpha_)) <= 1)
    # check that they also give a similar MSE
    mse_lars = interpolate.interp1d(lars.cv_alphas_, lars.mse_path_.T)
    np.testing.assert_approx_equal(mse_lars(clf.alphas_[5]).mean(),
                                   clf.mse_path_[5].mean(), significant=2)

    # test set
    assert_greater(clf.score(X_test, y_test), 0.99) 
開發者ID:alvarobartt,項目名稱:twitter-stock-recommendation,代碼行數:27,代碼來源:test_coordinate_descent.py

示例7: get_logistic_regression_coefs_l1

# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import LassoCV [as 別名]
def get_logistic_regression_coefs_l1(self, category,
                                         clf=LassoCV(alphas=[0.1, 0.001],
                                                     max_iter=10000,
                                                     n_jobs=-1)):
        ''' Computes l1-penalized logistic regression score.
        Parameters
        ----------
        category : str
            category name to score

        Returns
        -------
            (coefficient array, accuracy, majority class baseline accuracy)
        '''
        try:
            from sklearn.cross_validation import cross_val_predict
        except:
            from sklearn.model_selection import cross_val_predict
        y = self._get_mask_from_category(category)
        y_continuous = self._get_continuous_version_boolean_y(y)
        # X = TfidfTransformer().fit_transform(self._X)
        X = self._X

        clf.fit(X, y_continuous)
        y_hat = (cross_val_predict(clf, X, y_continuous) > 0)
        acc, baseline = self._get_accuracy_and_baseline_accuracy(y, y_hat)
        clf.fit(X, y_continuous)
        return clf.coef_, acc, baseline 
開發者ID:JasonKessler,項目名稱:scattertext,代碼行數:30,代碼來源:TermDocMatrix.py

示例8: _D_LassoCV_MatchSpace

# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import LassoCV [as 別名]
def _D_LassoCV_MatchSpace(
    X, Y, X_full, D_full, v_pens=None, n_v_cv=5, sample_frac=1, y_V_share=0.5, **kwargs
):  # pylint: disable=missing-param-doc, unused-argument
    if sample_frac < 1:
        N_y = X.shape[0]
        sample_y = np.random.choice(N_y, int(sample_frac * N_y), replace=False)
        X = X[sample_y, :]
        Y = Y[sample_y, :]
        N_d = D_full.shape[0]
        sample_d = np.random.choice(N_d, int(sample_frac * N_d), replace=False)
        X_full = X_full[sample_d, :]
        D_full = D_full[sample_d]
    y_varselectorfit = MultiTaskLassoCV(normalize=True, cv=n_v_cv, alphas=v_pens).fit(
        X, Y
    )
    y_V = np.sqrt(
        np.sum(np.square(y_varselectorfit.coef_), axis=0)
    )  # n_tasks x n_features -> n_feature
    best_y_v_pen = y_varselectorfit.alpha_

    d_varselectorfit = LassoCV(normalize=True, cv=n_v_cv, alphas=v_pens).fit(
        X_full, D_full
    )
    d_V = np.abs(d_varselectorfit.coef_)
    best_d_v_pen = d_varselectorfit.alpha_

    m_sel = (y_V + d_V) != 0
    transformer = SelMatchSpace(m_sel)
    if y_V.sum() == 0:
        V = d_V
    elif d_V.sum() == 0:
        V = y_V
    else:
        V = y_V_share * y_V / (y_V.sum()) + (1 - y_V_share) * d_V / (2 * d_V.sum())
    return transformer, V[m_sel], (best_y_v_pen, best_d_v_pen), V 
開發者ID:microsoft,項目名稱:SparseSC,代碼行數:37,代碼來源:match_space.py

示例9: test_lasso_cv_positive_constraint

# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import LassoCV [as 別名]
def test_lasso_cv_positive_constraint():
    X, y, X_test, y_test = build_dataset()
    max_iter = 500

    # Ensure the unconstrained fit has a negative coefficient
    clf_unconstrained = LassoCV(n_alphas=3, eps=1e-1, max_iter=max_iter, cv=2,
                                n_jobs=1)
    clf_unconstrained.fit(X, y)
    assert min(clf_unconstrained.coef_) < 0

    # On same data, constrained fit has non-negative coefficients
    clf_constrained = LassoCV(n_alphas=3, eps=1e-1, max_iter=max_iter,
                              positive=True, cv=2, n_jobs=1)
    clf_constrained.fit(X, y)
    assert min(clf_constrained.coef_) >= 0 
開發者ID:PacktPublishing,項目名稱:Mastering-Elasticsearch-7.0,代碼行數:17,代碼來源:test_coordinate_descent.py

示例10: test_uniform_targets

# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import LassoCV [as 別名]
def test_uniform_targets():
    enet = ElasticNetCV(fit_intercept=True, n_alphas=3)
    m_enet = MultiTaskElasticNetCV(fit_intercept=True, n_alphas=3)
    lasso = LassoCV(fit_intercept=True, n_alphas=3)
    m_lasso = MultiTaskLassoCV(fit_intercept=True, n_alphas=3)

    models_single_task = (enet, lasso)
    models_multi_task = (m_enet, m_lasso)

    rng = np.random.RandomState(0)

    X_train = rng.random_sample(size=(10, 3))
    X_test = rng.random_sample(size=(10, 3))

    y1 = np.empty(10)
    y2 = np.empty((10, 2))

    for model in models_single_task:
        for y_values in (0, 5):
            y1.fill(y_values)
            assert_array_equal(model.fit(X_train, y1).predict(X_test), y1)
            assert_array_equal(model.alphas_, [np.finfo(float).resolution]*3)

    for model in models_multi_task:
        for y_values in (0, 5):
            y2[:, 0].fill(y_values)
            y2[:, 1].fill(2 * y_values)
            assert_array_equal(model.fit(X_train, y2).predict(X_test), y2)
            assert_array_equal(model.alphas_, [np.finfo(float).resolution]*3) 
開發者ID:PacktPublishing,項目名稱:Mastering-Elasticsearch-7.0,代碼行數:31,代碼來源:test_coordinate_descent.py

示例11: test_1d_multioutput_lasso_and_multitask_lasso_cv

# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import LassoCV [as 別名]
def test_1d_multioutput_lasso_and_multitask_lasso_cv():
    X, y, _, _ = build_dataset(n_features=10)
    y = y[:, np.newaxis]
    clf = LassoCV(n_alphas=5, eps=2e-3)
    clf.fit(X, y[:, 0])
    clf1 = MultiTaskLassoCV(n_alphas=5, eps=2e-3)
    clf1.fit(X, y)
    assert_almost_equal(clf.alpha_, clf1.alpha_)
    assert_almost_equal(clf.coef_, clf1.coef_[0])
    assert_almost_equal(clf.intercept_, clf1.intercept_[0]) 
開發者ID:PacktPublishing,項目名稱:Mastering-Elasticsearch-7.0,代碼行數:12,代碼來源:test_coordinate_descent.py

示例12: test_precompute_invalid_argument

# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import LassoCV [as 別名]
def test_precompute_invalid_argument():
    X, y, _, _ = build_dataset()
    for clf in [ElasticNetCV(precompute="invalid"),
                LassoCV(precompute="invalid")]:
        assert_raises_regex(ValueError, ".*should be.*True.*False.*auto.*"
                            "array-like.*Got 'invalid'", clf.fit, X, y)

    # Precompute = 'auto' is not supported for ElasticNet
    assert_raises_regex(ValueError, ".*should be.*True.*False.*array-like.*"
                        "Got 'auto'", ElasticNet(precompute='auto').fit, X, y) 
開發者ID:PacktPublishing,項目名稱:Mastering-Elasticsearch-7.0,代碼行數:12,代碼來源:test_coordinate_descent.py

示例13: get_new_clf

# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import LassoCV [as 別名]
def get_new_clf(solver, folds=3, alphas=100):
    kf=KFold(n_splits=folds,shuffle=False)
    if "linear" == solver:
        clf = linear_model.LinearRegression(fit_intercept=False)
    if "ridge" == solver:
        alphas =  np.arange(1/alphas, 10+ 1/alphas, 10/alphas)
        clf = linear_model.RidgeCV(alphas=alphas, fit_intercept=False, cv=kf)
    elif "lasso" == solver:
        clf=linear_model.LassoCV(n_alphas=alphas, fit_intercept=False, cv=kf)
    elif "elastic" == solver:
        clf = linear_model.ElasticNetCV(n_alphas=alphas, fit_intercept=False, cv=kf)
    return clf 
開發者ID:ibramjub,項目名稱:Fast-and-Accurate-Least-Mean-Squares-Solvers,代碼行數:14,代碼來源:Booster.py

示例14: _lasso

# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import LassoCV [as 別名]
def _lasso(self):
        """Order features according to their corresponding coefficients."""
        if self.line_search:
            pred = None
            try:
                alpha_list = np.geomspace(self.max_alpha, self.min_alpha,
                                          self.steps)
            except AttributeError:
                alpha_list = np.exp(np.linspace(np.log(self.max_alpha),
                                                np.log(self.min_alpha),
                                                self.steps))
            for alpha in alpha_list:
                regr = Lasso(alpha=alpha, max_iter=self.iter,
                             fit_intercept=True, normalize=True,
                             selection='random')
                model = regr.fit(self.train_matrix, self.train_target)
                nz = len(model.coef_) - (model.coef_ == 0.).sum()
                if nz >= self.size:
                    coeff = model.coef_
                    break
        else:
            regr = LassoCV(fit_intercept=True, normalize=True,
                           n_alphas=self.steps, max_iter=self.iter,
                           eps=self.eps, cv=None)
            model = regr.fit(X=self.train_matrix, y=self.train_target)
            coeff = model.coef_

            # Make the linear prediction.
            pred = None
            if self.predict:
                data = model.predict(self.test_matrix)
                pred = get_error(prediction=data,
                                 target=self.test_target)['average']

        return coeff, pred 
開發者ID:SUNCAT-Center,項目名稱:CatLearn,代碼行數:37,代碼來源:scikit_wrapper.py

示例15: load_default

# 需要導入模塊: from sklearn import linear_model [as 別名]
# 或者: from sklearn.linear_model import LassoCV [as 別名]
def load_default(self, machine_list='basic'):
        """
        Loads 4 different scikit-learn regressors by default. The advanced list adds more machines. 

        Parameters
        ----------
        machine_list: optional, list of strings
            List of default machine names to be loaded.
        Returns
        -------
        self : returns an instance of self.
        """

        if machine_list == 'basic':
            machine_list = ['tree', 'ridge', 'random_forest', 'svm']
        if machine_list == 'advanced':
            machine_list=['lasso', 'tree', 'ridge', 'random_forest', 'svm', 'bayesian_ridge', 'sgd']

        self.estimators_ = {}
        for machine in machine_list:
            try:
                if machine == 'lasso':
                    self.estimators_['lasso'] = linear_model.LassoCV(random_state=self.random_state).fit(self.X_k_, self.y_k_)
                if machine == 'tree':
                    self.estimators_['tree'] = DecisionTreeRegressor(random_state=self.random_state).fit(self.X_k_, self.y_k_)
                if machine == 'ridge':
                    self.estimators_['ridge'] = linear_model.RidgeCV().fit(self.X_k_, self.y_k_)
                if machine == 'random_forest':
                    self.estimators_['random_forest'] = RandomForestRegressor(random_state=self.random_state).fit(self.X_k_, self.y_k_)
                if machine == 'svm':
                    self.estimators_['svm'] = LinearSVR(random_state=self.random_state).fit(self.X_k_, self.y_k_)
                if machine == 'sgd':
                    self.estimators_['sgd'] = linear_model.SGDRegressor(random_state=self.random_state).fit(self.X_k_, self.y_k_)
                if machine == 'bayesian_ridge':
                    self.estimators_['bayesian_ridge'] = linear_model.BayesianRidge().fit(self.X_k_, self.y_k_)
            except ValueError:
                continue
        return self 
開發者ID:bhargavvader,項目名稱:pycobra,代碼行數:40,代碼來源:cobra.py


注:本文中的sklearn.linear_model.LassoCV方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。