本文整理汇总了Python中sklearn.linear_model.base.LinearRegression.coef_方法的典型用法代码示例。如果您正苦于以下问题:Python LinearRegression.coef_方法的具体用法?Python LinearRegression.coef_怎么用?Python LinearRegression.coef_使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类sklearn.linear_model.base.LinearRegression
的用法示例。
在下文中一共展示了LinearRegression.coef_方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: fit
# 需要导入模块: from sklearn.linear_model.base import LinearRegression [as 别名]
# 或者: from sklearn.linear_model.base.LinearRegression import coef_ [as 别名]
#.........这里部分代码省略.........
w_left_prefits = np.vstack((w0, w_left_prefits))
(w_left2, in_mask_left2, score_left2) = \
lanes_ransac_select_best(X1, y1,
w_left_prefits, residual_threshold,
self.w_refs_left, score_lambdas)
n_inliers_left2 = np.sum(in_mask_left2)
n_inliers2 = n_inliers_right2 + n_inliers_left2
self.w_fits.append((w_left2, w_right2))
self.n_inliers.append(n_inliers2)
self.inliers_masks.append((in_mask_left2, in_mask_right2))
self.score_fits.append((score_left2, score_right2))
# === Previous frame??? === #
if self.w_refs_left.size > 0 and self.w_refs_right.size > 0:
in_mask_left3 = lanes_inliers(X1, y1, self.w_refs_left[0], residual_threshold)
in_mask_right3 = lanes_inliers(X2, y2, self.w_refs_right[0], residual_threshold)
n_inliers3 = np.sum(in_mask_left3) + np.sum(in_mask_right3)
score_left3 = lane_score(np.sum(in_mask_left3),
self.w_refs_left[0],
self.w_refs_left,
score_lambdas)
score_right3 = lane_score(np.sum(in_mask_right3),
self.w_refs_right[0],
self.w_refs_right,
score_lambdas)
self.w_fits.append((self.w_refs_left[0], self.w_refs_right[0]))
self.n_inliers.append(n_inliers3)
self.inliers_masks.append((in_mask_left3, in_mask_right3))
self.score_fits.append((score_left3, score_right3))
# L2 regression regularisation of fits.
self.w_fits_l2 = copy.deepcopy(self.w_fits)
if self.l2_scales is not None:
for i in range(len(self.w_fits)):
w1, w2 = self.w_fits[i]
# Some regression: ignored when inversed matrix error.
try:
w_left = m_regression_exp(X1, y1, w1, self.l2_scales)
except Exception:
w_left = w1
try:
w_right = m_regression_exp(X2, y2, w2, self.l2_scales)
except Exception:
w_right = w2
in_mask_left = lanes_inliers(X1, y1, w_left, residual_threshold)
in_mask_right = lanes_inliers(X2, y2, w_right, residual_threshold)
n_inliers = np.sum(in_mask_left) + np.sum(in_mask_right)
score_left = lane_score(np.sum(in_mask_left),
w_left,
self.w_refs_left,
score_lambdas)
score_right = lane_score(np.sum(in_mask_right),
w_right,
self.w_refs_right,
score_lambdas)
self.w_fits_l2[i] = (w_left, w_right)
self.n_inliers[i] = n_inliers
self.inliers_masks[i] = (in_mask_left, in_mask_right)
self.score_fits[i] = (score_left, score_right)
# Best fit?
scores = [s1+s2 for (s1, s2) in self.score_fits]
idx = np.argmax(scores)
w_left, w_right = self.w_fits_l2[idx]
in_mask_left, in_mask_right = self.inliers_masks[idx]
# Smoothing.
smoothing = self.smoothing
if self.w_refs_left.size > 0 and self.w_refs_right.size > 0:
w_left = smoothing * w_left + (1. - smoothing) * self.w_refs_left[0]
w_right = smoothing * w_right + (1. - smoothing) * self.w_refs_right[0]
self.w1_ = w_left
self.w2_ = w_right
# Set regression parameters.
base_estimator1 = LinearRegression(fit_intercept=False)
base_estimator1.coef_ = w_left
base_estimator1.intercept_ = 0.0
base_estimator2 = LinearRegression(fit_intercept=False)
base_estimator2.coef_ = w_right
base_estimator2.intercept_ = 0.0
# Save final model parameters.
self.estimator1_ = base_estimator1
self.estimator2_ = base_estimator2
self.inlier_mask1_ = in_mask_left
self.inlier_mask2_ = in_mask_right
# # Estimate final model using all inliers
# # base_estimator1.fit(X1_inlier_best, y1_inlier_best)
# # base_estimator2.fit(X2_inlier_best, y2_inlier_best)
return self
示例2: fit
# 需要导入模块: from sklearn.linear_model.base import LinearRegression [as 别名]
# 或者: from sklearn.linear_model.base.LinearRegression import coef_ [as 别名]
def fit(self, X1, y1, X2, y2):
"""Fit estimator using RANSAC algorithm.
Namely, the fit is done into two main steps:
- pre-fitting: quickly select n_prefits configurations which seems
suitable given topological constraints.
- finding best fit: select the pre-fit with the maximum number of inliers
as the best fit.
Inputs:
X1, y1: Left lane points (supposedly)
X2, y2: Right lane points (supposedly)
"""
check_consistent_length(X1, y1)
check_consistent_length(X2, y2)
# Assume linear model by default
min_samples = X1.shape[1] + 1
if min_samples > X1.shape[0] or min_samples > X2.shape[0]:
raise ValueError("`min_samples` may not be larger than number "
"of samples ``X1-2.shape[0]``.")
# Check additional parameters...
if self.stop_probability < 0 or self.stop_probability > 1:
raise ValueError("`stop_probability` must be in range [0, 1].")
if self.residual_threshold is None:
residual_threshold = np.median(np.abs(y - np.median(y)))
else:
residual_threshold = self.residual_threshold
# random_state = check_random_state(self.random_state)
# === Pre-fit with small subsets (4 points) === #
# Allows to quickly pre-select some good configurations.
w1_prefits, w2_prefits = lanes_ransac_prefit(X1, y1, X2, y2,
self.n_prefits,
self.max_trials,
self.is_valid_diffs,
self.is_valid_bounds)
# === Select best pre-fit, using the full dataset === #
post_fit = 0
(w1,
w2,
inlier_mask1,
inlier_mask2) = lanes_ransac_select_best(X1, y1, X2, y2,
w1_prefits, w2_prefits,
residual_threshold,
post_fit)
self.w1_ = w1
self.w2_ = w2
# Set regression parameters.
base_estimator1 = LinearRegression(fit_intercept=False)
base_estimator1.coef_ = w1
base_estimator1.intercept_ = 0.0
base_estimator2 = LinearRegression(fit_intercept=False)
base_estimator2.coef_ = w2
base_estimator2.intercept_ = 0.0
# Save final model parameters.
self.estimator1_ = base_estimator1
self.estimator2_ = base_estimator2
self.inlier_mask1_ = inlier_mask1
self.inlier_mask2_ = inlier_mask2
# # Estimate final model using all inliers
# # base_estimator1.fit(X1_inlier_best, y1_inlier_best)
# # base_estimator2.fit(X2_inlier_best, y2_inlier_best)
return self