本文整理汇总了Python中imblearn.combine.SMOTEENN.fit_resample方法的典型用法代码示例。如果您正苦于以下问题:Python SMOTEENN.fit_resample方法的具体用法?Python SMOTEENN.fit_resample怎么用?Python SMOTEENN.fit_resample使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类imblearn.combine.SMOTEENN
的用法示例。
在下文中一共展示了SMOTEENN.fit_resample方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_error_wrong_object
# 需要导入模块: from imblearn.combine import SMOTEENN [as 别名]
# 或者: from imblearn.combine.SMOTEENN import fit_resample [as 别名]
def test_error_wrong_object():
smote = 'rnd'
enn = 'rnd'
smt = SMOTEENN(smote=smote, random_state=RND_SEED)
with raises(ValueError, match="smote needs to be a SMOTE"):
smt.fit_resample(X, Y)
smt = SMOTEENN(enn=enn, random_state=RND_SEED)
with raises(ValueError, match="enn needs to be an "):
smt.fit_resample(X, Y)
示例2: test_validate_estimator_default
# 需要导入模块: from imblearn.combine import SMOTEENN [as 别名]
# 或者: from imblearn.combine.SMOTEENN import fit_resample [as 别名]
def test_validate_estimator_default():
smt = SMOTEENN(random_state=RND_SEED)
X_resampled, y_resampled = smt.fit_resample(X, Y)
X_gt = np.array([[1.52091956, -0.49283504], [0.84976473, -0.15570176], [
0.61319159, -0.11571667
], [0.66052536, -0.28246518], [-0.28162401, -2.10400981],
[0.83680821, 1.72827342], [0.08711622, 0.93259929]])
y_gt = np.array([0, 0, 0, 0, 1, 1, 1])
assert_allclose(X_resampled, X_gt, rtol=R_TOL)
assert_array_equal(y_resampled, y_gt)
示例3: test_sample_regular_half
# 需要导入模块: from imblearn.combine import SMOTEENN [as 别名]
# 或者: from imblearn.combine.SMOTEENN import fit_resample [as 别名]
def test_sample_regular_half():
sampling_strategy = {0: 10, 1: 12}
smote = SMOTEENN(
sampling_strategy=sampling_strategy, random_state=RND_SEED)
X_resampled, y_resampled = smote.fit_resample(X, Y)
X_gt = np.array([[1.52091956, -0.49283504], [-0.28162401, -2.10400981],
[0.83680821, 1.72827342], [0.08711622, 0.93259929]])
y_gt = np.array([0, 1, 1, 1])
assert_allclose(X_resampled, X_gt)
assert_array_equal(y_resampled, y_gt)
示例4: test_validate_estimator_init
# 需要导入模块: from imblearn.combine import SMOTEENN [as 别名]
# 或者: from imblearn.combine.SMOTEENN import fit_resample [as 别名]
def test_validate_estimator_init():
smote = SMOTE(random_state=RND_SEED)
enn = EditedNearestNeighbours(sampling_strategy='all')
smt = SMOTEENN(smote=smote, enn=enn, random_state=RND_SEED)
X_resampled, y_resampled = smt.fit_resample(X, Y)
X_gt = np.array([[1.52091956, -0.49283504], [0.84976473, -0.15570176], [
0.61319159, -0.11571667
], [0.66052536, -0.28246518], [-0.28162401, -2.10400981],
[0.83680821, 1.72827342], [0.08711622, 0.93259929]])
y_gt = np.array([0, 0, 0, 0, 1, 1, 1])
assert_allclose(X_resampled, X_gt, rtol=R_TOL)
assert_array_equal(y_resampled, y_gt)
示例5: train_decisiontree_with
# 需要导入模块: from imblearn.combine import SMOTEENN [as 别名]
# 或者: from imblearn.combine.SMOTEENN import fit_resample [as 别名]
def train_decisiontree_with(configurationname, train_data, k, score_function, undersam=False, oversam=False,
export=False):
assert k > 0
print("Training with configuration " + configurationname)
X_train, y_train, id_to_a_train = train_data
dtc = DecisionTreeClassifier(random_state=0)
print("Feature Selection")
# selector = SelectFpr(score_function)
selector = SelectKBest(score_function, k=k)
result = selector.fit(X_train, y_train)
X_train = selector.transform(X_train)
fitted_ids = [i for i in result.get_support(indices=True)]
print("Apply Resampling")
print(Counter(y_train))
if undersam and not oversam:
renn = RepeatedEditedNearestNeighbours()
X_train, y_train = renn.fit_resample(X_train, y_train)
if oversam and not undersam:
# feature_indices_array = list(range(len(f_to_id)))
# smote_nc = SMOTENC(categorical_features=feature_indices_array, random_state=0)
# X_train, y_train = smote_nc.fit_resample(X_train, y_train)
sm = SMOTE(random_state=42)
X_train, y_train = sm.fit_resample(X_train, y_train)
if oversam and undersam:
smote_enn = SMOTEENN(random_state=0)
X_train, y_train = smote_enn.fit_resample(X_train, y_train)
print(Counter(y_train))
print("Train Classifier")
dtc = dtc.fit(X_train, y_train, check_input=True)
if export:
export_graphviz(dtc, out_file=DATAP + "/temp/trees/sltree_" + configurationname + ".dot", filled=True)
transform(fitted_ids, configurationname)
print("Self Accuracy: " + str(dtc.score(X_train, y_train)))
return selector, dtc
示例6: print
# 需要导入模块: from imblearn.combine import SMOTEENN [as 别名]
# 或者: from imblearn.combine.SMOTEENN import fit_resample [as 别名]
print(__doc__)
# Generate the dataset
X, y = make_classification(n_classes=2, class_sep=2, weights=[0.1, 0.9],
n_informative=3, n_redundant=1, flip_y=0,
n_features=20, n_clusters_per_class=1,
n_samples=100, random_state=10)
# Instanciate a PCA object for the sake of easy visualisation
pca = PCA(n_components=2)
# Fit and transform x to visualise inside a 2D feature space
X_vis = pca.fit_transform(X)
# Apply SMOTE + ENN
sm = SMOTEENN()
X_resampled, y_resampled = sm.fit_resample(X, y)
X_res_vis = pca.transform(X_resampled)
# Two subplots, unpack the axes array immediately
f, (ax1, ax2) = plt.subplots(1, 2)
c0 = ax1.scatter(X_vis[y == 0, 0], X_vis[y == 0, 1], label="Class #0",
alpha=0.5)
c1 = ax1.scatter(X_vis[y == 1, 0], X_vis[y == 1, 1], label="Class #1",
alpha=0.5)
ax1.set_title('Original set')
ax2.scatter(X_res_vis[y_resampled == 0, 0], X_res_vis[y_resampled == 0, 1],
label="Class #0", alpha=0.5)
ax2.scatter(X_res_vis[y_resampled == 1, 0], X_res_vis[y_resampled == 1, 1],
label="Class #1", alpha=0.5)
示例7: test_error_wrong_object
# 需要导入模块: from imblearn.combine import SMOTEENN [as 别名]
# 或者: from imblearn.combine.SMOTEENN import fit_resample [as 别名]
def test_error_wrong_object(smote_params, err_msg):
smt = SMOTEENN(**smote_params)
with pytest.raises(ValueError, match=err_msg):
smt.fit_resample(X, Y)