本文整理汇总了Python中TreeNode.TreeNode.column方法的典型用法代码示例。如果您正苦于以下问题:Python TreeNode.column方法的具体用法?Python TreeNode.column怎么用?Python TreeNode.column使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类TreeNode.TreeNode
的用法示例。
在下文中一共展示了TreeNode.column方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _build_tree
# 需要导入模块: from TreeNode import TreeNode [as 别名]
# 或者: from TreeNode.TreeNode import column [as 别名]
def _build_tree(self, X, y):
'''
INPUT:
- X: 2d numpy array
- y: 1d numpy array
OUTPUT:
- TreeNode
Recursively build the decision tree. Return the root node.
'''
node = TreeNode()
index, value, splits = self._choose_split_index(X, y)
if index is None or len(np.unique(y)) == 1:
node.leaf = True
node.classes = Counter(y)
node.name = node.classes.most_common(1)[0][0]
else:
X1, y1, X2, y2 = splits
node.column = index
node.name = self.feature_names[index]
node.value = value
node.categorical = self.categorical[index]
node.left = self._build_tree(X1, y1)
node.right = self._build_tree(X2, y2)
return node
示例2: test_predict
# 需要导入模块: from TreeNode import TreeNode [as 别名]
# 或者: from TreeNode.TreeNode import column [as 别名]
def test_predict():
root = TN()
root.column = 1
root.name = 'column 1'
root.value = 'bat'
root.left = TN()
root.left.leaf = True
root.left.name = "one"
root.right = TN()
root.right.leaf = True
root.right.name = "two"
data = [10, 'cat']
result = root.predict_one(data)
actual = "two"
message = 'Predicted %r. Should be %r.\nTree:\n%r\ndata:\n%r' \
% (result, actual, root, data)
n.eq_(result, actual, message)
示例3: _build_tree
# 需要导入模块: from TreeNode import TreeNode [as 别名]
# 或者: from TreeNode.TreeNode import column [as 别名]
def _build_tree(self, X, y):
'''
INPUT:
- X: 2d numpy array
- y: 1d numpy array
OUTPUT:
- TreeNode
Recursively build the decision tree. Return the root node.
'''
# * initialize a root TreeNode
node = TreeNode()
# * set index, value, splits as the output of self._choose_split_index(X,y)
index, value, splits = self._choose_split_index(X,y)
# if no index is returned from the split index or we cannot split
if index is None or len(np.unique(y)) == 1:
# * set the node to be a leaf
node.leaf = True
# * set the classes attribute to the number of classes
node.classes = Counter(y)
# * we have in this leaf with Counter()
# * set the name of the node to be the most common class in it
node.name = node.classes.most_common(1)[0][0]
else: # otherwise we can split (again this comes out of choose_split_index
# * set X1, y1, X2, y2 to be the splits
X1, y1, X2, y2 = splits
# * the node column should be set to the index coming from split_index
node.column = index
# * the node name is the feature name as determined by
# the index (column name)
node.name = self.feature_names[index]
# * set the node value to be the value of the split
node.value = value
# * set the categorical flag of the node to be the category of the column
node.categorical = self.categorical[index]
# * now continue recursing down both branches of the split
node.left = self._build_tree(X1, y1)
node.right = self._build_tree(X2, y2)
return node
示例4: _build_tree
# 需要导入模块: from TreeNode import TreeNode [as 别名]
# 或者: from TreeNode.TreeNode import column [as 别名]
def _build_tree(self, X, y, pre_prune_type, pre_prune_size):
'''
INPUT:
- X: 2d numpy array
- y: 1d numpy array
OUTPUT:
- TreeNode
Recursively build the decision tree. Return the root node.
'''
if pre_prune_type == 'leaf_size':
leaf_size = pre_prune_size
else:
leaf_size = 1
if pre_prune_type == 'depth':
tree_depth = pre_prune_size
else:
tree_depth = X.shape[0]*X.shape[1]
node = TreeNode()
index, value, splits = self._choose_split_index(X, y)
if index is None or len(np.unique(y)) == 1 or len(y) < leaf_size or \
self.depth > tree_depth:
node.leaf = True
node.classes = Counter(y)
node.name = node.classes.most_common(1)[0][0]
else:
self.depth += 1
X1, y1, X2, y2 = splits
node.column = index
node.name = self.feature_names[index]
node.value = value
node.categorical = self.categorical[index]
node.left = self._build_tree(X1, y1, pre_prune_type, pre_prune_size)
node.right = self._build_tree(X2, y2, pre_prune_type, pre_prune_size)
return node