本文整理汇总了Golang中github.com/sjwhitworth/golearn/base.FixedDataGrid.GetAttribute方法的典型用法代码示例。如果您正苦于以下问题:Golang FixedDataGrid.GetAttribute方法的具体用法?Golang FixedDataGrid.GetAttribute怎么用?Golang FixedDataGrid.GetAttribute使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类github.com/sjwhitworth/golearn/base.FixedDataGrid
的用法示例。
在下文中一共展示了FixedDataGrid.GetAttribute方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Golang代码示例。
示例1: ChiMBuildFrequencyTable
func ChiMBuildFrequencyTable(attr base.Attribute, inst base.FixedDataGrid) []*FrequencyTableEntry {
ret := make([]*FrequencyTableEntry, 0)
attribute := attr.(*base.FloatAttribute)
attrSpec, err := inst.GetAttribute(attr)
if err != nil {
panic(err)
}
attrSpecs := []base.AttributeSpec{attrSpec}
err = inst.MapOverRows(attrSpecs, func(row [][]byte, rowNo int) (bool, error) {
value := row[0]
valueConv := attribute.GetFloatFromSysVal(value)
class := base.GetClass(inst, rowNo)
// Search the frequency table for the value
found := false
for _, entry := range ret {
if entry.Value == valueConv {
found = true
entry.Frequency[class] += 1
}
}
if !found {
newEntry := &FrequencyTableEntry{
valueConv,
make(map[string]int),
}
newEntry.Frequency[class] = 1
ret = append(ret, newEntry)
}
return true, nil
})
return ret
}
示例2: findBestSplit
func findBestSplit(partition base.FixedDataGrid) {
var delta float64
delta = math.MinInt64
attrs := partition.AllAttributes()
classAttrs := partition.AllClassAttributes()
candidates := base.AttributeDifferenceReferences(attrs, classAttrs)
fmt.Println(delta)
fmt.Println(classAttrs)
fmt.Println(reflect.TypeOf(partition))
fmt.Println(reflect.TypeOf(candidates))
for i, n := range attrs {
fmt.Println(i)
//fmt.Println(partition)
fmt.Println(reflect.TypeOf(n))
attributeSpec, _ := partition.GetAttribute(n)
fmt.Println(partition.GetAttribute(n))
_, rows := partition.Size()
for j := 0; j < rows; j++ {
data := partition.Get(attributeSpec, j)
fmt.Println(base.UnpackBytesToFloat(data))
}
}
}
示例3: Predict
// Predict outputs a base.Instances containing predictions from this tree
func (d *DecisionTreeNode) Predict(what base.FixedDataGrid) (base.FixedDataGrid, error) {
predictions := base.GeneratePredictionVector(what)
classAttr := getClassAttr(predictions)
classAttrSpec, err := predictions.GetAttribute(classAttr)
if err != nil {
panic(err)
}
predAttrs := base.AttributeDifferenceReferences(what.AllAttributes(), predictions.AllClassAttributes())
predAttrSpecs := base.ResolveAttributes(what, predAttrs)
what.MapOverRows(predAttrSpecs, func(row [][]byte, rowNo int) (bool, error) {
cur := d
for {
if cur.Children == nil {
predictions.Set(classAttrSpec, rowNo, classAttr.GetSysValFromString(cur.Class))
break
} else {
splitVal := cur.SplitRule.SplitVal
at := cur.SplitRule.SplitAttr
ats, err := what.GetAttribute(at)
if err != nil {
//predictions.Set(classAttrSpec, rowNo, classAttr.GetSysValFromString(cur.Class))
//break
panic(err)
}
var classVar string
if _, ok := ats.GetAttribute().(*base.FloatAttribute); ok {
// If it's a numeric Attribute (e.g. FloatAttribute) check that
// the value of the current node is greater than the old one
classVal := base.UnpackBytesToFloat(what.Get(ats, rowNo))
if classVal > splitVal {
classVar = "1"
} else {
classVar = "0"
}
} else {
classVar = ats.GetAttribute().GetStringFromSysVal(what.Get(ats, rowNo))
}
if next, ok := cur.Children[classVar]; ok {
cur = next
} else {
// Suspicious of this
var bestChild string
for c := range cur.Children {
bestChild = c
if c > classVar {
break
}
}
cur = cur.Children[bestChild]
}
}
}
return true, nil
})
return predictions, nil
}
示例4: getNumericAttributeEntropy
func getNumericAttributeEntropy(f base.FixedDataGrid, attr *base.FloatAttribute) (float64, float64) {
// Resolve Attribute
attrSpec, err := f.GetAttribute(attr)
if err != nil {
panic(err)
}
// Build sortable vector
_, rows := f.Size()
refs := make([]numericSplitRef, rows)
f.MapOverRows([]base.AttributeSpec{attrSpec}, func(val [][]byte, row int) (bool, error) {
cls := base.GetClass(f, row)
v := base.UnpackBytesToFloat(val[0])
refs[row] = numericSplitRef{v, cls}
return true, nil
})
// Sort
sort.Sort(splitVec(refs))
generateCandidateSplitDistribution := func(val float64) map[string]map[string]int {
presplit := make(map[string]int)
postplit := make(map[string]int)
for _, i := range refs {
if i.val < val {
presplit[i.class]++
} else {
postplit[i.class]++
}
}
ret := make(map[string]map[string]int)
ret["0"] = presplit
ret["1"] = postplit
return ret
}
minSplitEntropy := math.Inf(1)
minSplitVal := math.Inf(1)
// Consider each possible function
for i := 0; i < len(refs)-1; i++ {
val := refs[i].val + refs[i+1].val
val /= 2
splitDist := generateCandidateSplitDistribution(val)
splitEntropy := getSplitEntropy(splitDist)
if splitEntropy < minSplitEntropy {
minSplitEntropy = splitEntropy
minSplitVal = val
}
}
return minSplitEntropy, minSplitVal
}
示例5: Predict
// Predict outputs a base.Instances containing predictions from this tree
func (d *DecisionTreeNode) Predict(what base.FixedDataGrid) base.FixedDataGrid {
predictions := base.GeneratePredictionVector(what)
classAttr := getClassAttr(predictions)
classAttrSpec, err := predictions.GetAttribute(classAttr)
if err != nil {
panic(err)
}
predAttrs := base.AttributeDifferenceReferences(what.AllAttributes(), predictions.AllClassAttributes())
predAttrSpecs := base.ResolveAttributes(what, predAttrs)
what.MapOverRows(predAttrSpecs, func(row [][]byte, rowNo int) (bool, error) {
cur := d
for {
if cur.Children == nil {
predictions.Set(classAttrSpec, rowNo, classAttr.GetSysValFromString(cur.Class))
break
} else {
at := cur.SplitAttr
ats, err := what.GetAttribute(at)
if err != nil {
predictions.Set(classAttrSpec, rowNo, classAttr.GetSysValFromString(cur.Class))
break
}
classVar := ats.GetAttribute().GetStringFromSysVal(what.Get(ats, rowNo))
if next, ok := cur.Children[classVar]; ok {
cur = next
} else {
var bestChild string
for c := range cur.Children {
bestChild = c
if c > classVar {
break
}
}
cur = cur.Children[bestChild]
}
}
}
return true, nil
})
return predictions
}