当前位置: 首页>>代码示例>>C#>>正文


C# DecisionTrees.DecisionTree类代码示例

本文整理汇总了C#中Accord.MachineLearning.DecisionTrees.DecisionTree的典型用法代码示例。如果您正苦于以下问题:C# DecisionTree类的具体用法?C# DecisionTree怎么用?C# DecisionTree使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


DecisionTree类属于Accord.MachineLearning.DecisionTrees命名空间,在下文中一共展示了DecisionTree类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。

示例1: Write

        /// <summary>
        ///   Creates a C# code for the tree.
        /// </summary>
        /// 
        public void Write(DecisionTree tree, string className)
        {
            writer.WriteLine("// This file has been automatically generated by the");
            writer.WriteLine("//");
            writer.WriteLine("// Accord Machine Learning Library");
            writer.WriteLine("// The Accord.NET Framework");
            writer.WriteLine("// http://accord-framework.net");
            writer.WriteLine("//");
            writer.WriteLine();
            writer.WriteLine("namespace DecisionTrees");
            writer.WriteLine("{");
            writer.WriteLine("    using System.CodeDom.Compiler;");
            writer.WriteLine("    using System.Collections.Generic;");
            writer.WriteLine();
            writer.WriteLine("    /// <summary>");
            writer.WriteLine("    ///   Automatically generated decision tree.");
            writer.WriteLine("    /// </summary>");
            writer.WriteLine("    /// ");
            writer.WriteLine("    [GeneratedCode(\"Accord.NET DecisionTree\", \"2.8\")]");
            writer.WriteLine("    public static class {0}", className);
            writer.WriteLine("    {");
            writer.WriteLine();
            writer.WriteLine("        /// <summary>");
            writer.WriteLine("        ///   Assigns a class label to a given input.");
            writer.WriteLine("        /// </summary>");
            writer.WriteLine("        /// ");
            writer.WriteLine("        public static int Function(double[] input)");
            writer.WriteLine("        {");

            create(tree.Root, 3);

            writer.WriteLine("        }");
            writer.WriteLine("    }");
            writer.WriteLine("}");
        }
开发者ID:BiYiTuan,项目名称:framework,代码行数:39,代码来源:DecisionTreeWriter.cs

示例2: decisionTree

        private static void decisionTree(double[][] inputs, int[] outputs)
        {
            // In our problem, we have 2 classes (samples can be either
            // positive or negative), and 2 continuous-valued inputs.
            DecisionTree tree = new DecisionTree(inputs: new[] 
            {
                DecisionVariable.Continuous("X"),
                DecisionVariable.Continuous("Y")
            }, classes: 2);

            C45Learning teacher = new C45Learning(tree);

            // The C4.5 algorithm expects the class labels to
            // range from 0 to k, so we convert -1 to be zero:
            //
            outputs = outputs.Apply(x => x < 0 ? 0 : x);

            double error = teacher.Run(inputs, outputs);

            // Classify the samples using the model
            int[] answers = inputs.Apply(tree.Compute);

            // Plot the results
            ScatterplotBox.Show("Expected results", inputs, outputs);
            ScatterplotBox.Show("Decision Tree results", inputs, answers)
                .Hold();
        }
开发者ID:stefan-j,项目名称:framework,代码行数:27,代码来源:Program.cs

示例3: GenerateTrainingSets

        public static TrainingSet[] GenerateTrainingSets(IEnumerable<KeyValuePair<User, double[]>> studentsAndMarks, string[] normalRecords, string[] anomalies)
        {
            var countOfEntries = normalRecords.Length + anomalies.Length;
            var inputData = new double[countOfEntries][];
            var outputData = new int[countOfEntries];
            var counter = 0;

            foreach (var studentAndMarks in studentsAndMarks)
            {
                if (normalRecords.Contains(studentAndMarks.Key.OpenId))
                {
                    inputData[counter] = studentAndMarks.Value;
                    outputData[counter++] = 1;
                }

                if (!anomalies.Contains(studentAndMarks.Key.OpenId))
                {
                    continue;
                }

                inputData[counter] = studentAndMarks.Value;
                outputData[counter++] = 0;
            }

            var countOfFeatures = studentsAndMarks.ElementAt(0).Value.Length;
            var features = new DecisionVariable[countOfFeatures];
            features[0] = new DecisionVariable("0", DecisionAttributeKind.Continuous, new AForge.DoubleRange(80, 1200));
            
            for (var i = 1; i < countOfFeatures; i++)
            {
                features[i] = new DecisionVariable(i.ToString(), DecisionAttributeKind.Continuous, new AForge.DoubleRange(0, 10));
            }

            // Create the Decision tree with only 2 result values
            var tree = new DecisionTree(features, 2);

            // Creates a new instance of the C4.5 learning algorithm
            var c45 = new C45Learning(tree);

            // Learn the decision tree
            var error = c45.Run(inputData, outputData);

            // Split all data into normal and anomalies
            var setOfNormalRecords = studentsAndMarks.Where(x => tree.Compute(x.Value) == 1);
            var setOfAnomalies = studentsAndMarks.Where(x => tree.Compute(x.Value) == 0);
                        
            // Split normal records into 2 groups (one for training set and one for anomaly detection ocurency detection)
            var setOfNormalRecordsList = setOfNormalRecords.ToList();
            var splitCount = setOfNormalRecordsList.Count * 2 / 3;
            var setOfNormalRecordsTr1 = setOfNormalRecordsList.GetRange(0, splitCount);
            var setOfNormalRecordsTr2 = setOfNormalRecordsList.GetRange(splitCount, setOfNormalRecordsList.Count - splitCount);
            // Create Training Sets
            var trSetNormalFirst = CreateTrainingSetFromResources(setOfNormalRecordsTr1);
            var trSetNormalSecond = CreateTrainingSetFromResources(setOfNormalRecordsTr2);
            var trSetAnomalies = CreateTrainingSetFromResources(setOfAnomalies);

            return new[] { trSetNormalFirst, trSetNormalSecond, trSetAnomalies };
        }
开发者ID:supermuk,项目名称:iudico,代码行数:58,代码来源:TrainingSetsCreator.cs

示例4: FormTreeRule

        public FormTreeRule(DecisionTree tree, Codification codification, string rule)
            : this()
        {
            this.codification = codification;

            // Show the learned tree in the view            
            decisionTreeView.SetTree(tree, codification);
            decisionTreeView.viewRule(rule);
            
        }
开发者ID:hpbaotho,项目名称:benhvien,代码行数:10,代码来源:FormTreeRule.cs

示例5: SetTree

        // Create tree
        public void SetTree(DecisionTree tree, Codification codification)
        {
            this.treeSource = tree;            
            this.codification = codification;

            treeView1.Nodes.Clear();

            if (treeSource != null && treeSource.Root != null)
                treeView1.Nodes.Add(convert(TreeSource.Root));
            
        }
开发者ID:hpbaotho,项目名称:benhvien,代码行数:12,代码来源:DecisionTreeViewDiabetes.cs

示例6: FormTreeView

        public FormTreeView(DecisionTree tree, Codification codification)
            : this()
        {
            this.codification = codification;

            // Show the learned tree in the view            
            decisionTreeView.SetTree(tree, codification);
            
            if (tree != null && tree.Root != null)
                CreateRuleList(tree.Root, "");
        }
开发者ID:hpbaotho,项目名称:benhvien,代码行数:11,代码来源:FormTreeView.cs

示例7: c45

        /// <summary>
        ///   Creates a new C4.5 learning algorithm.
        /// </summary>
        /// 
        /// <param name="tree">The decision tree to be generated.</param>
        /// 
        public c45(DecisionTree tree)
        {
            this.tree = tree;
            this.attributes = new bool[tree.InputCount];
            this.inputRanges = new IntRange[tree.InputCount];
            this.outputClasses = tree.OutputClasses;
            this.maxHeight = attributes.Length;

            for (int i = 0; i < inputRanges.Length; i++)
                inputRanges[i] = tree.Attributes[i].Range.ToIntRange(false);
        }
开发者ID:ibrhmckc,项目名称:kanserli-h-cre-veri-madencili-i,代码行数:17,代码来源:c45.cs

示例8: Classification

        /***************************** Constructor *********************************/
        public Classification(SortedList columnList, int classLabeCount)
        {
            // Initialize DecisionTree
            decisionAttributes = new DecisionVariable[columnList.Count];
            for (int i = 0; i < decisionAttributes.Length; i++)
            {
                decisionAttributes[i] = new DecisionVariable((string)columnList.GetByIndex(i), DecisionVariableKind.Continuous);
            }

            int classCount = classLabeCount;
            descisionTree = new DecisionTree(decisionAttributes, classCount);
        }
开发者ID:MPPECS,项目名称:Twitter_RWR_Recommender,代码行数:13,代码来源:Classification.cs

示例9: CreateMitchellExample

        public static void CreateMitchellExample(out DecisionTree tree, out double[][] inputs, out int[] outputs)
        {
            DataTable data = new DataTable("Mitchell's Tennis Example");

            data.Columns.Add("Day", typeof(string));
            data.Columns.Add("Outlook", typeof(string));
            data.Columns.Add("Temperature", typeof(double));
            data.Columns.Add("Humidity", typeof(double));
            data.Columns.Add("Wind", typeof(string));
            data.Columns.Add("PlayTennis", typeof(string));

            data.Rows.Add("D1", "Sunny", 85, 85, "Weak", "No");
            data.Rows.Add("D2", "Sunny", 80, 90, "Strong", "No");
            data.Rows.Add("D3", "Overcast", 83, 78, "Weak", "Yes");
            data.Rows.Add("D4", "Rain", 70, 96, "Weak", "Yes");
            data.Rows.Add("D5", "Rain", 68, 80, "Weak", "Yes");
            data.Rows.Add("D6", "Rain", 65, 70, "Strong", "No");
            data.Rows.Add("D7", "Overcast", 64, 65, "Strong", "Yes");
            data.Rows.Add("D8", "Sunny", 72, 95, "Weak", "No");
            data.Rows.Add("D9", "Sunny", 69, 70, "Weak", "Yes");
            data.Rows.Add("D10", "Rain", 75, 80, "Weak", "Yes");
            data.Rows.Add("D11", "Sunny", 75, 70, "Strong", "Yes");
            data.Rows.Add("D12", "Overcast", 72, 90, "Strong", "Yes");
            data.Rows.Add("D13", "Overcast", 81, 75, "Weak", "Yes");
            data.Rows.Add("D14", "Rain", 71, 80, "Strong", "No");

            // Create a new codification codebook to
            // convert strings into integer symbols
            Codification codebook = new Codification(data);

            DecisionVariable[] attributes =
            {
               new DecisionVariable("Outlook",     codebook["Outlook"].Symbols),      // 3 possible values (Sunny, overcast, rain)
               new DecisionVariable("Temperature", DecisionVariableKind.Continuous), // continuous values
               new DecisionVariable("Humidity",    DecisionVariableKind.Continuous), // continuous values
               new DecisionVariable("Wind",        codebook["Wind"].Symbols)          // 2 possible values (Weak, strong)
            };

            int classCount = codebook["PlayTennis"].Symbols; // 2 possible values (yes, no)

            tree = new DecisionTree(attributes, classCount);
            C45Learning c45 = new C45Learning(tree);

            // Extract symbols from data and train the classifier
            DataTable symbols = codebook.Apply(data);
            inputs = symbols.ToArray("Outlook", "Temperature", "Humidity", "Wind");
            outputs = symbols.ToArray<int>("PlayTennis");

            double error = c45.Run(inputs, outputs);
        }
开发者ID:accord-net,项目名称:framework,代码行数:50,代码来源:C45LearningTest.cs

示例10: ID3Learning

            /// <summary>
            ///   Creates a new ID3 learning algorithm.
            /// </summary>
            /// 
            /// <param name="tree">The decision tree to be generated.</param>
            /// 
            public ID3Learning(DecisionTree tree)
            {
                this.tree = tree;
                this.inputRanges = new IntRange[tree.InputCount];
                this.outputClasses = tree.OutputClasses;
                this.attributes = new bool[tree.InputCount];
                this.maxHeight = attributes.Length;

                for (int i = 0; i < tree.Attributes.Count; i++)
                if (tree.Attributes[i].Nature != DecisionVariableKind.Discrete)
                    throw new ArgumentException("id3 inputları hatalı geldi.");

                for (int i = 0; i < inputRanges.Length; i++)
                inputRanges[i] = tree.Attributes[i].Range.ToIntRange(false);
            }
开发者ID:ibrhmckc,项目名称:kanserli-h-cre-veri-madencili-i,代码行数:21,代码来源:DecssionsTreee.cs

示例11: CreateMitchellExample

        // 
        //You can use the following additional attributes as you write your tests:
        //
        //Use ClassInitialize to run code before running the first test in the class
        //[ClassInitialize()]
        //public static void MyClassInitialize(TestContext testContext)
        //{
        //}
        //
        //Use ClassCleanup to run code after all tests in a class have run
        //[ClassCleanup()]
        //public static void MyClassCleanup()
        //{
        //}
        //
        //Use TestInitialize to run code before running each test
        //[TestInitialize()]
        //public void MyTestInitialize()
        //{
        //}
        //
        //Use TestCleanup to run code after each test has run
        //[TestCleanup()]
        //public void MyTestCleanup()
        //{
        //}
        //
        #endregion


        public static void CreateMitchellExample(out DecisionTree tree, out int[][] inputs, out int[] outputs)
        {
            DataTable data = new DataTable("Mitchell's Tennis Example");

            data.Columns.Add("Day", "Outlook", "Temperature", "Humidity", "Wind", "PlayTennis");

            data.Rows.Add("D1", "Sunny",     "Hot",  "High",   "Weak",   "No");
            data.Rows.Add("D2", "Sunny",     "Hot",  "High",   "Strong", "No");
            data.Rows.Add("D3", "Overcast",  "Hot",  "High",   "Weak",   "Yes");
            data.Rows.Add("D4", "Rain",      "Mild", "High",   "Weak",   "Yes");
            data.Rows.Add("D5", "Rain",      "Cool", "Normal", "Weak",   "Yes");
            data.Rows.Add("D6", "Rain",      "Cool", "Normal", "Strong", "No");
            data.Rows.Add("D7", "Overcast",  "Cool", "Normal", "Strong", "Yes");
            data.Rows.Add("D8", "Sunny",     "Mild", "High",   "Weak",   "No");
            data.Rows.Add("D9", "Sunny",     "Cool", "Normal", "Weak",   "Yes");
            data.Rows.Add("D10", "Rain",     "Mild", "Normal", "Weak",   "Yes");
            data.Rows.Add("D11", "Sunny",    "Mild", "Normal", "Strong", "Yes");
            data.Rows.Add("D12", "Overcast", "Mild", "High",   "Strong", "Yes");
            data.Rows.Add("D13", "Overcast", "Hot",  "Normal", "Weak",   "Yes");
            data.Rows.Add("D14", "Rain",     "Mild", "High",   "Strong", "No");

            // Create a new codification codebook to
            // convert strings into integer symbols
            Codification codebook = new Codification(data);

            DecisionVariable[] attributes =
            {
               new DecisionVariable("Outlook",     codebook["Outlook"].Symbols),     // 3 possible values (Sunny, overcast, rain)
               new DecisionVariable("Temperature", codebook["Temperature"].Symbols), // 3 possible values (Hot, mild, cool)
               new DecisionVariable("Humidity",    codebook["Humidity"].Symbols),    // 2 possible values (High, normal)
               new DecisionVariable("Wind",        codebook["Wind"].Symbols)         // 2 possible values (Weak, strong)
            };

            int classCount = codebook["PlayTennis"].Symbols; // 2 possible values (yes, no)

            tree = new DecisionTree(attributes, classCount);
            ID3Learning id3 = new ID3Learning(tree);

            // Extract symbols from data and train the classifier
            DataTable symbols = codebook.Apply(data);
            inputs = symbols.ToIntArray("Outlook", "Temperature", "Humidity", "Wind");
            outputs = symbols.ToIntArray("PlayTennis").GetColumn(0);

            id3.Run(inputs, outputs);
        }
开发者ID:xyicheng,项目名称:Accord,代码行数:75,代码来源:ID3LearningTest.cs

示例12: TrainningModel

        //public static C45Model CreateC45Model(Codification codification)
        //{
        //    int lastIndex = codification.Columns.Count - 1;

        //    List<DecisionVariable> attributes = new List<DecisionVariable>();

        //    for (int indexColumn = 0; indexColumn < lastIndex; indexColumn++)
        //    {
        //        attributes.Add(new DecisionVariable(codification.Columns[indexColumn].ColumnName,
        //            codification[indexColumn].Symbols));
        //    }

        //    C45Model model = new C45Model(new DecisionTree(attributes.ToArray(), 2));

        //    return model;
        //}

        //public C45Model(DecisionTree tree)
        //{
        //    this.Tree = tree;
        //}

        // Trainning decision tree with C4.5 algorithm
        public override void TrainningModel(TrainningData trainningData)
        {
            // Get data for trainning tree
            Codification codification = trainningData.CodificationData;
            double[][] inputs = trainningData.TrainningAttributes;
            int[] outputs = trainningData.ClassificationAttribute;

            // Create tree
            this.Tree = this.CreateDecisionTree(codification); 
            //var attributes = DecisionVariable.FromCodebook(codification, inputColumns);
            //DecisionTree tree = new DecisionTree(attributes, outputClasses: 5);

           
            // Creates a new instance of the C4.5 learning algorithm
            C45Learning c45 = new C45Learning(this.Tree);

            // Learn the decision tree
            double error = c45.Run(inputs, outputs);        
        }
开发者ID:hpbaotho,项目名称:benhvien,代码行数:42,代码来源:C45Model.cs

示例13: LargeRunTest2

        public void LargeRunTest2()
        {
            Accord.Math.Random.Generator.Seed = 0;

            int[,] random = Matrix.Random(1000, 10, 0.0, 10.0).ToInt32();

            int[][] samples = random.ToJagged();
            int[] outputs = new int[1000];

            for (int i = 0; i < samples.Length; i++)
            {
                if (samples[i][0] > 5 || Tools.Random.NextDouble() > 0.85)
                    outputs[i] = 1;
            }

            DecisionVariable[] vars = new DecisionVariable[10];
            for (int i = 0; i < vars.Length; i++)
                vars[i] = new DecisionVariable("x" + i, 10);

            DecisionTree tree = new DecisionTree(vars, 2);

            var teacher = new ID3Learning(tree);

            double error = teacher.Run(samples, outputs);

            Assert.AreEqual(0, error);

            var rules = DecisionSet.FromDecisionTree(tree);

            Simplification simpl = new Simplification(rules)
            {
                Alpha = 0.05
            };

            error = simpl.ComputeError(samples.ToDouble(), outputs);
            Assert.AreEqual(0, error);

            double newError = simpl.Compute(samples.ToDouble(), outputs);

            Assert.AreEqual(0.097, newError);
        }
开发者ID:accord-net,项目名称:framework,代码行数:41,代码来源:SimplificationTest.cs

示例14: IncompleteDiscreteVariableTest

        public void IncompleteDiscreteVariableTest()
        {
            DecisionTree tree;
            int[][] inputs;
            int[] outputs;

            DataTable data = new DataTable("Degenerated Tennis Example");

            data.Columns.Add("Day", "Outlook", "Temperature", "Humidity", "Wind", "PlayTennis");

            data.Rows.Add("D1", "Sunny", "Hot", "High", "Weak", "No");
            data.Rows.Add("D2", "Sunny", "Hot", "High", "Strong", "No");
            data.Rows.Add("D3", "Overcast", "Hot", "High", "Weak", "Yes");
            data.Rows.Add("D4", "Rain", "Mild", "High", "Weak", "Yes");
            data.Rows.Add("D5", "Rain", "Cool", "Normal", "Weak", "Yes");
            data.Rows.Add("D6", "Rain", "Cool", "Normal", "Strong", "No");
            data.Rows.Add("D7", "Overcast", "Cool", "Normal", "Strong", "Yes");
            data.Rows.Add("D8", "Sunny", "Mild", "High", "Weak", "No");
            data.Rows.Add("D9", "Sunny", "Cool", "Normal", "Weak", "Yes");
            data.Rows.Add("D10", "Rain", "Mild", "Normal", "Weak", "Yes");
            data.Rows.Add("D11", "Sunny", "Mild", "Normal", "Strong", "Yes");
            data.Rows.Add("D12", "Overcast", "Mild", "High", "Strong", "Yes");
            data.Rows.Add("D13", "Overcast", "Hot", "Normal", "Weak", "Yes");
            data.Rows.Add("D14", "Rain", "Mild", "High", "Strong", "No");

            // Create a new codification codebook to
            // convert strings into integer symbols
            Codification codebook = new Codification(data);

            DecisionVariable[] attributes =
            {
               new DecisionVariable("Outlook",     codebook["Outlook"].Symbols+200), // 203 possible values, 200 undefined
               new DecisionVariable("Temperature", codebook["Temperature"].Symbols), // 3 possible values (Hot, mild, cool)
               new DecisionVariable("Humidity",    codebook["Humidity"].Symbols),    // 2 possible values (High, normal)
               new DecisionVariable("Wind",        codebook["Wind"].Symbols)         // 2 possible values (Weak, strong)
            };

            int classCount = codebook["PlayTennis"].Symbols; // 2 possible values (yes, no)

            tree = new DecisionTree(attributes, classCount);
            ID3Learning id3 = new ID3Learning(tree);

            // Extract symbols from data and train the classifier
            DataTable symbols = codebook.Apply(data);
            inputs = symbols.ToArray<int>("Outlook", "Temperature", "Humidity", "Wind");
            outputs = symbols.ToArray<int>("PlayTennis");

            double error = id3.Run(inputs, outputs);

            Assert.AreEqual(203, tree.Root.Branches.Count);
            Assert.IsTrue(tree.Root.Branches[100].IsLeaf);
            Assert.IsNull(tree.Root.Branches[100].Output);

            for (int i = 0; i < inputs.Length; i++)
            {
                int y = tree.Compute(inputs[i]);
                Assert.AreEqual(outputs[i], y);
            }
        }
开发者ID:natepan,项目名称:framework,代码行数:59,代码来源:ID3LearningTest.cs

示例15: ConsistencyTest1

        public void ConsistencyTest1()
        {
            int[,] random = Matrix.Random(1000, 10, 0, 10).ToInt32();

            int[][] samples = random.ToArray();
            int[] outputs = new int[1000];

            for (int i = 0; i < samples.Length; i++)
            {
                if (samples[i][0] > 8)
                    outputs[i] = 1;
            }

            DecisionVariable[] vars = new DecisionVariable[10];
            for (int i = 0; i < vars.Length; i++)
                vars[i] = new DecisionVariable(i.ToString(), new IntRange(0,10));

            DecisionTree tree = new DecisionTree(vars, 2);

            ID3Learning teacher = new ID3Learning(tree);

            double error = teacher.Run(samples, outputs);

            Assert.AreEqual(0, error);

            Assert.AreEqual(11, tree.Root.Branches.Count);
            for (int i = 0; i < tree.Root.Branches.Count; i++)
                Assert.IsTrue(tree.Root.Branches[i].IsLeaf);    
        }
开发者ID:natepan,项目名称:framework,代码行数:29,代码来源:ID3LearningTest.cs


注:本文中的Accord.MachineLearning.DecisionTrees.DecisionTree类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。