本文整理汇总了C#中Accord.MachineLearning.DecisionTrees.DecisionTree.Compute方法的典型用法代码示例。如果您正苦于以下问题:C# DecisionTree.Compute方法的具体用法?C# DecisionTree.Compute怎么用?C# DecisionTree.Compute使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Accord.MachineLearning.DecisionTrees.DecisionTree
的用法示例。
在下文中一共展示了DecisionTree.Compute方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: GenerateTrainingSets
public static TrainingSet[] GenerateTrainingSets(IEnumerable<KeyValuePair<User, double[]>> studentsAndMarks, string[] normalRecords, string[] anomalies)
{
var countOfEntries = normalRecords.Length + anomalies.Length;
var inputData = new double[countOfEntries][];
var outputData = new int[countOfEntries];
var counter = 0;
foreach (var studentAndMarks in studentsAndMarks)
{
if (normalRecords.Contains(studentAndMarks.Key.OpenId))
{
inputData[counter] = studentAndMarks.Value;
outputData[counter++] = 1;
}
if (!anomalies.Contains(studentAndMarks.Key.OpenId))
{
continue;
}
inputData[counter] = studentAndMarks.Value;
outputData[counter++] = 0;
}
var countOfFeatures = studentsAndMarks.ElementAt(0).Value.Length;
var features = new DecisionVariable[countOfFeatures];
features[0] = new DecisionVariable("0", DecisionAttributeKind.Continuous, new AForge.DoubleRange(80, 1200));
for (var i = 1; i < countOfFeatures; i++)
{
features[i] = new DecisionVariable(i.ToString(), DecisionAttributeKind.Continuous, new AForge.DoubleRange(0, 10));
}
// Create the Decision tree with only 2 result values
var tree = new DecisionTree(features, 2);
// Creates a new instance of the C4.5 learning algorithm
var c45 = new C45Learning(tree);
// Learn the decision tree
var error = c45.Run(inputData, outputData);
// Split all data into normal and anomalies
var setOfNormalRecords = studentsAndMarks.Where(x => tree.Compute(x.Value) == 1);
var setOfAnomalies = studentsAndMarks.Where(x => tree.Compute(x.Value) == 0);
// Split normal records into 2 groups (one for training set and one for anomaly detection ocurency detection)
var setOfNormalRecordsList = setOfNormalRecords.ToList();
var splitCount = setOfNormalRecordsList.Count * 2 / 3;
var setOfNormalRecordsTr1 = setOfNormalRecordsList.GetRange(0, splitCount);
var setOfNormalRecordsTr2 = setOfNormalRecordsList.GetRange(splitCount, setOfNormalRecordsList.Count - splitCount);
// Create Training Sets
var trSetNormalFirst = CreateTrainingSetFromResources(setOfNormalRecordsTr1);
var trSetNormalSecond = CreateTrainingSetFromResources(setOfNormalRecordsTr2);
var trSetAnomalies = CreateTrainingSetFromResources(setOfAnomalies);
return new[] { trSetNormalFirst, trSetNormalSecond, trSetAnomalies };
}
示例2: CreateMitchellExample
//
//You can use the following additional attributes as you write your tests:
//
//Use ClassInitialize to run code before running the first test in the class
//[ClassInitialize()]
//public static void MyClassInitialize(TestContext testContext)
//{
//}
//
//Use ClassCleanup to run code after all tests in a class have run
//[ClassCleanup()]
//public static void MyClassCleanup()
//{
//}
//
//Use TestInitialize to run code before running each test
//[TestInitialize()]
//public void MyTestInitialize()
//{
//}
//
//Use TestCleanup to run code after each test has run
//[TestCleanup()]
//public void MyTestCleanup()
//{
//}
//
#endregion
public static void CreateMitchellExample(out DecisionTree tree, out int[][] inputs, out int[] outputs)
{
DataTable data = new DataTable("Mitchell's Tennis Example");
data.Columns.Add("Day", "Outlook", "Temperature", "Humidity", "Wind", "PlayTennis");
data.Rows.Add("D1", "Sunny", "Hot", "High", "Weak", "No");
data.Rows.Add("D2", "Sunny", "Hot", "High", "Strong", "No");
data.Rows.Add("D3", "Overcast", "Hot", "High", "Weak", "Yes");
data.Rows.Add("D4", "Rain", "Mild", "High", "Weak", "Yes");
data.Rows.Add("D5", "Rain", "Cool", "Normal", "Weak", "Yes");
data.Rows.Add("D6", "Rain", "Cool", "Normal", "Strong", "No");
data.Rows.Add("D7", "Overcast", "Cool", "Normal", "Strong", "Yes");
data.Rows.Add("D8", "Sunny", "Mild", "High", "Weak", "No");
data.Rows.Add("D9", "Sunny", "Cool", "Normal", "Weak", "Yes");
data.Rows.Add("D10", "Rain", "Mild", "Normal", "Weak", "Yes");
data.Rows.Add("D11", "Sunny", "Mild", "Normal", "Strong", "Yes");
data.Rows.Add("D12", "Overcast", "Mild", "High", "Strong", "Yes");
data.Rows.Add("D13", "Overcast", "Hot", "Normal", "Weak", "Yes");
data.Rows.Add("D14", "Rain", "Mild", "High", "Strong", "No");
// Create a new codification codebook to
// convert strings into integer symbols
Codification codebook = new Codification(data);
DecisionVariable[] attributes =
{
new DecisionVariable("Outlook", codebook["Outlook"].Symbols), // 3 possible values (Sunny, overcast, rain)
new DecisionVariable("Temperature", codebook["Temperature"].Symbols), // 3 possible values (Hot, mild, cool)
new DecisionVariable("Humidity", codebook["Humidity"].Symbols), // 2 possible values (High, normal)
new DecisionVariable("Wind", codebook["Wind"].Symbols) // 2 possible values (Weak, strong)
};
int classCount = codebook["PlayTennis"].Symbols; // 2 possible values (yes, no)
tree = new DecisionTree(attributes, classCount);
ID3Learning id3 = new ID3Learning(tree);
// Extract symbols from data and train the classifier
DataTable symbols = codebook.Apply(data);
inputs = symbols.ToArray<int>("Outlook", "Temperature", "Humidity", "Wind");
outputs = symbols.ToArray<int>("PlayTennis");
double error = id3.Run(inputs, outputs);
Assert.AreEqual(0, error);
foreach (DataRow row in data.Rows)
{
var x = codebook.Translate(row, "Outlook", "Temperature", "Humidity", "Wind");
int y = tree.Compute(x);
string actual = codebook.Translate("PlayTennis", y);
string expected = row["PlayTennis"] as string;
Assert.AreEqual(expected, actual);
}
{
string answer = codebook.Translate("PlayTennis",
tree.Compute(codebook.Translate("Sunny", "Hot", "High", "Strong")));
Assert.AreEqual("No", answer);
}
}
示例3: IncompleteDiscreteVariableTest
public void IncompleteDiscreteVariableTest()
{
DecisionTree tree;
int[][] inputs;
int[] outputs;
DataTable data = new DataTable("Degenerated Tennis Example");
data.Columns.Add("Day", "Outlook", "Temperature", "Humidity", "Wind", "PlayTennis");
data.Rows.Add("D1", "Sunny", "Hot", "High", "Weak", "No");
data.Rows.Add("D2", "Sunny", "Hot", "High", "Strong", "No");
data.Rows.Add("D3", "Overcast", "Hot", "High", "Weak", "Yes");
data.Rows.Add("D4", "Rain", "Mild", "High", "Weak", "Yes");
data.Rows.Add("D5", "Rain", "Cool", "Normal", "Weak", "Yes");
data.Rows.Add("D6", "Rain", "Cool", "Normal", "Strong", "No");
data.Rows.Add("D7", "Overcast", "Cool", "Normal", "Strong", "Yes");
data.Rows.Add("D8", "Sunny", "Mild", "High", "Weak", "No");
data.Rows.Add("D9", "Sunny", "Cool", "Normal", "Weak", "Yes");
data.Rows.Add("D10", "Rain", "Mild", "Normal", "Weak", "Yes");
data.Rows.Add("D11", "Sunny", "Mild", "Normal", "Strong", "Yes");
data.Rows.Add("D12", "Overcast", "Mild", "High", "Strong", "Yes");
data.Rows.Add("D13", "Overcast", "Hot", "Normal", "Weak", "Yes");
data.Rows.Add("D14", "Rain", "Mild", "High", "Strong", "No");
// Create a new codification codebook to
// convert strings into integer symbols
Codification codebook = new Codification(data);
DecisionVariable[] attributes =
{
new DecisionVariable("Outlook", codebook["Outlook"].Symbols+200), // 203 possible values, 200 undefined
new DecisionVariable("Temperature", codebook["Temperature"].Symbols), // 3 possible values (Hot, mild, cool)
new DecisionVariable("Humidity", codebook["Humidity"].Symbols), // 2 possible values (High, normal)
new DecisionVariable("Wind", codebook["Wind"].Symbols) // 2 possible values (Weak, strong)
};
int classCount = codebook["PlayTennis"].Symbols; // 2 possible values (yes, no)
tree = new DecisionTree(attributes, classCount);
ID3Learning id3 = new ID3Learning(tree);
// Extract symbols from data and train the classifier
DataTable symbols = codebook.Apply(data);
inputs = symbols.ToArray<int>("Outlook", "Temperature", "Humidity", "Wind");
outputs = symbols.ToArray<int>("PlayTennis");
double error = id3.Run(inputs, outputs);
Assert.AreEqual(203, tree.Root.Branches.Count);
Assert.IsTrue(tree.Root.Branches[100].IsLeaf);
Assert.IsNull(tree.Root.Branches[100].Output);
for (int i = 0; i < inputs.Length; i++)
{
int y = tree.Compute(inputs[i]);
Assert.AreEqual(outputs[i], y);
}
}
示例4: ConstantDiscreteVariableTest
public void ConstantDiscreteVariableTest()
{
DecisionTree tree;
int[][] inputs;
int[] outputs;
DataTable data = new DataTable("Degenerated Tennis Example");
data.Columns.Add("Day", "Outlook", "Temperature", "Humidity", "Wind", "PlayTennis");
data.Rows.Add("D1", "Sunny", "Hot", "High", "Weak", "No");
data.Rows.Add("D2", "Sunny", "Hot", "High", "Strong", "No");
data.Rows.Add("D3", "Overcast", "Hot", "High", "Weak", "Yes");
data.Rows.Add("D4", "Rain", "Hot", "High", "Weak", "Yes");
data.Rows.Add("D5", "Rain", "Hot", "Normal", "Weak", "Yes");
data.Rows.Add("D6", "Rain", "Hot", "Normal", "Strong", "No");
data.Rows.Add("D7", "Overcast", "Hot", "Normal", "Strong", "Yes");
data.Rows.Add("D8", "Sunny", "Hot", "High", "Weak", "No");
data.Rows.Add("D9", "Sunny", "Hot", "Normal", "Weak", "Yes");
data.Rows.Add("D10", "Rain", "Hot", "Normal", "Weak", "Yes");
data.Rows.Add("D11", "Sunny", "Hot", "Normal", "Strong", "Yes");
data.Rows.Add("D12", "Overcast", "Hot", "High", "Strong", "Yes");
data.Rows.Add("D13", "Overcast", "Hot", "Normal", "Weak", "Yes");
data.Rows.Add("D14", "Rain", "Hot", "High", "Strong", "No");
// Create a new codification codebook to
// convert strings into integer symbols
Codification codebook = new Codification(data);
DecisionVariable[] attributes =
{
new DecisionVariable("Outlook", codebook["Outlook"].Symbols), // 3 possible values (Sunny, overcast, rain)
new DecisionVariable("Temperature", codebook["Temperature"].Symbols), // 1 constant value (Hot)
new DecisionVariable("Humidity", codebook["Humidity"].Symbols), // 2 possible values (High, normal)
new DecisionVariable("Wind", codebook["Wind"].Symbols) // 2 possible values (Weak, strong)
};
int classCount = codebook["PlayTennis"].Symbols; // 2 possible values (yes, no)
bool thrown = false;
try
{
tree = new DecisionTree(attributes, classCount);
}
catch
{
thrown = true;
}
Assert.IsTrue(thrown);
attributes[1] = new DecisionVariable("Temperature", 2);
tree = new DecisionTree(attributes, classCount);
ID3Learning id3 = new ID3Learning(tree);
// Extract symbols from data and train the classifier
DataTable symbols = codebook.Apply(data);
inputs = symbols.ToArray<int>("Outlook", "Temperature", "Humidity", "Wind");
outputs = symbols.ToArray<int>("PlayTennis");
double error = id3.Run(inputs, outputs);
for (int i = 0; i < inputs.Length; i++)
{
int y = tree.Compute(inputs[i]);
Assert.AreEqual(outputs[i], y);
}
}
示例5: ConstantDiscreteVariableTest
public void ConstantDiscreteVariableTest()
{
DecisionTree tree;
double[][] inputs;
int[] outputs;
DataTable data = new DataTable("Degenerated Tennis Example");
data.Columns.Add("Day", typeof(string));
data.Columns.Add("Outlook", typeof(string));
data.Columns.Add("Temperature", typeof(double));
data.Columns.Add("Humidity", typeof(double));
data.Columns.Add("Wind", typeof(string));
data.Columns.Add("PlayTennis", typeof(string));
data.Rows.Add("D1", "Sunny", 50, 85, "Weak", "No");
data.Rows.Add("D2", "Sunny", 50, 90, "Weak", "No");
data.Rows.Add("D3", "Overcast", 83, 78, "Weak", "Yes");
data.Rows.Add("D4", "Rain", 70, 96, "Weak", "Yes");
data.Rows.Add("D5", "Rain", 68, 80, "Weak", "Yes");
data.Rows.Add("D6", "Rain", 65, 70, "Weak", "No");
data.Rows.Add("D7", "Overcast", 64, 65, "Weak", "Yes");
data.Rows.Add("D8", "Sunny", 50, 95, "Weak", "No");
data.Rows.Add("D9", "Sunny", 69, 70, "Weak", "Yes");
data.Rows.Add("D10", "Rain", 75, 80, "Weak", "Yes");
data.Rows.Add("D11", "Sunny", 75, 70, "Weak", "Yes");
data.Rows.Add("D12", "Overcast", 72, 90, "Weak", "Yes");
data.Rows.Add("D13", "Overcast", 81, 75, "Weak", "Yes");
data.Rows.Add("D14", "Rain", 50, 80, "Weak", "No");
// Create a new codification codebook to
// convert strings into integer symbols
Codification codebook = new Codification(data);
DecisionVariable[] attributes =
{
new DecisionVariable("Outlook", codebook["Outlook"].Symbols), // 3 possible values (Sunny, overcast, rain)
new DecisionVariable("Temperature", DecisionVariableKind.Continuous), // continuous values
new DecisionVariable("Humidity", DecisionVariableKind.Continuous), // continuous values
new DecisionVariable("Wind", codebook["Wind"].Symbols + 1) // 1 possible value (Weak)
};
int classCount = codebook["PlayTennis"].Symbols; // 2 possible values (yes, no)
tree = new DecisionTree(attributes, classCount);
C45Learning c45 = new C45Learning(tree);
// Extract symbols from data and train the classifier
DataTable symbols = codebook.Apply(data);
inputs = symbols.ToArray("Outlook", "Temperature", "Humidity", "Wind");
outputs = symbols.ToArray<int>("PlayTennis");
double error = c45.Run(inputs, outputs);
for (int i = 0; i < inputs.Length; i++)
{
int y = tree.Compute(inputs[i]);
Assert.AreEqual(outputs[i], y);
}
}
示例6: LargeRunTest
public void LargeRunTest()
{
#region doc_nursery
// This example uses the Nursery Database available from the University of
// California Irvine repository of machine learning databases, available at
//
// http://archive.ics.uci.edu/ml/machine-learning-databases/nursery/nursery.names
//
// The description paragraph is listed as follows.
//
// Nursery Database was derived from a hierarchical decision model
// originally developed to rank applications for nursery schools. It
// was used during several years in 1980's when there was excessive
// enrollment to these schools in Ljubljana, Slovenia, and the
// rejected applications frequently needed an objective
// explanation. The final decision depended on three subproblems:
// occupation of parents and child's nursery, family structure and
// financial standing, and social and health picture of the family.
// The model was developed within expert system shell for decision
// making DEX (M. Bohanec, V. Rajkovic: Expert system for decision
// making. Sistemica 1(1), pp. 145-157, 1990.).
//
// Let's begin by loading the raw data. This string variable contains
// the contents of the nursery.data file as a single, continuous text.
//
string nurseryData = Resources.nursery;
// Those are the input columns available in the data
//
string[] inputColumns =
{
"parents", "has_nurs", "form", "children",
"housing", "finance", "social", "health"
};
// And this is the output, the last column of the data.
//
string outputColumn = "output";
// Let's populate a data table with this information.
//
DataTable table = new DataTable("Nursery");
table.Columns.Add(inputColumns);
table.Columns.Add(outputColumn);
string[] lines = nurseryData.Split(
new[] { Environment.NewLine }, StringSplitOptions.None);
foreach (var line in lines)
table.Rows.Add(line.Split(','));
// Now, we have to convert the textual, categorical data found
// in the table to a more manageable discrete representation.
//
// For this, we will create a codebook to translate text to
// discrete integer symbols:
//
Codification codebook = new Codification(table);
// And then convert all data into symbols
//
DataTable symbols = codebook.Apply(table);
double[][] inputs = symbols.ToArray(inputColumns);
int[] outputs = symbols.ToArray<int>(outputColumn);
// From now on, we can start creating the decision tree.
//
var attributes = DecisionVariable.FromCodebook(codebook, inputColumns);
DecisionTree tree = new DecisionTree(attributes, classes: 5);
// Now, let's create the C4.5 algorithm
C45Learning c45 = new C45Learning(tree);
// and learn a decision tree. The value of
// the error variable below should be 0.
//
double error = c45.Run(inputs, outputs);
// To compute a decision for one of the input points,
// such as the 25-th example in the set, we can use
//
int y = tree.Compute(inputs[25]);
#endregion
Assert.AreEqual(0, error);
for (int i = 0; i < inputs.Length; i++)
{
int expected = outputs[i];
int actual = tree.Compute(inputs[i]);
Assert.AreEqual(expected, actual);
}
//.........这里部分代码省略.........
示例7: kararAgaci
public string kararAgaci(DataTable tbl)
{
int classCount = 2;
Codification codebook = new Codification(tbl);
DecisionVariable[] attributes ={
new DecisionVariable("Clump Thickness",10),
new DecisionVariable("Uniformity of Cell Size",10),new DecisionVariable("Uniformity of Cell Shape",10),
new DecisionVariable("Marginal Adhesion",10),new DecisionVariable("Single Epithelial Cell Size",10),
new DecisionVariable("Bare Nuclei",10),new DecisionVariable("Bland Chromatin",10),
new DecisionVariable("Normal Nucleoli",10),new DecisionVariable("Mitoses",10),
};
DecisionTree tree = new DecisionTree(attributes, classCount);
ID3Learning id3learning = new ID3Learning(tree);
// Translate our training data into integer symbols using our codebook:
DataTable symbols = codebook.Apply(tbl);
int[][] inputs = symbols.ToIntArray("Clump Thickness", "Uniformity of Cell Size", "Uniformity of Cell Shape", "Marginal Adhesion", "Single Epithelial Cell Size", "Bare Nuclei", "Bland Chromatin", "Normal Nucleoli", "Mitoses");
int[] outputs = symbols.ToIntArray("Class").GetColumn(0);
// symbols.
id3learning.Run(inputs, outputs);
int[] query = codebook.Translate(inputlar[0], inputlar[1], inputlar[2], inputlar[3],
inputlar[4], inputlar[5], inputlar[6], inputlar[7], inputlar[8]);
int output = tree.Compute(query);
string answer = codebook.Translate("Class", output);
return answer;
}
示例8: C45
private string C45(DataTable tbl)
{
int classCount = 2;
Codification codebook = new Codification(tbl);
DecisionVariable[] attributes ={
new DecisionVariable("Clump Thickness",10),
new DecisionVariable("Uniformity of Cell Size",10),new DecisionVariable("Uniformity of Cell Shape",10),
new DecisionVariable("Marginal Adhesion",10),new DecisionVariable("Single Epithelial Cell Size",10),
new DecisionVariable("Bare Nuclei",10),new DecisionVariable("Bland Chromatin",10),
new DecisionVariable("Normal Nucleoli",10),new DecisionVariable("Mitoses",10),
};
DecisionTree tree = new DecisionTree(attributes, classCount);
// ID3Learning id3learning = new ID3Learning(tree);
// Translate our training data into integer symbols using our codebook:
DataTable symbols = codebook.Apply(tbl);
double[][] inputs = symbols.ToIntArray("Clump Thickness", "Uniformity of Cell Size", "Uniformity of Cell Shape", "Marginal Adhesion", "Single Epithelial Cell Size", "Bare Nuclei", "Bland Chromatin", "Normal Nucleoli", "Mitoses").ToDouble();
int[] outputs = symbols.ToIntArray("Class").GetColumn(0);
// symbols.
// id3learning.Run(inputs, outputs);
// Now, let's create the C4.5 algorithm
C45Learning c45 = new C45Learning(tree);
// and learn a decision tree. The value of
// the error variable below should be 0.
//
double error = c45.Run(inputs, outputs);
// To compute a decision for one of the input points,
// such as the 25-th example in the set, we can use
//
int y = tree.Compute(inputs[5]);
// Finally, we can also convert our tree to a native
// function, improving efficiency considerably, with
//
Func<double[], int> func = tree.ToExpression().Compile();
// Again, to compute a new decision, we can just use
//
int z = func(inputs[5]);
int[] query = codebook.Translate(inputlar[0], inputlar[1], inputlar[2], inputlar[3],
inputlar[4], inputlar[5], inputlar[6], inputlar[7], inputlar[8]);
int output = tree.Compute(query);
string answer = codebook.Translate("Class", output);
return answer;
// throw new NotImplementedException();
}
示例9: Run
public void Run()
{
DataTable data = new DataTable("Mitchell's Tennis Example");
data.Columns.Add("Day");
data.Columns.Add("Outlook");
data.Columns.Add("Temperature");
data.Columns.Add("Humidity");
data.Columns.Add("Wind");
data.Columns.Add("PlayTennis");
data.Rows.Add("D1", "Sunny", "Hot", "High", "Weak", "No");
data.Rows.Add("D2", "Sunny", "Hot", "High", "Strong", "No");
data.Rows.Add("D3", "Overcast", "Hot", "High", "Weak", "Yes");
data.Rows.Add("D4", "Rain", "Mild", "High", "Weak", "Yes");
data.Rows.Add("D5", "Rain", "Cool", "Normal", "Weak", "Yes");
data.Rows.Add("D6", "Rain", "Cool", "Normal", "Strong", "No");
data.Rows.Add("D7", "Overcast", "Cool", "Normal", "Strong", "Yes");
data.Rows.Add("D8", "Sunny", "Mild", "High", "Weak", "No");
data.Rows.Add("D9", "Sunny", "Cool", "Normal", "Weak", "Yes");
data.Rows.Add("D10", "Rain", "Mild", "Normal", "Weak", "Yes");
data.Rows.Add("D11", "Sunny", "Mild", "Normal", "Strong", "Yes");
data.Rows.Add("D12", "Overcast", "Mild", "High", "Strong", "Yes");
data.Rows.Add("D13", "Overcast", "Hot", "Normal", "Weak", "Yes");
data.Rows.Add("D14", "Rain", "Mild", "High", "Strong", "No");
// Create a new codification codebook to
// convert strings into integer symbols
Codification codebook = new Codification(data, "Outlook", "Temperature", "Humidity", "Wind", "PlayTennis");
// Translate our training data into integer symbols using our codebook:
DataTable symbols = codebook.Apply(data);
CreateDic("Outlook", symbols);
CreateDic("Temperature", symbols);
CreateDic("Humidity", symbols);
CreateDic("Wind", symbols);
CreateDic("PlayTennis", symbols);
int[][] inputs = (from p in symbols.AsEnumerable()
select new int[]
{
GetIndex("Outlook", p["Outlook"].ToString()),
GetIndex("Temperature", p["Temperature"].ToString()),
GetIndex("Humidity", p["Humidity"].ToString()),
GetIndex("Wind", p["Wind"].ToString())
}).Cast<int[]>().ToArray();
int[] outputs = (from p in symbols.AsEnumerable()
select GetIndex("PlayTennis", p["PlayTennis"].ToString())).Cast<int>().ToArray();
/*
// Gather information about decision variables
DecisionVariable[] attributes =
{
new DecisionVariable("Outlook", 3), // 3 possible values (Sunny, overcast, rain)
new DecisionVariable("Temperature", 3), // 3 possible values (Hot, mild, cool)
new DecisionVariable("Humidity", 2), // 2 possible values (High, normal)
new DecisionVariable("Wind", 2) // 2 possible values (Weak, strong)
};
*/
DecisionVariable[] attributes =
{
new DecisionVariable("Outlook", GetCount("Outlook")), // 3 possible values (Sunny, overcast, rain)
new DecisionVariable("Temperature", GetCount("Temperature")), // 3 possible values (Hot, mild, cool)
new DecisionVariable("Humidity", GetCount("Humidity")), // 2 possible values (High, normal)
new DecisionVariable("Wind", GetCount("Wind")) // 2 possible values (Weak, strong)
};
int classCount = GetCount("PlayTennis"); // 2 possible output values for playing tennis: yes or no
//Create the decision tree using the attributes and classes
DecisionTree tree = new DecisionTree(attributes, classCount);
// Create a new instance of the ID3 algorithm
ID3Learning id3learning = new ID3Learning(tree);
// Learn the training instances!
id3learning.Run(inputs, outputs);
string answer = codebook.Translate("PlayTennis",
tree.Compute(codebook.Translate("Sunny", "Hot", "High", "Strong")));
Console.WriteLine("Calculate for: Sunny, Hot, High, Strong");
Console.WriteLine("Answer: " + answer);
var expression = tree.ToExpression();
Console.WriteLine(tree.ToCode("ClassTest"));
DecisionSet s = tree.ToRules();
Console.WriteLine(s.ToString());
// Compiles the expression to IL
var func = expression.Compile();
}