本文整理汇总了C#中Codification.Translate方法的典型用法代码示例。如果您正苦于以下问题:C# Codification.Translate方法的具体用法?C# Codification.Translate怎么用?C# Codification.Translate使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Codification
的用法示例。
在下文中一共展示了Codification.Translate方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: HMMGenerator
public HMMGenerator(PatchNames instrument)
{
this.book = new Codebook<Note>();
this.instrument = instrument;
DotNetLearn.Data.SampleSet asdasd;
Accord.Math.Tools.SetupGenerator(10);
// Consider some phrases:
//
string[][] phrases =
{
"The Big Brown Fox Jumps Over the Ugly Dog".Split(new char[]{' '}, StringSplitOptions.RemoveEmptyEntries),
"This is too hot to handle".Split(new char[]{' '}, StringSplitOptions.RemoveEmptyEntries),
"I am flying away like a gold eagle".Split(new char[]{' '}, StringSplitOptions.RemoveEmptyEntries),
"Onamae wa nan desu ka".Split(new char[]{' '}, StringSplitOptions.RemoveEmptyEntries),
"And then she asked, why is it so small?".Split(new char[]{' '}, StringSplitOptions.RemoveEmptyEntries),
"Great stuff John! Now you will surely be promoted".Split(new char[]{' '}, StringSplitOptions.RemoveEmptyEntries),
"Jayne was taken aback when she found out her son was gay".Split(new char[]{' '}, StringSplitOptions.RemoveEmptyEntries),
};
// Let's begin by transforming them to sequence of
// integer labels using a codification codebook:
var codebook = new Codification("Words", phrases);
// Now we can create the training data for the models:
int[][] sequence = codebook.Translate("Words", phrases);
// To create the models, we will specify a forward topology,
// as the sequences have definite start and ending points.
//
var topology = new Forward(states: codebook["Words"].Symbols);
int symbols = codebook["Words"].Symbols; // We have 7 different words
// Create the hidden Markov model
HiddenMarkovModel hmm = new HiddenMarkovModel(topology, symbols);
// Create the learning algorithm
var teacher = new ViterbiLearning(hmm);
// Teach the model about the phrases
double error = teacher.Run(sequence);
// Now, we can ask the model to generate new samples
// from the word distributions it has just learned:
//
List<int> sample = new List<int>();
int count = 10;
sample.Add(hmm.Generate(1)[0]);
while(sample.Count < count)
{
var k = hmm.Predict(sample.ToArray(), 1);
sample.AddRange(k);
}
// And the result will be: "those", "are", "words".
string[] result = codebook.Translate("Words", sample.ToArray());
}
示例2: ComputeTest2
public void ComputeTest2()
{
DataTable data = new DataTable("Mitchell's Tennis Example");
data.Columns.Add("Day", "Outlook", "Temperature", "Humidity", "Wind", "PlayTennis");
// We will set Temperature and Humidity to be continuous
data.Columns["Temperature"].DataType = typeof(double);
data.Columns["Humidity"].DataType = typeof(double);
data.Rows.Add("D1", "Sunny", 38.0, 96.0, "Weak", "No");
data.Rows.Add("D2", "Sunny", 39.0, 90.0, "Strong", "No");
data.Rows.Add("D3", "Overcast", 38.0, 75.0, "Weak", "Yes");
data.Rows.Add("D4", "Rain", 25.0, 87.0, "Weak", "Yes");
data.Rows.Add("D5", "Rain", 12.0, 30.0, "Weak", "Yes");
data.Rows.Add("D6", "Rain", 11.0, 35.0, "Strong", "No");
data.Rows.Add("D7", "Overcast", 10.0, 40.0, "Strong", "Yes");
data.Rows.Add("D8", "Sunny", 24.0, 90.0, "Weak", "No");
data.Rows.Add("D9", "Sunny", 12.0, 26.0, "Weak", "Yes");
data.Rows.Add("D10", "Rain", 25, 30.0, "Weak", "Yes");
data.Rows.Add("D11", "Sunny", 26.0, 40.0, "Strong", "Yes");
data.Rows.Add("D12", "Overcast", 27.0, 97.0, "Strong", "Yes");
data.Rows.Add("D13", "Overcast", 39.0, 41.0, "Weak", "Yes");
data.Rows.Add("D14", "Rain", 23.0, 98.0, "Strong", "No");
// Create a new codification codebook to
// convert strings into discrete symbols
Codification codebook = new Codification(data);
int classCount = codebook["PlayTennis"].Symbols; // 2 possible values (yes, no)
int inputCount = 4; // 4 variables (Outlook, Temperature, Humidity, Wind)
IUnivariateDistribution[] priors =
{
new GeneralDiscreteDistribution(codebook["Outlook"].Symbols), // 3 possible values (Sunny, overcast, rain)
new NormalDistribution(), // Continuous value (celsius)
new NormalDistribution(), // Continuous value (percentage)
new GeneralDiscreteDistribution(codebook["Wind"].Symbols) // 2 possible values (Weak, strong)
};
// Create a new Naive Bayes classifiers for the two classes
var target = new NaiveBayes<IUnivariateDistribution>(classCount, inputCount, priors);
// Extract symbols from data and train the classifier
DataTable symbols = codebook.Apply(data);
double[][] inputs = symbols.ToArray("Outlook", "Temperature", "Humidity", "Wind");
int[] outputs = symbols.ToArray<int>("PlayTennis");
// Compute the Naive Bayes model
target.Estimate(inputs, outputs);
double logLikelihood;
double[] responses;
// Compute the result for a sunny, cool, humid and windy day:
double[] instance = new double[]
{
codebook.Translate(columnName:"Outlook", value:"Sunny"),
12.0,
90.0,
codebook.Translate(columnName:"Wind", value:"Strong")
};
int c = target.Compute(instance, out logLikelihood, out responses);
string result = codebook.Translate("PlayTennis", c);
Assert.AreEqual("No", result);
Assert.AreEqual(0, c);
Assert.AreEqual(0.840, responses[0], 1e-3);
Assert.AreEqual(1, responses.Sum(), 1e-10);
Assert.IsFalse(double.IsNaN(responses[0]));
Assert.AreEqual(2, responses.Length);
}
示例3: ComputeTest
public void ComputeTest()
{
DataTable data = new DataTable("Mitchell's Tennis Example");
data.Columns.Add("Day", "Outlook", "Temperature", "Humidity", "Wind", "PlayTennis");
data.Rows.Add("D1", "Sunny", "Hot", "High", "Weak", "No");
data.Rows.Add("D2", "Sunny", "Hot", "High", "Strong", "No");
data.Rows.Add("D3", "Overcast", "Hot", "High", "Weak", "Yes");
data.Rows.Add("D4", "Rain", "Mild", "High", "Weak", "Yes");
data.Rows.Add("D5", "Rain", "Cool", "Normal", "Weak", "Yes");
data.Rows.Add("D6", "Rain", "Cool", "Normal", "Strong", "No");
data.Rows.Add("D7", "Overcast", "Cool", "Normal", "Strong", "Yes");
data.Rows.Add("D8", "Sunny", "Mild", "High", "Weak", "No");
data.Rows.Add("D9", "Sunny", "Cool", "Normal", "Weak", "Yes");
data.Rows.Add("D10", "Rain", "Mild", "Normal", "Weak", "Yes");
data.Rows.Add("D11", "Sunny", "Mild", "Normal", "Strong", "Yes");
data.Rows.Add("D12", "Overcast", "Mild", "High", "Strong", "Yes");
data.Rows.Add("D13", "Overcast", "Hot", "Normal", "Weak", "Yes");
data.Rows.Add("D14", "Rain", "Mild", "High", "Strong", "No");
// Create a new codification codebook to
// convert strings into discrete symbols
Codification codebook = new Codification(data);
int classCount = codebook["PlayTennis"].Symbols; // 2 possible values (yes, no)
int inputCount = 4; // 4 variables (Outlook, Temperature, Humidity, Wind)
GeneralDiscreteDistribution[] priors =
{
new GeneralDiscreteDistribution(codebook["Outlook"].Symbols), // 3 possible values (Sunny, overcast, rain)
new GeneralDiscreteDistribution(codebook["Temperature"].Symbols), // 3 possible values (Hot, mild, cool)
new GeneralDiscreteDistribution(codebook["Humidity"].Symbols), // 2 possible values (High, normal)
new GeneralDiscreteDistribution(codebook["Wind"].Symbols) // 2 possible values (Weak, strong)
};
// Create a new Naive Bayes classifiers for the two classes
var target = new NaiveBayes<GeneralDiscreteDistribution>(classCount, inputCount, priors);
// Extract symbols from data and train the classifier
DataTable symbols = codebook.Apply(data);
double[][] inputs = symbols.ToArray("Outlook", "Temperature", "Humidity", "Wind");
int[] outputs = symbols.ToArray<int>("PlayTennis");
// Compute the Naive Bayes model
target.Estimate(inputs, outputs);
double logLikelihood;
double[] responses;
// Compute the result for a sunny, cool, humid and windy day:
double[] instance = codebook.Translate("Sunny", "Cool", "High", "Strong").ToDouble();
int c = target.Compute(instance, out logLikelihood, out responses);
string result = codebook.Translate("PlayTennis", c);
Assert.AreEqual("No", result);
Assert.AreEqual(0, c);
Assert.AreEqual(0.795, responses[0], 1e-3);
Assert.AreEqual(1, responses.Sum(), 1e-10);
Assert.IsFalse(double.IsNaN(responses[0]));
Assert.AreEqual(2, responses.Length);
}
示例4: toString
private string toString(Codification codebook)
{
if (IsRoot)
return "Root";
String name = Owner.Attributes[Parent.Branches.AttributeIndex].Name;
if (String.IsNullOrEmpty(name))
name = "x" + Parent.Branches.AttributeIndex;
String op = ComparisonExtensions.ToString(Comparison);
String value;
if (codebook != null && Value.HasValue && codebook.Columns.Contains(name))
value = codebook.Translate(name, (int)Value.Value);
else value = Value.ToString();
return String.Format("{0} {1} {2}", name, op, value);
}
示例5: AttributeReuseTest1
public void AttributeReuseTest1()
{
string[][] text = Resources.iris_data.Split(
new[] { '\n' }, StringSplitOptions.RemoveEmptyEntries)
.Apply(x => x.Split(','));
double[][] inputs = new double[text.Length][];
for (int i = 0; i < inputs.Length; i++)
inputs[i] = text[i].First(4).Convert(s => Double.Parse(s, System.Globalization.CultureInfo.InvariantCulture));
string[] labels = text.GetColumn(4);
Codification codebook = new Codification("Label", labels);
int[] outputs = codebook.Translate("Label", labels);
DecisionVariable[] features =
{
new DecisionVariable("sepal length", DecisionVariableKind.Continuous),
new DecisionVariable("sepal width", DecisionVariableKind.Continuous),
new DecisionVariable("petal length", DecisionVariableKind.Continuous),
new DecisionVariable("petal width", DecisionVariableKind.Continuous),
};
DecisionTree tree = new DecisionTree(features, codebook.Columns[0].Symbols);
C45Learning teacher = new C45Learning(tree);
teacher.Join = 3;
double error = teacher.Run(inputs, outputs);
Assert.AreEqual(0.02, error, 1e-10);
DecisionSet rules = tree.ToRules();
double newError = ComputeError(rules, inputs, outputs);
Assert.AreEqual(0.02, newError, 1e-10);
string ruleText = rules.ToString(codebook,
System.Globalization.CultureInfo.InvariantCulture);
// TODO: implement this assertion properly, actually checking
// the text contents once the feature is completely finished.
Assert.AreEqual(600, ruleText.Length);
}
示例6: toString
private string toString(Codification codebook)
{
if (IsRoot)
return "Root";
String name = Owner.Attributes[Parent.Branches.AttributeIndex].Name;
if (String.IsNullOrEmpty(name))
name = "x" + Parent.Branches.AttributeIndex;
String op;
switch (Comparison)
{
case ComparisonKind.Equal:
op = "=="; break;
case ComparisonKind.GreaterThan:
op = ">"; break;
case ComparisonKind.GreaterThanOrEqual:
op = ">="; break;
case ComparisonKind.LessThan:
op = "<"; break;
case ComparisonKind.LessThanOrEqual:
op = "<="; break;
case ComparisonKind.NotEqual:
op = "!="; break;
default:
return "Unexpected comparison type.";
}
String value;
if (codebook != null && Value.HasValue && codebook.Columns.Contains(name))
value = codebook.Translate(name, (int)Value.Value);
else value = Value.ToString();
return String.Format("{0} {1} {2}", name, op, value);
}
示例7: TranslateTest3
public void TranslateTest3()
{
string[] colNames = { "col1", "col2", "col3" };
DataTable table = new DataTable("TranslateTest1 Table");
table.Columns.Add(colNames);
table.Rows.Add(1, 2, 3);
table.Rows.Add(1, 3, 5);
table.Rows.Add(1, 4, 7);
table.Rows.Add(2, 4, 6);
table.Rows.Add(2, 5, 8);
table.Rows.Add(2, 6, 10);
table.Rows.Add(3, 4, 5);
table.Rows.Add(3, 5, 7);
table.Rows.Add(3, 6, 9);
// ok, so values 1,2,3 are in column 1
// values 2,3,4,5,6 in column 2
// values 3,5,6,7,8,9,10 in column 3
var codeBook = new Codification(table);
Matrix.IsEqual(new int[] { 0, 0, 0 }, codeBook.Translate(new[] { "1", "2", "3" }));
Matrix.IsEqual(new int[] { 0, 1, 1 }, codeBook.Translate(new[] { "1", "3", "5" }));
Matrix.IsEqual(new int[] { 0, 2, 2 }, codeBook.Translate(new[] { "1", "4", "7" }));
Matrix.IsEqual(new int[] { 1, 2, 3 }, codeBook.Translate(new[] { "2", "4", "6" }));
Matrix.IsEqual(new int[] { 1, 3, 4 }, codeBook.Translate(new[] { "2", "5", "8" }));
Matrix.IsEqual(new int[] { 1, 4, 5 }, codeBook.Translate(new[] { "2", "6", "10" }));
Matrix.IsEqual(new int[] { 2, 2, 1 }, codeBook.Translate(new[] { "3", "4", "5" }));
Matrix.IsEqual(new int[] { 2, 3, 2 }, codeBook.Translate(new[] { "3", "5", "7" }));
Matrix.IsEqual(new int[] { 2, 4, 6 }, codeBook.Translate(new[] { "3", "6", "9" }));
Matrix.IsEqual(new int[] { 2 }, codeBook.Translate(new[] { "3" }));
Matrix.IsEqual(new int[] { 2, 4 }, codeBook.Translate(new[] { "3", "6" }));
Matrix.IsEqual(new int[] { 2, 4, 6 }, codeBook.Translate(new[] { "3", "6", "9" }));
bool thrown = false;
try { codeBook.Translate(new[] { "3", "6", "9", "10" }); }
catch (Exception) { thrown = true; }
Assert.IsTrue(thrown);
}
示例8: ApplyTest3
public void ApplyTest3()
{
string[] names = { "child", "adult", "elder" };
Codification codebook = new Codification("Label", names);
// After that, we can use the codebook to "translate"
// the text labels into discrete symbols, such as:
int a = codebook.Translate("Label", "child"); // returns 0
int b = codebook.Translate("Label", "adult"); // returns 1
int c = codebook.Translate("Label", "elder"); // returns 2
// We can also do the reverse:
string labela = codebook.Translate("Label", 0); // returns "child"
string labelb = codebook.Translate("Label", 1); // returns "adult"
string labelc = codebook.Translate("Label", 2); // returns "elder"
Assert.AreEqual(0, a);
Assert.AreEqual(1, b);
Assert.AreEqual(2, c);
Assert.AreEqual("child", labela);
Assert.AreEqual("adult", labelb);
Assert.AreEqual("elder", labelc);
}
示例9: kararAgaci
public string kararAgaci(DataTable tbl)
{
int classCount = 2;
Codification codebook = new Codification(tbl);
DecisionVariable[] attributes ={
new DecisionVariable("Clump Thickness",10),
new DecisionVariable("Uniformity of Cell Size",10),new DecisionVariable("Uniformity of Cell Shape",10),
new DecisionVariable("Marginal Adhesion",10),new DecisionVariable("Single Epithelial Cell Size",10),
new DecisionVariable("Bare Nuclei",10),new DecisionVariable("Bland Chromatin",10),
new DecisionVariable("Normal Nucleoli",10),new DecisionVariable("Mitoses",10),
};
DecisionTree tree = new DecisionTree(attributes, classCount);
ID3Learning id3learning = new ID3Learning(tree);
// Translate our training data into integer symbols using our codebook:
DataTable symbols = codebook.Apply(tbl);
int[][] inputs = symbols.ToIntArray("Clump Thickness", "Uniformity of Cell Size", "Uniformity of Cell Shape", "Marginal Adhesion", "Single Epithelial Cell Size", "Bare Nuclei", "Bland Chromatin", "Normal Nucleoli", "Mitoses");
int[] outputs = symbols.ToIntArray("Class").GetColumn(0);
// symbols.
id3learning.Run(inputs, outputs);
int[] query = codebook.Translate(inputlar[0], inputlar[1], inputlar[2], inputlar[3],
inputlar[4], inputlar[5], inputlar[6], inputlar[7], inputlar[8]);
int output = tree.Compute(query);
string answer = codebook.Translate("Class", output);
return answer;
}
示例10: C45
private string C45(DataTable tbl)
{
int classCount = 2;
Codification codebook = new Codification(tbl);
DecisionVariable[] attributes ={
new DecisionVariable("Clump Thickness",10),
new DecisionVariable("Uniformity of Cell Size",10),new DecisionVariable("Uniformity of Cell Shape",10),
new DecisionVariable("Marginal Adhesion",10),new DecisionVariable("Single Epithelial Cell Size",10),
new DecisionVariable("Bare Nuclei",10),new DecisionVariable("Bland Chromatin",10),
new DecisionVariable("Normal Nucleoli",10),new DecisionVariable("Mitoses",10),
};
DecisionTree tree = new DecisionTree(attributes, classCount);
// ID3Learning id3learning = new ID3Learning(tree);
// Translate our training data into integer symbols using our codebook:
DataTable symbols = codebook.Apply(tbl);
double[][] inputs = symbols.ToIntArray("Clump Thickness", "Uniformity of Cell Size", "Uniformity of Cell Shape", "Marginal Adhesion", "Single Epithelial Cell Size", "Bare Nuclei", "Bland Chromatin", "Normal Nucleoli", "Mitoses").ToDouble();
int[] outputs = symbols.ToIntArray("Class").GetColumn(0);
// symbols.
// id3learning.Run(inputs, outputs);
// Now, let's create the C4.5 algorithm
C45Learning c45 = new C45Learning(tree);
// and learn a decision tree. The value of
// the error variable below should be 0.
//
double error = c45.Run(inputs, outputs);
// To compute a decision for one of the input points,
// such as the 25-th example in the set, we can use
//
int y = tree.Compute(inputs[5]);
// Finally, we can also convert our tree to a native
// function, improving efficiency considerably, with
//
Func<double[], int> func = tree.ToExpression().Compile();
// Again, to compute a new decision, we can just use
//
int z = func(inputs[5]);
int[] query = codebook.Translate(inputlar[0], inputlar[1], inputlar[2], inputlar[3],
inputlar[4], inputlar[5], inputlar[6], inputlar[7], inputlar[8]);
int output = tree.Compute(query);
string answer = codebook.Translate("Class", output);
return answer;
// throw new NotImplementedException();
}
示例11: bayes
private string bayes(DataTable tbl)
{
Codification codebook = new Codification(tbl,
"Clump Thickness", "Uniformity of Cell Size", "Uniformity of Cell Shape", "Marginal Adhesion", "Single Epithelial Cell Size", "Bare Nuclei", "Bland Chromatin", "Normal Nucleoli", "Mitoses", "Class");
// Translate our training data into integer symbols using our codebook:
DataTable symbols = codebook.Apply(tbl);
int[][] inputs = symbols.ToIntArray("Clump Thickness", "Uniformity of Cell Size", "Uniformity of Cell Shape", "Marginal Adhesion", "Single Epithelial Cell Size", "Bare Nuclei", "Bland Chromatin", "Normal Nucleoli", "Mitoses");
int[] outputs = symbols.ToIntArray("Class").GetColumn(0);
// Gather information about decision variables
int[] symbolCounts =
{
codebook["Clump Thickness"].Symbols, // 3 possible values (Sunny, overcast, rain)
codebook["Uniformity of Cell Size"].Symbols, // 3 possible values (Hot, mild, cool)
codebook["Uniformity of Cell Shape"].Symbols, // 2 possible values (High, normal)
codebook["Marginal Adhesion"].Symbols , // 2 possible values (Weak, strong)
codebook["Single Epithelial Cell Size"].Symbols ,
codebook["Bare Nuclei"].Symbols ,
codebook["Bland Chromatin"].Symbols ,
codebook["Normal Nucleoli"].Symbols ,
codebook["Mitoses"].Symbols
};
int classCount = codebook["Class"].Symbols; // 2 possible values (yes, no)
// Create a new Naive Bayes classifiers for the two classes
NaiveBayes target = new NaiveBayes(classCount, symbolCounts);
// Compute the Naive Bayes model
target.Estimate(inputs, outputs);
// We will be computing the label for a sunny, cool, humid and windy day:
int[] instance = codebook.Translate(inputlar[0], inputlar[1], inputlar[2], inputlar[3],
inputlar[4], inputlar[5], inputlar[6], inputlar[7], inputlar[8]);
// Now, we can feed this instance to our model
int output = target.Compute(instance);
// Finally, the result can be translated back to one of the codewords using
string result = codebook.Translate("Class", output); // result is "No"
return result;
}
示例12: SerializationTest
public void SerializationTest()
{
DataTable data = new DataTable("Mitchell's Tennis Example");
data.Columns.Add("Day", "Outlook", "Temperature", "Humidity", "Wind", "PlayTennis");
data.Rows.Add("D1", "Sunny", "Hot", "High", "Weak", "No");
data.Rows.Add("D2", "Sunny", "Hot", "High", "Strong", "No");
data.Rows.Add("D3", "Overcast", "Hot", "High", "Weak", "Yes");
data.Rows.Add("D4", "Rain", "Mild", "High", "Weak", "Yes");
data.Rows.Add("D5", "Rain", "Cool", "Normal", "Weak", "Yes");
data.Rows.Add("D6", "Rain", "Cool", "Normal", "Strong", "No");
data.Rows.Add("D7", "Overcast", "Cool", "Normal", "Strong", "Yes");
data.Rows.Add("D8", "Sunny", "Mild", "High", "Weak", "No");
data.Rows.Add("D9", "Sunny", "Cool", "Normal", "Weak", "Yes");
data.Rows.Add("D10", "Rain", "Mild", "Normal", "Weak", "Yes");
data.Rows.Add("D11", "Sunny", "Mild", "Normal", "Strong", "Yes");
data.Rows.Add("D12", "Overcast", "Mild", "High", "Strong", "Yes");
data.Rows.Add("D13", "Overcast", "Hot", "Normal", "Weak", "Yes");
data.Rows.Add("D14", "Rain", "Mild", "High", "Strong", "No");
// Create a new codification codebook to
// convert strings into discrete symbols
Codification codebook = new Codification(data,
"Outlook", "Temperature", "Humidity", "Wind", "PlayTennis");
var target = Serializer.Load<NaiveBayes<GeneralDiscreteDistribution>>(new MemoryStream(Resources.nb));
Assert.AreEqual(target.InputCount, 4);
Assert.AreEqual(target.ClassCount, 2);
double logLikelihood;
double[] responses;
// Compute the result for a sunny, cool, humid and windy day:
double[] instance = codebook.Translate("Sunny", "Cool", "High", "Strong").ToDouble();
int c = target.Compute(instance, out logLikelihood, out responses);
string result = codebook.Translate("PlayTennis", c);
Assert.AreEqual("No", result);
Assert.AreEqual(0, c);
Assert.AreEqual(0.795, responses[0], 1e-3);
Assert.AreEqual(1, responses.Sum(), 1e-10);
Assert.IsFalse(double.IsNaN(responses[0]));
Assert.AreEqual(2, responses.Length);
}
示例13: learn_test_mitchell
public void learn_test_mitchell()
{
#region doc_mitchell_1
// We will represent Mitchell's Tennis example using a DataTable. However,
// the use of a DataTable is not required in order to use the Naive Bayes.
// Please take a look at the other examples below for simpler approaches.
DataTable data = new DataTable("Mitchell's Tennis Example");
data.Columns.Add("Day", "Outlook", "Temperature", "Humidity", "Wind", "PlayTennis");
// We will set Temperature and Humidity to be continuous
data.Columns["Temperature"].DataType = typeof(double);
data.Columns["Humidity"].DataType = typeof(double);
// Add some data
data.Rows.Add("D1", "Sunny", 38.0, 96.0, "Weak", "No");
data.Rows.Add("D2", "Sunny", 39.0, 90.0, "Strong", "No");
data.Rows.Add("D3", "Overcast", 38.0, 75.0, "Weak", "Yes");
data.Rows.Add("D4", "Rain", 25.0, 87.0, "Weak", "Yes");
data.Rows.Add("D5", "Rain", 12.0, 30.0, "Weak", "Yes");
data.Rows.Add("D6", "Rain", 11.0, 35.0, "Strong", "No");
data.Rows.Add("D7", "Overcast", 10.0, 40.0, "Strong", "Yes");
data.Rows.Add("D8", "Sunny", 24.0, 90.0, "Weak", "No");
data.Rows.Add("D9", "Sunny", 12.0, 26.0, "Weak", "Yes");
data.Rows.Add("D10", "Rain", 25, 30.0, "Weak", "Yes");
data.Rows.Add("D11", "Sunny", 26.0, 40.0, "Strong", "Yes");
data.Rows.Add("D12", "Overcast", 27.0, 97.0, "Strong", "Yes");
data.Rows.Add("D13", "Overcast", 39.0, 41.0, "Weak", "Yes");
data.Rows.Add("D14", "Rain", 23.0, 98.0, "Strong", "No");
#endregion
#region doc_mitchell_2
// Create a new codification codebook to
// convert strings into discrete symbols
Codification codebook = new Codification(data);
#endregion
#region doc_mitchell_3
// Some distributions require constructor parameters, and as such, cannot
// be automatically initialized by the learning algorithm. For this reason,
// we might need to specify how each component should be initialized:
IUnivariateFittableDistribution[] priors =
{
new GeneralDiscreteDistribution(codebook["Outlook"].Symbols), // 3 possible values (Sunny, overcast, rain)
new NormalDistribution(), // Continuous value (Celsius)
new NormalDistribution(), // Continuous value (percentage)
new GeneralDiscreteDistribution(codebook["Wind"].Symbols) // 2 possible values (Weak, strong)
};
// Create a new Naive Bayes classifiers for the two classes
var learner = new NaiveBayesLearning<IUnivariateFittableDistribution>()
{
// Tell the learner how to initialize the distributions
Distribution = (classIndex, variableIndex) => priors[variableIndex]
};
// Extract symbols from data and train the classifier
DataTable symbols = codebook.Apply(data);
double[][] inputs = symbols.ToArray("Outlook", "Temperature", "Humidity", "Wind");
int[] outputs = symbols.ToArray<int>("PlayTennis");
// Learn the Naive Bayes model
var naiveBayes = learner.Learn(inputs, outputs);
#endregion
#region doc_mitchell_4
// Create an instance representing a "sunny, cool, humid and windy day":
double[] instance = new double[]
{
codebook.Translate(columnName:"Outlook", value:"Sunny"), //n 0
12.0,
90.0,
codebook.Translate(columnName:"Wind", value:"Strong") // 1
};
// We can obtain a class prediction using
int predicted = naiveBayes.Decide(instance);
// Or compute probabilities of each class using
double[] probabilities = naiveBayes.Probabilities(instance);
// Or obtain the log-likelihood of prediction
double ll = naiveBayes.LogLikelihood(instance);
// Finally, the result can be translated back using
string result = codebook.Translate("PlayTennis", predicted); // Should be "No"
#endregion
Assert.AreEqual("No", result);
Assert.AreEqual(0, predicted);
Assert.AreEqual(0.840, probabilities[0], 1e-3);
Assert.AreEqual(-10.493243476691351, ll, 1e-6);
Assert.AreEqual(1, probabilities.Sum(), 1e-10);
Assert.AreEqual(2, probabilities.Length);
}
示例14: CreateMitchellExample
//
//You can use the following additional attributes as you write your tests:
//
//Use ClassInitialize to run code before running the first test in the class
//[ClassInitialize()]
//public static void MyClassInitialize(TestContext testContext)
//{
//}
//
//Use ClassCleanup to run code after all tests in a class have run
//[ClassCleanup()]
//public static void MyClassCleanup()
//{
//}
//
//Use TestInitialize to run code before running each test
//[TestInitialize()]
//public void MyTestInitialize()
//{
//}
//
//Use TestCleanup to run code after each test has run
//[TestCleanup()]
//public void MyTestCleanup()
//{
//}
//
#endregion
public static void CreateMitchellExample(out DecisionTree tree, out int[][] inputs, out int[] outputs)
{
DataTable data = new DataTable("Mitchell's Tennis Example");
data.Columns.Add("Day", "Outlook", "Temperature", "Humidity", "Wind", "PlayTennis");
data.Rows.Add("D1", "Sunny", "Hot", "High", "Weak", "No");
data.Rows.Add("D2", "Sunny", "Hot", "High", "Strong", "No");
data.Rows.Add("D3", "Overcast", "Hot", "High", "Weak", "Yes");
data.Rows.Add("D4", "Rain", "Mild", "High", "Weak", "Yes");
data.Rows.Add("D5", "Rain", "Cool", "Normal", "Weak", "Yes");
data.Rows.Add("D6", "Rain", "Cool", "Normal", "Strong", "No");
data.Rows.Add("D7", "Overcast", "Cool", "Normal", "Strong", "Yes");
data.Rows.Add("D8", "Sunny", "Mild", "High", "Weak", "No");
data.Rows.Add("D9", "Sunny", "Cool", "Normal", "Weak", "Yes");
data.Rows.Add("D10", "Rain", "Mild", "Normal", "Weak", "Yes");
data.Rows.Add("D11", "Sunny", "Mild", "Normal", "Strong", "Yes");
data.Rows.Add("D12", "Overcast", "Mild", "High", "Strong", "Yes");
data.Rows.Add("D13", "Overcast", "Hot", "Normal", "Weak", "Yes");
data.Rows.Add("D14", "Rain", "Mild", "High", "Strong", "No");
// Create a new codification codebook to
// convert strings into integer symbols
Codification codebook = new Codification(data);
DecisionVariable[] attributes =
{
new DecisionVariable("Outlook", codebook["Outlook"].Symbols), // 3 possible values (Sunny, overcast, rain)
new DecisionVariable("Temperature", codebook["Temperature"].Symbols), // 3 possible values (Hot, mild, cool)
new DecisionVariable("Humidity", codebook["Humidity"].Symbols), // 2 possible values (High, normal)
new DecisionVariable("Wind", codebook["Wind"].Symbols) // 2 possible values (Weak, strong)
};
int classCount = codebook["PlayTennis"].Symbols; // 2 possible values (yes, no)
tree = new DecisionTree(attributes, classCount);
ID3Learning id3 = new ID3Learning(tree);
// Extract symbols from data and train the classifier
DataTable symbols = codebook.Apply(data);
inputs = symbols.ToArray<int>("Outlook", "Temperature", "Humidity", "Wind");
outputs = symbols.ToArray<int>("PlayTennis");
double error = id3.Run(inputs, outputs);
Assert.AreEqual(0, error);
foreach (DataRow row in data.Rows)
{
var x = codebook.Translate(row, "Outlook", "Temperature", "Humidity", "Wind");
int y = tree.Compute(x);
string actual = codebook.Translate("PlayTennis", y);
string expected = row["PlayTennis"] as string;
Assert.AreEqual(expected, actual);
}
{
string answer = codebook.Translate("PlayTennis",
tree.Compute(codebook.Translate("Sunny", "Hot", "High", "Strong")));
Assert.AreEqual("No", answer);
}
}
示例15: ComputeTest
public void ComputeTest()
{
#region doc_mitchell
DataTable data = new DataTable("Mitchell's Tennis Example");
data.Columns.Add("Day", "Outlook", "Temperature", "Humidity", "Wind", "PlayTennis");
data.Rows.Add("D1", "Sunny", "Hot", "High", "Weak", "No");
data.Rows.Add("D2", "Sunny", "Hot", "High", "Strong", "No");
data.Rows.Add("D3", "Overcast", "Hot", "High", "Weak", "Yes");
data.Rows.Add("D4", "Rain", "Mild", "High", "Weak", "Yes");
data.Rows.Add("D5", "Rain", "Cool", "Normal", "Weak", "Yes");
data.Rows.Add("D6", "Rain", "Cool", "Normal", "Strong", "No");
data.Rows.Add("D7", "Overcast", "Cool", "Normal", "Strong", "Yes");
data.Rows.Add("D8", "Sunny", "Mild", "High", "Weak", "No");
data.Rows.Add("D9", "Sunny", "Cool", "Normal", "Weak", "Yes");
data.Rows.Add("D10", "Rain", "Mild", "Normal", "Weak", "Yes");
data.Rows.Add("D11", "Sunny", "Mild", "Normal", "Strong", "Yes");
data.Rows.Add("D12", "Overcast", "Mild", "High", "Strong", "Yes");
data.Rows.Add("D13", "Overcast", "Hot", "Normal", "Weak", "Yes");
data.Rows.Add("D14", "Rain", "Mild", "High", "Strong", "No");
#endregion
#region doc_codebook
// Create a new codification codebook to
// convert strings into discrete symbols
Codification codebook = new Codification(data,
"Outlook", "Temperature", "Humidity", "Wind", "PlayTennis");
// Extract input and output pairs to train
DataTable symbols = codebook.Apply(data);
int[][] inputs = symbols.ToArray<int>("Outlook", "Temperature", "Humidity", "Wind");
int[] outputs = symbols.ToArray<int>("PlayTennis");
#endregion
#region doc_learn
// Create a new Naive Bayes learning
var learner = new NaiveBayesLearning();
// Learn a Naive Bayes model from the examples
NaiveBayes nb = learner.Learn(inputs, outputs);
#endregion
#region doc_test
// Consider we would like to know whether one should play tennis at a
// sunny, cool, humid and windy day. Let us first encode this instance
int[] instance = codebook.Translate("Sunny", "Cool", "High", "Strong");
// Let us obtain the numeric output that represents the answer
int c = nb.Decide(instance); // answer will be 0
// Now let us convert the numeric output to an actual "Yes" or "No" answer
string result = codebook.Translate("PlayTennis", c); // answer will be "No"
// We can also extract the probabilities for each possible answer
double[] probs = nb.Probabilities(instance); // { 0.795, 0.205 }
#endregion
Assert.AreEqual("No", result);
Assert.AreEqual(0, c);
Assert.AreEqual(0.795, probs[0], 1e-3);
Assert.AreEqual(0.205, probs[1], 1e-3);
Assert.AreEqual(1, probs.Sum(), 1e-10);
Assert.IsFalse(double.IsNaN(probs[0]));
Assert.AreEqual(2, probs.Length);
}