本文整理汇总了C#中HiddenMarkovClassifier类的典型用法代码示例。如果您正苦于以下问题:C# HiddenMarkovClassifier类的具体用法?C# HiddenMarkovClassifier怎么用?C# HiddenMarkovClassifier使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
HiddenMarkovClassifier类属于命名空间,在下文中一共展示了HiddenMarkovClassifier类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: Recognize
public int Recognize(ISoundSignalReader signal, HiddenMarkovClassifier hmm, out string name,
SignalVisitor voiceVisitor = null)
{
var featureUtility = new FeatureUtility(_engineParameters);
signal.Reset();
var features = featureUtility.ExtractFeatures(signal, voiceVisitor).First();
var observations = _codeBook.Quantize(features.Select(item => new Point(item)).ToArray());
double[] responsabilities;
var ret = hmm.Compute(observations, out responsabilities);
var models = hmm.Models;
var likelyHoodValue = Double.MinValue;
name = string.Empty;
foreach (var model in models)
{
var val = model.Evaluate(observations);
if (val > likelyHoodValue)
{
likelyHoodValue = val;
name = model.Tag.ToString();
}
}
return ret;
}
示例2: btnCreate_Click
/// <summary>
/// Creates the ensemble
/// </summary>
private void btnCreate_Click(object sender, EventArgs e)
{
DataTable source = dgvSequenceSource.DataSource as DataTable;
if (source == null)
{
MessageBox.Show("Please load some data by clicking 'Open' under the 'File' menu first. " +
"A sample dataset can be found in the folder 'Resources' contained in the same " +
"directory as this application.");
return;
}
DataTable k = source.DefaultView.ToTable(true, "Label", "States");
// Get the number of different classes in the data
int classes = k.Rows.Count;
string[] categories = new string[classes];
int[] states = new int[classes];
for (int i = 0; i < classes; i++)
{
// Gets the label name
categories[i] = k.Rows[i]["Label"] as string;
// Gets the number of states to recognize each label
states[i] = int.Parse(k.Rows[i]["States"] as string);
}
hmmc = new HiddenMarkovClassifier(classes, states, 3, categories);
dgvModels.DataSource = hmmc.Models;
}
示例3: CreateModel1
public static HiddenMarkovClassifier<NormalDistribution> CreateModel1()
{
// Create a Continuous density Hidden Markov Model Sequence Classifier
// to detect a univariate sequence and the same sequence backwards.
double[][] sequences = new double[][]
{
new double[] { 0,1,2,3,4 }, // This is the first sequence with label = 0
new double[] { 4,3,2,1,0 }, // This is the second sequence with label = 1
};
// Labels for the sequences
int[] labels = { 0, 1 };
// Creates a sequence classifier containing 2 hidden Markov Models
// with 2 states and an underlying Normal distribution as density.
NormalDistribution density = new NormalDistribution();
var classifier = new HiddenMarkovClassifier<NormalDistribution>(2, new Ergodic(2), density);
// Configure the learning algorithms to train the sequence classifier
var teacher = new HiddenMarkovClassifierLearning<NormalDistribution>(classifier,
// Train each model until the log-likelihood changes less than 0.001
modelIndex => new BaumWelchLearning<NormalDistribution>(classifier.Models[modelIndex])
{
Tolerance = 0.0001,
Iterations = 0
}
);
// Train the sequence classifier using the algorithm
double logLikelihood = teacher.Run(sequences, labels);
return classifier;
}
示例4: LearnTest
public void LearnTest()
{
// Declare some testing data
int[][] inputs = new int[][]
{
new int[] { 0,1,1,0 }, // Class 0
new int[] { 0,0,1,0 }, // Class 0
new int[] { 0,1,1,1,0 }, // Class 0
new int[] { 0,1,0 }, // Class 0
new int[] { 1,0,0,1 }, // Class 1
new int[] { 1,1,0,1 }, // Class 1
new int[] { 1,0,0,0,1 }, // Class 1
new int[] { 1,0,1 }, // Class 1
};
int[] outputs = new int[]
{
0,0,0,0, // First four sequences are of class 0
1,1,1,1, // Last four sequences are of class 1
};
// We are trying to predict two different classes
int classes = 2;
// Each sequence may have up to two symbols (0 or 1)
int symbols = 2;
// Nested models will have two states each
int[] states = new int[] { 2, 2 };
// Creates a new Hidden Markov Model Classifier with the given parameters
HiddenMarkovClassifier classifier = new HiddenMarkovClassifier(classes, states, symbols);
// Create a new learning algorithm to train the sequence classifier
var teacher = new HiddenMarkovClassifierLearning(classifier,
// Train each model until the log-likelihood changes less than 0.001
modelIndex => new BaumWelchLearning(classifier.Models[modelIndex])
{
Tolerance = 0.001,
Iterations = 0
}
);
// Train the sequence classifier using the algorithm
double likelihood = teacher.Run(inputs, outputs);
// Will assert the models have learned the sequences correctly.
for (int i = 0; i < inputs.Length; i++)
{
int expected = outputs[i];
int actual = classifier.Compute(inputs[i], out likelihood);
Assert.AreEqual(expected, actual);
}
}
示例5: CreateModel1
public static HiddenMarkovClassifier<Independent> CreateModel1()
{
// Create a Continuous density Hidden Markov Model Sequence Classifier
// to detect a multivariate sequence and the same sequence backwards.
double[][][] sequences = new double[][][]
{
new double[][]
{
// This is the first sequence with label = 0
new double[] { 0 },
new double[] { 1 },
new double[] { 2 },
new double[] { 3 },
new double[] { 4 },
},
new double[][]
{
// This is the second sequence with label = 1
new double[] { 4 },
new double[] { 3 },
new double[] { 2 },
new double[] { 1 },
new double[] { 0 },
}
};
// Labels for the sequences
int[] labels = { 0, 1 };
// Creates a sequence classifier containing 2 hidden Markov Models
// with 2 states and an underlying Normal distribution as density.
NormalDistribution component = new NormalDistribution();
Independent density = new Independent(component);
var classifier = new HiddenMarkovClassifier<Independent>(2, new Ergodic(2), density);
// Configure the learning algorithms to train the sequence classifier
var teacher = new HiddenMarkovClassifierLearning<Independent>(classifier,
// Train each model until the log-likelihood changes less than 0.001
modelIndex => new BaumWelchLearning<Independent>(classifier.Models[modelIndex])
{
Tolerance = 0.0001,
Iterations = 0
}
);
// Train the sequence classifier using the algorithm
double logLikelihood = teacher.Run(sequences, labels);
Assert.AreEqual(-13.271981026832929d, logLikelihood);
return classifier;
}
示例6: LearnTest1
public void LearnTest1()
{
// Create a Continuous density Hidden Markov Model Sequence Classifier
// to detect a univariate sequence and the same sequence backwards.
double[][] sequences = new double[][]
{
new double[] { 0,1,2,3,4 }, // This is the first sequence with label = 0
new double[] { 4,3,2,1,0 }, // This is the second sequence with label = 1
};
// Labels for the sequences
int[] labels = { 0, 1 };
// Creates a sequence classifier containing 2 hidden Markov Models
// with 2 states and an underlying Normal distribution as density.
NormalDistribution density = new NormalDistribution();
var classifier = new HiddenMarkovClassifier<NormalDistribution>(2, new Ergodic(2), density);
// Configure the learning algorithms to train the sequence classifier
var teacher = new HiddenMarkovClassifierLearning<NormalDistribution>(classifier,
// Train each model until the log-likelihood changes less than 0.001
modelIndex => new BaumWelchLearning<NormalDistribution>(classifier.Models[modelIndex])
{
Tolerance = 0.0001,
Iterations = 0
}
);
// Train the sequence classifier using the algorithm
double logLikelihood = teacher.Run(sequences, labels);
// Calculate the probability that the given
// sequences originated from the model
double likelihood1, likelihood2;
// Try to classify the first sequence (output should be 0)
int c1 = classifier.Compute(sequences[0], out likelihood1);
// Try to classify the second sequence (output should be 1)
int c2 = classifier.Compute(sequences[1], out likelihood2);
Assert.AreEqual(0, c1);
Assert.AreEqual(1, c2);
Assert.AreEqual(-13.271981026832929, logLikelihood, 1e-10);
Assert.AreEqual(0.99999791320102149, likelihood1, 1e-10);
Assert.AreEqual(0.99999791320102149, likelihood2, 1e-10);
Assert.IsFalse(double.IsNaN(logLikelihood));
Assert.IsFalse(double.IsNaN(likelihood1));
Assert.IsFalse(double.IsNaN(likelihood2));
}
示例7: CreateModel1
public static HiddenMarkovClassifier CreateModel1()
{
// Declare some testing data
int[][] inputs = new int[][]
{
new int[] { 0,1,1,0 }, // Class 0
new int[] { 0,0,1,0 }, // Class 0
new int[] { 0,1,1,1,0 }, // Class 0
new int[] { 0,1,0 }, // Class 0
new int[] { 1,0,0,1 }, // Class 1
new int[] { 1,1,0,1 }, // Class 1
new int[] { 1,0,0,0,1 }, // Class 1
new int[] { 1,0,1 }, // Class 1
};
int[] outputs = new int[]
{
0,0,0,0, // First four sequences are of class 0
1,1,1,1, // Last four sequences are of class 1
};
// We are trying to predict two different classes
int classes = 2;
// Each sequence may have up to two symbols (0 or 1)
int symbols = 2;
// Nested models will have two states each
int[] states = new int[] { 2, 2 };
// Creates a new Hidden Markov Model Classifier with the given parameters
HiddenMarkovClassifier classifier = new HiddenMarkovClassifier(classes, states, symbols);
// Create a new learning algorithm to train the sequence classifier
var teacher = new HiddenMarkovClassifierLearning(classifier,
// Train each model until the log-likelihood changes less than 0.001
modelIndex => new BaumWelchLearning(classifier.Models[modelIndex])
{
Tolerance = 0.001,
Iterations = 0
}
);
// Train the sequence classifier using the algorithm
double likelihood = teacher.Run(inputs, outputs);
return classifier;
}
示例8: Aprender
public void Aprender(IDadosSinaisDinamicos dados)
{
var quantidadeCaracteristicas = dados.CaracteristicasSinais[0][0].Length;
hmm = new HiddenMarkovClassifier<MultivariateNormalDistribution>(
classes: dados.QuantidadeClasses,
topology: new Forward(QuantidadeEstados),
initial: new MultivariateNormalDistribution(quantidadeCaracteristicas)
);
var teacher = new HiddenMarkovClassifierLearning<MultivariateNormalDistribution>(hmm,
modelIndex => new BaumWelchLearning<MultivariateNormalDistribution>(hmm.Models[modelIndex])
{
Tolerance = 0.001,
Iterations = 100,
FittingOptions = new NormalOptions { Regularization = 1e-5}
});
teacher.Run(dados.CaracteristicasSinais, dados.IdentificadoresSinais);
}
示例9: button1_Click
private void button1_Click(object sender, EventArgs e)
{
var classes = 4;
var states = new[]{1,2,2,3};
var cat = new[] {"ខ្ញុំ", "ទៅ", "ខ្លួន", "ក"};
//var cat = new[] { "A", "B" };
_hmmc = new HiddenMarkovClassifier(classes, states, 4, cat);
// Train the ensemble
var sequences = new[]
{
new[] {1, 1, 1},
new[] {0, 2},
new[] {0, 1, 2},
new[] {1, 2}
};
var labels = new[] {0, 1, 2, 3};
var teacher = new HiddenMarkovClassifierLearning(_hmmc, i =>
new BaumWelchLearning(_hmmc.Models[i])
{
Iterations = 0,
Tolerance = 0.0001
}
);
teacher.Run(sequences, labels);
var m = _hmmc.Models;
var test = new[]{1,2};
double likelihood;
var label = _hmmc.Compute(test, out likelihood);
MessageBox.Show(_hmmc.Models[label].Tag.ToString()+ " P =" + likelihood);
}
示例10: LearnTest2
public void LearnTest2()
{
// Declare some testing data
int[][] inputs = new int[][]
{
new int[] { 0,0,1,2 }, // Class 0
new int[] { 0,1,1,2 }, // Class 0
new int[] { 0,0,0,1,2 }, // Class 0
new int[] { 0,1,2,2,2 }, // Class 0
new int[] { 2,2,1,0 }, // Class 1
new int[] { 2,2,2,1,0 }, // Class 1
new int[] { 2,2,2,1,0 }, // Class 1
new int[] { 2,2,2,2,1 }, // Class 1
};
int[] outputs = new int[]
{
0,0,0,0, // First four sequences are of class 0
1,1,1,1, // Last four sequences are of class 1
};
// We are trying to predict two different classes
int classes = 2;
// Each sequence may have up to 3 symbols (0,1,2)
int symbols = 3;
// Nested models will have 3 states each
int[] states = new int[] { 3, 3 };
// Creates a new Hidden Markov Model Classifier with the given parameters
HiddenMarkovClassifier classifier = new HiddenMarkovClassifier(classes, states, symbols);
// Create a new learning algorithm to train the sequence classifier
var teacher = new HiddenMarkovClassifierLearning(classifier,
// Train each model until the log-likelihood changes less than 0.001
modelIndex => new BaumWelchLearning(classifier.Models[modelIndex])
{
Tolerance = 0.001,
Iterations = 0
}
);
// Enable support for sequence rejection
teacher.Rejection = true;
// Train the sequence classifier using the algorithm
double likelihood = teacher.Run(inputs, outputs);
// Will assert the models have learned the sequences correctly.
for (int i = 0; i < inputs.Length; i++)
{
int expected = outputs[i];
int actual = classifier.Compute(inputs[i], out likelihood);
Assert.AreEqual(expected, actual);
}
HiddenMarkovModel threshold = classifier.Threshold;
Assert.AreEqual(6, threshold.States);
Assert.AreEqual(classifier.Models[0].Transitions[0, 0], threshold.Transitions[0, 0], 1e-10);
Assert.AreEqual(classifier.Models[0].Transitions[1, 1], threshold.Transitions[1, 1], 1e-10);
Assert.AreEqual(classifier.Models[0].Transitions[2, 2], threshold.Transitions[2, 2], 1e-10);
Assert.AreEqual(classifier.Models[1].Transitions[0, 0], threshold.Transitions[3, 3], 1e-10);
Assert.AreEqual(classifier.Models[1].Transitions[1, 1], threshold.Transitions[4, 4], 1e-10);
Assert.AreEqual(classifier.Models[1].Transitions[2, 2], threshold.Transitions[5, 5], 1e-10);
Assert.IsFalse(Matrix.HasNaN(threshold.Transitions));
int[] r0 = new int[] { 1, 1, 0, 0, 2 };
double logRejection;
int c = classifier.Compute(r0, out logRejection);
Assert.AreEqual(-1, c);
Assert.AreEqual(0.99569011079012049, logRejection);
Assert.IsFalse(double.IsNaN(logRejection));
logRejection = threshold.Evaluate(r0);
Assert.AreEqual(-6.7949285513628528, logRejection, 1e-10);
Assert.IsFalse(double.IsNaN(logRejection));
threshold.Decode(r0, out logRejection);
Assert.AreEqual(-8.902077561009957, logRejection, 1e-10);
Assert.IsFalse(double.IsNaN(logRejection));
}
示例11: LogForwardTest3
public void LogForwardTest3()
{
MultivariateNormalDistribution density = new MultivariateNormalDistribution(3);
var hmm = new HiddenMarkovClassifier<MultivariateNormalDistribution>(2, new Ergodic(2), density);
double[][][] inputs =
{
new [] { new double[] { 0, 1, 0 }, new double[] { 0, 1, 0 }, new double[] { 0, 1, 0 } },
new [] { new double[] { 1, 6, 2 }, new double[] { 2, 1, 6 }, new double[] { 1, 1, 0 } },
new [] { new double[] { 9, 1, 0 }, new double[] { 0, 1, 5 }, new double[] { 0, 0, 0 } },
};
int[] outputs =
{
0, 0, 1
};
var function = new MarkovMultivariateFunction(hmm);
var observations = inputs[0];
double[,] expected = Matrix.Log(Accord.Statistics.Models.Fields.
ForwardBackwardAlgorithm.Forward(function.Factors[0], observations, 0));
double logLikelihood;
double[,] actual = Accord.Statistics.Models.Fields.
ForwardBackwardAlgorithm.LogForward(function.Factors[0], observations, 0, out logLikelihood);
Assert.IsTrue(expected.IsEqual(actual, 1e-10));
double p = 0;
for (int i = 0; i < hmm[0].States; i++)
p += Math.Exp(actual[observations.Length - 1, i]);
Assert.AreEqual(Math.Exp(logLikelihood), p, 1e-8);
Assert.IsFalse(double.IsNaN(p));
}
示例12: runDiscreteDensityHiddenMarkovClassifierLearningExample
static void runDiscreteDensityHiddenMarkovClassifierLearningExample()
{
// Observation sequences should only contain symbols that are greater than or equal to 0, and lesser than the number of symbols.
int[][] observationSequences =
{
// First class of sequences: starts and ends with zeros, ones in the middle.
new[] { 0, 1, 1, 1, 0 },
new[] { 0, 0, 1, 1, 0, 0 },
new[] { 0, 1, 1, 1, 1, 0 },
// Second class of sequences: starts with twos and switches to ones until the end.
new[] { 2, 2, 2, 2, 1, 1, 1, 1, 1 },
new[] { 2, 2, 1, 2, 1, 1, 1, 1, 1 },
new[] { 2, 2, 2, 2, 2, 1, 1, 1, 1 },
// Third class of sequences: can start with any symbols, but ends with three.
new[] { 0, 0, 1, 1, 3, 3, 3, 3 },
new[] { 0, 0, 0, 3, 3, 3, 3 },
new[] { 1, 0, 1, 2, 2, 2, 3, 3 },
new[] { 1, 1, 2, 3, 3, 3, 3 },
new[] { 0, 0, 1, 1, 3, 3, 3, 3 },
new[] { 2, 2, 0, 3, 3, 3, 3 },
new[] { 1, 0, 1, 2, 3, 3, 3, 3 },
new[] { 1, 1, 2, 3, 3, 3, 3 },
};
// Consider their respective class labels.
// Class labels have to be zero-based and successive integers.
int[] classLabels =
{
0, 0, 0, // Sequences 1-3 are from class 0.
1, 1, 1, // Sequences 4-6 are from class 1.
2, 2, 2, 2, 2, 2, 2, 2 // Sequences 7-14 are from class 2.
};
// Use a single topology for all inner models.
ITopology forward = new Forward(states: 3);
// Create a hidden Markov classifier with the given topology.
HiddenMarkovClassifier hmc = new HiddenMarkovClassifier(classes: 3, topology: forward, symbols: 4);
// Create a algorithms to teach each of the inner models.
var trainer = new HiddenMarkovClassifierLearning(
hmc,
// Specify individual training options for each inner model.
modelIndex => new BaumWelchLearning(hmc.Models[modelIndex])
{
Tolerance = 0.001, // iterate until log-likelihood changes less than 0.001.
Iterations = 0 // don't place an upper limit on the number of iterations.
}
);
// Call its Run method to start learning.
double averageLogLikelihood = trainer.Run(observationSequences, classLabels);
Console.WriteLine("average log-likelihood for the observations = {0}", averageLogLikelihood);
// Check the output classificaton label for some sequences.
int y1 = hmc.Compute(new[] { 0, 1, 1, 1, 0 }); // output is y1 = 0.
Console.WriteLine("output class = {0}", y1);
int y2 = hmc.Compute(new[] { 0, 0, 1, 1, 0, 0 }); // output is y2 = 0.
Console.WriteLine("output class = {0}", y2);
int y3 = hmc.Compute(new[] { 2, 2, 2, 2, 1, 1 }); // output is y3 = 1.
Console.WriteLine("output class = {0}", y3);
int y4 = hmc.Compute(new[] { 2, 2, 1, 1 }); // output is y4 = 1.
Console.WriteLine("output class = {0}", y4);
int y5 = hmc.Compute(new[] { 0, 0, 1, 3, 3, 3 }); // output is y5 = 2.
Console.WriteLine("output class = {0}", y4);
int y6 = hmc.Compute(new[] { 2, 0, 2, 2, 3, 3 }); // output is y6 = 2.
Console.WriteLine("output class = {0}", y6);
}
示例13: CreateModel3
public static HiddenMarkovClassifier<Independent> CreateModel3()
{
// Create a Continuous density Hidden Markov Model Sequence Classifier
// to detect a multivariate sequence and the same sequence backwards.
var comp1 = new GeneralDiscreteDistribution(3);
var comp2 = new NormalDistribution(1);
var comp3 = new NormalDistribution(2);
var comp4 = new NormalDistribution(3);
var comp5 = new NormalDistribution(4);
var density = new Independent(comp1, comp2, comp3, comp4, comp5);
// Creates a sequence classifier containing 2 hidden Markov Models with 2 states
// and an underlying multivariate mixture of Normal distributions as density.
var classifier = new HiddenMarkovClassifier<Independent>(
2, new Forward(5), density);
// Configure the learning algorithms to train the sequence classifier
var teacher = new HiddenMarkovClassifierLearning<Independent>(
classifier,
// Train each model until the log-likelihood changes less than 0.0001
modelIndex => new BaumWelchLearning<Independent>(
classifier.Models[modelIndex])
{
Tolerance = 0.0001,
Iterations = 0,
}
);
// Train the sequence classifier using the algorithm
double logLikelihood = teacher.Run(sequences2, labels2);
return classifier;
}
示例14: check4
private static void check4(double[][][] words, HiddenMarkovClassifier<Independent> model, MarkovMultivariateFunction target, HiddenConditionalRandomField<double[]> hcrf)
{
double actual;
double expected;
foreach (var x in words)
{
for (int c = 0; c < model.Classes; c++)
{
for (int i = 0; i < model[c].States; i++)
{
// Check initial state transitions
double xa = model.Priors[c];
double xb = Math.Exp(model[c].Probabilities[i]);
double xc = model[c].Emissions[i].ProbabilityDensityFunction(x[0]);
expected = xa * xb * xc;
actual = Math.Exp(target.Factors[c].Compute(-1, i, x, 0, c));
Assert.IsTrue(expected.IsRelativelyEqual(actual, 1e-10));
Assert.IsFalse(double.IsNaN(actual));
}
for (int t = 1; t < x.Length; t++)
{
// Check normal state transitions
for (int i = 0; i < model[c].States; i++)
{
for (int j = 0; j < model[c].States; j++)
{
double xb = Math.Exp(model[c].Transitions[i, j]);
double xc = model[c].Emissions[j].ProbabilityDensityFunction(x[t]);
expected = xb * xc;
actual = Math.Exp(target.Factors[c].Compute(i, j, x, t, c));
Assert.IsTrue(expected.IsRelativelyEqual(actual, 1e-10));
Assert.IsFalse(double.IsNaN(actual));
}
}
}
actual = Math.Exp(model.LogLikelihood(x, c));
expected = Math.Exp(hcrf.LogLikelihood(x, c));
Assert.AreEqual(expected, actual, 1e-10);
Assert.IsFalse(double.IsNaN(actual));
actual = model.Compute(x);
expected = hcrf.Compute(x);
Assert.AreEqual(expected, actual);
Assert.IsFalse(double.IsNaN(actual));
}
}
}
示例15: LearnTest2
public void LearnTest2()
{
// Declare some testing data
int[][] inputs = new int[][]
{
new int[] { 0,0,1,2 }, // Class 0
new int[] { 0,1,1,2 }, // Class 0
new int[] { 0,0,0,1,2 }, // Class 0
new int[] { 0,1,2,2,2 }, // Class 0
new int[] { 2,2,1,0 }, // Class 1
new int[] { 2,2,2,1,0 }, // Class 1
new int[] { 2,2,2,1,0 }, // Class 1
new int[] { 2,2,2,2,1 }, // Class 1
};
int[] outputs = new int[]
{
0,0,0,0, // First four sequences are of class 0
1,1,1,1, // Last four sequences are of class 1
};
// We are trying to predict two different classes
int classes = 2;
// Each sequence may have up to 3 symbols (0,1,2)
int symbols = 3;
// Nested models will have 3 states each
int[] states = new int[] { 3, 3 };
// Creates a new Hidden Markov Model Classifier with the given parameters
HiddenMarkovClassifier classifier = new HiddenMarkovClassifier(classes, states, symbols);
// Create a new learning algorithm to train the sequence classifier
var teacher = new HiddenMarkovClassifierLearning(classifier,
// Train each model until the log-likelihood changes less than 0.001
modelIndex => new BaumWelchLearning(classifier.Models[modelIndex])
{
Tolerance = 0.001,
Iterations = 0
}
);
// Enable support for sequence rejection
teacher.Rejection = true;
// Train the sequence classifier using the algorithm
double likelihood = teacher.Run(inputs, outputs);
HiddenMarkovModel threshold = classifier.Threshold;
Assert.AreEqual(6, threshold.States);
Assert.AreEqual(classifier.Models[0].Transitions[0, 0], threshold.Transitions[0, 0], 1e-10);
Assert.AreEqual(classifier.Models[0].Transitions[1, 1], threshold.Transitions[1, 1], 1e-10);
Assert.AreEqual(classifier.Models[0].Transitions[2, 2], threshold.Transitions[2, 2], 1e-10);
Assert.AreEqual(classifier.Models[1].Transitions[0, 0], threshold.Transitions[3, 3], 1e-10);
Assert.AreEqual(classifier.Models[1].Transitions[1, 1], threshold.Transitions[4, 4], 1e-10);
Assert.AreEqual(classifier.Models[1].Transitions[2, 2], threshold.Transitions[5, 5], 1e-10);
for (int i = 0; i < 3; i++)
for (int j = 3; j < 6; j++)
Assert.AreEqual(Double.NegativeInfinity, threshold.Transitions[i, j]);
for (int i = 3; i < 6; i++)
for (int j = 0; j < 3; j++)
Assert.AreEqual(Double.NegativeInfinity, threshold.Transitions[i, j]);
Assert.IsFalse(Matrix.HasNaN(threshold.Transitions));
classifier.Sensitivity = 0.5;
// Will assert the models have learned the sequences correctly.
for (int i = 0; i < inputs.Length; i++)
{
int expected = outputs[i];
int actual = classifier.Compute(inputs[i], out likelihood);
Assert.AreEqual(expected, actual);
}
int[] r0 = new int[] { 1, 1, 0, 0, 2 };
double logRejection;
int c = classifier.Compute(r0, out logRejection);
Assert.AreEqual(-1, c);
Assert.AreEqual(0.99906957195279988, logRejection);
Assert.IsFalse(double.IsNaN(logRejection));
logRejection = threshold.Evaluate(r0);
Assert.AreEqual(-4.5653702970734793, logRejection, 1e-10);
Assert.IsFalse(double.IsNaN(logRejection));
//.........这里部分代码省略.........