本文整理汇总了C#中Beta.GetMeanAndVariance方法的典型用法代码示例。如果您正苦于以下问题:C# Beta.GetMeanAndVariance方法的具体用法?C# Beta.GetMeanAndVariance怎么用?C# Beta.GetMeanAndVariance使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Beta
的用法示例。
在下文中一共展示了Beta.GetMeanAndVariance方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: getClickObservations
// Get click observations for each chunk and label class
static private Gaussian[][][] getClickObservations(int numLabs, int chunkSize, int[] labels, int[] clicks, int[] exams)
{
int nData = labels.Length;
int numChunks = (nData + chunkSize - 1) / chunkSize;
Gaussian[][][] chunks = new Gaussian[numChunks][][];
int[] obsX = new int[numLabs];
int startChunk = 0;
int endChunk = 0;
for (int c = 0; c < numChunks; c++) {
startChunk = endChunk;
endChunk = startChunk + chunkSize;
if (endChunk > nData)
endChunk = nData;
int[] labCnts = getLabelCounts(numLabs, labels, startChunk, endChunk);
chunks[c] = new Gaussian[numLabs][];
Gaussian[][] currChunk = chunks[c];
for (int l = 0; l < numLabs; l++) {
currChunk[l] = new Gaussian[labCnts[l]];
obsX[l] = 0;
}
for (int d = startChunk; d < endChunk; d++) {
int lab = labels[d];
int nC = clicks[d];
int nE = exams[d];
int nNC = nE - nC;
double b0 = 1.0 + nC; // Observations of clicks
double b1 = 1.0 + nNC; // Observations of no clicks
Beta b = new Beta(b0, b1);
double m, v;
b.GetMeanAndVariance(out m, out v);
Gaussian g = new Gaussian();
g.SetMeanAndVariance(m, v);
currChunk[lab][obsX[lab]++] = g;
}
}
return chunks;
}
示例2: LogisticProposalDistribution
/// <summary>
/// Find the Laplace approximation for Beta(Logistic(x)) * Gaussian(x))
/// </summary>
/// <param name="beta">Beta distribution</param>
/// <param name="gauss">Gaussian distribution</param>
/// <returns>A proposal distribution</returns>
public static Gaussian LogisticProposalDistribution(Beta beta, Gaussian gauss)
{
if (beta.IsUniform())
return new Gaussian(gauss);
// if gauss is uniform, m,p = 0 below, and the following code will just ignore the Gaussian
// and do a Laplace approximation for Beta(Logistic(x))
double c = beta.TrueCount-1;
double d = beta.FalseCount-1;
double m = gauss.GetMean();
double p = gauss.Precision;
// We want to find the mode of
// ln(g(x)) = c.ln(f(x)) + d.ln(1 - f(x)) - 0.5p((x - m)^2) + constant
// First deriv:
// h(x) = (ln(g(x))' = c.(1 - f(x)) - d.f(x) - p(x-m)
// Second deriv:
// h'(x) = (ln(g(x))' = -(c+d).f'(x) - p
// Use Newton-Raphson to find unique root of h(x).
// g(x) is log-concave so Newton-Raphson should converge quickly.
// Set the initial point by projecting beta
// to a Gaussian and taking the mean of the product:
double bMean, bVar;
beta.GetMeanAndVariance(out bMean, out bVar);
Gaussian prod = new Gaussian();
double invLogisticMean = Math.Log(bMean) - Math.Log(1.0-bMean);
prod.SetToProduct(Gaussian.FromMeanAndVariance(invLogisticMean, bVar), gauss);
double xnew = prod.GetMean();
double x=0, fx, dfx, hx, dhx=0;
int maxIters = 100; // Should only need a handful of iters
int cnt = 0;
do {
x = xnew;
fx = MMath.Logistic(x);
dfx = fx * (1.0-fx);
// Find the root of h(x)
hx = c * (1.0 - fx) - d * fx - p*(x-m);
dhx = -(c+d)*dfx - p;
xnew = x - (hx / dhx); // The Newton step
if (Math.Abs(x - xnew) < 0.00001)
break;
} while (++cnt < maxIters);
if (cnt >= maxIters)
throw new ApplicationException("Unable to find proposal distribution mode");
return Gaussian.FromMeanAndPrecision(x, -dhx);
}
示例3: Run
public void Run()
{
// Number of label classes for this example
int numLabels = 3;
// Train the model
ClickModelMarginals marginals = Model1(numLabels, false);
if (marginals == null)
return;
//-----------------------------------------------------------------------------
// The prediction model
//-----------------------------------------------------------------------------
// The observations will be in the form of an array of distributions
Variable<int> numberOfObservations = Variable.New<int>().Named("NumObs");
Range r = new Range(numberOfObservations).Named("N");
VariableArray<Gaussian> observationDistribs = Variable.Array<Gaussian>(r).Named("Obs");
// Use the marginals from the trained model
Variable<double> scoreMean = Variable.Random(marginals.marginalScoreMean).Named("scoreMean");
Variable<double> scorePrec = Variable.Random(marginals.marginalScorePrec).Named("scorePrec");
Variable<double> judgePrec = Variable.Random(marginals.marginalJudgePrec).Named("judgePrec");
Variable<double> clickPrec = Variable.Random(marginals.marginalClickPrec).Named("clickPrec");
Variable<double>[] thresholds = new Variable<double>[numLabels + 1];
// Variables for each observation
VariableArray<double> scores = Variable.Array<double>(r).Named("Scores");
VariableArray<double> scoresJ = Variable.Array<double>(r).Named("ScoresJ");
VariableArray<double> scoresC = Variable.Array<double>(r).Named("ScoresC");
scores[r] = Variable.GaussianFromMeanAndPrecision(scoreMean, scorePrec).ForEach(r);
scoresJ[r] = Variable.GaussianFromMeanAndPrecision(scores[r], judgePrec);
scoresC[r] = Variable.GaussianFromMeanAndPrecision(scores[r], clickPrec);
// Constrain to the click observation
Variable.ConstrainEqualRandom(scoresC[r], observationDistribs[r]);
// The threshold variables
thresholds[0] = Variable.GaussianFromMeanAndVariance(Double.NegativeInfinity, 0.0).Named("thresholds0");
for (int i = 1; i < thresholds.Length - 1; i++)
thresholds[i] = Variable.Random(marginals.marginalThresh[i]).Named("thresholds"+i);
thresholds[thresholds.Length - 1] = Variable.GaussianFromMeanAndVariance(Double.PositiveInfinity, 0.0).Named("thresholds"+(thresholds.Length-1));
// Boolean label variables
VariableArray<bool>[] testLabels = new VariableArray<bool>[numLabels];
for (int j = 0; j < numLabels; j++) {
testLabels[j] = Variable.Array<bool>(r).Named("TestLabels" + j);
testLabels[j][r] = Variable.IsBetween(scoresJ[r], thresholds[j], thresholds[j + 1]);
}
//--------------------------------------------------------------------
// Running the prediction model
//--------------------------------------------------------------------
int[] clicks = { 10, 100, 1000, 9, 99, 999, 10, 10, 10 };
int[] exams = { 20, 200, 2000, 10, 100, 1000, 100, 1000, 10000 };
Gaussian[] obs = new Gaussian[clicks.Length];
for (int i = 0; i < clicks.Length; i++) {
int nC = clicks[i]; // Number of clicks
int nE = exams[i]; // Number of examinations
int nNC = nE - nC; // Number of non-clicks
Beta b = new Beta(1.0 + nC, 1.0 + nNC);
double m, v;
b.GetMeanAndVariance(out m, out v);
obs[i] = Gaussian.FromMeanAndVariance(m, v);
}
numberOfObservations.ObservedValue = obs.Length;
observationDistribs.ObservedValue = obs;
InferenceEngine engine = new InferenceEngine();
Gaussian[] latentScore = engine.Infer<Gaussian[]>(scores);
Bernoulli[][] predictedLabels = new Bernoulli[numLabels][];
for (int j = 0; j < numLabels; j++)
predictedLabels[j] = engine.Infer<Bernoulli[]>(testLabels[j]);
Console.WriteLine("\n****** Some Predictions ******\n");
Console.WriteLine("Clicks\tExams\t\tScore\t\tLabel0\t\tLabel1\t\tLabel2");
for (int i = 0; i < clicks.Length; i++) {
Console.WriteLine("{0}\t{1}\t\t{2}\t\t{3}\t\t{4}\t\t{5}",
clicks[i], exams[i], latentScore[i].GetMean().ToString("F4"),
predictedLabels[0][i].GetProbTrue().ToString("F4"),
predictedLabels[1][i].GetProbTrue().ToString("F4"),
predictedLabels[2][i].GetProbTrue().ToString("F4"));
}
}