本文整理汇总了C#中weka.core.Instances.resampleWithWeights方法的典型用法代码示例。如果您正苦于以下问题:C# Instances.resampleWithWeights方法的具体用法?C# Instances.resampleWithWeights怎么用?C# Instances.resampleWithWeights使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类weka.core.Instances
的用法示例。
在下文中一共展示了Instances.resampleWithWeights方法的1个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: performIteration
/// <summary> Performs one boosting iteration.</summary>
private void performIteration(double[][] trainYs, double[][] trainFs, double[][] probs, Instances data, double origSumOfWeights)
{
if (m_Debug)
{
System.Console.Error.WriteLine("Training classifier " + (m_NumGenerated + 1));
}
// Build the new models
for (int j = 0; j < m_NumClasses; j++)
{
if (m_Debug)
{
System.Console.Error.WriteLine("\t...for class " + (j + 1) + " (" + m_ClassAttribute.name() + "=" + m_ClassAttribute.value_Renamed(j) + ")");
}
// Make copy because we want to save the weights
Instances boostData = new Instances(data);
// Set instance pseudoclass and weights
for (int i = 0; i < probs.Length; i++)
{
// Compute response and weight
double p = probs[i][j];
double z, actual = trainYs[i][j];
if (actual == 1 - m_Offset)
{
z = 1.0 / p;
if (z > Z_MAX)
{
// threshold
z = Z_MAX;
}
}
else
{
z = (- 1.0) / (1.0 - p);
if (z < - Z_MAX)
{
// threshold
z = - Z_MAX;
}
}
double w = (actual - p) / z;
// Set values for instance
Instance current = boostData.instance(i);
current.setValue(boostData.classIndex(), z);
current.Weight = current.weight() * w;
}
// Scale the weights (helps with some base learners)
double sumOfWeights = boostData.sumOfWeights();
double scalingFactor = (double) origSumOfWeights / sumOfWeights;
for (int i = 0; i < probs.Length; i++)
{
Instance current = boostData.instance(i);
current.Weight = current.weight() * scalingFactor;
}
// Select instances to train the classifier on
Instances trainData = boostData;
if (m_WeightThreshold < 100)
{
trainData = selectWeightQuantile(boostData, (double) m_WeightThreshold / 100);
}
else
{
if (m_UseResampling)
{
double[] weights = new double[boostData.numInstances()];
for (int kk = 0; kk < weights.Length; kk++)
{
weights[kk] = boostData.instance(kk).weight();
}
trainData = boostData.resampleWithWeights(m_RandomInstance, weights);
}
}
// Build the classifier
m_Classifiers[j][m_NumGenerated].buildClassifier(trainData);
}
// Evaluate / increment trainFs from the classifier
for (int i = 0; i < trainFs.Length; i++)
{
double[] pred = new double[m_NumClasses];
double predSum = 0;
for (int j = 0; j < m_NumClasses; j++)
{
pred[j] = m_Shrinkage * m_Classifiers[j][m_NumGenerated].classifyInstance(data.instance(i));
predSum += pred[j];
}
predSum /= m_NumClasses;
for (int j = 0; j < m_NumClasses; j++)
{
trainFs[i][j] += (pred[j] - predSum) * (m_NumClasses - 1) / m_NumClasses;
}
//.........这里部分代码省略.........