本文整理汇总了C#中weka.core.Instances.insertAttributeAt方法的典型用法代码示例。如果您正苦于以下问题:C# Instances.insertAttributeAt方法的具体用法?C# Instances.insertAttributeAt怎么用?C# Instances.insertAttributeAt使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类weka.core.Instances
的用法示例。
在下文中一共展示了Instances.insertAttributeAt方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: test
//.........这里部分代码省略.........
}
System.Console.Out.WriteLine("\nClass values and labels of instances:\n");
for (i = 0; i < instances.numInstances(); i++)
{
Instance inst = instances.instance(i);
System.Console.Out.Write(inst.classValue() + "\t");
System.Console.Out.Write(inst.toString(inst.classIndex()));
if (instances.instance(i).classIsMissing())
{
System.Console.Out.WriteLine("\tis missing");
}
else
{
System.Console.Out.WriteLine();
}
}
// Create random weights.
System.Console.Out.WriteLine("\nCreating random weights for instances.");
for (i = 0; i < instances.numInstances(); i++)
{
instances.instance(i).Weight = random.NextDouble();
}
// Print all instances and their weights (and the sum of weights).
System.Console.Out.WriteLine("\nInstances and their weights:\n");
System.Console.Out.WriteLine(instances.instancesAndWeights());
System.Console.Out.Write("\nSum of weights: ");
System.Console.Out.WriteLine(instances.sumOfWeights());
// Insert an attribute
secondInstances = new Instances(instances);
Attribute testAtt = new Attribute("Inserted");
secondInstances.insertAttributeAt(testAtt, 0);
System.Console.Out.WriteLine("\nSet with inserted attribute:\n");
//UPGRADE_TODO: Method 'java.io.PrintStream.println' was converted to 'System.Console.Out.WriteLine' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073_javaioPrintStreamprintln_javalangObject'"
System.Console.Out.WriteLine(secondInstances);
System.Console.Out.WriteLine("\nClass name: " + secondInstances.classAttribute().name());
// Delete the attribute
secondInstances.deleteAttributeAt(0);
System.Console.Out.WriteLine("\nSet with attribute deleted:\n");
//UPGRADE_TODO: Method 'java.io.PrintStream.println' was converted to 'System.Console.Out.WriteLine' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073_javaioPrintStreamprintln_javalangObject'"
System.Console.Out.WriteLine(secondInstances);
System.Console.Out.WriteLine("\nClass name: " + secondInstances.classAttribute().name());
// Test if headers are equal
System.Console.Out.WriteLine("\nHeaders equal: " + instances.equalHeaders(secondInstances) + "\n");
// Print data in internal format.
System.Console.Out.WriteLine("\nData (internal values):\n");
for (i = 0; i < instances.numInstances(); i++)
{
for (j = 0; j < instances.numAttributes(); j++)
{
if (instances.instance(i).isMissing(j))
{
System.Console.Out.Write("? ");
}
else
{
System.Console.Out.Write(instances.instance(i).value_Renamed(j) + " ");
}
}
System.Console.Out.WriteLine();
}
示例2: PerformTraining
/// <summary>
/// Build the learning model for classification
/// </summary>
/// <param name="InstancesList">list of instances </param>
/// <param name="NumberofClusters">Number of Clusters</param>
/// <param name="TextBoxForFeedback">Text box for the results (can be NULL)</param>
/// <param name="PanelForVisualFeedback">Panel to display visual results if avalaible (can be NULL)</param>
public Classifier PerformTraining(FormForClassificationInfo WindowForClassificationParam, Instances InstancesList, /*int NumberofClusters,*/ RichTextBox TextBoxForFeedback,
Panel PanelForVisualFeedback, out weka.classifiers.Evaluation ModelEvaluation, bool IsCellular)
{
// weka.classifiers.Evaluation ModelEvaluation = null;
// FormForClassificationInfo WindowForClassificationParam = new FormForClassificationInfo(GlobalInfo);
ModelEvaluation = null;
// if (WindowForClassificationParam.ShowDialog() != System.Windows.Forms.DialogResult.OK) return null;
// weka.classifiers.Evaluation ModelEvaluation = new Evaluation(
cParamAlgo ClassifAlgoParams = WindowForClassificationParam.GetSelectedAlgoAndParameters();
if (ClassifAlgoParams == null) return null;
//this.Cursor = Cursors.WaitCursor;
// cParamAlgo ClassificationAlgo = WindowForClassificationParam.GetSelectedAlgoAndParameters();
cListValuesParam Parameters = ClassifAlgoParams.GetListValuesParam();
//Classifier this.CurrentClassifier = null;
// -------------------------- Classification -------------------------------
// create the instances
// InstancesList = this.ListInstances;
this.attValsWithoutClasses = new FastVector();
if (IsCellular)
for (int i = 0; i < cGlobalInfo.ListCellularPhenotypes.Count; i++)
this.attValsWithoutClasses.addElement(cGlobalInfo.ListCellularPhenotypes[i].Name);
else
for (int i = 0; i < cGlobalInfo.ListWellClasses.Count; i++)
this.attValsWithoutClasses.addElement(cGlobalInfo.ListWellClasses[i].Name);
InstancesList.insertAttributeAt(new weka.core.Attribute("Class", this.attValsWithoutClasses), InstancesList.numAttributes());
//int A = Classes.Count;
for (int i = 0; i < Classes.Count; i++)
InstancesList.get(i).setValue(InstancesList.numAttributes() - 1, Classes[i]);
InstancesList.setClassIndex(InstancesList.numAttributes() - 1);
weka.core.Instances train = new weka.core.Instances(InstancesList, 0, InstancesList.numInstances());
if (PanelForVisualFeedback != null)
PanelForVisualFeedback.Controls.Clear();
#region List classifiers
#region J48
if (ClassifAlgoParams.Name == "J48")
{
this.CurrentClassifier = new weka.classifiers.trees.J48();
((J48)this.CurrentClassifier).setMinNumObj((int)Parameters.ListDoubleValues.Get("numericUpDownMinInstLeaf").Value);
((J48)this.CurrentClassifier).setConfidenceFactor((float)Parameters.ListDoubleValues.Get("numericUpDownConfFactor").Value);
((J48)this.CurrentClassifier).setNumFolds((int)Parameters.ListDoubleValues.Get("numericUpDownNumFolds").Value);
((J48)this.CurrentClassifier).setUnpruned((bool)Parameters.ListCheckValues.Get("checkBoxUnPruned").Value);
((J48)this.CurrentClassifier).setUseLaplace((bool)Parameters.ListCheckValues.Get("checkBoxLaplacianSmoothing").Value);
((J48)this.CurrentClassifier).setSeed((int)Parameters.ListDoubleValues.Get("numericUpDownSeedNumber").Value);
((J48)this.CurrentClassifier).setSubtreeRaising((bool)Parameters.ListCheckValues.Get("checkBoxSubTreeRaising").Value);
// CurrentClassif.SetJ48Tree((J48)this.CurrentClassifier, Classes.Length);
this.CurrentClassifier.buildClassifier(train);
// display results training
// display tree
if (PanelForVisualFeedback != null)
{
GViewer GraphView = DisplayTree(GlobalInfo, ((J48)this.CurrentClassifier), IsCellular).gViewerForTreeClassif;
GraphView.Size = new System.Drawing.Size(PanelForVisualFeedback.Width, PanelForVisualFeedback.Height);
GraphView.Anchor = (AnchorStyles.Bottom | AnchorStyles.Top | AnchorStyles.Left | AnchorStyles.Right);
PanelForVisualFeedback.Controls.Clear();
PanelForVisualFeedback.Controls.Add(GraphView);
}
}
#endregion
#region Random Tree
else if (ClassifAlgoParams.Name == "RandomTree")
{
this.CurrentClassifier = new weka.classifiers.trees.RandomTree();
if ((bool)Parameters.ListCheckValues.Get("checkBoxMaxDepthUnlimited").Value)
((RandomTree)this.CurrentClassifier).setMaxDepth(0);
else
((RandomTree)this.CurrentClassifier).setMaxDepth((int)Parameters.ListDoubleValues.Get("numericUpDownMaxDepth").Value);
((RandomTree)this.CurrentClassifier).setSeed((int)Parameters.ListDoubleValues.Get("numericUpDownSeed").Value);
((RandomTree)this.CurrentClassifier).setMinNum((double)Parameters.ListDoubleValues.Get("numericUpDownMinWeight").Value);
if ((bool)Parameters.ListCheckValues.Get("checkBoxIsBackfitting").Value)
{
((RandomTree)this.CurrentClassifier).setNumFolds((int)Parameters.ListDoubleValues.Get("numericUpDownBackFittingFolds").Value);
}
else
{
((RandomTree)this.CurrentClassifier).setNumFolds(0);
}
this.CurrentClassifier.buildClassifier(train);
//string StringForTree = ((RandomTree)this.CurrentClassifier).graph().Remove(0, ((RandomTree)this.CurrentClassifier).graph().IndexOf("{") + 2);
//.........这里部分代码省略.........
示例3: buildClassifier
/// <summary> Builds the boosted classifier</summary>
public virtual void buildClassifier(Instances data)
{
m_RandomInstance = new Random(m_Seed);
Instances boostData;
int classIndex = data.classIndex();
if (data.classAttribute().Numeric)
{
throw new Exception("LogitBoost can't handle a numeric class!");
}
if (m_Classifier == null)
{
throw new System.Exception("A base classifier has not been specified!");
}
if (!(m_Classifier is WeightedInstancesHandler) && !m_UseResampling)
{
m_UseResampling = true;
}
if (data.checkForStringAttributes())
{
throw new Exception("Cannot handle string attributes!");
}
if (m_Debug)
{
System.Console.Error.WriteLine("Creating copy of the training data");
}
m_NumClasses = data.numClasses();
m_ClassAttribute = data.classAttribute();
// Create a copy of the data
data = new Instances(data);
data.deleteWithMissingClass();
// Create the base classifiers
if (m_Debug)
{
System.Console.Error.WriteLine("Creating base classifiers");
}
m_Classifiers = new Classifier[m_NumClasses][];
for (int j = 0; j < m_NumClasses; j++)
{
m_Classifiers[j] = Classifier.makeCopies(m_Classifier, this.NumIterations);
}
// Do we want to select the appropriate number of iterations
// using cross-validation?
int bestNumIterations = this.NumIterations;
if (m_NumFolds > 1)
{
if (m_Debug)
{
System.Console.Error.WriteLine("Processing first fold.");
}
// Array for storing the results
double[] results = new double[this.NumIterations];
// Iterate throught the cv-runs
for (int r = 0; r < m_NumRuns; r++)
{
// Stratify the data
data.randomize(m_RandomInstance);
data.stratify(m_NumFolds);
// Perform the cross-validation
for (int i = 0; i < m_NumFolds; i++)
{
// Get train and test folds
Instances train = data.trainCV(m_NumFolds, i, m_RandomInstance);
Instances test = data.testCV(m_NumFolds, i);
// Make class numeric
Instances trainN = new Instances(train);
trainN.ClassIndex = - 1;
trainN.deleteAttributeAt(classIndex);
trainN.insertAttributeAt(new weka.core.Attribute("'pseudo class'"), classIndex);
trainN.ClassIndex = classIndex;
m_NumericClassData = new Instances(trainN, 0);
// Get class values
int numInstances = train.numInstances();
double[][] tmpArray = new double[numInstances][];
for (int i2 = 0; i2 < numInstances; i2++)
{
tmpArray[i2] = new double[m_NumClasses];
}
double[][] trainFs = tmpArray;
double[][] tmpArray2 = new double[numInstances][];
for (int i3 = 0; i3 < numInstances; i3++)
{
tmpArray2[i3] = new double[m_NumClasses];
}
double[][] trainYs = tmpArray2;
for (int j = 0; j < m_NumClasses; j++)
{
//.........这里部分代码省略.........