本文整理汇总了C#中weka.core.Instances.setClassIndex方法的典型用法代码示例。如果您正苦于以下问题:C# Instances.setClassIndex方法的具体用法?C# Instances.setClassIndex怎么用?C# Instances.setClassIndex使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类weka.core.Instances
的用法示例。
在下文中一共展示了Instances.setClassIndex方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: Classify
public static string Classify(bool useRubine, float duration, bool righthandedness, List<float> SpeakerAngles, PointCollection pointHist, StylusPointCollection S, List<List<int>> hist, List<List<int>> ihist)
{
// Convert all parameters to format used in GestureTests
List<Vector2> InterpretedPoints = new List<Vector2>();
List<Vector2> StylusPoints = new List<Vector2>();
List<Vector2> VelocityHistory = new List<Vector2>();
List<Vector2> InverseVelocityHistory = new List<Vector2>();
foreach(Point P in pointHist)
InterpretedPoints.Add(new Vector2((float)P.X,(float)P.Y));
foreach(StylusPoint P in S)
StylusPoints.Add(new Vector2((float)P.X,(float)P.Y));
for (int i = 0; i < hist[0].Count; i++)
{
VelocityHistory.Add(new Vector2(hist[0][i], hist[1][i]));
InverseVelocityHistory.Add(new Vector2(ihist[0][i], ihist[1][i]));
}
// Create a new Sample, compute the features, and classify
GS = new GestureSample(GestureTests.Types.GestureType.unknown, righthandedness,duration,SpeakerAngles,InterpretedPoints,StylusPoints,VelocityHistory,InverseVelocityHistory);
GS.ComputeFeatures(GestureFeatures.PointsStroke);
if (useRubine)
return EC.Recognizer.Classify(GS).ToString();
WriteARFF();
Instances test = new Instances(new java.io.FileReader("outfile.arff"));
test.setClassIndex(0);
double clsLabel = cls.classifyInstance(test.instance(0));
test.instance(0).setClassValue(clsLabel);
// Return the appropriate label
return ((GestureType2D)((int)clsLabel+1)).ToString();
}
示例2: classifyTest
// Test the classification result of each map that a user played,
// with the data available as if they were playing through it
public static void classifyTest(String dataString, String playerID)
{
String results = "";
try {
java.io.StringReader stringReader = new java.io.StringReader(dataString);
java.io.BufferedReader buffReader = new java.io.BufferedReader(stringReader);
/* NOTE THAT FOR NAIVE BAYES ALL WEIGHTS CAN BE = 1*/
//weka.core.converters.ConverterUtils.DataSource source = new weka.core.converters.ConverterUtils.DataSource("iris.arff");
weka.core.Instances data = new weka.core.Instances(buffReader); //source.getDataSet();
// setting class attribute if the data format does not provide this information
// For example, the XRFF format saves the class attribute information as well
if (data.classIndex() == -1)
data.setClassIndex(data.numAttributes() - 1);
weka.classifiers.Classifier cl;
for (int i = 3; i < data.numInstances(); i++) {
cl = new weka.classifiers.bayes.NaiveBayes();
//cl = new weka.classifiers.trees.J48();
//cl = new weka.classifiers.lazy.IB1();
//cl = new weka.classifiers.functions.MultilayerPerceptron();
((weka.classifiers.functions.MultilayerPerceptron)cl).setHiddenLayers("12");
weka.core.Instances subset = new weka.core.Instances(data,0,i);
cl.buildClassifier(subset);
weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(subset);
eval.crossValidateModel(cl, subset, 3, new java.util.Random(1));
results = results + eval.pctCorrect(); // For accuracy measurement
/* For Mathews Correlation Coefficient */
//double TP = eval.numTruePositives(1);
//double FP = eval.numFalsePositives(1);
//double TN = eval.numTrueNegatives(1);
//double FN = eval.numFalseNegatives(1);
//double correlationCoeff = ((TP*TN)-(FP*FN))/Math.Sqrt((TP+FP)*(TP+FN)*(TN+FP)*(TN+FN));
//results = results + correlationCoeff;
if (i != data.numInstances()-1)
results = results + ", ";
if(i == data.numInstances()-1)
Debug.Log("Player: " + playerID + ", Num Maps: " + data.numInstances() + ", AUC: " + eval.areaUnderROC(1));
}
} catch (java.lang.Exception ex)
{
Debug.LogError(ex.getMessage());
}
// Write values to file for a matlab read
// For accuracy
StreamWriter writer = new StreamWriter("DataForMatlab/"+playerID+"_CrossFoldValidations_NeuralNet.txt");
//StreamWriter writer = new StreamWriter("DataForMatlab/"+playerID+"_CrossFoldCorrCoeff.txt"); // For mathews cc
writer.WriteLine(results);
writer.Close();
Debug.Log(playerID + " has been written to file");
}
示例3: InitializeClassifier
/* Use when the player logs in to initially create the classifier with data from server */
public void InitializeClassifier(String dataString)
{
try {
java.io.StringReader stringReader = new java.io.StringReader(dataString);
java.io.BufferedReader buffReader = new java.io.BufferedReader(stringReader);
playerData = new weka.core.Instances(buffReader);
/* State where in each Instance the class attribute is, if its not already specified by the file */
if (playerData.classIndex() == -1)
playerData.setClassIndex(playerData.numAttributes() - 1);
/* NAIVE BAYES */
//classifier = new weka.classifiers.bayes.NaiveBayes();
/* NEURAL NET */
//classifier = new weka.classifiers.functions.MultilayerPerceptron();
//((weka.classifiers.functions.MultilayerPerceptron)classifier).setHiddenLayers("12");
/* J48 TREE */
//classifier = new weka.classifiers.trees.J48();
/* IB1 NEAREST NEIGHBOUR */
//classifier = new weka.classifiers.lazy.IB1();
/* RANDOM FOREST */
classifier = new weka.classifiers.trees.RandomForest();
classifier.buildClassifier(playerData);
Debug.Log("Initialized Classifier");
}
catch (java.lang.Exception ex)
{
Debug.LogError(ex.getMessage());
}
}
示例4: CreateInstanceForNClasses
/// <summary>
/// Create a single instance for WEKA
/// </summary>
/// <param name="NClasses">Number of classes</param>
/// <returns>the weka instances</returns>
public Instances CreateInstanceForNClasses(cInfoClass InfoClass)
{
List<double> AverageList = new List<double>();
for (int i = 0; i < Parent.ListDescriptors.Count; i++)
if (Parent.ListDescriptors[i].IsActive()) AverageList.Add(GetAverageValuesList()[i]);
weka.core.FastVector atts = new FastVector();
List<string> NameList = Parent.ListDescriptors.GetListNameActives();
for (int i = 0; i < NameList.Count; i++)
atts.addElement(new weka.core.Attribute(NameList[i]));
weka.core.FastVector attVals = new FastVector();
for (int i = 0; i < InfoClass.NumberOfClass; i++)
attVals.addElement("Class" + i);
atts.addElement(new weka.core.Attribute("Class__", attVals));
Instances data1 = new Instances("SingleInstance", atts, 0);
double[] newTable = new double[AverageList.Count + 1];
Array.Copy(AverageList.ToArray(), 0, newTable, 0, AverageList.Count);
//newTable[AverageList.Count] = 1;
data1.add(new DenseInstance(1.0, newTable));
data1.setClassIndex((data1.numAttributes() - 1));
return data1;
}
示例5: analyze
// ---- OPERATIONS ----
///
/// <summary> * Analyze the time series data. The similarity matrices are created
/// * and filled with euclidean distances based on the tolerance values
/// * for similarity.
/// * </summary>
/// * <param name="data"> data to be analyzed </param>
public override void analyze(Instances data)
{
data.setClassIndex(data.numAttributes() - 1);
m_data = data;
m_rangeTemplates.setUpper(data.numAttributes());
//Date startFT = new Date();
// compute fourier transform
FourierTransform dftFilter = new FourierTransform();
dftFilter.setInputFormat(data);
dftFilter.setNumCoeffs(getNumCoeffs());
dftFilter.setUseFFT(getUseFFT());
Instances fourierdata = Filter.useFilter(data, dftFilter);
Date endFT = new Date();
// time taken for FT
//m_DFTTime = new Date(endFT.getTime() - startFT.getTime());
int numdim = data.numAttributes();
//ORIGINAL LINE: m_distancesFreq = new double[numdim][numdim];
//JAVA TO VB & C# CONVERTER NOTE: The following call to the 'RectangularArrays' helper class reproduces the rectangular array initialization that is automatic in Java:
m_distancesFreq = RectangularArrays.ReturnRectangularDoubleArray(numdim, numdim);
//ORIGINAL LINE: m_distancesTime = new double[numdim][numdim];
//JAVA TO VB & C# CONVERTER NOTE: The following call to the 'RectangularArrays' helper class reproduces the rectangular array initialization that is automatic in Java:
m_distancesTime = RectangularArrays.ReturnRectangularDoubleArray(numdim, numdim);
//long ftDistTime = 0;
//long tDistTime = 0;
// compute similarity matrices
for (int i = 0; i < data.numAttributes(); ++i)
{
for (int j = 0; j < i; j++)
{
// not for template sequences
if (m_rangeTemplates.isInRange(i) && m_rangeTemplates.isInRange(j))
{
continue;
}
//Date startFTDist = new Date();
// Compute the Euclidean distance between 2 dims using FT
double[] reCT = fourierdata.attributeToDoubleArray(2 * i);
double[] imCT = fourierdata.attributeToDoubleArray(2 * i + 1);
double[] reCS = fourierdata.attributeToDoubleArray(2 * j);
double[] imCS = fourierdata.attributeToDoubleArray(2 * j + 1);
m_distancesFreq[i][j] = computeEuclidean(reCT, imCT, reCS, imCS);
// if found similar using FT
if (m_distancesFreq[i][j] <= m_epsilon)
{
// then compute normal Euclidean distances between the 2 dims
double[] x = data.attributeToDoubleArray(i);
double[] y = data.attributeToDoubleArray(j);
m_distancesTime[i][j] = computeEuclidean(x, y);
}
//Date endFTDist = new Date();
// time taken for computing similarity based on FT
//ftDistTime += (endFTDist.getTime() - startFTDist.getTime());
// Date startDist = new Date();
//// compute similarity matrices (brute force)
// double[] x1 = data.attributeToDoubleArray(i);
// double[] y1 = data.attributeToDoubleArray(j);
// computeEuclidean(x1, y1);
// Date endDist = new Date();
//// time taken for computing similarity based brute force method
// tDistTime += (endDist.getTime() - startDist.getTime());
}
}
//m_FTEuclideanTime = new Date(ftDistTime);
//m_EuclideanTime = new Date(tDistTime);
}
示例6: PerformTraining
/// <summary>
/// Build the learning model for classification
/// </summary>
/// <param name="InstancesList">list of instances </param>
/// <param name="NumberofClusters">Number of Clusters</param>
/// <param name="TextBoxForFeedback">Text box for the results (can be NULL)</param>
/// <param name="PanelForVisualFeedback">Panel to display visual results if avalaible (can be NULL)</param>
public Classifier PerformTraining(FormForClassificationInfo WindowForClassificationParam, Instances InstancesList, /*int NumberofClusters,*/ RichTextBox TextBoxForFeedback,
Panel PanelForVisualFeedback, out weka.classifiers.Evaluation ModelEvaluation, bool IsCellular)
{
// weka.classifiers.Evaluation ModelEvaluation = null;
// FormForClassificationInfo WindowForClassificationParam = new FormForClassificationInfo(GlobalInfo);
ModelEvaluation = null;
// if (WindowForClassificationParam.ShowDialog() != System.Windows.Forms.DialogResult.OK) return null;
// weka.classifiers.Evaluation ModelEvaluation = new Evaluation(
cParamAlgo ClassifAlgoParams = WindowForClassificationParam.GetSelectedAlgoAndParameters();
if (ClassifAlgoParams == null) return null;
//this.Cursor = Cursors.WaitCursor;
// cParamAlgo ClassificationAlgo = WindowForClassificationParam.GetSelectedAlgoAndParameters();
cListValuesParam Parameters = ClassifAlgoParams.GetListValuesParam();
//Classifier this.CurrentClassifier = null;
// -------------------------- Classification -------------------------------
// create the instances
// InstancesList = this.ListInstances;
this.attValsWithoutClasses = new FastVector();
if (IsCellular)
for (int i = 0; i < cGlobalInfo.ListCellularPhenotypes.Count; i++)
this.attValsWithoutClasses.addElement(cGlobalInfo.ListCellularPhenotypes[i].Name);
else
for (int i = 0; i < cGlobalInfo.ListWellClasses.Count; i++)
this.attValsWithoutClasses.addElement(cGlobalInfo.ListWellClasses[i].Name);
InstancesList.insertAttributeAt(new weka.core.Attribute("Class", this.attValsWithoutClasses), InstancesList.numAttributes());
//int A = Classes.Count;
for (int i = 0; i < Classes.Count; i++)
InstancesList.get(i).setValue(InstancesList.numAttributes() - 1, Classes[i]);
InstancesList.setClassIndex(InstancesList.numAttributes() - 1);
weka.core.Instances train = new weka.core.Instances(InstancesList, 0, InstancesList.numInstances());
if (PanelForVisualFeedback != null)
PanelForVisualFeedback.Controls.Clear();
#region List classifiers
#region J48
if (ClassifAlgoParams.Name == "J48")
{
this.CurrentClassifier = new weka.classifiers.trees.J48();
((J48)this.CurrentClassifier).setMinNumObj((int)Parameters.ListDoubleValues.Get("numericUpDownMinInstLeaf").Value);
((J48)this.CurrentClassifier).setConfidenceFactor((float)Parameters.ListDoubleValues.Get("numericUpDownConfFactor").Value);
((J48)this.CurrentClassifier).setNumFolds((int)Parameters.ListDoubleValues.Get("numericUpDownNumFolds").Value);
((J48)this.CurrentClassifier).setUnpruned((bool)Parameters.ListCheckValues.Get("checkBoxUnPruned").Value);
((J48)this.CurrentClassifier).setUseLaplace((bool)Parameters.ListCheckValues.Get("checkBoxLaplacianSmoothing").Value);
((J48)this.CurrentClassifier).setSeed((int)Parameters.ListDoubleValues.Get("numericUpDownSeedNumber").Value);
((J48)this.CurrentClassifier).setSubtreeRaising((bool)Parameters.ListCheckValues.Get("checkBoxSubTreeRaising").Value);
// CurrentClassif.SetJ48Tree((J48)this.CurrentClassifier, Classes.Length);
this.CurrentClassifier.buildClassifier(train);
// display results training
// display tree
if (PanelForVisualFeedback != null)
{
GViewer GraphView = DisplayTree(GlobalInfo, ((J48)this.CurrentClassifier), IsCellular).gViewerForTreeClassif;
GraphView.Size = new System.Drawing.Size(PanelForVisualFeedback.Width, PanelForVisualFeedback.Height);
GraphView.Anchor = (AnchorStyles.Bottom | AnchorStyles.Top | AnchorStyles.Left | AnchorStyles.Right);
PanelForVisualFeedback.Controls.Clear();
PanelForVisualFeedback.Controls.Add(GraphView);
}
}
#endregion
#region Random Tree
else if (ClassifAlgoParams.Name == "RandomTree")
{
this.CurrentClassifier = new weka.classifiers.trees.RandomTree();
if ((bool)Parameters.ListCheckValues.Get("checkBoxMaxDepthUnlimited").Value)
((RandomTree)this.CurrentClassifier).setMaxDepth(0);
else
((RandomTree)this.CurrentClassifier).setMaxDepth((int)Parameters.ListDoubleValues.Get("numericUpDownMaxDepth").Value);
((RandomTree)this.CurrentClassifier).setSeed((int)Parameters.ListDoubleValues.Get("numericUpDownSeed").Value);
((RandomTree)this.CurrentClassifier).setMinNum((double)Parameters.ListDoubleValues.Get("numericUpDownMinWeight").Value);
if ((bool)Parameters.ListCheckValues.Get("checkBoxIsBackfitting").Value)
{
((RandomTree)this.CurrentClassifier).setNumFolds((int)Parameters.ListDoubleValues.Get("numericUpDownBackFittingFolds").Value);
}
else
{
((RandomTree)this.CurrentClassifier).setNumFolds(0);
}
this.CurrentClassifier.buildClassifier(train);
//string StringForTree = ((RandomTree)this.CurrentClassifier).graph().Remove(0, ((RandomTree)this.CurrentClassifier).graph().IndexOf("{") + 2);
//.........这里部分代码省略.........
示例7: CreateInstancesWithClassesWithPlateBasedDescriptor
/// <summary>
/// Create an instances structure with classes for supervised methods
/// </summary>
/// <param name="NumClass"></param>
/// <returns></returns>
public Instances CreateInstancesWithClassesWithPlateBasedDescriptor(int NumberOfClass)
{
weka.core.FastVector atts = new FastVector();
int columnNo = 0;
for (int i = 0; i < ParentScreening.ListPlateBaseddescriptorNames.Count; i++)
{
atts.addElement(new weka.core.Attribute(ParentScreening.ListPlateBaseddescriptorNames[i]));
columnNo++;
}
weka.core.FastVector attVals = new FastVector();
for (int i = 0; i < NumberOfClass; i++)
attVals.addElement("Class" + (i).ToString());
atts.addElement(new weka.core.Attribute("Class", attVals));
Instances data1 = new Instances("MyRelation", atts, 0);
int IdxWell = 0;
foreach (cWell CurrentWell in this.ListActiveWells)
{
if (CurrentWell.GetCurrentClassIdx() == -1) continue;
double[] vals = new double[data1.numAttributes()];
int IdxCol = 0;
for (int Col = 0; Col < ParentScreening.ListPlateBaseddescriptorNames.Count; Col++)
{
vals[IdxCol++] = CurrentWell.ListPlateBasedDescriptors[Col].GetValue();
}
vals[columnNo] = CurrentWell.GetCurrentClassIdx();
data1.add(new DenseInstance(1.0, vals));
IdxWell++;
}
data1.setClassIndex((data1.numAttributes() - 1));
return data1;
}
示例8: CreateInstancesWithClasses
/// <summary>
/// Create an instances structure with classes for supervised methods
/// </summary>
/// <param name="NumClass"></param>
/// <returns></returns>
public Instances CreateInstancesWithClasses(List<bool> ListClassSelected)
{
weka.core.FastVector atts = new FastVector();
int columnNo = 0;
for (int i = 0; i < ParentScreening.ListDescriptors.Count; i++)
{
if (ParentScreening.ListDescriptors[i].IsActive() == false) continue;
atts.addElement(new weka.core.Attribute(ParentScreening.ListDescriptors[i].GetName()));
columnNo++;
}
weka.core.FastVector attVals = new FastVector();
foreach (var item in cGlobalInfo.ListWellClasses)
{
attVals.addElement(item.Name);
}
atts.addElement(new weka.core.Attribute("ClassAttribute", attVals));
Instances data1 = new Instances("MyRelation", atts, 0);
int IdxWell = 0;
foreach (cWell CurrentWell in this.ListActiveWells)
{
if (!ListClassSelected[CurrentWell.GetCurrentClassIdx()]) continue;
double[] vals = new double[data1.numAttributes()];
int IdxCol = 0;
for (int Col = 0; Col < ParentScreening.ListDescriptors.Count; Col++)
{
if (ParentScreening.ListDescriptors[Col].IsActive() == false) continue;
vals[IdxCol++] = CurrentWell.ListSignatures[Col].GetValue();
}
vals[columnNo] = CurrentWell.GetCurrentClassIdx();
data1.add(new DenseInstance(1.0, vals));
IdxWell++;
}
data1.setClassIndex((data1.numAttributes() - 1));
return data1;
}
示例9: classifyTest
// Test the classification result of each map that a user played,
// with the data available as if they were playing through it
public static void classifyTest(String dataString, String playerID)
{
try {
java.io.StringReader stringReader = new java.io.StringReader(dataString);
java.io.BufferedReader buffReader = new java.io.BufferedReader(stringReader);
/* NOTE THAT FOR NAIVE BAYES ALL WEIGHTS CAN BE = 1*/
//weka.core.converters.ConverterUtils.DataSource source = new weka.core.converters.ConverterUtils.DataSource("iris.arff");
weka.core.Instances thisData = new weka.core.Instances(buffReader); //source.getDataSet();
if (thisData.classIndex() == -1)
thisData.setClassIndex(thisData.numAttributes() - 1);
weka.core.Instances thisUniqueData = new weka.core.Instances(thisData);
if (thisUniqueData.classIndex() == -1)
thisUniqueData.setClassIndex(thisUniqueData.numAttributes() - 1);
thisUniqueData.delete();
if (allUniqueData == null) {
allUniqueData = new weka.core.Instances(thisData);
if (allUniqueData.classIndex() == -1)
allUniqueData.setClassIndex(allUniqueData.numAttributes() - 1);
allUniqueData.delete();
}
weka.core.InstanceComparator com = new weka.core.InstanceComparator(false);
for (int i = 0; i < thisData.numInstances(); i++)
{
bool dup = false;
for (int j = 0; j < allUniqueData.numInstances(); j++)
{
if (com.compare(thisData.instance(i),allUniqueData.instance(j)) == 0)
{
Debug.Log("Duplicate found!");
dup = true;
break;
}
}
if (!dup)
allUniqueData.add(thisData.instance(i));
else
dupInstances++;
}
for (int i = 0; i < thisData.numInstances(); i++)
{
bool dup = false;
for (int j = 0; j < thisUniqueData.numInstances(); j++)
{
if (com.compare(thisData.instance(i),thisUniqueData.instance(j)) == 0)
{
Debug.Log("Duplicate found!");
dup = true;
break;
}
}
if (!dup)
thisUniqueData.add(thisData.instance(i));
else
dupInstancesSamePlayer++;
}
//Debug.Log("All Data Instance Count = " + thisData.numInstances());
//Debug.Log("Unique Data Instance Count = " + thisUniqueData.numInstances());
//Debug.Log("Done!");
} catch (java.lang.Exception ex)
{
Debug.LogError(ex.getMessage());
}
}
示例10: createWhyInstances
private Instances createWhyInstances()
{
FastVector fvWhy = createWhyFastVector();
Instances whyInstances = new Instances("WhyInstances", fvWhy, listSecondaryWhyCandidates.Count);
foreach (Token candidate in listSecondaryWhyCandidates)
{
if (candidate.Value == null) continue;
Instance whyInstance = createSingleWhyInstance(fvWhy, candidate);
whyInstance.setDataset(whyInstances);
whyInstances.add(whyInstance);
}
whyInstances.setClassIndex(fvWhy.size() - 1);
return whyInstances;
}