本文整理汇总了C++中FeatureVectorPtr::MLClass方法的典型用法代码示例。如果您正苦于以下问题:C++ FeatureVectorPtr::MLClass方法的具体用法?C++ FeatureVectorPtr::MLClass怎么用?C++ FeatureVectorPtr::MLClass使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类FeatureVectorPtr
的用法示例。
在下文中一共展示了FeatureVectorPtr::MLClass方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: CreateEncodedFeatureVector
FeatureVectorListPtr FeatureEncoder::CreateEncodedFeatureVector (FeatureVectorList& srcData)
{
if (srcData.AllFieldsAreNumeric ())
return srcData.DuplicateListAndContents ();
FeatureVectorListPtr encodedFeatureVectorList = new FeatureVectorList (destFileDesc, true);
FeatureVectorList::iterator idx;
for (idx = srcData.begin (); idx != srcData.end (); idx++)
{
FeatureVectorPtr srcExample = *idx;
XSpacePtr encodedData = EncodeAExample (srcExample);
kkint32 zed = 0;
FeatureVectorPtr encodedFeatureVector = new FeatureVector (codedNumOfFeatures);
while (encodedData[zed].index != -1)
{
encodedFeatureVector->AddFeatureData (encodedData[zed].index, (float)encodedData[zed].value);
zed++;
}
encodedFeatureVector->MLClass (srcExample->MLClass ());
encodedFeatureVectorList->PushOnBack (encodedFeatureVector);
delete encodedData;
encodedData = NULL;
}
return encodedFeatureVectorList;
} /* CreateEncodedFeatureVector */
示例2: EncodeAExample
FeatureVectorPtr FeatureEncoder::EncodeAExample (FileDescConstPtr encodedFileDesc,
FeatureVectorPtr src
)
{
FeatureVectorPtr encodedExample = new FeatureVector (numEncodedFeatures);
encodedExample->MLClass (src->MLClass ());
encodedExample->PredictedClass (src->PredictedClass ());
//encodedExample->Version (src->Version ());
encodedExample->TrainWeight (src->TrainWeight ());
const float* featureData = src->FeatureData ();
kkint32 x;
for (x = 0; x < numOfFeatures; x++)
{
float featureVal = featureData [srcFeatureNums[x]];
kkint32 y = destFeatureNums[x];
switch (destWhatToDo[x])
{
case FeWhatToDo::FeAsIs:
{
encodedExample->AddFeatureData (y, featureVal);
}
break;
case FeWhatToDo::FeBinary:
{
for (kkint32 z = 0; z < cardinalityDest[x]; z++)
{
float bVal = ((kkint32)featureVal == z);
encodedExample->AddFeatureData (y, bVal);
y++;
}
}
break;
case FeWhatToDo::FeScale:
{
encodedExample->AddFeatureData (y, (featureVal / (float)cardinalityDest[x]));
}
break;
}
}
return encodedExample;
} /* EncodeAExample */
示例3: ExtractListOfClasses
MLClassListPtr FeatureVectorList::ExtractListOfClasses () const
{
MLClassPtr lastClass = NULL;
map<MLClassPtr,MLClassPtr> ptrIndex;
map<MLClassPtr,MLClassPtr>::iterator ptrIndexItr;
FeatureVectorList::const_iterator idx;
for (idx = begin (); idx != end (); ++idx)
{
FeatureVectorPtr example = *idx;
MLClassPtr newClass = example->MLClass ();
if (newClass == lastClass)
continue;
lastClass = newClass;
ptrIndexItr = ptrIndex.find (newClass);
if (ptrIndexItr == ptrIndex.end ())
{
lastClass = newClass;
ptrIndex.insert (pair<MLClassPtr,MLClassPtr> (newClass, newClass));
}
}
MLClassListPtr classes = new MLClassList ();
for (ptrIndexItr = ptrIndex.begin (); ptrIndexItr != ptrIndex.end (); ++ptrIndexItr)
classes->PushOnBack (ptrIndexItr->first);
return classes;
} /* ExtractListOfClasses */
示例4: if
FeatureVectorListPtr FeatureEncoder2::EncodedFeatureVectorList (const FeatureVectorList& srcData) const
{
if (srcData.AllFieldsAreNumeric ())
return srcData.DuplicateListAndContents ();
FeatureVectorListPtr encodedFeatureVectorList = new FeatureVectorList (encodedFileDesc, true);
FeatureVectorList::const_iterator idx;
for (idx = srcData.begin (); idx != srcData.end (); idx++)
{
FeatureVectorPtr srcExample = *idx;
FeatureVectorPtr encodedFeatureVector = EncodeAExample (srcExample);
encodedFeatureVector->MLClass (srcExample->MLClass ());
encodedFeatureVectorList->PushOnBack (encodedFeatureVector);
}
return encodedFeatureVectorList;
} /* EncodedFeatureVectorList */
示例5: SplitForestCoverFile
void SplitForestCoverFile ()
{
RunLog log;
MLClassConstList mlClasses;
bool cancelFlag = false;
bool successful;
bool changesMade = false;
FeatureVectorListPtr images = FeatureFileIOC45::Driver ()->LoadFeatureFile
("covtype_alpha.data", mlClasses, -1, cancelFlag, successful, changesMade, log);
FileDescPtr fileDesc = images->FileDesc ();
images->RandomizeOrder ();
images->RandomizeOrder ();
images->RandomizeOrder ();
images->RandomizeOrder ();
images->RandomizeOrder ();
MLClassConstPtr lodgepolePine = mlClasses.GetMLClassPtr ("Lodgepole_Pine");
MLClassConstPtr spruceFir = mlClasses.GetMLClassPtr ("Spruce_Fir");
int lodgepolePineTrainCount = 0;
int spruceFirTrainCount = 0;
FeatureVectorList::iterator idx;
FeatureVectorListPtr trainData = new FeatureVectorList (fileDesc, false, log, 10000);
FeatureVectorListPtr testData = new FeatureVectorList (fileDesc, false, log, 10000);
int c = 0;
for (idx = images->begin (); idx != images->end (); idx++)
{
FeatureVectorPtr i = *idx;
if (c % 5000)
cout << c << endl;
if (i->MLClass () == lodgepolePine)
{
if (lodgepolePineTrainCount < 56404)
{
trainData->PushOnBack (i);
lodgepolePineTrainCount++;
}
else
{
testData->PushOnBack (i);
}
}
else if (i->MLClass () == spruceFir)
{
if (spruceFirTrainCount < 42480)
{
trainData->PushOnBack (i);
spruceFirTrainCount++;
}
else
{
testData->PushOnBack (i);
}
}
c++;
}
KKU::uint numExamplesWritten = 0;
FeatureFileIOC45::Driver ()->SaveFeatureFile
("CovType_TwoClass.data",
trainData->AllFeatures (),
*trainData,
numExamplesWritten,
cancelFlag,
successful,
log
);
FeatureFileIOC45::Driver ()->SaveFeatureFile
("CovType_TwoClass.test",
testData->AllFeatures (),
*testData,
numExamplesWritten,
cancelFlag,
successful,
log
);
delete trainData;
delete testData;
delete images;
} /* SplitForestCoverFile */
示例6: sizeof
void FeatureEncoder::EncodeIntoSparseMatrix
(FeatureVectorListPtr src,
ClassAssignments& assignments,
XSpacePtr& xSpace,
kkint32& totalxSpaceUsed,
struct svm_problem& prob,
RunLog& log
)
{
FeatureVectorListPtr compressedExamples = NULL;
FeatureVectorListPtr examplesToUseFoXSpace = NULL;
kkint32 xSpaceUsed = 0;
totalxSpaceUsed = 0;
examplesToUseFoXSpace = src;
kkint32 numOfExamples = examplesToUseFoXSpace->QueueSize ();
//kkint32 elements = numOfExamples * xSpaceNeededPerExample;
prob.l = numOfExamples;
prob.y = (double*)malloc (prob.l * sizeof (double));
prob.x = (struct svm_node **) malloc (prob.l * sizeof (struct svm_node*));
prob.index = new kkint32[prob.l];
prob.exampleNames.clear ();
kkint32 numNeededXspaceNodes = DetermineNumberOfNeededXspaceNodes (examplesToUseFoXSpace);
kkint32 totalBytesForxSpaceNeeded = (numNeededXspaceNodes + 10) * sizeof (struct svm_node); // I added '10' to elements because I am paranoid
xSpace = (struct svm_node*) malloc (totalBytesForxSpaceNeeded);
if (xSpace == NULL)
{
log.Level (-1) << endl << endl << endl
<< " FeatureEncoder::Compress *** Failed to allocates space for 'xSpace' ****" << endl
<< endl
<< " Space needed [" << totalBytesForxSpaceNeeded << "]" << endl
<< " Num of Examples [" << numOfExamples << "]" << endl
<< " Num XSpaceNodesNeeded [" << numNeededXspaceNodes << "]" << endl
<< endl;
// we sill have to allocate space for each individual training example separately.
//throw "FeatureEncoder::Compress Allocation of memory for xSpace Failed.";
}
prob.W = NULL;
kkint32 i = 0;
FeatureVectorPtr example = NULL;
MLClassPtr lastMlClass = NULL;
kkint16 lastClassNum = -1;
kkint32 bytesOfxSpacePerExample = xSpaceNeededPerExample * sizeof (struct svm_node);
for (i = 0; i < prob.l; i++)
{
if (totalxSpaceUsed > numNeededXspaceNodes)
{
log.Level (-1) << endl << endl
<< "FeatureEncoder::Compress ***ERROR*** We have exceeded the number of XSpace nodes allocated." << endl
<< endl;
}
example = examplesToUseFoXSpace->IdxToPtr (i);
if (example->MLClass () != lastMlClass)
{
lastMlClass = example->MLClass ();
lastClassNum = assignments.GetNumForClass (lastMlClass);
}
prob.y[i] = lastClassNum;
prob.index[i] = i;
prob.exampleNames.push_back (osGetRootName (example->ExampleFileName ()));
if (prob.W)
{
prob.W[i] = example->TrainWeight () * c_Param;
if (example->TrainWeight () <= 0.0f)
{
log.Level (-1) << endl
<< "FeatureEncoder::EncodeIntoSparseMatrix ***ERROR*** Example[" << example->ExampleFileName () << "]" << endl
<< " has a TrainWeight value of 0 or less defaulting to 1.0" << endl
<< endl;
prob.W[i] = 1.0 * c_Param;
}
}
if (xSpace == NULL)
{
struct svm_node* xSpaceThisExample = (struct svm_node*) malloc (bytesOfxSpacePerExample);
prob.x[i] = xSpaceThisExample;
EncodeAExample (example, prob.x[i], xSpaceUsed);
if (xSpaceUsed < xSpaceNeededPerExample)
{
kkint32 bytesNeededForThisExample = xSpaceUsed * sizeof (struct svm_node);
struct svm_node* smallerXSpaceThisExample = (struct svm_node*) malloc (bytesNeededForThisExample);
memcpy (smallerXSpaceThisExample, xSpaceThisExample, bytesNeededForThisExample);
free (xSpaceThisExample);
//.........这里部分代码省略.........
示例7: FeatureDataReSink
FeatureVectorListPtr FeatureFileIO::FeatureDataReSink (FactoryFVProducerPtr _fvProducerFactory,
const KKStr& _dirName,
const KKStr& _fileName,
MLClassPtr _unknownClass,
bool _useDirectoryNameForClassName,
MLClassList& _mlClasses,
VolConstBool& _cancelFlag,
bool& _changesMade,
KKB::DateTime& _timeStamp,
RunLog& _log
)
{
_changesMade = false;
_timeStamp = DateTime ();
if (_unknownClass == NULL)
_unknownClass = MLClass::GetUnKnownClassStatic ();
KKStr className = _unknownClass->Name ();
_log.Level (10) << "FeatureFileIO::FeatureDataReSink dirName: " << _dirName << endl
<< " fileName: " << _fileName << " UnKnownClass: " << className << endl;
KKStr fullFeatureFileName = osAddSlash (_dirName) + _fileName;
bool successful = true;
KKStr fileNameToOpen;
if (_dirName.Empty ())
fileNameToOpen = _fileName;
else
fileNameToOpen = osAddSlash (_dirName) + _fileName;
bool versionsAreSame = false;
FeatureVectorListPtr origFeatureVectorData
= LoadFeatureFile (fileNameToOpen, _mlClasses, -1, _cancelFlag, successful, _changesMade, _log);
if (origFeatureVectorData == NULL)
{
successful = false;
origFeatureVectorData = _fvProducerFactory->ManufacturFeatureVectorList (true);
}
if (_cancelFlag)
{
delete origFeatureVectorData; origFeatureVectorData = NULL;
return _fvProducerFactory->ManufacturFeatureVectorList (true);
}
FeatureVectorListPtr origFeatureData = NULL;
if (successful &&
(&typeid (*origFeatureVectorData) == _fvProducerFactory->FeatureVectorListTypeId ()) &&
((*(origFeatureVectorData->FileDesc ())) == (*(_fvProducerFactory->FileDesc ())))
)
{
origFeatureData = origFeatureVectorData;
}
else
{
origFeatureData = _fvProducerFactory->ManufacturFeatureVectorList (true);
delete origFeatureVectorData;
origFeatureVectorData = NULL;
}
KKStr fileSpec = osAddSlash (_dirName) + "*.*";
KKStrListPtr fileNameList = osGetListOfFiles (fileSpec);
if (!fileNameList)
{
// There are no Image Files, so we need to return a Empty List of Image Features.
if (origFeatureData->QueueSize () > 0)
_changesMade = true;
delete origFeatureData; origFeatureData = NULL;
return _fvProducerFactory->ManufacturFeatureVectorList (true);
}
FeatureVectorProducerPtr fvProducer = _fvProducerFactory->ManufactureInstance (_log);
if (successful)
{
if (origFeatureData->Version () == fvProducer->Version ())
{
versionsAreSame = true;
_timeStamp = osGetFileDateTime (fileNameToOpen);
}
else
{
_changesMade = true;
}
}
else
{
delete origFeatureData;
origFeatureData = _fvProducerFactory->ManufacturFeatureVectorList (true);
//.........这里部分代码省略.........
示例8: ComputeFeatureVector
FeatureVectorPtr GrayScaleImagesFVProducer::ComputeFeatureVector (const Raster& srcImage,
const MLClassPtr knownClass,
RasterListPtr intermediateImages,
float priorReductionFactor,
RunLog& runLog
)
{
FeatureVectorPtr fv = new FeatureVector (maxNumOfFeatures);
fv->MLClass (knownClass);
float* featureData = fv->FeatureDataAlter ();
fv->Version (Version ());
kkint32 areaBeforeReduction = 0;
float weighedSizeBeforeReduction = 0.0f;
kkint32 row = 0;
kkuint32 intensityHistBuckets[8];
srcImage.CalcAreaAndIntensityFeatures (areaBeforeReduction,
weighedSizeBeforeReduction,
intensityHistBuckets
);
kkint32 srcHeight = srcImage.Height ();
kkint32 srcWidth = srcImage.Width ();
kkint32 reducedHeight = srcHeight;
kkint32 reducedWidth = srcWidth;
kkint32 reducedSquareArea = reducedHeight * reducedWidth;
kkint32 reductionMultiple = 1;
while (reducedSquareArea > totPixsForMorphOps)
{
++reductionMultiple;
reducedHeight = (srcHeight + reductionMultiple - 1) / reductionMultiple;
reducedWidth = (srcWidth + reductionMultiple - 1) / reductionMultiple;
reducedSquareArea = reducedHeight * reducedWidth;
}
float totalReductionMultiple = priorReductionFactor * (float)reductionMultiple;
float totalReductionMultipleSquared = totalReductionMultiple * totalReductionMultiple;
delete workRaster1Rows; workRaster1Rows = new uchar*[reducedHeight];
delete workRaster2Rows; workRaster2Rows = new uchar*[reducedHeight];
delete workRaster3Rows; workRaster3Rows = new uchar*[reducedHeight];
uchar* wp1 = workRaster1Area;
uchar* wp2 = workRaster2Area;
uchar* wp3 = workRaster3Area;
for (row = 0; row < reducedHeight; ++row)
{
workRaster1Rows[row] = wp1;
workRaster2Rows[row] = wp2;
workRaster3Rows[row] = wp3;
wp1 += reducedWidth;
wp2 += reducedWidth;
wp3 += reducedWidth;
}
Raster workRaster1 (reducedHeight, reducedWidth, workRaster1Area, workRaster1Rows);
Raster workRaster2 (reducedHeight, reducedWidth, workRaster2Area, workRaster2Rows);
Raster workRaster3 (reducedHeight, reducedWidth, workRaster3Area, workRaster3Rows);
Raster const * initRaster = NULL;
RasterPtr wr1 = NULL;
RasterPtr wr2 = NULL;
if (reductionMultiple > 1)
{
try
{
ReductionByMultiple (reductionMultiple, srcImage, workRaster1);
}
catch (...)
{
runLog.Level (-1) << endl << "GrayScaleImagesFVProducer::ComputeFeatureVector ***ERROR*** Exception calling 'ReductionByMultiple'." << endl << endl;
return NULL;
}
initRaster = &workRaster1;
wr1 = &workRaster2;
wr2 = &workRaster3;
}
else
{
initRaster = &srcImage;
wr1 = &workRaster1;
wr2 = &workRaster2;
}
if (areaBeforeReduction < 20)
{
for (kkint32 tp = 0; tp < maxNumOfFeatures; tp++)
featureData[tp] = 9999999;
return fv;
}
//.........这里部分代码省略.........
示例9: GradeExamplesAgainstGroundTruth
void GradeClassification::GradeExamplesAgainstGroundTruth (FeatureVectorListPtr examplesToGrade,
FeatureVectorListPtr groundTruth
)
{
log.Level (10) << "GradeClassification::GradeExamplesAgainstGroundTruth" << endl;
groundTruth->SortByRootName ();
MLClassConstPtr unknownClass = mlClasses->GetUnKnownClass ();
MLClassConstListPtr classes = NULL;
{
MLClassConstListPtr examplesToGradeClasses = examplesToGrade->ExtractMLClassConstList ();
MLClassConstListPtr groundTruthClasses = groundTruth->ExtractMLClassConstList ();
classes = MLClassConstList::MergeClassList (*examplesToGradeClasses, *groundTruthClasses);
delete examplesToGradeClasses;
delete groundTruthClasses;
}
uint16 maxHierarchialLevel = 0;
{
MLClassConstList::iterator idx;
for (idx = classes->begin (); idx != classes->end (); idx++)
{
MLClassConstPtr c = *idx;
maxHierarchialLevel = Max (maxHierarchialLevel, c->NumHierarchialLevels ());
}
}
// Create ConfusionMatrix objects for each posible level of Hierarchy. The 'resultsSummary' vector will
// end up owning the instances of 'ConfusionMatrix2' and th edestructr will be responable for deleting them.
uint curLevel = 0;
vector<ConfusionMatrix2Ptr> cmByLevel;
for (curLevel = 0; curLevel < maxHierarchialLevel; curLevel++)
{
MLClassConstListPtr classesThisLevel = classes->ExtractListOfClassesForAGivenHierarchialLevel (curLevel);
ConfusionMatrix2Ptr cm = new ConfusionMatrix2 (*classesThisLevel);
cmByLevel.push_back (cm);
}
ConfusionMatrix2 cm (*classes);
ImageFeaturesList::iterator idx;
for (idx = examplesToGrade->begin (); idx != examplesToGrade->end (); idx++)
{
ImageFeaturesPtr exampleToGrade = *idx;
MLClassConstPtr predictedClass = exampleToGrade->MLClass ();
float origSize = exampleToGrade->OrigSize ();
float probability = exampleToGrade->Probability ();
KKStr rootName = osGetRootName (exampleToGrade->ImageFileName ());
FeatureVectorPtr groundTruthExample = groundTruth->LookUpByRootName (rootName);
MLClassConstPtr groundTruthClass = unknownClass;
if (groundTruthExample)
groundTruthClass = groundTruthExample->MLClass ();
cm.Increment (groundTruthClass, predictedClass, (int)origSize, probability, log);
for (curLevel = 0; curLevel < maxHierarchialLevel; curLevel++)
{
MLClassConstPtr groundTruthClasssThisLevel = groundTruthClass->MLClassForGivenHierarchialLevel (curLevel);
MLClassConstPtr predictedClassThisLevel = predictedClass->MLClassForGivenHierarchialLevel (curLevel);
cmByLevel[curLevel]->Increment (groundTruthClasssThisLevel, predictedClassThisLevel, (int)origSize, probability, log);
}
}
//cm.PrintTrueFalsePositivesTabDelimited (*report);
{
// report Hierarchial results
for (curLevel = 0; curLevel < maxHierarchialLevel; curLevel++)
{
log.Level (10) << "GradeClassification::GradeExamplesAgainstGroundTruth Printing Level[" << curLevel << "]" << endl;
*report << endl << endl << endl
<< "Confusion Matrix Training Level[" << maxHierarchialLevel << "] Preduction Level[" << (curLevel + 1) << "]" << endl
<< endl;
cmByLevel[curLevel]->PrintConfusionMatrixTabDelimited (*report);
resultsSummary.push_back (SummaryRec (maxHierarchialLevel, curLevel + 1, cmByLevel[curLevel]));
}
*report << endl << endl << endl;
}
log.Level (10) << "GradeClassification::GradeExamplesAgainstGroundTruth Exiting" << endl;
} /* GradeExamplesAgainstGroundTruth */
示例10: GradeUsingTrainingConfiguration
void GradeClassification::GradeUsingTrainingConfiguration ()
{
log.Level (10) << "GradeClassification::GradeUsingTrainingConfiguration" << endl;
delete mlClasses;
mlClasses = config->ExtractClassList ();
bool changesMadeToTrainingLibraries = false;
KKU::DateTime latestImageTimeStamp;
log.Level (10) << "GradeUsingTrainingConfiguration Loading Training Data." << endl;
FeatureVectorListPtr trainingData = config->LoadFeatureDataFromTrainingLibraries (latestImageTimeStamp, changesMadeToTrainingLibraries, cancelFlag);
if (!trainingData)
{
log.Level (-1) << endl << endl << endl
<< "GradedlClassification::GradeUsingTrainingConfiguration ***ERROR***" << endl
<< endl
<< " Could not load training data file Configuration File[" << configFileName << "]" << endl
<< endl
<< endl;
Abort (true);
return;
}
uint maxLevelsOfHiearchy = config->NumHierarchialLevels ();
uint hierarchyLevel = 0;
while (hierarchyLevel < maxLevelsOfHiearchy)
{
log.Level (10) << "GradeUsingTrainingConfiguration Hierarchy Level[" << hierarchyLevel << "]" << endl;
TrainingConfiguration2Ptr configThisLevel = config->GenerateAConfiguraionForAHierarchialLevel (hierarchyLevel);
FeatureVectorListPtr trainingDataThisLevel = trainingData->ExtractExamplesForHierarchyLevel (hierarchyLevel);
FeatureVectorListPtr groundTruthThisLevel = groundTruth->ExtractExamplesForHierarchyLevel (hierarchyLevel);
FeatureVectorListPtr groundTruthThisLevelClassified = new FeatureVectorList (*groundTruthThisLevel, true);
KKStr statusMessage;
TrainingProcess2 trainer (configThisLevel,
trainingDataThisLevel,
NULL, // No report file,
trainingDataThisLevel->FileDesc (),
log,
false, // false = features are not already normalized.
cancelFlag,
statusMessage
);
trainer.CreateModelsFromTrainingData ();
{
Classifier2 classifier (&trainer, log);
FeatureVectorList::iterator idx;
for (idx = groundTruthThisLevelClassified->begin (); idx != groundTruthThisLevelClassified->end (); idx++)
{
FeatureVectorPtr fv = *idx;
MLClassConstPtr ic = classifier.ClassifyAImage (*fv);
fv->MLClass (ic);
}
}
GradeExamplesAgainstGroundTruth (groundTruthThisLevelClassified, groundTruthThisLevel);
delete groundTruthThisLevelClassified; groundTruthThisLevelClassified = NULL;
delete groundTruthThisLevel; groundTruthThisLevel = NULL;
delete trainingDataThisLevel; trainingDataThisLevel = NULL;
hierarchyLevel++;
}
ReportResults ();
delete trainingData;
} /* GradeUsingTrainingConfiguration */