本文整理汇总了C++中FeatureVectorPtr类的典型用法代码示例。如果您正苦于以下问题:C++ FeatureVectorPtr类的具体用法?C++ FeatureVectorPtr怎么用?C++ FeatureVectorPtr使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了FeatureVectorPtr类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: for
MLClassListPtr FeatureVectorList::ExtractListOfClasses () const
{
MLClassPtr lastClass = NULL;
map<MLClassPtr,MLClassPtr> ptrIndex;
map<MLClassPtr,MLClassPtr>::iterator ptrIndexItr;
FeatureVectorList::const_iterator idx;
for (idx = begin (); idx != end (); ++idx)
{
FeatureVectorPtr example = *idx;
MLClassPtr newClass = example->MLClass ();
if (newClass == lastClass)
continue;
lastClass = newClass;
ptrIndexItr = ptrIndex.find (newClass);
if (ptrIndexItr == ptrIndex.end ())
{
lastClass = newClass;
ptrIndex.insert (pair<MLClassPtr,MLClassPtr> (newClass, newClass));
}
}
MLClassListPtr classes = new MLClassList ();
for (ptrIndexItr = ptrIndex.begin (); ptrIndexItr != ptrIndex.end (); ++ptrIndexItr)
classes->PushOnBack (ptrIndexItr->first);
return classes;
} /* ExtractListOfClasses */
示例2: if
FeatureVectorListPtr FeatureEncoder::CreateEncodedFeatureVector (FeatureVectorList& srcData)
{
if (srcData.AllFieldsAreNumeric ())
return srcData.DuplicateListAndContents ();
FeatureVectorListPtr encodedFeatureVectorList = new FeatureVectorList (destFileDesc, true);
FeatureVectorList::iterator idx;
for (idx = srcData.begin (); idx != srcData.end (); idx++)
{
FeatureVectorPtr srcExample = *idx;
XSpacePtr encodedData = EncodeAExample (srcExample);
kkint32 zed = 0;
FeatureVectorPtr encodedFeatureVector = new FeatureVector (codedNumOfFeatures);
while (encodedData[zed].index != -1)
{
encodedFeatureVector->AddFeatureData (encodedData[zed].index, (float)encodedData[zed].value);
zed++;
}
encodedFeatureVector->MLClass (srcExample->MLClass ());
encodedFeatureVectorList->PushOnBack (encodedFeatureVector);
delete encodedData;
encodedData = NULL;
}
return encodedFeatureVectorList;
} /* CreateEncodedFeatureVector */
示例3: sizeof
kkMemSize FeatureVectorList::MemoryConsumedEstimated () const
{
kkMemSize memoryConsumedEstimated = sizeof (FeatureVectorList) + fileName.MemoryConsumedEstimated ();
FeatureVectorList::const_iterator idx;
for (idx = begin (); idx != end (); ++idx)
{
FeatureVectorPtr fv = *idx;
memoryConsumedEstimated += fv->MemoryConsumedEstimated ();
}
return memoryConsumedEstimated;
} /* MemoryConsumedEstimated */
示例4: Strip
void Strip ()
{
bool cancelFlag = false;
bool successful = false;
bool changesMade = false;
RunLog log;
FeatureFileIOPtr driver = FeatureFileIO::FileFormatFromStr ("C45");
MLClassList mlClasses;
FeatureVectorListPtr data =
driver->LoadFeatureFile ("D:\\Pices\\Reports\\FeatureDataFiles\\AllValidatedImages_ForJonathon\\AllValidatedDataNorm.data",
mlClasses,
-1,
cancelFlag,
successful,
changesMade,
log
);
FeatureVectorListPtr stripped = new FeatureVectorList (data->FileDesc (), false);
FeatureVectorList::const_iterator idx;
for (idx = data->begin (); idx != data->end (); ++idx)
{
FeatureVectorPtr fv = *idx;
KKStr fn = fv->ExampleFileName ();
if (fn.StartsWith ("SML") || (fn.StartsWith ("SMP")))
{
}
else
{
stripped->PushOnBack (fv);
}
}
kkuint32 numExamplesWritten = 90;
driver->SaveFeatureFile ("D:\\Pices\\Reports\\FeatureDataFiles\\AllValidatedImages_ForJonathon\\AllValidatedData1209.data",
data->AllFeatures (),
*stripped,
numExamplesWritten,
cancelFlag,
successful,
log
);
}
示例5: KKCheck
void FeatureVectorList::ResetFileDesc (FileDescConstPtr newFileDesc)
{
KKCheck (fileDesc, "FeatureVector::ResetFileDesc ***ERROR*** newFileDesc == NULL.")
fileDesc = newFileDesc;
numOfFeatures = fileDesc->NumOfFields ();
for (iterator idx = begin (); idx != end (); idx++)
{
FeatureVectorPtr i = *idx;
i->ResetNumOfFeatures (numOfFeatures);
}
} /* ResetFileDesc */
示例6: CompareTwoExamples
kkint32 ImageFeaturesNodeKey::CompareTwoExamples (const FeatureVectorPtr i1,
const FeatureVectorPtr i2
) const
{
const float* f1 = i1->FeatureDataConst ();
const float* f2 = i2->FeatureDataConst ();
for (kkint32 x = 0; x < i1->NumOfFeatures (); x++)
{
if (f1[x] < f2[x])
return -1;
else if (f1[x] > f2[x])
return 1;
}
return 0;
} /* CompareTwoImageFeaturesObjects */
示例7: for
kkint32 FeatureEncoder::DetermineNumberOfNeededXspaceNodes (FeatureVectorListPtr src) const
{
kkint32 xSpaceNodesNeeded = 0;
FeatureVectorList::const_iterator idx;
for (idx = src->begin (); idx != src->end (); ++idx)
{
FeatureVectorPtr fv = *idx;
const float* featureData = fv->FeatureData ();
for (kkint32 x = 0; x < numOfFeatures; x++)
{
float featureVal = featureData [srcFeatureNums[x]];
kkint32 y = destFeatureNums[x];
switch (destWhatToDo[x])
{
case FeWhatToDo::FeAsIs:
if (featureVal != 0.0)
xSpaceNodesNeeded++;
break;
case FeWhatToDo::FeBinary:
for (kkint32 z = 0; z < cardinalityDest[x]; z++)
{
float bVal = ((kkint32)featureVal == z);
if (bVal != 0.0)
xSpaceNodesNeeded++;
y++;
}
break;
case FeWhatToDo::FeScale:
if (featureVal != (float)0.0)
xSpaceNodesNeeded++;
break;
}
}
xSpaceNodesNeeded++;
}
return xSpaceNodesNeeded;
} /* DetermineNumberOfNeededXspaceNodes */
示例8: SplitForestCoverFile
void SplitForestCoverFile ()
{
RunLog log;
MLClassConstList mlClasses;
bool cancelFlag = false;
bool successful;
bool changesMade = false;
FeatureVectorListPtr images = FeatureFileIOC45::Driver ()->LoadFeatureFile
("covtype_alpha.data", mlClasses, -1, cancelFlag, successful, changesMade, log);
FileDescPtr fileDesc = images->FileDesc ();
images->RandomizeOrder ();
images->RandomizeOrder ();
images->RandomizeOrder ();
images->RandomizeOrder ();
images->RandomizeOrder ();
MLClassConstPtr lodgepolePine = mlClasses.GetMLClassPtr ("Lodgepole_Pine");
MLClassConstPtr spruceFir = mlClasses.GetMLClassPtr ("Spruce_Fir");
int lodgepolePineTrainCount = 0;
int spruceFirTrainCount = 0;
FeatureVectorList::iterator idx;
FeatureVectorListPtr trainData = new FeatureVectorList (fileDesc, false, log, 10000);
FeatureVectorListPtr testData = new FeatureVectorList (fileDesc, false, log, 10000);
int c = 0;
for (idx = images->begin (); idx != images->end (); idx++)
{
FeatureVectorPtr i = *idx;
if (c % 5000)
cout << c << endl;
if (i->MLClass () == lodgepolePine)
{
if (lodgepolePineTrainCount < 56404)
{
trainData->PushOnBack (i);
lodgepolePineTrainCount++;
}
else
{
testData->PushOnBack (i);
}
}
else if (i->MLClass () == spruceFir)
{
if (spruceFirTrainCount < 42480)
{
trainData->PushOnBack (i);
spruceFirTrainCount++;
}
else
{
testData->PushOnBack (i);
}
}
c++;
}
KKU::uint numExamplesWritten = 0;
FeatureFileIOC45::Driver ()->SaveFeatureFile
("CovType_TwoClass.data",
trainData->AllFeatures (),
*trainData,
numExamplesWritten,
cancelFlag,
successful,
log
);
FeatureFileIOC45::Driver ()->SaveFeatureFile
("CovType_TwoClass.test",
testData->AllFeatures (),
*testData,
numExamplesWritten,
cancelFlag,
successful,
log
);
delete trainData;
delete testData;
delete images;
} /* SplitForestCoverFile */
示例9: FeatureVectorList
void FeatureFileConverter::ConvertData ()
{
cout << endl
<< "Saving [" << data->QueueSize () << "] records to data file[" << destFileName << "]" << endl
<< endl;
bool successful = false;
int numOfFeatures = data->NumOfFeatures ();
int numWithAllZeros = 0;
{
FeatureVectorListPtr newData = new FeatureVectorList (srcFileDesc, true, log);
// Will store examples that have all zero's for all features in "zeroData"
// container. This way they can be deleted from memory later and not result
// in a memory leak. This has to be done because they are not going to
// be placed into newData which is going to become the owner of all the
// examples.
FeatureVectorListPtr zeroData = new FeatureVectorList (srcFileDesc, true, log);
// How many have all 0's for feature data.
FeatureVectorList::iterator idx;
for (idx = data->begin (); idx != data->end (); idx++)
{
FeatureVectorPtr i = *idx;
bool allZeros = true;
for (int featureNum = 0; featureNum < numOfFeatures; featureNum++)
{
allZeros = (i->FeatureData (featureNum) == 0.0f);
if (!allZeros)
break;
}
if (allZeros)
{
numWithAllZeros++;
zeroData->PushOnBack (i);
}
else
{
newData->PushOnBack (i);
}
}
data->Owner (false);
delete data;
data = newData;
delete zeroData;
}
*report << endl
<< endl
<< "Num of data items with all zero feature data [" << numWithAllZeros << "]" << endl
<< endl;
*report << data->ClassStatisticsStr ();
*report << endl << endl << endl;
if (statistics)
{
*report << "Class Statistics:" << endl;
data->PrintClassStatistics (*report);
*report << endl << endl;
*report << "Feature Statistics:" << endl;
data->PrintFeatureStatisticsByClass (*report);
}
if (enumerateClasses)
{
// We are going to change the name of the classes to numbers enumberated by className
MLClassConstListPtr mlClasses = data->ExtractMLClassConstList ();
mlClasses->SortByName ();
MLClassConstListPtr newClassNames = new MLClassConstList ();
int classIdx = 0;
MLClassConstList::iterator idx;
for (idx = mlClasses->begin (); idx != mlClasses->end (); idx++)
{
KKStr newName = StrFormatInt (classIdx, "zzz0");
MLClassConstPtr mlClass = newClassNames->GetMLClassPtr (newName);
classIdx++;
}
FeatureVectorList::iterator idx2;
for (idx2 = data->begin (); idx2 != data->end (); idx2++)
{
MLClassConstPtr c = (*idx2)->MLClass ();
int classIndex = mlClasses->PtrToIdx (c);
(*idx2)->MLClass (newClassNames->IdxToPtr (classIndex));
}
delete mlClasses; mlClasses = NULL;
delete newClassNames; newClassNames = NULL;
}
//.........这里部分代码省略.........
示例10: for
void GradeClassification::ValidateThatBothListsHaveSameEntries (FeatureVectorList& groundTruth,
FeatureVectorList& examplesToGrade,
bool& theyAreTheSame
)
{
theyAreTheSame = true; // We will assume that they are the same to start with.
int missingExamplesToGrade = 0;
int missingGroundTruthExamples = 0;
groundTruth.SortByRootName ();
examplesToGrade.SortByRootName ();
*report << endl << endl << endl
<< "Missing Examples To Grade" << endl
<< "=========================" << endl;
ImageFeaturesList::iterator idx;
for (idx = groundTruth.begin (); idx != groundTruth.end (); idx++)
{
FeatureVectorPtr groundTruthExample = *idx;
KKStr rootName = osGetRootName (groundTruthExample->ImageFileName ());
FeatureVectorPtr exampleToGrade = examplesToGrade.LookUpByRootName (rootName);
if (!exampleToGrade)
{
theyAreTheSame = false;
*report << rootName << "\t" << "*** MISSING ***" << "\t" << groundTruthExample->ImageFileName () << endl;
missingExamplesToGrade++;
}
}
if (missingExamplesToGrade == 0)
{
*report << " *** None ***" << endl;
}
*report << endl << endl << endl
<< "Missing Ground Truth Examples" << endl
<< "=============================" << endl;
for (idx = examplesToGrade.begin (); idx != examplesToGrade.end (); idx++)
{
FeatureVectorPtr exampleToGrade = *idx;
KKStr rootName = osGetRootName (exampleToGrade->ImageFileName ());
FeatureVectorPtr groundTruthExample = groundTruth.LookUpByRootName (rootName);
if (!groundTruthExample)
{
theyAreTheSame = false;
*report << rootName << "\t" << "*** MISSING ***" << "\t" << exampleToGrade->ImageFileName () << "\t" << endl;
missingGroundTruthExamples++;
}
}
if (missingGroundTruthExamples == 0)
{
*report << " *** None ***" << endl;
}
} /* ValidateThatBothListsHaveSameEntries */
示例11: PushOnFront
void FeatureVectorList::PushOnFront (FeatureVectorPtr example)
{
KKCheck (example->NumOfFeatures () == numOfFeatures, "FeatureVectorList::PushOnFront Mismatch numOfFeatures: " << numOfFeatures << " example->NumOfFeaturess: " << example->NumOfFeatures ())
KKQueue<FeatureVector>::PushOnFront (example);
curSortOrder = IFL_SortOrder::IFL_UnSorted;
} /* PushOnFront */
示例12: FeatureVector
FeatureVectorPtr GrayScaleImagesFVProducer::ComputeFeatureVector (const Raster& srcImage,
const MLClassPtr knownClass,
RasterListPtr intermediateImages,
float priorReductionFactor,
RunLog& runLog
)
{
FeatureVectorPtr fv = new FeatureVector (maxNumOfFeatures);
fv->MLClass (knownClass);
float* featureData = fv->FeatureDataAlter ();
fv->Version (Version ());
kkint32 areaBeforeReduction = 0;
float weighedSizeBeforeReduction = 0.0f;
kkint32 row = 0;
kkuint32 intensityHistBuckets[8];
srcImage.CalcAreaAndIntensityFeatures (areaBeforeReduction,
weighedSizeBeforeReduction,
intensityHistBuckets
);
kkint32 srcHeight = srcImage.Height ();
kkint32 srcWidth = srcImage.Width ();
kkint32 reducedHeight = srcHeight;
kkint32 reducedWidth = srcWidth;
kkint32 reducedSquareArea = reducedHeight * reducedWidth;
kkint32 reductionMultiple = 1;
while (reducedSquareArea > totPixsForMorphOps)
{
++reductionMultiple;
reducedHeight = (srcHeight + reductionMultiple - 1) / reductionMultiple;
reducedWidth = (srcWidth + reductionMultiple - 1) / reductionMultiple;
reducedSquareArea = reducedHeight * reducedWidth;
}
float totalReductionMultiple = priorReductionFactor * (float)reductionMultiple;
float totalReductionMultipleSquared = totalReductionMultiple * totalReductionMultiple;
delete workRaster1Rows; workRaster1Rows = new uchar*[reducedHeight];
delete workRaster2Rows; workRaster2Rows = new uchar*[reducedHeight];
delete workRaster3Rows; workRaster3Rows = new uchar*[reducedHeight];
uchar* wp1 = workRaster1Area;
uchar* wp2 = workRaster2Area;
uchar* wp3 = workRaster3Area;
for (row = 0; row < reducedHeight; ++row)
{
workRaster1Rows[row] = wp1;
workRaster2Rows[row] = wp2;
workRaster3Rows[row] = wp3;
wp1 += reducedWidth;
wp2 += reducedWidth;
wp3 += reducedWidth;
}
Raster workRaster1 (reducedHeight, reducedWidth, workRaster1Area, workRaster1Rows);
Raster workRaster2 (reducedHeight, reducedWidth, workRaster2Area, workRaster2Rows);
Raster workRaster3 (reducedHeight, reducedWidth, workRaster3Area, workRaster3Rows);
Raster const * initRaster = NULL;
RasterPtr wr1 = NULL;
RasterPtr wr2 = NULL;
if (reductionMultiple > 1)
{
try
{
ReductionByMultiple (reductionMultiple, srcImage, workRaster1);
}
catch (...)
{
runLog.Level (-1) << endl << "GrayScaleImagesFVProducer::ComputeFeatureVector ***ERROR*** Exception calling 'ReductionByMultiple'." << endl << endl;
return NULL;
}
initRaster = &workRaster1;
wr1 = &workRaster2;
wr2 = &workRaster3;
}
else
{
initRaster = &srcImage;
wr1 = &workRaster1;
wr2 = &workRaster2;
}
if (areaBeforeReduction < 20)
{
for (kkint32 tp = 0; tp < maxNumOfFeatures; tp++)
featureData[tp] = 9999999;
return fv;
}
//.........这里部分代码省略.........
示例13: DateTime
//.........这里部分代码省略.........
}
origFeatureData->SortByRootName (false);
FeatureVectorListPtr extractedFeatures = _fvProducerFactory->ManufacturFeatureVectorList (true);
extractedFeatures->Version (fvProducer->Version ());
fileNameList->Sort (false);
KKStrList::iterator fnIDX;
fnIDX = fileNameList->begin (); // fileNameList
KKStrPtr imageFileName;
kkuint32 numImagesFoundInOrigFeatureData = 0;
kkuint32 numOfNewFeatureExtractions = 0;
for (fnIDX = fileNameList->begin (); (fnIDX != fileNameList->end ()) && (!_cancelFlag); ++fnIDX)
{
imageFileName = *fnIDX;
// pv414-_002_20140414-162243_02068814-1261.bmp
KKStr rootName = osGetRootName (*imageFileName);
if (rootName == "pv414-_002_20140414-162243_02068814-1261")
cout << "Stop Here." << endl;
bool validImageFileFormat = SupportedImageFileFormat (*imageFileName);
if (!validImageFileFormat)
continue;
bool featureVectorCoputaionSuccessful = false;
FeatureVectorPtr origFV = origFeatureData->BinarySearchByName (*imageFileName);
if (origFV)
numImagesFoundInOrigFeatureData++;
if (origFV && versionsAreSame)
{
featureVectorCoputaionSuccessful = true;
if (_useDirectoryNameForClassName)
{
if (origFV->MLClass () != _unknownClass)
{
_changesMade = true;
origFV->MLClass (_unknownClass);
}
}
else if ((origFV->MLClass ()->UnDefined ()) && (origFV->MLClass () != _unknownClass))
{
_changesMade = true;
origFV->MLClass (_unknownClass);
}
extractedFeatures->PushOnBack (origFV);
origFeatureData->DeleteEntry (origFV);
}
else
{
// We either DON'T have an original image or versions are not the same.
KKStr fullFileName = osAddSlash (_dirName) + (*imageFileName);
FeatureVectorPtr fv = NULL;
try
{
示例14: FeatureVector
FeatureVectorPtr FeatureEncoder::EncodeAExample (FileDescConstPtr encodedFileDesc,
FeatureVectorPtr src
)
{
FeatureVectorPtr encodedExample = new FeatureVector (numEncodedFeatures);
encodedExample->MLClass (src->MLClass ());
encodedExample->PredictedClass (src->PredictedClass ());
//encodedExample->Version (src->Version ());
encodedExample->TrainWeight (src->TrainWeight ());
const float* featureData = src->FeatureData ();
kkint32 x;
for (x = 0; x < numOfFeatures; x++)
{
float featureVal = featureData [srcFeatureNums[x]];
kkint32 y = destFeatureNums[x];
switch (destWhatToDo[x])
{
case FeWhatToDo::FeAsIs:
{
encodedExample->AddFeatureData (y, featureVal);
}
break;
case FeWhatToDo::FeBinary:
{
for (kkint32 z = 0; z < cardinalityDest[x]; z++)
{
float bVal = ((kkint32)featureVal == z);
encodedExample->AddFeatureData (y, bVal);
y++;
}
}
break;
case FeWhatToDo::FeScale:
{
encodedExample->AddFeatureData (y, (featureVal / (float)cardinalityDest[x]));
}
break;
}
}
return encodedExample;
} /* EncodeAExample */
示例15: sizeof
void FeatureEncoder::EncodeIntoSparseMatrix
(FeatureVectorListPtr src,
ClassAssignments& assignments,
XSpacePtr& xSpace,
kkint32& totalxSpaceUsed,
struct svm_problem& prob,
RunLog& log
)
{
FeatureVectorListPtr compressedExamples = NULL;
FeatureVectorListPtr examplesToUseFoXSpace = NULL;
kkint32 xSpaceUsed = 0;
totalxSpaceUsed = 0;
examplesToUseFoXSpace = src;
kkint32 numOfExamples = examplesToUseFoXSpace->QueueSize ();
//kkint32 elements = numOfExamples * xSpaceNeededPerExample;
prob.l = numOfExamples;
prob.y = (double*)malloc (prob.l * sizeof (double));
prob.x = (struct svm_node **) malloc (prob.l * sizeof (struct svm_node*));
prob.index = new kkint32[prob.l];
prob.exampleNames.clear ();
kkint32 numNeededXspaceNodes = DetermineNumberOfNeededXspaceNodes (examplesToUseFoXSpace);
kkint32 totalBytesForxSpaceNeeded = (numNeededXspaceNodes + 10) * sizeof (struct svm_node); // I added '10' to elements because I am paranoid
xSpace = (struct svm_node*) malloc (totalBytesForxSpaceNeeded);
if (xSpace == NULL)
{
log.Level (-1) << endl << endl << endl
<< " FeatureEncoder::Compress *** Failed to allocates space for 'xSpace' ****" << endl
<< endl
<< " Space needed [" << totalBytesForxSpaceNeeded << "]" << endl
<< " Num of Examples [" << numOfExamples << "]" << endl
<< " Num XSpaceNodesNeeded [" << numNeededXspaceNodes << "]" << endl
<< endl;
// we sill have to allocate space for each individual training example separately.
//throw "FeatureEncoder::Compress Allocation of memory for xSpace Failed.";
}
prob.W = NULL;
kkint32 i = 0;
FeatureVectorPtr example = NULL;
MLClassPtr lastMlClass = NULL;
kkint16 lastClassNum = -1;
kkint32 bytesOfxSpacePerExample = xSpaceNeededPerExample * sizeof (struct svm_node);
for (i = 0; i < prob.l; i++)
{
if (totalxSpaceUsed > numNeededXspaceNodes)
{
log.Level (-1) << endl << endl
<< "FeatureEncoder::Compress ***ERROR*** We have exceeded the number of XSpace nodes allocated." << endl
<< endl;
}
example = examplesToUseFoXSpace->IdxToPtr (i);
if (example->MLClass () != lastMlClass)
{
lastMlClass = example->MLClass ();
lastClassNum = assignments.GetNumForClass (lastMlClass);
}
prob.y[i] = lastClassNum;
prob.index[i] = i;
prob.exampleNames.push_back (osGetRootName (example->ExampleFileName ()));
if (prob.W)
{
prob.W[i] = example->TrainWeight () * c_Param;
if (example->TrainWeight () <= 0.0f)
{
log.Level (-1) << endl
<< "FeatureEncoder::EncodeIntoSparseMatrix ***ERROR*** Example[" << example->ExampleFileName () << "]" << endl
<< " has a TrainWeight value of 0 or less defaulting to 1.0" << endl
<< endl;
prob.W[i] = 1.0 * c_Param;
}
}
if (xSpace == NULL)
{
struct svm_node* xSpaceThisExample = (struct svm_node*) malloc (bytesOfxSpacePerExample);
prob.x[i] = xSpaceThisExample;
EncodeAExample (example, prob.x[i], xSpaceUsed);
if (xSpaceUsed < xSpaceNeededPerExample)
{
kkint32 bytesNeededForThisExample = xSpaceUsed * sizeof (struct svm_node);
struct svm_node* smallerXSpaceThisExample = (struct svm_node*) malloc (bytesNeededForThisExample);
memcpy (smallerXSpaceThisExample, xSpaceThisExample, bytesNeededForThisExample);
free (xSpaceThisExample);
//.........这里部分代码省略.........