本文整理汇总了C++中Statistics::Average方法的典型用法代码示例。如果您正苦于以下问题:C++ Statistics::Average方法的具体用法?C++ Statistics::Average怎么用?C++ Statistics::Average使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Statistics
的用法示例。
在下文中一共展示了Statistics::Average方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: pvlOut
void pvlOut(Statistics stats1, Statistics stats2, QString name, int start,
int end, PvlObject *one, PvlObject *two) {
PvlGroup left(name);
left += PvlKeyword("StartLine", toString(start + 1));
left += PvlKeyword("EndLine", toString(end));
left += PvlKeyword("TotalPixels", toString(stats1.TotalPixels()));
left += PvlKeyword("ValidPixels", toString(stats1.ValidPixels()));
if(stats1.ValidPixels() > 0) {
left += PvlKeyword("Mean", toString(stats1.Average()));
left += PvlKeyword("StandardDeviation", toString(stats1.StandardDeviation()));
left += PvlKeyword("Minimum", toString(stats1.Minimum()));
left += PvlKeyword("Maximum", toString(stats1.Maximum()));
}
one->addGroup(left);
PvlGroup right(name);
right += PvlKeyword("StartLine", toString(start + 1));
right += PvlKeyword("EndLine", toString(end));
right += PvlKeyword("TotalPixels", toString(stats2.TotalPixels()));
right += PvlKeyword("ValidPixels", toString(stats2.ValidPixels()));
if(stats2.ValidPixels() > 0) {
right += PvlKeyword("Mean", toString(stats2.Average()));
right += PvlKeyword("StandardDeviation", toString(stats2.StandardDeviation()));
right += PvlKeyword("Minimum", toString(stats2.Minimum()));
right += PvlKeyword("Maximum", toString(stats2.Maximum()));
}
two->addGroup(right);
}
示例2: TranslateCode
// translate the code once it is found
void TranslateCode() {
// read the code from the image
Chip chip(8*RADIUS, 64*RADIUS);
chip.TackCube(codeSample+3*RADIUS, codeLine+31*RADIUS);
chip.Load(cube);
for (int j=0; j<32; j++) {
for (int i=0; i<4; i++) {
Statistics stats;
// Get the average of the subchip
for (int x=1; x<=2*RADIUS; x++) {
for (int y=1; y<=2*RADIUS; y++) {
stats.AddData(chip.GetValue(i*2*RADIUS + x,j*2*RADIUS + y));
}
}
// see if it is on or off
if (stats.Average() > 20000)
code[i][31-j] = true;
else code[i][31-j] = false;
}
}
for (int j=0; j<32; j++) {
for (int i=0; i<4; i++) {
}
}
}
示例3: main
int main() {
Isis::Preference::Preferences(true);
cerr << "GroupedStatistics unitTest!!!\n\n";
// test constructor
cerr << "testing constructor...\n\n";
GroupedStatistics *groupedStats = new GroupedStatistics();
// test AddStatistic
cerr << "testing AddStatistic...\n\n";
groupedStats->AddStatistic("Height", 71.5);
// test copy constructor
cerr << "testing copy constructor...\n\n";
GroupedStatistics *groupedStats2 = new GroupedStatistics(*groupedStats);
// test GetStatistics
cerr << "testing GetStatistics...\n";
Statistics stats = groupedStats2->GetStatistics("Height");
cerr << " " << stats.Average() << "\n\n";
// test GetStatisticTypes
cerr << "testing GetStatisticTypes...\n";
QVector< QString > statTypes = groupedStats->GetStatisticTypes();
for(int i = 0; i < statTypes.size(); i++)
cerr << " " << statTypes[i].toStdString() << "\n";
cerr << "\n";
// test destructor
delete groupedStats;
delete groupedStats2;
return 0;
}
示例4: guess
/**
* @brief Compute the initial guess of the fit
*
* This method provides the non-linear fit with an initial guess of the
* solution. It involves a linear fit to the latter half of the data to
* provide the first two coefficents, the difference of the averages of the
* residuals at both ends of the data set and 5 times the last line time as
* the final (fourth) element...a bit involved really.
*
* @return NLVector 4-element vector of the initial guess coefficients
*/
NonLinearLSQ::NLVector DriftCorrect::guess() {
int n = _data.dim();
int nb = n - _badLines;
HiVector b1 = _data.subarray(0, nb-1);
LowPassFilterComp gfilter(b1, _history, _sWidth, _sIters);
int nb2 = nb/2;
_b2 = gfilter.ref();
HiVector cc = poly_fit(_b2.subarray(nb2,_b2.dim()-1), nb2-1);
// Compute the 3rd term guess by getting the average of the residual
// at both ends of the data set.
Statistics s;
// Get the head of the data set
int n0 = MIN(nb, 20);
for ( int k = 0 ; k < n0 ; k++ ) {
double d = _b2[k] - (cc[0] + cc[1] * _timet(k));
s.AddData(&d, 1);
}
double head = s.Average();
// Get the tail of the data set
s.Reset();
n0 = (int) (0.9 * nb);
for ( int l = n0 ; l < nb ; l++ ) {
double d = _b2[l] - (cc[0] + cc[1] * _timet(l));
s.AddData(&d, 1);
}
double tail = s.Average();
// Populate the guess with the results
NLVector g(4, 0.0);
g[0] = cc[0];
g[1] = cc[1];
g[2] = head-tail;
g[3] = -5.0/_timet(nb-1);
_guess = g;
_history.add("Guess["+ToString(_guess[0])+ ","+
ToString(_guess[1])+ ","+
ToString(_guess[2])+ ","+
ToString(_guess[3])+ "]");
return (g);
}
示例5: CheckFramelets
/**
* This method performs pass1 on one image. It analyzes each framelet's
* statistics and populates the necessary global variable.
*
* @param progress Progress message
* @param theCube Current cube that needs processing
*
* @return bool True if the file contains a valid framelet
*/
bool CheckFramelets(string progress, Cube &theCube) {
bool foundValidFramelet = false;
LineManager mgr(theCube);
Progress prog;
prog.SetText(progress);
prog.SetMaximumSteps(theCube.Lines());
prog.CheckStatus();
vector<double> frameletAvgs;
// We need to store off the framelet information, because if no good
// framelets were found then no data should be added to the
// global variable for framelets, just files.
vector< pair<int,double> > excludedFrameletsTmp;
Statistics frameletStats;
for(int line = 1; line <= theCube.Lines(); line++) {
if((line-1) % numFrameLines == 0) {
frameletStats.Reset();
}
mgr.SetLine(line);
theCube.Read(mgr);
frameletStats.AddData(mgr.DoubleBuffer(), mgr.size());
if((line-1) % numFrameLines == numFrameLines-1) {
if(IsSpecial(frameletStats.StandardDeviation()) ||
frameletStats.StandardDeviation() > maxStdev) {
excludedFrameletsTmp.push_back(
pair<int,double>((line-1)/numFrameLines, frameletStats.StandardDeviation())
);
}
else {
foundValidFramelet = true;
}
frameletAvgs.push_back(frameletStats.Average());
}
prog.CheckStatus();
}
inputFrameletAverages.push_back(frameletAvgs);
if(foundValidFramelet) {
for(unsigned int i = 0; i < excludedFrameletsTmp.size(); i++) {
excludedFramelets.insert(pair< pair<int,int>, double>(
pair<int,int>(currImage, excludedFrameletsTmp[i].first),
excludedFrameletsTmp[i].second
)
);
}
}
return foundValidFramelet;
}
示例6: cimage_dark
void HiImageClean::cimage_dark() {
// Combine calibration region
std::vector<H2DBuf> blobs;
blobs.push_back(_caldark);
blobs.push_back(_ancdark);
H2DBuf dark = appendLines(blobs);
int nsamples(dark.dim2());
int nlines(dark.dim1());
// Compute averages for the mask area
int firstDark(4);
int ndarks(dark.dim2()-firstDark);
_predark = H1DBuf(nlines);
for (int line = 0 ; line < nlines ; line++) {
Statistics darkave;
darkave.AddData(&dark[line][firstDark], ndarks);
_predark[line] = darkave.Average();
}
// Get statistics to determine state of mask and next course of action
_darkStats.Reset();
_darkStats.AddData(&_predark[0], _predark.dim1());
if (_darkStats.ValidPixels() <= 0) {
std::ostringstream mess;
mess << "No valid pixels in calibration/ancillary dark regions, "
<< "binning = " << _binning << std::ends;
throw(iException::Message(iException::Programmer,mess.str(),_FILEINFO_));
}
// Now apply a smoothing filter
QuickFilter smooth(_predark.dim1(), _filterWidth, 1);
smooth.AddLine(&_predark[0]);
nsamples = smooth.Samples();
_dark = H1DBuf(nsamples);
for (int s = 0 ; s < nsamples ; s++) {
_dark[s] = smooth.Average(s);
}
// Now apply to all calibration data
BigInt nbad(0);
_calimg = row_apply(_calimg, _dark, 0, nbad, 1.0);
_calbuf = row_apply(_calbuf, _dark, 0, nbad, 1.0);
_caldark = row_apply(_caldark, _dark, 0, nbad, 1.0);
_ancbuf = row_apply(_ancbuf, _dark, _firstImageLine, nbad, 1.0);
_ancdark = row_apply(_ancdark, _dark, _firstImageLine, nbad, 1.0);
return;
}
示例7: gatherAverages
void gatherAverages(Buffer &in) {
Statistics lineStats;
lineStats.AddData(in.DoubleBuffer(), in.size());
double average = lineStats.Average();
lineAverages[in.Band() - 1][in.Line() - 1] = average;
// The cube average will finish being calculated before the correction is applied.
if(!IsSpecial(average)) {
cubeAverage[in.Band() - 1] += average;
}
else {
numIgnoredLines ++;
}
}
示例8: PvlStats
// Return a PVL group containing the statistical information
PvlGroup PvlStats(Statistics &stats, const QString &name) {
// Construct a label with the results
PvlGroup results(name);
if(stats.ValidPixels() != 0) {
results += PvlKeyword("Average", toString(stats.Average()));
results += PvlKeyword("StandardDeviation", toString(stats.StandardDeviation()));
results += PvlKeyword("Variance", toString(stats.Variance()));
results += PvlKeyword("Minimum", toString(stats.Minimum()));
results += PvlKeyword("Maximum", toString(stats.Maximum()));
}
results += PvlKeyword("TotalPixels", toString(stats.TotalPixels()));
results += PvlKeyword("ValidPixels", toString(stats.ValidPixels()));
results += PvlKeyword("NullPixels", toString(stats.NullPixels()));
results += PvlKeyword("LisPixels", toString(stats.LisPixels()));
results += PvlKeyword("LrsPixels", toString(stats.LrsPixels()));
results += PvlKeyword("HisPixels", toString(stats.HisPixels()));
results += PvlKeyword("HrsPixels", toString(stats.HrsPixels()));
return results;
}
示例9: IsisMain
void IsisMain() {
Preference::Preferences(true);
ProcessImportVicar p;
Pvl vlab;
p.SetVicarFile("unitTest.img", vlab);
p.SetOutputCube("TO");
p.StartProcess();
p.EndProcess();
cout << vlab << endl;
Process p2;
CubeAttributeInput att;
QString file = Application::GetUserInterface().GetFileName("TO");
Cube *icube = p2.SetInputCube(file, att);
Statistics *stat = icube->statistics();
cout << stat->Average() << endl;
cout << stat->Variance() << endl;
p2.EndProcess();
QFile::remove(file);
}
示例10: getStats
//**********************************************************
// DOUSER - Get statistics on a column or row of pixels
//**********************************************************
void getStats(Buffer &in) {
Statistics stats;
stats.AddData(in.DoubleBuffer(), in.size());
band.push_back(in.Band());
element.push_back(in.Sample());
// Sort the input buffer
vector<double> pixels;
for(int i = 0; i < in.size(); i++) {
if(IsValidPixel(in[i])) pixels.push_back(in[i]);
}
sort(pixels.begin(), pixels.end());
// Now obtain the median value and store in the median vector
int size = pixels.size();
if(size != 0) {
int med = size / 2;
if(size % 2 == 0) {
median.push_back((pixels[med-1] + pixels[med]) / 2.0);
}
else {
median.push_back(pixels[med]);
}
}
else {
median.push_back(Isis::Null);
}
// Store the statistics in the appropriate vectors
average.push_back(stats.Average());
stddev.push_back(stats.StandardDeviation());
validpixels.push_back(stats.ValidPixels());
minimum.push_back(stats.Minimum());
maximum.push_back(stats.Maximum());
}
示例11: writeFlat
//function to write the stats values to flat file
void writeFlat (ofstream &os, Statistics &s){
os << ValidateValue(s.Minimum())<<","<<
ValidateValue(s.Maximum())<<","<<
ValidateValue(s.Average())<<","<<
ValidateValue(s.StandardDeviation())<<",";
}
示例12: cimage_mask
void HiImageClean::cimage_mask() {
// Combine calibration region
std::vector<H2DBuf> blobs;
blobs.push_back(_calbuf);
blobs.push_back(_calimg);
blobs.push_back(_caldark);
H2DBuf calibration = appendSamples(blobs);
// Set the mask depending on the binning mode
_firstMaskLine = 20;
_lastMaskLine = 39;
switch (_binning) {
case 1:
_firstMaskLine = 21;
_lastMaskLine = 38;
break;
case 2:
_firstMaskLine = 21;
_lastMaskLine = 29;
break;
case 3:
_firstMaskLine = 21;
_lastMaskLine = 26;
break;
case 4:
_firstMaskLine = 21;
_lastMaskLine = 24;
break;
case 8:
_firstMaskLine = 21;
_lastMaskLine = 22;
break;
case 16:
_firstMaskLine = 21;
_lastMaskLine = 21;
break;
default:
std::ostringstream msg;
msg << "Invalid binning mode (" << _binning
<< ") - valid are 1-4, 8 and 16" << std::ends;
throw(iException::Message(iException::Programmer,msg.str(),_FILEINFO_));
}
// Initialize lines and samples of mask area of interest
int nsamples(calibration.dim2());
int nlines(_lastMaskLine - _firstMaskLine + 1);
// Compute averages for the mask area
_premask = H1DBuf(nsamples);
for (int samp = 0 ; samp < nsamples; samp++) {
H1DBuf maskcol = slice(calibration, samp);
Statistics maskave;
maskave.AddData(&maskcol[_firstMaskLine], nlines);
_premask[samp] = maskave.Average();
}
_mask = _premask.copy();
// Get statistics to determine state of mask and next course of action
_maskStats.Reset();
_maskStats.AddData(&_premask[0], nsamples);
if (_maskStats.ValidPixels() <= 0) {
std::ostringstream mess;
mess << "No valid pixels in calibration mask region in lines "
<< (_firstMaskLine+1) << " to " << (_lastMaskLine+1) << ", binning = "
<< _binning << std::ends;
throw(iException::Message(iException::Programmer,mess.str(),_FILEINFO_));
}
// If there are any missing values, replace with mins/maxs of region
if (_maskStats.TotalPixels() != _maskStats.ValidPixels()) {
for (int samp = 0 ; samp < nsamples ; samp++) {
if (Pixel::IsLow(_premask[samp]) || Pixel::IsNull(_premask[samp])) {
_mask[samp] = _maskStats.Minimum();
}
else if (Pixel::IsHigh(_premask[samp])) {
_mask[samp] = _maskStats.Maximum();
}
}
}
// Now apply to all calibration data
BigInt nbad(0);
_calimg = column_apply(_calimg, _mask, _firstImageSample, nbad, 1.0);
_calbuf = column_apply(_calbuf, _mask, _firstBufferSample, nbad, 1.0);
_caldark = column_apply(_caldark, _mask, _firstDarkSample, nbad, 1.0);
_ancbuf = column_apply(_ancbuf, _mask, _firstBufferSample, nbad, 1.0);
_ancdark = column_apply(_ancdark, _mask, _firstDarkSample, nbad, 1.0);
return;
}
示例13: calculateSpecificEnergy
/**
* This calculates the coefficients for specific energy corrections
*/
void calculateSpecificEnergy(Cube *icube) {
PvlGroup &inst = icube->label()->findGroup("Instrument", Pvl::Traverse);
bool vis = (inst["Channel"][0] != "IR");
double coefficient = 1.0;
if(inst["GainMode"][0] == "HIGH") {
coefficient /= 2;
}
if(vis && inst["SamplingMode"][0] == "HI-RES") {
coefficient *= 3;
}
if(vis) {
coefficient /= toDouble(inst["ExposureDuration"][1]) / 1000.0;
}
else {
coefficient /= (toDouble(inst["ExposureDuration"][0]) * 1.01725) / 1000.0 - 0.004;
}
QString specEnergyFile = "$cassini/calibration/vims/";
if(vis) {
specEnergyFile += "vis_perf_v????.cub";
}
else {
specEnergyFile += "ir_perf_v????.cub";
}
QString waveCalFile = "$cassini/calibration/vims/wavecal_v????.cub";
FileName specEnergyFileName(specEnergyFile);
specEnergyFileName = specEnergyFileName.highestVersion();
FileName waveCalFileName(waveCalFile);
waveCalFileName = waveCalFileName.highestVersion();
Cube specEnergyCube;
specEnergyCube.open(specEnergyFileName.expanded());
Cube waveCalCube;
waveCalCube.open(waveCalFileName.expanded());
LineManager specEnergyMgr(specEnergyCube);
LineManager waveCalMgr(waveCalCube);
for(int i = 0; i < icube->bandCount(); i++) {
Statistics specEnergyStats;
Statistics waveCalStats;
if(vis) {
specEnergyMgr.SetLine(1, i + 1);
waveCalMgr.SetLine(1, i + 1);
}
else {
specEnergyMgr.SetLine(1, i + 1);
// ir starts at band 97
waveCalMgr.SetLine(1, i + 96 + 1);
}
specEnergyCube.read(specEnergyMgr);
waveCalCube.read(waveCalMgr);
specEnergyStats.AddData(specEnergyMgr.DoubleBuffer(), specEnergyMgr.size());
waveCalStats.AddData(waveCalMgr.DoubleBuffer(), waveCalMgr.size());
double bandCoefficient = coefficient * specEnergyStats.Average() * waveCalStats.Average();
specificEnergyCorrections.push_back(bandCoefficient);
}
}
示例14: IsisMain
// Main Program
void IsisMain() {
UserInterface &ui = Application::GetUserInterface();
Isis::FileName fromFile = ui.GetFileName("FROM");
Isis::Cube inputCube;
inputCube.open(fromFile.expanded());
//Check to make sure we got the cube properly
if(!inputCube.isOpen()) {
QString msg = "Could not open FROM cube" + fromFile.expanded();
throw IException(IException::User, msg, _FILEINFO_);
}
ProcessByLine processByLine;
Cube *icube = processByLine.SetInputCube("FROM");
int totalSamples = icube->sampleCount();
//We'll be going through the cube by line, manually differentiating
// between phases
Isis::LineManager lineManager(inputCube);
lineManager.begin();
Table hifix("HiRISE Ancillary");
int channel = icube->group("Instrument")["ChannelNumber"];
if(channel == 0) {
phases = channel0Phases;
}
else {
phases = channel1Phases;
}
int binning_mode = icube->group("Instrument")["Summing"];
if(binning_mode != 1 && binning_mode != 2) {
/*IString msg = "You may only use input with binning mode 1 or 2, not";
msg += binning_mode;
throw iException::Message(iException::User, msg, _FILEINFO_);*/
DestripeForOtherBinningModes(totalSamples);
}
else {
//Adjust phase breaks based on the binning mode
for(int i = 0 ; i < num_phases ; i++) {
phases[i] /= binning_mode;
}
//Phases must be able to stretch across the entire cube
if(totalSamples != phases[3]) {
QString required_samples(phases[3]);
QString bin_QString(binning_mode);
QString msg = "image must have exactly ";
msg += required_samples;
msg += " samples per line for binning mode ";
msg += bin_QString;
throw IException(IException::User, msg, _FILEINFO_);
}
//Index starts at 1 and will go up to totalLines. This must be done since
// lines go into different statistics vectors based on their index
myIndex = 1;
processByLine.StartProcess(getStats);
//This program is trying to find horizontal striping in the image that occurs
// in every other line, but at runtime we do not know whether that striping
// occurs on the odd numbered lines (1, 3, 5, etc.) or the even numbered
// ones (2, 4, 6, etc.). The below algorithm determines which of these is the
// case.
QString parity = ui.GetString("PARITY");
if(parity == "EVEN") {
offset = 1;
}
else if(parity == "ODD") {
offset = 0;
}
else {
//PRECONDITION: getStats must have been run
long double maxDiff = 0;
int maxDiffIndex = 0;
for(int i = 0 ; i < num_phases ; i++) {
long double thisDiff;
thisDiff = lineStats[i].Average() - stats.Average();
if(thisDiff < 0) {
thisDiff *= -1;
}
if(thisDiff > maxDiff) {
maxDiff = thisDiff;
maxDiffIndex = i;
}
}
if(maxDiffIndex == 1 || maxDiffIndex == 3) {
offset = 1;
}
else {
offset = 0;
}
}
//Again we must reset the index, because we apply corrections only on every
//.........这里部分代码省略.........
示例15: CreateTemporaryData
/**
* This method is the pass 2 processing routine. A ProcessByBrick
* will call this method for sets of data (depending on the camera
* type) and this method is responsible for writing the entire output
* temporary cube.
*
* @param in Input raw image data, not including excluded files
*/
void CreateTemporaryData(Buffer &in) {
/**
* Line scan cameras process by frame columns.
*/
if(cameraType == LineScan) {
// The statistics of every column of data need to be known
// before we can write to the temp file. Gather stats for this
// column.
Statistics inputColStats;
for(int i = 0; i < in.size(); i++) {
inputColStats.AddData(in[i]);
// We'll also need the stats for the entire frame in order to
// normalize and in order to decide whether or not we want
// to toss out the frame
inputFrameStats.AddData(in[i]);
}
// Store off the column stats
outputTmpAverages[in.Sample()-1] = inputColStats.Average();
outputTmpCounts[in.Sample()-1] = inputColStats.ValidPixels();
// Test if this is the last column and we've got all of our stats
if(in.Sample() == numOutputSamples) {
// Decide if we want this data
if(IsSpecial(inputFrameStats.StandardDeviation()) ||
inputFrameStats.StandardDeviation() > maxStdev) {
// We don't want this data...
// CreateNullData is a helper method for this case that
// nulls out the stats
CreateNullData();
// Record the exclusion
PvlGroup currExclusion("ExcludedLines");
currExclusion += PvlKeyword("FrameStartLine", iString(in.Line()));
currExclusion += PvlKeyword("ValidPixels", iString(inputFrameStats.ValidPixels()));
if(!IsSpecial(inputFrameStats.StandardDeviation()))
currExclusion += PvlKeyword("StandardDeviation", inputFrameStats.StandardDeviation());
else
currExclusion += PvlKeyword("StandardDeviation", "N/A");
excludedDetails[excludedDetails.size()-1].AddGroup(currExclusion);
}
// Let's write our data... CreateNullData took care of nulls for us
// Band 1 is our normalized average
oLineMgr->SetLine(oLineMgr->Line(),1);
for(int i = 0; i < (int)outputTmpAverages.size(); i++) {
if(!IsSpecial(outputTmpAverages[i])) {
(*oLineMgr)[i] = outputTmpAverages[i] / inputFrameStats.Average();
}
else {
(*oLineMgr)[i] = Isis::Null;
}
}
ocube->Write(*oLineMgr);
oLineMgr->SetLine(oLineMgr->Line(),2);
// band 2 is our valid dn counts
for(int i = 0; i < (int)outputTmpCounts.size(); i++) {
(*oLineMgr)[i] = outputTmpCounts[i];
numInputDns[i] += (int)(outputTmpCounts[i] + 0.5);
}
ocube->Write(*oLineMgr);
(*oLineMgr) ++;
inputFrameStats.Reset();
}
}
else if(cameraType == Framing || cameraType == PushFrame) {
// Framing cameras and push frames are treated identically;
// the framelet size for a framelet in the framing camera
// is the entire image!
int framelet = (in.Line()-1) / numFrameLines;
double stdev;
bool excluded = Excluded(currImage, framelet, stdev);
if(excluded && ((in.Line()-1) % numFrameLines == 0)) {
PvlGroup currExclusion("ExcludedFramelet");
currExclusion += PvlKeyword("FrameletStartLine", iString(in.Line()));
currExclusion += PvlKeyword("FrameletNumber", (in.Line()-1) / numFrameLines);
if(!IsSpecial(stdev)) {
currExclusion += PvlKeyword("StandardDeviation",
stdev);
}
else {
currExclusion += PvlKeyword("StandardDeviation",
//.........这里部分代码省略.........