本文整理汇总了C++中Statistics::AddData方法的典型用法代码示例。如果您正苦于以下问题:C++ Statistics::AddData方法的具体用法?C++ Statistics::AddData怎么用?C++ Statistics::AddData使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Statistics
的用法示例。
在下文中一共展示了Statistics::AddData方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: FixSeams
/**
* This is the main loop over the cube data. Statistics are used to
* calculate which brick actually contains DNs. The framelet with DNs
* is corrected by RemoveSeam and this also clears remembered offsets
* (used for speed optimization) when the band changes.
*/
void FixSeams(vector<Buffer *> &inBuffers, vector<Buffer *> &outBuffers) {
Buffer &evenBuffer = *inBuffers[0];
Buffer &oddBuffer = *inBuffers[1];
Buffer &outEvenBuffer = *outBuffers[0];
Buffer &outOddBuffer = *outBuffers[1];
outEvenBuffer.Copy(evenBuffer);
outOddBuffer.Copy(oddBuffer);
Statistics evenStats;
evenStats.AddData(evenBuffer.DoubleBuffer(), evenBuffer.size());
Statistics oddStats;
oddStats.AddData(oddBuffer.DoubleBuffer(), oddBuffer.size());
int framelet = (evenBuffer.Line() - 1) / frameletSize;
if(framelet == 0) {
frameletOffsetsForBand.clear();
}
if(evenStats.ValidPixels() > oddStats.ValidPixels()) {
RemoveSeam(outEvenBuffer, framelet, evenBuffer.Band(), false);
}
else {
RemoveSeam(outOddBuffer, framelet, oddBuffer.Band(), true);
}
}
示例2: getStats
//**********************************************************
// Get statistics on a line of pixels and break it into phases
//**********************************************************
//add all the data to the stats statistics object. When we compare which of
// the lines (%4 = 0, %4 = 1, %4 = 2, %4 = 3) is the furtherest from the
// total average, we use stats for the "total average"
void getStats(Buffer &in) {
stats.AddData(in.DoubleBuffer(), in.size());
//Phase 1 processing
{
Buffer proc(phases[0], 1, 1, in.PixelType());
for(int quad1 = 0 ; quad1 < phases[0] ; quad1++) {
proc[quad1] = in[quad1];
}
Statistics temp;
temp.AddData(proc.DoubleBuffer(), proc.size());
lines[0].push_back(temp);
stats.AddData(proc.DoubleBuffer(), proc.size());
lineStats[0].AddData(proc.DoubleBuffer(), proc.size());
}
//Phase 2 processing
{
Buffer proc(phases[1] - phases[0], 1, 1, in.PixelType());
for(int quad2 = phases[0] ; quad2 < phases[1] ; quad2++) {
proc[quad2 - phases[0]] = in[quad2];
}
Statistics temp;
temp.AddData(proc.DoubleBuffer(), proc.size());
lines[1].push_back(temp);
stats.AddData(proc.DoubleBuffer(), proc.size());
lineStats[1].AddData(proc.DoubleBuffer(), proc.size());
}
//Phase 3 processing
{
Buffer proc(phases[2] - phases[1], 1, 1, in.PixelType());
for(int quad3 = phases[1] ; quad3 < phases[2] ; quad3++) {
proc[quad3 - phases[1]] = in[quad3];
}
Statistics temp;
temp.AddData(proc.DoubleBuffer(), proc.size());
lines[2].push_back(temp);
stats.AddData(proc.DoubleBuffer(), proc.size());
lineStats[2].AddData(proc.DoubleBuffer(), proc.size());
}
//Phase 4 processing
{
Buffer proc(phases[3] - phases[2], 1, 1, in.PixelType());
for(int quad4 = phases[2] ; quad4 < phases[3] ; quad4++) {
proc[quad4 - phases[2]] = in[quad4];
}
Statistics temp;
temp.AddData(proc.DoubleBuffer(), proc.size());
lines[3].push_back(temp);
stats.AddData(proc.DoubleBuffer(), proc.size());
lineStats[3].AddData(proc.DoubleBuffer(), proc.size());
}
myIndex++;
}
示例3: TranslateCode
// translate the code once it is found
void TranslateCode() {
// read the code from the image
Chip chip(8*RADIUS, 64*RADIUS);
chip.TackCube(codeSample+3*RADIUS, codeLine+31*RADIUS);
chip.Load(cube);
for (int j=0; j<32; j++) {
for (int i=0; i<4; i++) {
Statistics stats;
// Get the average of the subchip
for (int x=1; x<=2*RADIUS; x++) {
for (int y=1; y<=2*RADIUS; y++) {
stats.AddData(chip.GetValue(i*2*RADIUS + x,j*2*RADIUS + y));
}
}
// see if it is on or off
if (stats.Average() > 20000)
code[i][31-j] = true;
else code[i][31-j] = false;
}
}
for (int j=0; j<32; j++) {
for (int i=0; i<4; i++) {
}
}
}
示例4: computeStretch
//! Compute automatic stretch for a portion of the cube
void ChipViewport::computeStretch(Stretch &stretch, bool force) {
if (p_stretchLocked && !force) {
stretch = *p_stretch;
}
else {
Statistics stats;
for (int line = 1; line < p_chip->Lines(); line++) {
for (int samp = 1; samp < p_chip->Samples(); samp++) {
double value = p_chip->GetValue(samp, line);
stats.AddData(&value, 1);
}
}
Histogram hist(stats.BestMinimum(), stats.BestMaximum());
for (int line = 1; line <= p_chip->Lines(); line++) {
for (int samp = 1; samp <= p_chip->Samples(); samp++) {
double value = p_chip->GetValue(samp, line);
hist.AddData(&value, 1);
}
}
stretch.ClearPairs();
if (hist.Percent(0.5) != hist.Percent(99.5)) {
stretch.AddPair(hist.Percent(0.5), 0.0);
stretch.AddPair(hist.Percent(99.5), 255.0);
}
else {
stretch.AddPair(-DBL_MAX, 0.0);
stretch.AddPair(DBL_MAX, 255.0);
}
*p_stretch = stretch;
}
}
示例5: guess
/**
* @brief Compute the initial guess of the fit
*
* This method provides the non-linear fit with an initial guess of the
* solution. It involves a linear fit to the latter half of the data to
* provide the first two coefficents, the difference of the averages of the
* residuals at both ends of the data set and 5 times the last line time as
* the final (fourth) element...a bit involved really.
*
* @return NLVector 4-element vector of the initial guess coefficients
*/
NonLinearLSQ::NLVector DriftCorrect::guess() {
int n = _data.dim();
int nb = n - _badLines;
HiVector b1 = _data.subarray(0, nb-1);
LowPassFilterComp gfilter(b1, _history, _sWidth, _sIters);
int nb2 = nb/2;
_b2 = gfilter.ref();
HiVector cc = poly_fit(_b2.subarray(nb2,_b2.dim()-1), nb2-1);
// Compute the 3rd term guess by getting the average of the residual
// at both ends of the data set.
Statistics s;
// Get the head of the data set
int n0 = MIN(nb, 20);
for ( int k = 0 ; k < n0 ; k++ ) {
double d = _b2[k] - (cc[0] + cc[1] * _timet(k));
s.AddData(&d, 1);
}
double head = s.Average();
// Get the tail of the data set
s.Reset();
n0 = (int) (0.9 * nb);
for ( int l = n0 ; l < nb ; l++ ) {
double d = _b2[l] - (cc[0] + cc[1] * _timet(l));
s.AddData(&d, 1);
}
double tail = s.Average();
// Populate the guess with the results
NLVector g(4, 0.0);
g[0] = cc[0];
g[1] = cc[1];
g[2] = head-tail;
g[3] = -5.0/_timet(nb-1);
_guess = g;
_history.add("Guess["+ToString(_guess[0])+ ","+
ToString(_guess[1])+ ","+
ToString(_guess[2])+ ","+
ToString(_guess[3])+ "]");
return (g);
}
示例6: CheckFramelets
/**
* This method performs pass1 on one image. It analyzes each framelet's
* statistics and populates the necessary global variable.
*
* @param progress Progress message
* @param theCube Current cube that needs processing
*
* @return bool True if the file contains a valid framelet
*/
bool CheckFramelets(string progress, Cube &theCube) {
bool foundValidFramelet = false;
LineManager mgr(theCube);
Progress prog;
prog.SetText(progress);
prog.SetMaximumSteps(theCube.Lines());
prog.CheckStatus();
vector<double> frameletAvgs;
// We need to store off the framelet information, because if no good
// framelets were found then no data should be added to the
// global variable for framelets, just files.
vector< pair<int,double> > excludedFrameletsTmp;
Statistics frameletStats;
for(int line = 1; line <= theCube.Lines(); line++) {
if((line-1) % numFrameLines == 0) {
frameletStats.Reset();
}
mgr.SetLine(line);
theCube.Read(mgr);
frameletStats.AddData(mgr.DoubleBuffer(), mgr.size());
if((line-1) % numFrameLines == numFrameLines-1) {
if(IsSpecial(frameletStats.StandardDeviation()) ||
frameletStats.StandardDeviation() > maxStdev) {
excludedFrameletsTmp.push_back(
pair<int,double>((line-1)/numFrameLines, frameletStats.StandardDeviation())
);
}
else {
foundValidFramelet = true;
}
frameletAvgs.push_back(frameletStats.Average());
}
prog.CheckStatus();
}
inputFrameletAverages.push_back(frameletAvgs);
if(foundValidFramelet) {
for(unsigned int i = 0; i < excludedFrameletsTmp.size(); i++) {
excludedFramelets.insert(pair< pair<int,int>, double>(
pair<int,int>(currImage, excludedFrameletsTmp[i].first),
excludedFrameletsTmp[i].second
)
);
}
}
return foundValidFramelet;
}
示例7: GatherStatistics
// Gather statistics for the cube
void GatherStatistics(Buffer &in) {
// Number of samples per line that intersect with the next and the
// previous images
unsigned int intersect;
// Check if samples equal 682 or 683
if (in.size() == 682 || in.size() == 683) {
intersect = 18;
}
// If not the above case, then we perform an algorithm to account for binning
else {
// Number of intersecting samples is directly related to total
// number of samples in the line, with 2048 being the maximum possible
unsigned int div = 2048 / in.size();
intersect = 48 / div;
}
g_s.AddData(&in[0], in.size());
g_sl.AddData(&in[0], intersect);
g_sr.AddData(&in[in.size()-intersect], intersect);
}
示例8: cimage_dark
void HiImageClean::cimage_dark() {
// Combine calibration region
std::vector<H2DBuf> blobs;
blobs.push_back(_caldark);
blobs.push_back(_ancdark);
H2DBuf dark = appendLines(blobs);
int nsamples(dark.dim2());
int nlines(dark.dim1());
// Compute averages for the mask area
int firstDark(4);
int ndarks(dark.dim2()-firstDark);
_predark = H1DBuf(nlines);
for (int line = 0 ; line < nlines ; line++) {
Statistics darkave;
darkave.AddData(&dark[line][firstDark], ndarks);
_predark[line] = darkave.Average();
}
// Get statistics to determine state of mask and next course of action
_darkStats.Reset();
_darkStats.AddData(&_predark[0], _predark.dim1());
if (_darkStats.ValidPixels() <= 0) {
std::ostringstream mess;
mess << "No valid pixels in calibration/ancillary dark regions, "
<< "binning = " << _binning << std::ends;
throw(iException::Message(iException::Programmer,mess.str(),_FILEINFO_));
}
// Now apply a smoothing filter
QuickFilter smooth(_predark.dim1(), _filterWidth, 1);
smooth.AddLine(&_predark[0]);
nsamples = smooth.Samples();
_dark = H1DBuf(nsamples);
for (int s = 0 ; s < nsamples ; s++) {
_dark[s] = smooth.Average(s);
}
// Now apply to all calibration data
BigInt nbad(0);
_calimg = row_apply(_calimg, _dark, 0, nbad, 1.0);
_calbuf = row_apply(_calbuf, _dark, 0, nbad, 1.0);
_caldark = row_apply(_caldark, _dark, 0, nbad, 1.0);
_ancbuf = row_apply(_ancbuf, _dark, _firstImageLine, nbad, 1.0);
_ancdark = row_apply(_ancdark, _dark, _firstImageLine, nbad, 1.0);
return;
}
示例9: gatherAverages
void gatherAverages(Buffer &in) {
Statistics lineStats;
lineStats.AddData(in.DoubleBuffer(), in.size());
double average = lineStats.Average();
lineAverages[in.Band() - 1][in.Line() - 1] = average;
// The cube average will finish being calculated before the correction is applied.
if(!IsSpecial(average)) {
cubeAverage[in.Band() - 1] += average;
}
else {
numIgnoredLines ++;
}
}
示例10: getStats
//**********************************************************
// DOUSER - Get statistics on a column or row of pixels
//**********************************************************
void getStats(Buffer &in) {
Statistics stats;
stats.AddData(in.DoubleBuffer(), in.size());
band.push_back(in.Band());
element.push_back(in.Sample());
// Sort the input buffer
vector<double> pixels;
for(int i = 0; i < in.size(); i++) {
if(IsValidPixel(in[i])) pixels.push_back(in[i]);
}
sort(pixels.begin(), pixels.end());
// Now obtain the median value and store in the median vector
int size = pixels.size();
if(size != 0) {
int med = size / 2;
if(size % 2 == 0) {
median.push_back((pixels[med-1] + pixels[med]) / 2.0);
}
else {
median.push_back(pixels[med]);
}
}
else {
median.push_back(Isis::Null);
}
// Store the statistics in the appropriate vectors
average.push_back(stats.Average());
stddev.push_back(stats.StandardDeviation());
validpixels.push_back(stats.ValidPixels());
minimum.push_back(stats.Minimum());
maximum.push_back(stats.Maximum());
}
示例11: IsisMain
void IsisMain() {
// We will be processing by line
ProcessByLine p;
// Setup the input and output cubes
Cube *icube = p.SetInputCube("FROM");
PvlKeyword &status = icube->group("RESEAUS")["STATUS"];
UserInterface &ui = Application::GetUserInterface();
QString in = ui.GetFileName("FROM");
// Check reseau status and make sure it is not nominal or removed
if((QString)status == "Nominal") {
QString msg = "Input file [" + in +
"] appears to have nominal reseau status. You must run findrx first.";
throw IException(IException::User, msg, _FILEINFO_);
}
if((QString)status == "Removed") {
QString msg = "Input file [" + in +
"] appears to already have reseaus removed.";
throw IException(IException::User, msg, _FILEINFO_);
}
status = "Removed";
p.SetOutputCube("TO");
// Start the processing
p.StartProcess(cpy);
p.EndProcess();
// Get the user entered dimensions
sdim = ui.GetInteger("SDIM");
ldim = ui.GetInteger("LDIM");
// Get other user entered options
QString out = ui.GetFileName("TO");
resvalid = ui.GetBoolean("RESVALID");
action = ui.GetString("ACTION");
// Open the output cube
Cube cube;
cube.open(out, "rw");
PvlGroup &res = cube.label()->findGroup("RESEAUS", Pvl::Traverse);
// Get reseau line, sample, type, and valid Keywords
PvlKeyword lines = res.findKeyword("LINE");
PvlKeyword samps = res.findKeyword("SAMPLE");
PvlKeyword type = res.findKeyword("TYPE");
PvlKeyword valid = res.findKeyword("VALID");
int numres = lines.size();
Brick brick(sdim, ldim, 1, cube.pixelType());
for(int res = 0; res < numres; res++) {
if((resvalid == 0 || toInt(valid[res]) == 1) && toInt(type[res]) != 0) {
int baseSamp = (int)(toDouble(samps[res]) + 0.5) - (sdim / 2);
int baseLine = (int)(toDouble(lines[res]) + 0.5) - (ldim / 2);
brick.SetBasePosition(baseSamp, baseLine, 1);
cube.read(brick);
if(action == "NULL") {
for(int i = 0; i < brick.size(); i++) brick[i] = Isis::Null;
}
else if(action == "BILINEAR") {
Statistics stats;
double array[sdim][ldim];
for(int s = 0; s < sdim; s++) {
for(int l = 0; l < ldim; l++) {
int index = l * sdim + s;
array[s][l] = brick[index];
// Add perimeter data to stats object for calculations
if(s == 0 || l == 0 || s == (sdim - 1) || l == (ldim - 1)) {
stats.AddData(&array[s][l], 1);
}
}
}
// Get the average and standard deviation of the perimeter of the brick
double avg = stats.Average();
double sdev = stats.StandardDeviation();
// Top Edge Reseau
if(toInt(type[res]) == 2) {
int l1 = 0;
int l2 = ldim - 1;
for(int s = 0; s < sdim; s++) {
array[s][l1] = array[s][l2];
}
}
// Left Edge Reseau
else if(toInt(type[res]) == 4) {
int s1 = 0;
int s2 = sdim - 1;
for(int l = 0; l < ldim; l++) {
array[s1][l] = array[s2][l];
}
}
// Right Edge Reseau
else if(toInt(type[res]) == 6) {
int s1 = 0;
int s2 = sdim - 1;
for(int l = 0; l < ldim; l++) {
//.........这里部分代码省略.........
示例12: cimage_mask
void HiImageClean::cimage_mask() {
// Combine calibration region
std::vector<H2DBuf> blobs;
blobs.push_back(_calbuf);
blobs.push_back(_calimg);
blobs.push_back(_caldark);
H2DBuf calibration = appendSamples(blobs);
// Set the mask depending on the binning mode
_firstMaskLine = 20;
_lastMaskLine = 39;
switch (_binning) {
case 1:
_firstMaskLine = 21;
_lastMaskLine = 38;
break;
case 2:
_firstMaskLine = 21;
_lastMaskLine = 29;
break;
case 3:
_firstMaskLine = 21;
_lastMaskLine = 26;
break;
case 4:
_firstMaskLine = 21;
_lastMaskLine = 24;
break;
case 8:
_firstMaskLine = 21;
_lastMaskLine = 22;
break;
case 16:
_firstMaskLine = 21;
_lastMaskLine = 21;
break;
default:
std::ostringstream msg;
msg << "Invalid binning mode (" << _binning
<< ") - valid are 1-4, 8 and 16" << std::ends;
throw(iException::Message(iException::Programmer,msg.str(),_FILEINFO_));
}
// Initialize lines and samples of mask area of interest
int nsamples(calibration.dim2());
int nlines(_lastMaskLine - _firstMaskLine + 1);
// Compute averages for the mask area
_premask = H1DBuf(nsamples);
for (int samp = 0 ; samp < nsamples; samp++) {
H1DBuf maskcol = slice(calibration, samp);
Statistics maskave;
maskave.AddData(&maskcol[_firstMaskLine], nlines);
_premask[samp] = maskave.Average();
}
_mask = _premask.copy();
// Get statistics to determine state of mask and next course of action
_maskStats.Reset();
_maskStats.AddData(&_premask[0], nsamples);
if (_maskStats.ValidPixels() <= 0) {
std::ostringstream mess;
mess << "No valid pixels in calibration mask region in lines "
<< (_firstMaskLine+1) << " to " << (_lastMaskLine+1) << ", binning = "
<< _binning << std::ends;
throw(iException::Message(iException::Programmer,mess.str(),_FILEINFO_));
}
// If there are any missing values, replace with mins/maxs of region
if (_maskStats.TotalPixels() != _maskStats.ValidPixels()) {
for (int samp = 0 ; samp < nsamples ; samp++) {
if (Pixel::IsLow(_premask[samp]) || Pixel::IsNull(_premask[samp])) {
_mask[samp] = _maskStats.Minimum();
}
else if (Pixel::IsHigh(_premask[samp])) {
_mask[samp] = _maskStats.Maximum();
}
}
}
// Now apply to all calibration data
BigInt nbad(0);
_calimg = column_apply(_calimg, _mask, _firstImageSample, nbad, 1.0);
_calbuf = column_apply(_calbuf, _mask, _firstBufferSample, nbad, 1.0);
_caldark = column_apply(_caldark, _mask, _firstDarkSample, nbad, 1.0);
_ancbuf = column_apply(_ancbuf, _mask, _firstBufferSample, nbad, 1.0);
_ancdark = column_apply(_ancdark, _mask, _firstDarkSample, nbad, 1.0);
return;
}
示例13: calculateSpecificEnergy
/**
* This calculates the coefficients for specific energy corrections
*/
void calculateSpecificEnergy(Cube *icube) {
PvlGroup &inst = icube->label()->findGroup("Instrument", Pvl::Traverse);
bool vis = (inst["Channel"][0] != "IR");
double coefficient = 1.0;
if(inst["GainMode"][0] == "HIGH") {
coefficient /= 2;
}
if(vis && inst["SamplingMode"][0] == "HI-RES") {
coefficient *= 3;
}
if(vis) {
coefficient /= toDouble(inst["ExposureDuration"][1]) / 1000.0;
}
else {
coefficient /= (toDouble(inst["ExposureDuration"][0]) * 1.01725) / 1000.0 - 0.004;
}
QString specEnergyFile = "$cassini/calibration/vims/";
if(vis) {
specEnergyFile += "vis_perf_v????.cub";
}
else {
specEnergyFile += "ir_perf_v????.cub";
}
QString waveCalFile = "$cassini/calibration/vims/wavecal_v????.cub";
FileName specEnergyFileName(specEnergyFile);
specEnergyFileName = specEnergyFileName.highestVersion();
FileName waveCalFileName(waveCalFile);
waveCalFileName = waveCalFileName.highestVersion();
Cube specEnergyCube;
specEnergyCube.open(specEnergyFileName.expanded());
Cube waveCalCube;
waveCalCube.open(waveCalFileName.expanded());
LineManager specEnergyMgr(specEnergyCube);
LineManager waveCalMgr(waveCalCube);
for(int i = 0; i < icube->bandCount(); i++) {
Statistics specEnergyStats;
Statistics waveCalStats;
if(vis) {
specEnergyMgr.SetLine(1, i + 1);
waveCalMgr.SetLine(1, i + 1);
}
else {
specEnergyMgr.SetLine(1, i + 1);
// ir starts at band 97
waveCalMgr.SetLine(1, i + 96 + 1);
}
specEnergyCube.read(specEnergyMgr);
waveCalCube.read(waveCalMgr);
specEnergyStats.AddData(specEnergyMgr.DoubleBuffer(), specEnergyMgr.size());
waveCalStats.AddData(waveCalMgr.DoubleBuffer(), waveCalMgr.size());
double bandCoefficient = coefficient * specEnergyStats.Average() * waveCalStats.Average();
specificEnergyCorrections.push_back(bandCoefficient);
}
}
示例14: getStatistics
/**
* Retrieve the statistics based on the box size
* and point on the cube.
*
* @param p
*/
void StatisticsTool::getStatistics(QPoint p) {
MdiCubeViewport *cvp = cubeViewport();
if(cvp == NULL) return;
double sample, line;
cvp->viewportToCube(p.x(), p.y(), sample, line);
// If we are outside of the cube, do nothing
if((sample < 0.5) || (line < 0.5) ||
(sample > cvp->cubeSamples() + 0.5) || (line > cvp->cubeLines() + 0.5)) {
return;
}
int isamp = (int)(sample + 0.5);
int iline = (int)(line + 0.5);
Statistics stats;
Brick *brick = new Brick(1, 1, 1, cvp->cube()->pixelType());
QVector<QVector<double> > pixelData(p_boxLines, QVector<double>(p_boxSamps, Null));
double lineDiff = p_boxLines / 2.0;
double sampDiff = p_boxSamps / 2.0;
p_ulSamp = isamp - (int)floor(sampDiff);
p_ulLine = iline - (int)floor(lineDiff);
int x, y;
y = p_ulLine;
for(int i = 0; i < p_boxLines; i++) {
x = p_ulSamp;
if(y < 1 || y > cvp->cubeLines()) {
y++;
continue;
}
for(int j = 0; j < p_boxSamps; j++) {
if(x < 1 || x > cvp->cubeSamples()) {
x++;
continue;
}
brick->SetBasePosition(x, y, cvp->grayBand());
cvp->cube()->read(*brick);
stats.AddData(brick->at(0));
pixelData[i][j] = brick->at(0);
x++;
}
y++;
}
p_visualDisplay->setPixelData(pixelData, p_ulSamp, p_ulLine);
if (stats.ValidPixels()) {
p_minLabel->setText(QString("Minimum: %1").arg(stats.Minimum()));
p_maxLabel->setText(QString("Maximum: %1").arg(stats.Maximum()));
p_avgLabel->setText(QString("Average: %1").arg(stats.Average()));
p_stdevLabel->setText(QString("Standard Dev: %1").arg(stats.StandardDeviation(), 0, 'f', 6));
}
else {
p_minLabel->setText(QString("Minimum: n/a"));
p_maxLabel->setText(QString("Maximum: n/a"));
p_avgLabel->setText(QString("Average: n/a"));
p_stdevLabel->setText(QString("Standard Dev: n/a"));
}
p_set = true;
resizeScrollbars();
}
示例15: IsisMain
void IsisMain() {
Process p;
// Get the list of names of input CCD cubes to stitch together
FileList flist;
UserInterface &ui = Application::GetUserInterface();
flist.Read(ui.GetFilename("FROMLIST"));
if (flist.size() < 1) {
string msg = "The list file[" + ui.GetFilename("FROMLIST") +
" does not contain any filenames";
throw iException::Message(iException::User,msg,_FILEINFO_);
}
string projection("Equirectangular");
if(ui.WasEntered("MAP")) {
Pvl mapfile(ui.GetFilename("MAP"));
projection = (string) mapfile.FindGroup("Mapping")["ProjectionName"];
}
if(ui.WasEntered("PROJECTION")) {
projection = ui.GetString("PROJECTION");
}
// Gather other user inputs to projection
string lattype = ui.GetString("LATTYPE");
string londir = ui.GetString("LONDIR");
string londom = ui.GetString("LONDOM");
int digits = ui.GetInteger("PRECISION");
// Fix them for mapping group
lattype = (lattype == "PLANETOCENTRIC") ? "Planetocentric" : "Planetographic";
londir = (londir == "POSITIVEEAST") ? "PositiveEast" : "PositiveWest";
Progress prog;
prog.SetMaximumSteps(flist.size());
prog.CheckStatus();
Statistics scaleStat;
Statistics longitudeStat;
Statistics latitudeStat;
Statistics equiRadStat;
Statistics poleRadStat;
PvlObject fileset("FileSet");
// Save major equitorial and polar radii for last occuring
double eqRad;
double eq2Rad;
double poleRad;
string target("Unknown");
for (unsigned int i = 0 ; i < flist.size() ; i++) {
// Set the input image, get the camera model, and a basic mapping
// group
Cube cube;
cube.Open(flist[i]);
int lines = cube.Lines();
int samples = cube.Samples();
PvlObject fmap("File");
fmap += PvlKeyword("Name",flist[i]);
fmap += PvlKeyword("Lines", lines);
fmap += PvlKeyword("Samples", samples);
Camera *cam = cube.Camera();
Pvl mapping;
cam->BasicMapping(mapping);
PvlGroup &mapgrp = mapping.FindGroup("Mapping");
mapgrp.AddKeyword(PvlKeyword("ProjectionName",projection),Pvl::Replace);
mapgrp.AddKeyword(PvlKeyword("LatitudeType",lattype),Pvl::Replace);
mapgrp.AddKeyword(PvlKeyword("LongitudeDirection",londir),Pvl::Replace);
mapgrp.AddKeyword(PvlKeyword("LongitudeDomain",londom),Pvl::Replace);
// Get the radii
double radii[3];
cam->Radii(radii);
eqRad = radii[0] * 1000.0;
eq2Rad = radii[1] * 1000.0;
poleRad = radii[2] * 1000.0;
target = cam->Target();
equiRadStat.AddData(&eqRad, 1);
poleRadStat.AddData(&poleRad, 1);
// Get resolution
double lowres = cam->LowestImageResolution();
double hires = cam->HighestImageResolution();
scaleStat.AddData(&lowres, 1);
scaleStat.AddData(&hires, 1);
double pixres = (lowres+hires)/2.0;
double scale = Scale(pixres, poleRad, eqRad);
mapgrp.AddKeyword(PvlKeyword("PixelResolution",pixres),Pvl::Replace);
mapgrp.AddKeyword(PvlKeyword("Scale",scale,"pixels/degree"),Pvl::Replace);
mapgrp += PvlKeyword("MinPixelResolution",lowres,"meters");
mapgrp += PvlKeyword("MaxPixelResolution",hires,"meters");
// Get the universal ground range
//.........这里部分代码省略.........