当前位置: 首页>>代码示例>>C++>>正文


C++ DataSet::read方法代码示例

本文整理汇总了C++中h5::DataSet::read方法的典型用法代码示例。如果您正苦于以下问题:C++ DataSet::read方法的具体用法?C++ DataSet::read怎么用?C++ DataSet::read使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在h5::DataSet的用法示例。


在下文中一共展示了DataSet::read方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。

示例1: loadModel

void FeaturePointsRANSAC::loadModel(std::string modelPath)
{
	H5::H5File h5Model;

	try {
		h5Model = H5::H5File(modelPath, H5F_ACC_RDONLY);
	}
	catch (H5::Exception& e) {
		std::string msg( std::string( "Could not open HDF5 file \n" ) + e.getCDetailMsg() );
		throw msg;
	}

	// Load the Shape
	H5::Group modelReconstructive = h5Model.openGroup("/shape/ReconstructiveModel/model");
	H5::DataSet dsMean = modelReconstructive.openDataSet("./mean");
	hsize_t dims[1];
	dsMean.getSpace().getSimpleExtentDims(dims, NULL);	// dsMean.getSpace() leaks memory... maybe a hdf5 bug, maybe vlenReclaim(...) could be a fix. No idea.
	//H5::DataSpace dsp = dsMean.getSpace();
	//dsp.close();

	std::cout << "Dims: " << dims[0] << std::endl;		// TODO: I guess this whole part could be done A LOT better!
	float* testData = new float[dims[0]];
	dsMean.read(testData, H5::PredType::NATIVE_FLOAT);
	this->modelMeanShp.reserve(dims[0]);

	for (unsigned int i=0; i < dims[0]; ++i)	{
		modelMeanShp.push_back(testData[i]);
	}
	delete[] testData;
	testData = NULL;
	dsMean.close();

	// // Load the Texture
	H5::Group modelReconstructiveTex = h5Model.openGroup("/color/ReconstructiveModel/model");
	H5::DataSet dsMeanTex = modelReconstructiveTex.openDataSet("./mean");
	hsize_t dimsTex[1];
	dsMeanTex.getSpace().getSimpleExtentDims(dimsTex, NULL);
	std::cout << "Dims: " << dimsTex[0] << std::endl;		// TODO: I guess this whole part could be done A LOT better!
	float* testDataTex = new float[dimsTex[0]];
	dsMeanTex.read(testDataTex, H5::PredType::NATIVE_FLOAT);
	this->modelMeanTex.reserve(dimsTex[0]);

	for (unsigned int i=0; i < dimsTex[0]; ++i)	{
		modelMeanTex.push_back(testDataTex[i]);
	}
	delete[] testDataTex;
	testDataTex = NULL;
	dsMeanTex.close();

	h5Model.close();
}
开发者ID:herohuyongtao,项目名称:FeatureDetection,代码行数:51,代码来源:FeaturePointsModelRANSAC.cpp

示例2: memspace

arma::Mat<uint16_t> readLUT(const std::string& path)
{
    H5::H5File file (path.c_str(), H5F_ACC_RDONLY);
    H5::DataSet ds = file.openDataSet("LUT");

    H5::DataSpace filespace = ds.getSpace();
    int ndims = filespace.getSimpleExtentNdims();
    assert(ndims == 2);
    hsize_t dims[2] = {1, 1};
    filespace.getSimpleExtentDims(dims);

    H5::DataSpace memspace (ndims, dims);

    arma::Mat<uint16_t> res (dims[0], dims[1]);

    ds.read(res.memptr(), H5::PredType::NATIVE_UINT16, memspace, filespace);

    filespace.close();
    memspace.close();
    ds.close();
    file.close();

    // NOTE: Armadillo stores data in column-major order, while HDF5 uses
    // row-major ordering. Above, we read the data directly from HDF5 into
    // the arma matrix, so it was implicitly transposed. The next function
    // fixes this problem.
    arma::inplace_trans(res);
    return res;
}
开发者ID:jbradt,项目名称:mcopt,代码行数:29,代码来源:utils.cpp

示例3: mem_space

    inline void
    read_values(H5::DataSet& dataset, H5::DataSpace& data_space,
        dimension const& dimx, dimension const& dimy, dimension const& dimz,
        double* values)
    {
        using namespace H5;

        // Define the hyperslab for file based data.
        hsize_t data_offset[dimension::dim] = {
            dimx.offset_, dimy.offset_, dimz.offset_
        };
        hsize_t data_count[dimension::dim] = {
            dimx.count_, dimy.count_, dimz.count_
        };
        data_space.selectHyperslab(H5S_SELECT_SET, data_count, data_offset);

        // Memory dataspace.
        DataSpace mem_space (dimension::dim, data_count);

        // Define the hyperslab for data in memory.
        hsize_t mem_offset[dimension::dim] = { 0, 0, 0 };
        mem_space.selectHyperslab(H5S_SELECT_SET, data_count, mem_offset);

        // Read data to memory.
        dataset.read(values, PredType::NATIVE_DOUBLE, mem_space, data_space);
    }
开发者ID:brycelelbach,项目名称:hpx,代码行数:26,代码来源:read_values.cpp

示例4: ReadTable

      void ReadTable(const int& tableNum, void* buf, 
            const H5::DataType& dType){

         std::string tNum = Num2Table(tableNum);
         dSet_ = file_->openDataSet(tNum);
         dSet_.read( buf, dType);
      }
开发者ID:rseal,项目名称:HDF5R,代码行数:7,代码来源:HDF5.hpp

示例5: loadStackHDF5

bool loadStackHDF5( const char* fileName, Image4DSimple& img )
{
#ifdef USE_HDF5
    H5::Exception::dontPrint();
    H5::H5File file( fileName, H5F_ACC_RDONLY );

    for ( size_t i = 0; i < file.getObjCount(); i++ )
    {
        H5std_string name = file.getObjnameByIdx( i );
        if ( name == "Channels" )
        {
            H5::Group channels = file.openGroup( name );

            // Grab the attributes
            H5::Attribute attr = channels.openAttribute( "width" );
            H5::DataType type = attr.getDataType();
            long width, height;
            attr.read( type, &width );
            attr.close();

            attr = channels.openAttribute( "height" );
            attr.read( type, &height );
            attr.close();

            int num_channels = 0;
            // Count the number of channels
            for ( size_t obj = 0; obj < channels.getNumObjs(); obj++ )
                if ( channels.getObjTypeByIdx( obj ) == H5G_DATASET )
                    num_channels++;

            int channel_idx = 0;
            for ( size_t obj = 0; obj < channels.getNumObjs(); obj++ )
            {
                if ( channels.getObjTypeByIdx( obj ) == H5G_DATASET )
                {
                    H5std_string ds_name = channels.getObjnameByIdx( obj );
                    H5::DataSet data = channels.openDataSet( ds_name );
                    uint8_t* buffer = new uint8_t[ data.getStorageSize() ];
                    data.read( buffer, data.getDataType() );
                    QByteArray qbarray( ( const char* )buffer, data.getStorageSize() );
                    data.close();

                    if ( !loadIndexedStackFFMpeg( &qbarray, img, channel_idx++, num_channels,
                                                  width, height ) )
                    {
                        v3d_msg( "Error happened in HDF file reading. Stop. \n", false );
                        return false;
                    }

                    delete [] buffer;
                }
            }
        }
    }

#endif

    return true;
}
开发者ID:Vaa3D,项目名称:v3d_external,代码行数:59,代码来源:loadV3dFFMpeg.cpp

示例6: memspace

	/**
	 * @brief Returns a pointer to a std::vector<float> containing the values of the selected variable
	 *
	 * This allocates a new std::vector<float> pointer.  Make sure you
	 * delete the contents when you done using it, or you will have a memory leak.
	 *
	 * @param variable
	 * @return std::vector<float> containing the values of the selected variable.
	 */
	std::vector<float>* HDF5FileReader::getVariable(const std::string& variable)
	{
		std::vector<float>* variableData = new std::vector<float>();

		if (this->doesVariableExist(variable))
		{
			//std::cout << "reading " << variable << std::endl;
			//get variable number
//			long variableNum = this->getVariableID(variable);

			//std::cout << "variableNum for " << variable << ": " << variableNum << std::endl;
			//get dim sizes

			H5::Group group = this->current_file->openGroup("Variables");
			//cout << "variable: " << variable << ": " << counts[0] << endl;
			H5::DataSet * dataset = new H5::DataSet(group.openDataSet(variable));
			H5::DataSpace dataspace = dataset->getSpace();
			int rank = dataspace.getSimpleExtentNdims(); //should be 1
			hsize_t count[1];
			hsize_t offset[1] = {0};
//			int ndims = dataspace.getSimpleExtentDims(count, NULL);

			//std::cout << "count[0]: " << count[0] << std::endl;
			float * buffer = new float[count[0]];



			dataspace.selectHyperslab(H5S_SELECT_SET, count, offset);

			H5::DataSpace memspace( rank, count);
			memspace.selectHyperslab(H5S_SELECT_SET, count, offset);

			dataset->read(buffer, H5::PredType::NATIVE_FLOAT, memspace, dataspace);
			//std::cout << "after read" << std::endl;

			//add data to vector type, and delete original array
			variableData->reserve(count[0]);
			for (int i = 0; i < count[0]; i++)
			{
				variableData->push_back(buffer[i]);
			}
			//std::cout << "after adding to variableData vector" << std::endl;

			delete[] buffer;
			delete dataset;
			//std::cout << "finished reading " << variable << std::endl;
			//std::cout << "size of variable: " << variableData.size() << std::endl;
			//std::cout << "dimSizes[0]: " << dimSizes[0] << std::endl;

		}

		return variableData;
	}
开发者ID:NeelSavani,项目名称:ccmc-software,代码行数:62,代码来源:HDF5FileReader.cpp

示例7: getGroup

int HDF5IO::loadInt(const std::string& GroupName, const std::string& Name)
{
    try{
      H5::Group FG = getGroup( GroupName );
      H5::DataSet DataSet = FG.openDataSet( Name.c_str());
      int x;
      DataSet.read(&x,H5::PredType::NATIVE_INT);
      FG.close();
      return x;
    }catch( H5::GroupIException not_found_error ){
      RUNTIME_ERROR("No dataset found in loadInt. ");
    }
}
开发者ID:chengyanlai,项目名称:ExactDiagonalization,代码行数:13,代码来源:hdf5io.cpp

示例8: range_error

NDArray<T, Nd> NDArray<T,Nd>::ReadFromH5(const H5::DataSet& h5Dset) {
  H5::DataSpace dspace = h5Dset.getSpace(); 
  int ndim = dspace.getSimpleExtentNdims(); 
  if (ndim>Nd) 
      throw std::range_error("Too many dimensions in H5 dataset for NDArray");
  hsize_t dimSize[ndim];
  dspace.getSimpleExtentDims(dimSize); 
  std::array<std::size_t, Nd> dimSizeArr;
  for (int i=0; i<Nd; ++i) dimSizeArr[i] = dimSize[i];
  NDArray<T, Nd> arr(dimSizeArr);
  // Read in data here
  H5::DataType h5DType = GetH5DataType<T>();
  h5Dset.read(arr.mData, h5DType);
  return arr;
}
开发者ID:ermalrrapaj,项目名称:two_phase_skyrme,代码行数:15,代码来源:NDArray.hpp

示例9: ComplexType

ComplexType HDF5IO::loadComplex(const std::string& GroupName, const std::string& Name)
{
  try{
    H5::CompType ComplexDataType = this->openCompType("complex");
    H5::Group FG = getGroup( GroupName );
    H5::DataSet DataSet = FG.openDataSet(Name.c_str());
    ComplexType C;
    RealType RealImag[2];
    DataSet.read(RealImag, ComplexDataType);
    FG.close();
    return ComplexType(RealImag[0],RealImag[1]);
  }catch( H5::GroupIException not_found_error ){
    RUNTIME_ERROR("No dataset found in loadComplex. ");
  }
}
开发者ID:chengyanlai,项目名称:ExactDiagonalization,代码行数:15,代码来源:hdf5io.cpp

示例10: getNodeType

    bool readDataset1D(const H5::H5File &file, const std::string &name, std::vector<_Tp> &data)
    {
        H5::DataSet dataset = file.openDataSet(name);
        H5::DataSpace dataspace = dataset.getSpace();
        hsize_t dims_out[1];
        int rank = dataspace.getSimpleExtentDims( dims_out, NULL);

        int _type;
        bool read = getNodeType(dataset,  _type);
        read &= (_type == StorageNode::SEQ);
        read &= (rank  == 1);

        if (!read)
            return read;
        data.resize(dims_out[0]);
        dataset.read(data.data(), dataset.getDataType());
        return true;
    }
开发者ID:yxliang,项目名称:binaryStorage,代码行数:18,代码来源:h5persistence.hpp

示例11: strreadbuf

OXSXDataSet
DataSetIO::LoadDataSet(const std::string& filename_){
    // Get Data Set
    H5::H5File  file(filename_, H5F_ACC_RDONLY);
    H5::DataSet dataSet = file.openDataSet("observations");
 
    // read meta information
    unsigned nObs = 0;
    H5::Attribute nameAtt  = dataSet.openAttribute("observed_quantities");
    H5::Attribute countAtt  = dataSet.openAttribute("n_observables");
    H5std_string strreadbuf("");
    nameAtt.read(nameAtt.getDataType(), strreadbuf);
    countAtt.read(countAtt.getDataType(), &nObs);

    // Read data out as 1D array
    hsize_t nData = 0;
    dataSet.getSpace().getSimpleExtentDims(&nData, NULL);
    size_t nEntries = nData/nObs;

    std::vector<double> flatData(nData, 0);
    dataSet.read(&flatData.at(0), H5::PredType::NATIVE_DOUBLE);

    assert(nData%nObs == 0); // logic error in writing file (this class!) if assert fails.

    // Assemble into an OXSX data set
    OXSXDataSet oxsxDataSet;

    // Set the variable names
    oxsxDataSet.SetObservableNames(UnpackString(strreadbuf, fDelimiter));

    // then the data
    std::vector<double> oneEventObs(nObs, 0);
    for(size_t i = 0; i < nEntries; i++){
        for(size_t j = 0; j < nObs; j++)
            oneEventObs[j] = flatData.at(i * nObs + j);
        
        oxsxDataSet.AddEntry(EventData(oneEventObs));
    }
      
    return oxsxDataSet;
}
开发者ID:arushanova,项目名称:oxsx,代码行数:41,代码来源:DataSetIO.cpp

示例12: memspace

std::vector<double> readlastrow( H5::DataSet& ds )
{
  H5::DataSpace origspace = ds.getSpace();
  int rank = origspace.getSimpleExtentNdims();
  hsize_t dims[rank];
  int ndims = origspace.getSimpleExtentDims( dims, NULL);
  hsize_t nrows=dims[0];
  hsize_t ncols=dims[1];
  std::vector<double> returnvect( ncols );

  
  
  hsize_t targrowoffset = nrows-1;
  hsize_t targcoloffset = 0;
  hsize_t dimsmem[rank] = {1,  ncols};
  H5::DataSpace memspace(rank, dimsmem);

  hsize_t offset[rank] = { targrowoffset, targcoloffset };
  origspace.selectHyperslab( H5S_SELECT_SET, dimsmem, offset );
  ds.read( returnvect.data(), H5::PredType::NATIVE_DOUBLE, memspace, origspace );

  return returnvect;
}
开发者ID:flyingfalling,项目名称:psweep2,代码行数:23,代码来源:testhdf5ids.cpp

示例13: mem_space

    inline void
    read_values(H5::DataSet& dataset, H5::DataSpace& data_space,
        hsize_t offset, hsize_t count, double* values)
    {
        using namespace H5;

        // Define hyperslab for file based data
        hsize_t data_offset[1] = { offset };
        hsize_t data_count[1] = { count };
        data_space.selectHyperslab(H5S_SELECT_SET, data_count, data_offset);

        // memory dataspace
        hsize_t mem_dims[1] = { count };
        DataSpace mem_space (1, mem_dims);

        // Define hyperslab for data in memory
        hsize_t mem_offset[1] = { 0 };
        hsize_t mem_count[1] = { count };
        mem_space.selectHyperslab(H5S_SELECT_SET, mem_count, mem_offset);

        // read data to memory
        dataset.read(values, PredType::NATIVE_DOUBLE, mem_space, data_space);
    }
开发者ID:7ev3n,项目名称:hpx,代码行数:23,代码来源:read_values.cpp

示例14: loadStatismoModel

PcaModel PcaModel::loadStatismoModel(path h5file, PcaModel::ModelType modelType)
{
	logging::Logger logger = Loggers->getLogger("shapemodels");
	PcaModel model;

	// Load the shape or color model from the .h5 file
	string h5GroupType;
	if (modelType == ModelType::SHAPE) {
		h5GroupType = "shape";
	} else if (modelType == ModelType::COLOR) {
		h5GroupType = "color";
	}

	H5::H5File h5Model;

	try {
		h5Model = H5::H5File(h5file.string(), H5F_ACC_RDONLY);
	}
	catch (H5::Exception& e) {
		string errorMessage = "Could not open HDF5 file: " + string(e.getCDetailMsg());
		logger.error(errorMessage);
		throw errorMessage;
	}

	// Load either the shape or texture mean
	string h5Group = "/" + h5GroupType + "/model";
	H5::Group modelReconstructive = h5Model.openGroup(h5Group);

	// Read the mean
	H5::DataSet dsMean = modelReconstructive.openDataSet("./mean");
	hsize_t dims[2];
	dsMean.getSpace().getSimpleExtentDims(dims, NULL);	// dsMean.getSpace() leaks memory... maybe a hdf5 bug, maybe vlenReclaim(...) could be a fix. No idea.
	//H5::DataSpace dsp = dsMean.getSpace();
	//dsp.close();
	Loggers->getLogger("shapemodels").debug("Dimensions of the model mean: " + lexical_cast<string>(dims[0]));
	model.mean = Mat(1, dims[0], CV_32FC1); // Use a row-vector, because of faster memory access and I'm not sure the memory block is allocated contiguously if we have multiple rows.
	dsMean.read(model.mean.ptr<float>(0), H5::PredType::NATIVE_FLOAT);
	model.mean = model.mean.t(); // Transpose it to a col-vector
	dsMean.close();

	// Read the eigenvalues
	dsMean = modelReconstructive.openDataSet("./pcaVariance");
	dsMean.getSpace().getSimpleExtentDims(dims, NULL);
	Loggers->getLogger("shapemodels").debug("Dimensions of the pcaVariance: " + lexical_cast<string>(dims[0]));
	model.eigenvalues = Mat(1, dims[0], CV_32FC1);
	dsMean.read(model.eigenvalues.ptr<float>(0), H5::PredType::NATIVE_FLOAT);
	model.eigenvalues = model.eigenvalues.t();
	dsMean.close();

	// Read the PCA basis matrix
	dsMean = modelReconstructive.openDataSet("./pcaBasis");
	dsMean.getSpace().getSimpleExtentDims(dims, NULL);
	Loggers->getLogger("shapemodels").debug("Dimensions of the PCA basis matrix: " + lexical_cast<string>(dims[0]) + ", " + lexical_cast<string>(dims[1]));
	model.pcaBasis = Mat(dims[0], dims[1], CV_32FC1);
	dsMean.read(model.pcaBasis.ptr<float>(0), H5::PredType::NATIVE_FLOAT);
	dsMean.close();

	modelReconstructive.close(); // close the model-group

	// Read the noise variance (not implemented)
	/*dsMean = modelReconstructive.openDataSet("./noiseVariance");
	float noiseVariance = 10.0f;
	dsMean.read(&noiseVariance, H5::PredType::NATIVE_FLOAT);
	dsMean.close(); */

	// Read the triangle-list
	string representerGroupName = "/" + h5GroupType + "/representer";
	H5::Group representerGroup = h5Model.openGroup(representerGroupName);
	dsMean = representerGroup.openDataSet("./reference-mesh/triangle-list");
	dsMean.getSpace().getSimpleExtentDims(dims, NULL);
	Loggers->getLogger("shapemodels").debug("Dimensions of the triangle-list: " + lexical_cast<string>(dims[0]) + ", " + lexical_cast<string>(dims[1]));
	Mat triangles(dims[0], dims[1], CV_32SC1);
	dsMean.read(triangles.ptr<int>(0), H5::PredType::NATIVE_INT32);
	dsMean.close();
	representerGroup.close();
	model.triangleList.resize(triangles.rows);
	for (unsigned int i = 0; i < model.triangleList.size(); ++i) {
		model.triangleList[i][0] = triangles.at<int>(i, 0);
		model.triangleList[i][1] = triangles.at<int>(i, 1);
		model.triangleList[i][2] = triangles.at<int>(i, 2);
	}

	// Load the landmarks mappings:
	// load the reference-mesh
	representerGroup = h5Model.openGroup(representerGroupName);
	dsMean = representerGroup.openDataSet("./reference-mesh/vertex-coordinates");
	dsMean.getSpace().getSimpleExtentDims(dims, NULL);
	Loggers->getLogger("shapemodels").debug("Dimensions of the reference-mesh vertex-coordinates matrix: " + lexical_cast<string>(dims[0]) + ", " + lexical_cast<string>(dims[1]));
	Mat referenceMesh(dims[0], dims[1], CV_32FC1);
	dsMean.read(referenceMesh.ptr<float>(0), H5::PredType::NATIVE_FLOAT);
	dsMean.close();
	representerGroup.close();

	// convert to 3 vectors with the x, y and z coordinates for easy searching
	vector<float> refx(referenceMesh.col(0).clone());
	vector<float> refy(referenceMesh.col(1).clone());
	vector<float> refz(referenceMesh.col(2).clone());

	// load the landmarks info (mapping name <-> reference (x, y, z)-coords)
	H5::Group landmarksGroup = h5Model.openGroup("/metadata/landmarks");
//.........这里部分代码省略.........
开发者ID:nqthiep,项目名称:FeatureDetection,代码行数:101,代码来源:PcaModel.cpp

示例15: load

bool TStellarData::load(const std::string& fname, const std::string& group, const std::string& dset,
			double err_floor, double default_EBV) {
	H5::H5File *file = H5Utils::openFile(fname);
	if(file == NULL) { return false; }
	
	H5::Group *gp = H5Utils::openGroup(file, group);
	if(gp == NULL) {
		delete file;
		return false;
	}
	
	H5::DataSet dataset = gp->openDataSet(dset);
	
	/*
	 *  Photometry
	 */
	
	// Datatype
	hsize_t nbands = NBANDS;
	H5::ArrayType f4arr(H5::PredType::NATIVE_FLOAT, 1, &nbands);
	H5::ArrayType u4arr(H5::PredType::NATIVE_UINT32, 1, &nbands);
	H5::CompType dtype(sizeof(TFileData));
	dtype.insertMember("obj_id", HOFFSET(TFileData, obj_id), H5::PredType::NATIVE_UINT64);
	dtype.insertMember("l", HOFFSET(TFileData, l), H5::PredType::NATIVE_DOUBLE);
	dtype.insertMember("b", HOFFSET(TFileData, b), H5::PredType::NATIVE_DOUBLE);
	dtype.insertMember("mag", HOFFSET(TFileData, mag), f4arr);
	dtype.insertMember("err", HOFFSET(TFileData, err), f4arr);
	dtype.insertMember("maglimit", HOFFSET(TFileData, maglimit), f4arr);
	dtype.insertMember("nDet", HOFFSET(TFileData, N_det), u4arr);
	dtype.insertMember("EBV", HOFFSET(TFileData, EBV), H5::PredType::NATIVE_FLOAT);
	
	// Dataspace
	hsize_t length;
	H5::DataSpace dataspace = dataset.getSpace();
	dataspace.getSimpleExtentDims(&length);
	
	// Read in dataset
	TFileData* data_buf = new TFileData[length];
	dataset.read(data_buf, dtype);
	//std::cerr << "# Read in dimensions." << std::endl;
	
	// Fix magnitude limits
	for(int n=0; n<nbands; n++) {
		float tmp;
		float maglim_replacement = 25.;
		
		// Find the 95th percentile of valid magnitude limits
		std::vector<float> maglimit;
		for(hsize_t i=0; i<length; i++) {
			tmp = data_buf[i].maglimit[n];
			
			if((tmp > 10.) && (tmp < 40.) && (!isnan(tmp))) {
				maglimit.push_back(tmp);
			}
		}
		
		//std::sort(maglimit.begin(), maglimit.end());
		if(maglimit.size() != 0) {
			maglim_replacement = percentile(maglimit, 95.);
		}
		
		// Replace missing magnitude limits with the 95th percentile magnitude limit
		for(hsize_t i=0; i<length; i++) {
			tmp = data_buf[i].maglimit[n];
			
			if(!((tmp > 10.) && (tmp < 40.)) || isnan(tmp)) {
				//std::cout << i << ", " << n << ":  " << tmp << std::endl;
				data_buf[i].maglimit[n] = maglim_replacement;
			}
		}
	}
	
	//int n_filtered = 0;
	//int n_M_dwarfs = 0;
	
	TMagnitudes mag_tmp;
	for(size_t i=0; i<length; i++) {
		mag_tmp.set(data_buf[i], err_floor);
		star.push_back(mag_tmp);
		
		//int n_informative = 0;
		
		// Remove g-band
		//mag_tmp.m[0] = 0.;
		//mag_tmp.err[0] = 1.e10;
		
		//double g_err = mag_tmp.err[0];
		//mag_tmp.err[0] = sqrt(g_err*g_err + 0.1*0.1);
		
		// Filter bright end
                // TODO: Put this into query_lsd.py
		/*for(int j=0; j<NBANDS; j++) {
			if((mag_tmp.err[j] < 1.e9) && (mag_tmp.m[j] < 14.)) {
				mag_tmp.err[j] = 1.e10;
				mag_tmp.m[j] = 0.;
			}
			
			if(mag_tmp.err[j] < 1.e9) {
				n_informative++;
			}
//.........这里部分代码省略.........
开发者ID:gregreen,项目名称:bayestar,代码行数:101,代码来源:data.cpp


注:本文中的h5::DataSet::read方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。