本文整理汇总了C++中vector_double::reserve方法的典型用法代码示例。如果您正苦于以下问题:C++ vector_double::reserve方法的具体用法?C++ vector_double::reserve怎么用?C++ vector_double::reserve使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类vector_double
的用法示例。
在下文中一共展示了vector_double::reserve方法的1个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: read_particles_restart
// extracted from Particles3Dcomm.cpp
//
void Collective::read_particles_restart(
const VCtopology3D* vct,
int species_number,
vector_double& u,
vector_double& v,
vector_double& w,
vector_double& q,
vector_double& x,
vector_double& y,
vector_double& z,
vector_double& t)const
{
#ifdef NO_HDF5
eprintf("Require HDF5 to read from restart file.");
#else
if (vct->getCartesian_rank() == 0 && species_number == 0)
{
printf("LOADING PARTICLES FROM RESTART FILE in %s/restart.hdf\n",
getRestartDirName().c_str());
}
stringstream ss;
ss << vct->getCartesian_rank();
string name_file = getRestartDirName() + "/restart" + ss.str() + ".hdf";
// hdf stuff
hid_t file_id, dataspace;
hid_t datatype, dataset_id;
herr_t status;
size_t size;
hsize_t dims_out[1]; /* dataset dimensions */
int status_n;
// open the hdf file
file_id = H5Fopen(name_file.c_str(), H5F_ACC_RDWR, H5P_DEFAULT);
if (file_id < 0) {
eprintf("couldn't open file: %s\n"
"\tRESTART NOT POSSIBLE", name_file.c_str());
//cout << "couldn't open file: " << name_file << endl;
//cout << "RESTART NOT POSSIBLE" << endl;
}
//find the last cycle
int lastcycle=0;
dataset_id = H5Dopen2(file_id, "/last_cycle", H5P_DEFAULT); // HDF 1.8.8
status = H5Dread(dataset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &lastcycle);
status = H5Dclose(dataset_id);
stringstream species_name;
species_name << species_number;
ss.str("");ss << "/particles/species_" << species_number << "/x/cycle_" << lastcycle;
dataset_id = H5Dopen2(file_id, ss.str().c_str(), H5P_DEFAULT); // HDF 1.8.8
datatype = H5Dget_type(dataset_id);
size = H5Tget_size(datatype);
dataspace = H5Dget_space(dataset_id); /* dataspace handle */
status_n = H5Sget_simple_extent_dims(dataspace, dims_out, NULL);
// get how many particles there are on this processor for this species
status_n = H5Sget_simple_extent_dims(dataspace, dims_out, NULL);
const int nop = dims_out[0]; // number of particles in this process
//Particles3Dcomm::resize_SoA(nop);
{
//
// allocate space for particles including padding
//
const int padded_nop = roundup_to_multiple(nop,DVECWIDTH);
u.reserve(padded_nop);
v.reserve(padded_nop);
w.reserve(padded_nop);
q.reserve(padded_nop);
x.reserve(padded_nop);
y.reserve(padded_nop);
z.reserve(padded_nop);
t.reserve(padded_nop);
//
// define size of particle data
//
u.resize(nop);
v.resize(nop);
w.resize(nop);
q.resize(nop);
x.resize(nop);
y.resize(nop);
z.resize(nop);
t.resize(nop);
}
// get x
status = H5Dread(dataset_id, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, &x[0]);
// close the data set
status = H5Dclose(dataset_id);
// get y
ss.str("");ss << "/particles/species_" << species_number << "/y/cycle_" << lastcycle;
dataset_id = H5Dopen2(file_id, ss.str().c_str(), H5P_DEFAULT); // HDF 1.8.8
status = H5Dread(dataset_id, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, &y[0]);
status = H5Dclose(dataset_id);
// get z
//.........这里部分代码省略.........