本文整理汇总了C++中VecCreateSeqWithArray函数的典型用法代码示例。如果您正苦于以下问题:C++ VecCreateSeqWithArray函数的具体用法?C++ VecCreateSeqWithArray怎么用?C++ VecCreateSeqWithArray使用的例子?那么, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了VecCreateSeqWithArray函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: MatBlockMatSetPreallocation_BlockMat
PetscErrorCode MatBlockMatSetPreallocation_BlockMat(Mat A,PetscInt bs,PetscInt nz,PetscInt *nnz)
{
Mat_BlockMat *bmat = (Mat_BlockMat*)A->data;
PetscErrorCode ierr;
PetscInt i;
PetscFunctionBegin;
ierr = PetscLayoutSetBlockSize(A->rmap,bs);CHKERRQ(ierr);
ierr = PetscLayoutSetBlockSize(A->cmap,bs);CHKERRQ(ierr);
ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
ierr = PetscLayoutGetBlockSize(A->rmap,&bs);CHKERRQ(ierr);
if (nz == PETSC_DEFAULT || nz == PETSC_DECIDE) nz = 5;
if (nz < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nz cannot be less than 0: value %d",nz);
if (nnz) {
for (i=0; i<A->rmap->n/bs; i++) {
if (nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nnz cannot be less than 0: local row %d value %d",i,nnz[i]);
if (nnz[i] > A->cmap->n/bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nnz cannot be greater than row length: local row %d value %d rowlength %d",i,nnz[i],A->cmap->n/bs);
}
}
bmat->mbs = A->rmap->n/bs;
ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,bs,NULL,&bmat->right);CHKERRQ(ierr);
ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,bs,NULL,&bmat->middle);CHKERRQ(ierr);
ierr = VecCreateSeq(PETSC_COMM_SELF,bs,&bmat->left);CHKERRQ(ierr);
if (!bmat->imax) {
ierr = PetscMalloc2(A->rmap->n,&bmat->imax,A->rmap->n,&bmat->ilen);CHKERRQ(ierr);
ierr = PetscLogObjectMemory((PetscObject)A,2*A->rmap->n*sizeof(PetscInt));CHKERRQ(ierr);
}
if (nnz) {
nz = 0;
for (i=0; i<A->rmap->n/A->rmap->bs; i++) {
bmat->imax[i] = nnz[i];
nz += nnz[i];
}
} else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Currently requires block row by row preallocation");
/* bmat->ilen will count nonzeros in each row so far. */
for (i=0; i<bmat->mbs; i++) bmat->ilen[i] = 0;
/* allocate the matrix space */
ierr = MatSeqXAIJFreeAIJ(A,(PetscScalar**)&bmat->a,&bmat->j,&bmat->i);CHKERRQ(ierr);
ierr = PetscMalloc3(nz,&bmat->a,nz,&bmat->j,A->rmap->n+1,&bmat->i);CHKERRQ(ierr);
ierr = PetscLogObjectMemory((PetscObject)A,(A->rmap->n+1)*sizeof(PetscInt)+nz*(sizeof(PetscScalar)+sizeof(PetscInt)));CHKERRQ(ierr);
bmat->i[0] = 0;
for (i=1; i<bmat->mbs+1; i++) {
bmat->i[i] = bmat->i[i-1] + bmat->imax[i-1];
}
bmat->singlemalloc = PETSC_TRUE;
bmat->free_a = PETSC_TRUE;
bmat->free_ij = PETSC_TRUE;
bmat->nz = 0;
bmat->maxnz = nz;
A->info.nz_unneeded = (double)bmat->maxnz;
ierr = MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
PetscFunctionReturn(0);
}
示例2: BVCreate_Mat
PETSC_EXTERN PetscErrorCode BVCreate_Mat(BV bv)
{
PetscErrorCode ierr;
BV_MAT *ctx;
PetscInt nloc,bs;
PetscBool seq;
char str[50];
PetscFunctionBegin;
ierr = PetscNewLog(bv,&ctx);CHKERRQ(ierr);
bv->data = (void*)ctx;
ierr = PetscObjectTypeCompare((PetscObject)bv->t,VECMPI,&ctx->mpi);CHKERRQ(ierr);
if (!ctx->mpi) {
ierr = PetscObjectTypeCompare((PetscObject)bv->t,VECSEQ,&seq);CHKERRQ(ierr);
if (!seq) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot create a BVMAT from a non-standard template vector");
}
ierr = VecGetLocalSize(bv->t,&nloc);CHKERRQ(ierr);
ierr = VecGetBlockSize(bv->t,&bs);CHKERRQ(ierr);
ierr = MatCreateDense(PetscObjectComm((PetscObject)bv->t),nloc,PETSC_DECIDE,PETSC_DECIDE,bv->m,NULL,&ctx->A);CHKERRQ(ierr);
ierr = MatAssemblyBegin(ctx->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
ierr = MatAssemblyEnd(ctx->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
ierr = PetscLogObjectParent((PetscObject)bv,(PetscObject)ctx->A);CHKERRQ(ierr);
if (((PetscObject)bv)->name) {
ierr = PetscSNPrintf(str,50,"%s_0",((PetscObject)bv)->name);CHKERRQ(ierr);
ierr = PetscObjectSetName((PetscObject)ctx->A,str);CHKERRQ(ierr);
}
if (ctx->mpi) {
ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)bv->t),bs,nloc,PETSC_DECIDE,NULL,&bv->cv[0]);CHKERRQ(ierr);
ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)bv->t),bs,nloc,PETSC_DECIDE,NULL,&bv->cv[1]);CHKERRQ(ierr);
} else {
ierr = VecCreateSeqWithArray(PetscObjectComm((PetscObject)bv->t),bs,nloc,NULL,&bv->cv[0]);CHKERRQ(ierr);
ierr = VecCreateSeqWithArray(PetscObjectComm((PetscObject)bv->t),bs,nloc,NULL,&bv->cv[1]);CHKERRQ(ierr);
}
bv->ops->mult = BVMult_Mat;
bv->ops->multvec = BVMultVec_Mat;
bv->ops->multinplace = BVMultInPlace_Mat;
bv->ops->multinplacetrans = BVMultInPlaceTranspose_Mat;
bv->ops->axpy = BVAXPY_Mat;
bv->ops->dot = BVDot_Mat;
bv->ops->dotvec = BVDotVec_Mat;
bv->ops->scale = BVScale_Mat;
bv->ops->norm = BVNorm_Mat;
/*bv->ops->orthogonalize = BVOrthogonalize_Mat;*/
bv->ops->matmult = BVMatMult_Mat;
bv->ops->copy = BVCopy_Mat;
bv->ops->resize = BVResize_Mat;
bv->ops->getcolumn = BVGetColumn_Mat;
bv->ops->restorecolumn = BVRestoreColumn_Mat;
bv->ops->getarray = BVGetArray_Mat;
bv->ops->restorearray = BVRestoreArray_Mat;
bv->ops->destroy = BVDestroy_Mat;
if (!ctx->mpi) bv->ops->view = BVView_Mat;
PetscFunctionReturn(0);
}
示例3: PCSetUp_TFS
static PetscErrorCode PCSetUp_TFS(PC pc)
{
PC_TFS *tfs = (PC_TFS*)pc->data;
Mat A = pc->pmat;
Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data;
PetscErrorCode ierr;
PetscInt *localtoglobal,ncol,i;
PetscBool ismpiaij;
/*
PetscBool issymmetric;
Petsc Real tol = 0.0;
*/
PetscFunctionBegin;
if (A->cmap->N != A->rmap->N) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_SIZ,"matrix must be square");
ierr = PetscObjectTypeCompare((PetscObject)pc->pmat,MATMPIAIJ,&ismpiaij);CHKERRQ(ierr);
if (!ismpiaij) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Currently only supports MPIAIJ matrices");
/* generate the local to global mapping */
ncol = a->A->cmap->n + a->B->cmap->n;
ierr = PetscMalloc((ncol)*sizeof(PetscInt),&localtoglobal);CHKERRQ(ierr);
for (i=0; i<a->A->cmap->n; i++) {
localtoglobal[i] = A->cmap->rstart + i + 1;
}
for (i=0; i<a->B->cmap->n; i++) {
localtoglobal[i+a->A->cmap->n] = a->garray[i] + 1;
}
/* generate the vectors needed for the local solves */
ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,a->A->rmap->n,PETSC_NULL,&tfs->b);CHKERRQ(ierr);
ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,a->A->cmap->n,PETSC_NULL,&tfs->xd);CHKERRQ(ierr);
ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,a->B->cmap->n,PETSC_NULL,&tfs->xo);CHKERRQ(ierr);
tfs->nd = a->A->cmap->n;
/* ierr = MatIsSymmetric(A,tol,&issymmetric); */
/* if (issymmetric) { */
ierr = PetscBarrier((PetscObject)pc);CHKERRQ(ierr);
if (A->symmetric) {
tfs->xxt = XXT_new();
ierr = XXT_factor(tfs->xxt,localtoglobal,A->rmap->n,ncol,(void*)PCTFSLocalMult_TFS,pc);CHKERRQ(ierr);
pc->ops->apply = PCApply_TFS_XXT;
} else {
tfs->xyt = XYT_new();
ierr = XYT_factor(tfs->xyt,localtoglobal,A->rmap->n,ncol,(void*)PCTFSLocalMult_TFS,pc);CHKERRQ(ierr);
pc->ops->apply = PCApply_TFS_XYT;
}
ierr = PetscFree(localtoglobal);CHKERRQ(ierr);
PetscFunctionReturn(0);
}
示例4: CHKERRQ
int Epetra_PETScAIJMatrix::Multiply(bool TransA,
const Epetra_MultiVector& X,
Epetra_MultiVector& Y) const
{
(void)TransA;
int NumVectors = X.NumVectors();
if (NumVectors!=Y.NumVectors()) EPETRA_CHK_ERR(-1); // X and Y must have same number of vectors
double ** xptrs;
double ** yptrs;
X.ExtractView(&xptrs);
Y.ExtractView(&yptrs);
if (RowMatrixImporter()!=0) {
if (ImportVector_!=0) {
if (ImportVector_->NumVectors()!=NumVectors) { delete ImportVector_; ImportVector_= 0;}
}
if (ImportVector_==0) ImportVector_ = new Epetra_MultiVector(RowMatrixColMap(),NumVectors);
ImportVector_->Import(X, *RowMatrixImporter(), Insert);
ImportVector_->ExtractView(&xptrs);
}
double *vals=0;
int length;
Vec petscX, petscY;
int ierr;
for (int i=0; i<NumVectors; i++) {
# ifdef HAVE_MPI
ierr=VecCreateMPIWithArray(Comm_->Comm(),X.MyLength(),X.GlobalLength(),xptrs[i],&petscX); CHKERRQ(ierr);
ierr=VecCreateMPIWithArray(Comm_->Comm(),Y.MyLength(),Y.GlobalLength(),yptrs[i],&petscY); CHKERRQ(ierr);
# else //FIXME untested
ierr=VecCreateSeqWithArray(Comm_->Comm(),X.MyLength(),X.GlobalLength(),xptrs[i],&petscX); CHKERRQ(ierr);
ierr=VecCreateSeqWithArray(Comm_->Comm(),Y.MyLength(),Y.GlobalLength(),yptrs[i],&petscY); CHKERRQ(ierr);
# endif
ierr = MatMult(Amat_,petscX,petscY);CHKERRQ(ierr);
ierr = VecGetArray(petscY,&vals);CHKERRQ(ierr);
ierr = VecGetLocalSize(petscY,&length);CHKERRQ(ierr);
for (int j=0; j<length; j++) yptrs[i][j] = vals[j];
ierr = VecRestoreArray(petscY,&vals);CHKERRQ(ierr);
}
VecDestroy(petscX); VecDestroy(petscY);
double flops = NumGlobalNonzeros();
flops *= 2.0;
flops *= (double) NumVectors;
UpdateFlops(flops);
return(0);
} //Multiply()
示例5: DMDAGlobalToNaturalEnd
/*@
DMDAGlobalToNaturalAllCreate - Creates a scatter context that maps from the
global vector the entire vector to each processor in natural numbering
Collective on DMDA
Input Parameter:
. da - the distributed array context
Output Parameter:
. scatter - the scatter context
Level: advanced
.keywords: distributed array, global to local, begin, coarse problem
.seealso: DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
@*/
PetscErrorCode DMDAGlobalToNaturalAllCreate(DM da,VecScatter *scatter)
{
PetscErrorCode ierr;
PetscInt N;
IS from,to;
Vec tmplocal,global;
AO ao;
DM_DA *dd = (DM_DA*)da->data;
PetscFunctionBegin;
PetscValidHeaderSpecific(da,DM_CLASSID,1);
PetscValidPointer(scatter,2);
ierr = DMDAGetAO(da,&ao);CHKERRQ(ierr);
/* create the scatter context */
ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)da),dd->w,dd->Nlocal,PETSC_DETERMINE,0,&global);CHKERRQ(ierr);
ierr = VecGetSize(global,&N);CHKERRQ(ierr);
ierr = ISCreateStride(PetscObjectComm((PetscObject)da),N,0,1,&to);CHKERRQ(ierr);
ierr = AOPetscToApplicationIS(ao,to);CHKERRQ(ierr);
ierr = ISCreateStride(PetscObjectComm((PetscObject)da),N,0,1,&from);CHKERRQ(ierr);
ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,dd->w,N,0,&tmplocal);CHKERRQ(ierr);
ierr = VecScatterCreate(global,from,tmplocal,to,scatter);CHKERRQ(ierr);
ierr = VecDestroy(&tmplocal);CHKERRQ(ierr);
ierr = VecDestroy(&global);CHKERRQ(ierr);
ierr = ISDestroy(&from);CHKERRQ(ierr);
ierr = ISDestroy(&to);CHKERRQ(ierr);
PetscFunctionReturn(0);
}
示例6: VecCreateSeqWithArray
void PetscVector::copyFromArray( double v[] )
{
int ierr;
Vec sv;
IS is;
VecScatter ctx;
ierr = VecCreateSeqWithArray(PETSC_COMM_SELF, n, v, &sv);
assert(ierr == 0);
ierr = ISCreateStride(PETSC_COMM_WORLD, n, 0, 1, &is);
assert( ierr == 0);
ierr = VecScatterCreate( sv, is, pv, is, &ctx);
assert( ierr == 0);
ierr = VecScatterBegin( sv, pv,INSERT_VALUES,SCATTER_FORWARD,
ctx);
assert( ierr == 0);
ierr = VecScatterEnd( sv, pv,INSERT_VALUES,SCATTER_FORWARD,
ctx);
assert( ierr == 0);
ierr = VecScatterDestroy(ctx);
assert( ierr == 0);
ierr = ISDestroy( is );
assert(ierr == 0);
ierr = VecDestroy( sv );
assert(ierr == 0);
}
示例7: times
void
PetscSparseMtrx :: times(const FloatArray &x, FloatArray &answer) const
{
if ( this->giveNumberOfColumns() != x.giveSize() ) {
OOFEM_ERROR("Dimension mismatch");
}
#ifdef __PARALLEL_MODE
if ( emodel->isParallel() ) {
OOFEM_ERROR("PetscSparseMtrx :: times - Not implemented");
}
#endif
Vec globX, globY;
VecCreateSeqWithArray(PETSC_COMM_SELF, 1, x.giveSize(), x.givePointer(), & globX);
VecCreate(PETSC_COMM_SELF, & globY);
VecSetType(globY, VECSEQ);
VecSetSizes(globY, PETSC_DECIDE, this->nRows);
MatMult(this->mtrx, globX, globY);
double *ptr;
VecGetArray(globY, & ptr);
answer.resize(this->nRows);
for ( int i = 0; i < this->nRows; i++ ) {
answer(i) = ptr [ i ];
}
VecRestoreArray(globY, & ptr);
VecDestroy(&globX);
VecDestroy(&globY);
}
示例8: setupIS
Real SAMpatchPETSc::normL2(const Vector& x, char dofType) const
{
#ifdef HAVE_MPI
if (adm.isParallel()) {
if (dofIS.find(dofType) == dofIS.end())
setupIS(dofType);
Vec lx;
VecCreateSeqWithArray(PETSC_COMM_SELF, 1, x.size(), x.data(), &lx);
Vec gx;
VecCreate(*adm.getCommunicator(), &gx);
VecSetSizes(gx, dofIS[dofType].nDofs, PETSC_DETERMINE);
VecSetFromOptions(gx);
PetscInt n;
VecGetSize(gx, &n);
if (!dofIS[dofType].scatterCreated) {
VecScatterCreate(lx, dofIS[dofType].local, gx, dofIS[dofType].global, &dofIS[dofType].ctx);
dofIS[dofType].scatterCreated = true;
}
VecScatterBegin(dofIS[dofType].ctx, lx, gx, INSERT_VALUES, SCATTER_FORWARD);
VecScatterEnd(dofIS[dofType].ctx, lx, gx, INSERT_VALUES, SCATTER_FORWARD);
PetscReal d;
VecNorm(gx, NORM_2, &d);
VecDestroy(&lx);
VecDestroy(&gx);
return d / sqrt(double(n));
}
#endif
return this->SAM::normL2(x, dofType);
}
示例9: PetscViewerCreate
// Write the sub mesh into a HDF5 file.
PetscErrorCode ProbeVolume::writeSubMeshHDF5(const std::string &filePath)
{
PetscErrorCode ierr;
PetscFunctionBeginUser;
// only the first process in the communicator write the sub-mesh into a file
if (commRank == 0)
{
// because only one process is involved in writing the sub-mesh,
// we need to create a temporary viewer
PetscViewer viewer2;
ierr = PetscViewerCreate(PETSC_COMM_SELF, &viewer2); CHKERRQ(ierr);
ierr = PetscViewerSetType(viewer2, PETSCVIEWERHDF5); CHKERRQ(ierr);
ierr = PetscViewerFileSetMode(viewer2, FILE_MODE_WRITE); CHKERRQ(ierr);
ierr = PetscViewerFileSetName(
viewer2, filePath.c_str()); CHKERRQ(ierr);
ierr = PetscViewerHDF5PushGroup(viewer2, "mesh"); CHKERRQ(ierr);
std::vector<std::string> dirs{"x", "y", "z"};
for (unsigned int d = 0; d < coord.size(); ++d)
{
Vec tmp;
ierr = VecCreateSeqWithArray(PETSC_COMM_SELF, 1, nPtsDir[d],
&coord[d][0], &tmp); CHKERRQ(ierr);
ierr = PetscObjectSetName((PetscObject) tmp,
dirs[d].c_str()); CHKERRQ(ierr);
ierr = VecView(tmp, viewer2); CHKERRQ(ierr);
ierr = VecDestroy(&tmp); CHKERRQ(ierr);
}
ierr = PetscViewerDestroy(&viewer2); CHKERRQ(ierr);
}
PetscFunctionReturn(0);
} // ProbeVolume::writeSubMeshHDF5
示例10: BVCreate_Contiguous
PETSC_EXTERN PetscErrorCode BVCreate_Contiguous(BV bv)
{
PetscErrorCode ierr;
BV_CONTIGUOUS *ctx;
PetscInt j,nloc,bs;
PetscBool seq;
char str[50];
PetscFunctionBegin;
ierr = PetscNewLog(bv,&ctx);CHKERRQ(ierr);
bv->data = (void*)ctx;
ierr = PetscObjectTypeCompare((PetscObject)bv->t,VECMPI,&ctx->mpi);CHKERRQ(ierr);
if (!ctx->mpi) {
ierr = PetscObjectTypeCompare((PetscObject)bv->t,VECSEQ,&seq);CHKERRQ(ierr);
if (!seq) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot create a contiguous BV from a non-standard template vector");
}
ierr = VecGetLocalSize(bv->t,&nloc);CHKERRQ(ierr);
ierr = VecGetBlockSize(bv->t,&bs);CHKERRQ(ierr);
ierr = PetscMalloc1(bv->m*nloc,&ctx->array);CHKERRQ(ierr);
ierr = PetscMemzero(ctx->array,bv->m*nloc*sizeof(PetscScalar));CHKERRQ(ierr);
ierr = PetscMalloc1(bv->m,&ctx->V);CHKERRQ(ierr);
for (j=0;j<bv->m;j++) {
if (ctx->mpi) {
ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)bv->t),bs,nloc,PETSC_DECIDE,ctx->array+j*nloc,ctx->V+j);CHKERRQ(ierr);
} else {
ierr = VecCreateSeqWithArray(PetscObjectComm((PetscObject)bv->t),bs,nloc,ctx->array+j*nloc,ctx->V+j);CHKERRQ(ierr);
}
}
ierr = PetscLogObjectParents(bv,bv->m,ctx->V);CHKERRQ(ierr);
if (((PetscObject)bv)->name) {
for (j=0;j<bv->m;j++) {
ierr = PetscSNPrintf(str,50,"%s_%D",((PetscObject)bv)->name,j);CHKERRQ(ierr);
ierr = PetscObjectSetName((PetscObject)ctx->V[j],str);CHKERRQ(ierr);
}
}
bv->ops->mult = BVMult_Contiguous;
bv->ops->multvec = BVMultVec_Contiguous;
bv->ops->multinplace = BVMultInPlace_Contiguous;
bv->ops->multinplacetrans = BVMultInPlaceTranspose_Contiguous;
bv->ops->axpy = BVAXPY_Contiguous;
bv->ops->dot = BVDot_Contiguous;
bv->ops->dotvec = BVDotVec_Contiguous;
bv->ops->scale = BVScale_Contiguous;
bv->ops->norm = BVNorm_Contiguous;
/*bv->ops->orthogonalize = BVOrthogonalize_Contiguous;*/
bv->ops->matmult = BVMatMult_Contiguous;
bv->ops->copy = BVCopy_Contiguous;
bv->ops->resize = BVResize_Contiguous;
bv->ops->getcolumn = BVGetColumn_Contiguous;
bv->ops->getarray = BVGetArray_Contiguous;
bv->ops->destroy = BVDestroy_Contiguous;
PetscFunctionReturn(0);
}
示例11: order
/*@
VecMPISetGhost - Sets the ghost points for an MPI ghost vector
Collective on Vec
Input Parameters:
+ vv - the MPI vector
. nghost - number of local ghost points
- ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
Notes:
Use VecGhostGetLocalForm() to access the local, ghosted representation
of the vector.
This also automatically sets the ISLocalToGlobalMapping() for this vector.
You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).
Level: advanced
Concepts: vectors^ghosted
.seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
@*/
PetscErrorCode VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[])
{
PetscErrorCode ierr;
PetscBool flg;
PetscFunctionBegin;
ierr = PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg);CHKERRQ(ierr);
/* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */
if (flg) {
PetscInt n,N;
Vec_MPI *w;
PetscScalar *larray;
IS from,to;
ISLocalToGlobalMapping ltog;
PetscInt rstart,i,*indices;
MPI_Comm comm = ((PetscObject)vv)->comm;
n = vv->map->n;
N = vv->map->N;
ierr = (*vv->ops->destroy)(vv);CHKERRQ(ierr);
ierr = VecSetSizes(vv,n,N);CHKERRQ(ierr);
ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,PETSC_NULL);CHKERRQ(ierr);
w = (Vec_MPI *)(vv)->data;
/* Create local representation */
ierr = VecGetArray(vv,&larray);CHKERRQ(ierr);
ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
ierr = PetscLogObjectParent(vv,w->localrep);CHKERRQ(ierr);
ierr = VecRestoreArray(vv,&larray);CHKERRQ(ierr);
/*
Create scatter context for scattering (updating) ghost values
*/
ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
ierr = VecScatterCreate(vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
ierr = PetscLogObjectParent(vv,w->localupdate);CHKERRQ(ierr);
ierr = ISDestroy(&to);CHKERRQ(ierr);
ierr = ISDestroy(&from);CHKERRQ(ierr);
/* set local to global mapping for ghosted vector */
ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
ierr = VecGetOwnershipRange(vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
for (i=0; i<n; i++) {
indices[i] = rstart + i;
}
for (i=0; i<nghost; i++) {
indices[n+i] = ghosts[i];
}
ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,PETSC_OWN_POINTER,<og);CHKERRQ(ierr);
ierr = VecSetLocalToGlobalMapping(vv,ltog);CHKERRQ(ierr);
ierr = ISLocalToGlobalMappingDestroy(<og);CHKERRQ(ierr);
} else if (vv->ops->create == VecCreate_MPI) SETERRQ(((PetscObject)vv)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting");
else if (!((PetscObject)vv)->type_name) SETERRQ(((PetscObject)vv)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting");
PetscFunctionReturn(0);
}
示例12: op_create_vec
static Vec op_create_vec ( const op_dat vec ) {
assert( vec );
Vec p_vec;
// Create a PETSc vector and pass it the user-allocated storage
VecCreateSeqWithArray(MPI_COMM_SELF,vec->dim * vec->set->size,(PetscScalar*)vec->data,&p_vec);
VecAssemblyBegin(p_vec);
VecAssemblyEnd(p_vec);
return p_vec;
}
示例13: length
/*@C
VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
the caller allocates the array space. Indices in the ghost region are based on blocks.
Collective on MPI_Comm
Input Parameters:
+ comm - the MPI communicator to use
. bs - block size
. n - local vector length
. N - global vector length (or PETSC_DECIDE to have calculated if n is given)
. nghost - number of local ghost blocks
. ghosts - global indices of ghost blocks (or PETSC_NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
- array - the space to store the vector values (as long as n + nghost*bs)
Output Parameter:
. vv - the global vector representation (without ghost points as part of vector)
Notes:
Use VecGhostGetLocalForm() to access the local, ghosted representation
of the vector.
n is the local vector size (total local size not the number of blocks) while nghost
is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
portion is bs*nghost
Level: advanced
Concepts: vectors^creating ghosted
Concepts: vectors^creating with array
.seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
VecCreateGhostWithArray(), VecCreateGhostBlock()
@*/
PetscErrorCode VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
{
PetscErrorCode ierr;
Vec_MPI *w;
PetscScalar *larray;
IS from,to;
ISLocalToGlobalMapping ltog;
PetscInt rstart,i,nb,*indices;
PetscFunctionBegin;
*vv = 0;
if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
if (nghost < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
if (n % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
/* Create global representation */
ierr = VecCreate(comm,vv);CHKERRQ(ierr);
ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr);
w = (Vec_MPI *)(*vv)->data;
/* Create local representation */
ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
/*
Create scatter context for scattering (updating) ghost values
*/
ierr = ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
ierr = ISDestroy(&to);CHKERRQ(ierr);
ierr = ISDestroy(&from);CHKERRQ(ierr);
/* set local to global mapping for ghosted vector */
nb = n/bs;
ierr = PetscMalloc((nb+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
for (i=0; i<nb; i++) {
indices[i] = rstart + i*bs;
}
for (i=0; i<nghost; i++) {
indices[nb+i] = ghosts[i];
}
ierr = ISLocalToGlobalMappingCreate(comm,nb+nghost,indices,PETSC_OWN_POINTER,<og);CHKERRQ(ierr);
ierr = VecSetLocalToGlobalMappingBlock(*vv,ltog);CHKERRQ(ierr);
ierr = ISLocalToGlobalMappingDestroy(<og);CHKERRQ(ierr);
PetscFunctionReturn(0);
}
示例14: MatMPIAIJCRL_create_aijcrl
PetscErrorCode MatMPIAIJCRL_create_aijcrl(Mat A)
{
Mat_MPIAIJ *a = (Mat_MPIAIJ*)(A)->data;
Mat_SeqAIJ *Aij = (Mat_SeqAIJ*)(a->A->data), *Bij = (Mat_SeqAIJ*)(a->B->data);
Mat_AIJCRL *aijcrl = (Mat_AIJCRL*) A->spptr;
PetscInt m = A->rmap->n; /* Number of rows in the matrix. */
PetscInt nd = a->A->cmap->n; /* number of columns in diagonal portion */
PetscInt *aj = Aij->j,*bj = Bij->j; /* From the CSR representation; points to the beginning of each row. */
PetscInt i, j,rmax = 0,*icols, *ailen = Aij->ilen, *bilen = Bij->ilen;
PetscScalar *aa = Aij->a,*ba = Bij->a,*acols,*array;
PetscErrorCode ierr;
PetscFunctionBegin;
/* determine the row with the most columns */
for (i=0; i<m; i++) {
rmax = PetscMax(rmax,ailen[i]+bilen[i]);
}
aijcrl->nz = Aij->nz+Bij->nz;
aijcrl->m = A->rmap->n;
aijcrl->rmax = rmax;
ierr = PetscFree2(aijcrl->acols,aijcrl->icols);CHKERRQ(ierr);
ierr = PetscMalloc2(rmax*m,PetscScalar,&aijcrl->acols,rmax*m,PetscInt,&aijcrl->icols);CHKERRQ(ierr);
acols = aijcrl->acols;
icols = aijcrl->icols;
for (i=0; i<m; i++) {
for (j=0; j<ailen[i]; j++) {
acols[j*m+i] = *aa++;
icols[j*m+i] = *aj++;
}
for (; j<ailen[i]+bilen[i]; j++) {
acols[j*m+i] = *ba++;
icols[j*m+i] = nd + *bj++;
}
for (; j<rmax; j++) { /* empty column entries */
acols[j*m+i] = 0.0;
icols[j*m+i] = (j) ? icols[(j-1)*m+i] : 0; /* handle case where row is EMPTY */
}
}
ierr = PetscInfo1(A,"Percentage of 0's introduced for vectorized multiply %g\n",1.0-((double)(aijcrl->nz))/((double)(rmax*m)));CHKERRQ(ierr);
ierr = PetscFree(aijcrl->array);CHKERRQ(ierr);
ierr = PetscMalloc((a->B->cmap->n+nd)*sizeof(PetscScalar),&array);CHKERRQ(ierr);
/* xwork array is actually B->n+nd long, but we define xwork this length so can copy into it */
ierr = VecDestroy(&aijcrl->xwork);CHKERRQ(ierr);
ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)A),1,nd,PETSC_DECIDE,array,&aijcrl->xwork);CHKERRQ(ierr);
ierr = VecDestroy(&aijcrl->fwork);CHKERRQ(ierr);
ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,a->B->cmap->n,array+nd,&aijcrl->fwork);CHKERRQ(ierr);
aijcrl->array = array;
aijcrl->xscat = a->Mvctx;
PetscFunctionReturn(0);
}
示例15: SetUp
virtual void SetUp() {
// setup FilePath directories
FilePath::set_io_dirs(".",UNIT_TESTS_SRC_DIR,"",".");
Profiler::initialize();
PetscInitialize(0,PETSC_NULL,PETSC_NULL,PETSC_NULL);
FilePath mesh_file( "fields/one_element_2d.msh", FilePath::input_file);
mesh= new Mesh;
ifstream in(string( mesh_file ).c_str());
mesh->read_gmsh_from_stream(in);
dh = new DOFHandlerMultiDim(*mesh);
VecCreateSeqWithArray(PETSC_COMM_SELF, 1, 3, dof_values, &v);
}