本文整理汇总了C++中PetscObjectTypeCompare函数的典型用法代码示例。如果您正苦于以下问题:C++ PetscObjectTypeCompare函数的具体用法?C++ PetscObjectTypeCompare怎么用?C++ PetscObjectTypeCompare使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了PetscObjectTypeCompare函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: PetscDrawDivideViewPort
/*@
PetscDrawSplitViewPort - Splits a window shared by several processes into smaller
view ports. One for each process.
Collective on PetscDraw
Input Parameter:
. draw - the drawing context
Level: advanced
Concepts: drawing^in subset of window
.seealso: PetscDrawDivideViewPort(), PetscDrawSetViewPort()
@*/
PetscErrorCode PetscDrawSplitViewPort(PetscDraw draw)
{
PetscErrorCode ierr;
PetscMPIInt rank,size;
PetscInt n;
PetscBool isnull;
PetscReal xl,xr,yl,yr,h;
PetscFunctionBegin;
PetscValidHeaderSpecific(draw,PETSC_DRAW_CLASSID,1);
ierr = PetscObjectTypeCompare((PetscObject)draw,PETSC_DRAW_NULL,&isnull);CHKERRQ(ierr);
if (isnull) PetscFunctionReturn(0);
ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)draw),&rank);CHKERRQ(ierr);
ierr = MPI_Comm_size(PetscObjectComm((PetscObject)draw),&size);CHKERRQ(ierr);
n = (PetscInt)(.1 + PetscSqrtReal((PetscReal)size));
while (n*n < size) n++;
h = 1.0/n;
xl = (rank % n)*h;
xr = xl + h;
yl = (rank/n)*h;
yr = yl + h;
ierr = PetscDrawLine(draw,xl,yl,xl,yr,PETSC_DRAW_BLACK);CHKERRQ(ierr);
ierr = PetscDrawLine(draw,xl,yr,xr,yr,PETSC_DRAW_BLACK);CHKERRQ(ierr);
ierr = PetscDrawLine(draw,xr,yr,xr,yl,PETSC_DRAW_BLACK);CHKERRQ(ierr);
ierr = PetscDrawLine(draw,xr,yl,xl,yl,PETSC_DRAW_BLACK);CHKERRQ(ierr);
ierr = PetscDrawSynchronizedFlush(draw);CHKERRQ(ierr);
draw->port_xl = xl + .1*h;
draw->port_xr = xr - .1*h;
draw->port_yl = yl + .1*h;
draw->port_yr = yr - .1*h;
if (draw->ops->setviewport) {
ierr = (*draw->ops->setviewport)(draw,xl,yl,xr,yr);CHKERRQ(ierr);
}
PetscFunctionReturn(0);
}
示例2: MatView_SeqAIJ_Inode
PetscErrorCode MatView_SeqAIJ_Inode(Mat A,PetscViewer viewer)
{
Mat_SeqAIJ *a=(Mat_SeqAIJ*)A->data;
PetscErrorCode ierr;
PetscBool iascii;
PetscViewerFormat format;
PetscFunctionBegin;
ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
if (iascii) {
ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
if (format == PETSC_VIEWER_ASCII_INFO_DETAIL || format == PETSC_VIEWER_ASCII_INFO) {
if (a->inode.size) {
ierr = PetscViewerASCIIPrintf(viewer,"using I-node routines: found %D nodes, limit used is %D\n",a->inode.node_count,a->inode.limit);CHKERRQ(ierr);
} else {
ierr = PetscViewerASCIIPrintf(viewer,"not using I-node routines\n");CHKERRQ(ierr);
}
}
}
PetscFunctionReturn(0);
}
示例3: vector
/*@
VecGhostUpdateEnd - End the vector scatter to update the vector from
local representation to global or global representation to local.
Neighbor-wise Collective on Vec
Input Parameters:
+ g - the vector (obtained with VecCreateGhost() or VecDuplicate())
. insertmode - one of ADD_VALUES or INSERT_VALUES
- scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
Notes:
Use the following to update the ghost regions with correct values from the owning process
.vb
VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
.ve
Use the following to accumulate the ghost region values onto the owning processors
.vb
VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
.ve
To accumulate the ghost region values onto the owning processors and then update
the ghost regions correctly, call the later followed by the former, i.e.,
.vb
VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
.ve
Level: advanced
.seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(),
VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
@*/
PetscErrorCode VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode)
{
Vec_MPI *v;
PetscErrorCode ierr;
PetscBool ismpi;
PetscFunctionBegin;
PetscValidHeaderSpecific(g,VEC_CLASSID,1);
ierr = PetscObjectTypeCompare((PetscObject)g,VECMPI,&ismpi);CHKERRQ(ierr);
if (ismpi) {
v = (Vec_MPI*)g->data;
if (!v->localrep) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
if (!v->localupdate) PetscFunctionReturn(0);
if (scattermode == SCATTER_REVERSE) {
ierr = VecScatterEnd(v->localupdate,v->localrep,g,insertmode,scattermode);CHKERRQ(ierr);
} else {
ierr = VecScatterEnd(v->localupdate,g,v->localrep,insertmode,scattermode);CHKERRQ(ierr);
}
}
PetscFunctionReturn(0);
}
示例4: MatLMVMSetPrev
extern PetscErrorCode MatLMVMSetPrev(Mat M, Vec x, Vec g)
{
MatLMVMCtx *ctx;
PetscErrorCode ierr;
PetscBool same;
PetscFunctionBegin;
PetscValidHeaderSpecific(x,VEC_CLASSID,2);
PetscValidHeaderSpecific(g,VEC_CLASSID,3);
ierr = PetscObjectTypeCompare((PetscObject)M,MATSHELL,&same);CHKERRQ(ierr);
if (!same) SETERRQ(PETSC_COMM_SELF,1,"Matrix M is not type MatLMVM");
ierr = MatShellGetContext(M,(void**)&ctx);CHKERRQ(ierr);
if (ctx->nupdates == 0) {
ierr = MatLMVMUpdate(M,x,g);CHKERRQ(ierr);
} else {
ierr = VecCopy(x,ctx->Xprev);CHKERRQ(ierr);
ierr = VecCopy(g,ctx->Gprev);CHKERRQ(ierr);
/* TODO scaling specific terms */
}
PetscFunctionReturn(0);
}
示例5: PCView_SPAI
static PetscErrorCode PCView_SPAI(PC pc,PetscViewer viewer)
{
PC_SPAI *ispai = (PC_SPAI*)pc->data;
PetscErrorCode ierr;
PetscBool iascii;
PetscFunctionBegin;
ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
if (iascii) {
ierr = PetscViewerASCIIPrintf(viewer," SPAI preconditioner\n");CHKERRQ(ierr);
ierr = PetscViewerASCIIPrintf(viewer," epsilon %g\n", (double)ispai->epsilon);CHKERRQ(ierr);
ierr = PetscViewerASCIIPrintf(viewer," nbsteps %d\n", ispai->nbsteps);CHKERRQ(ierr);
ierr = PetscViewerASCIIPrintf(viewer," max %d\n", ispai->max);CHKERRQ(ierr);
ierr = PetscViewerASCIIPrintf(viewer," maxnew %d\n", ispai->maxnew);CHKERRQ(ierr);
ierr = PetscViewerASCIIPrintf(viewer," block_size %d\n",ispai->block_size);CHKERRQ(ierr);
ierr = PetscViewerASCIIPrintf(viewer," cache_size %d\n",ispai->cache_size);CHKERRQ(ierr);
ierr = PetscViewerASCIIPrintf(viewer," verbose %d\n", ispai->verbose);CHKERRQ(ierr);
ierr = PetscViewerASCIIPrintf(viewer," sp %d\n", ispai->sp);CHKERRQ(ierr);
}
PetscFunctionReturn(0);
}
示例6: PCView_ILU
static PetscErrorCode PCView_ILU(PC pc,PetscViewer viewer)
{
PC_ILU *ilu = (PC_ILU*)pc->data;
PetscErrorCode ierr;
PetscBool iascii;
PetscFunctionBegin;
ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
if (iascii) {
if (ilu->inplace) {
ierr = PetscViewerASCIIPrintf(viewer," ILU: in-place factorization\n");CHKERRQ(ierr);
} else {
ierr = PetscViewerASCIIPrintf(viewer," ILU: out-of-place factorization\n");CHKERRQ(ierr);
}
if (ilu->reusefill) {ierr = PetscViewerASCIIPrintf(viewer," ILU: Reusing fill from past factorization\n");CHKERRQ(ierr);}
if (ilu->reuseordering) {ierr = PetscViewerASCIIPrintf(viewer," ILU: Reusing reordering from past factorization\n");CHKERRQ(ierr);}
}
ierr = PCView_Factor(pc,viewer);CHKERRQ(ierr);
PetscFunctionReturn(0);
}
示例7: VecView
/*@C
KSPGMRESMonitorKrylov - Calls VecView() for each direction in the
GMRES accumulated Krylov space.
Collective on KSP
Input Parameters:
+ ksp - the KSP context
. its - iteration number
. fgnorm - 2-norm of residual (or gradient)
- a viewers object created with PetscViewersCreate()
Level: intermediate
.keywords: KSP, nonlinear, vector, monitor, view, Krylov space
.seealso: KSPMonitorSet(), KSPMonitorDefault(), VecView(), PetscViewersCreate(), PetscViewersDestroy()
@*/
PetscErrorCode KSPGMRESMonitorKrylov(KSP ksp,PetscInt its,PetscReal fgnorm,void *dummy)
{
PetscViewers viewers = (PetscViewers)dummy;
KSP_GMRES *gmres = (KSP_GMRES*)ksp->data;
PetscErrorCode ierr;
Vec x;
PetscViewer viewer;
PetscBool flg;
PetscFunctionBegin;
ierr = PetscViewersGetViewer(viewers,gmres->it+1,&viewer);CHKERRQ(ierr);
ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&flg);CHKERRQ(ierr);
if (!flg) {
ierr = PetscViewerSetType(viewer,PETSCVIEWERDRAW);CHKERRQ(ierr);
ierr = PetscViewerDrawSetInfo(viewer,NULL,"Krylov GMRES Monitor",PETSC_DECIDE,PETSC_DECIDE,300,300);CHKERRQ(ierr);
}
x = VEC_VV(gmres->it+1);
ierr = VecView(x,viewer);CHKERRQ(ierr);
PetscFunctionReturn(0);
}
示例8: DMLibMeshGetVariables
PetscErrorCode DMLibMeshGetVariables(DM dm, PetscInt *n, char*** varnames)
{
PetscErrorCode ierr;
PetscFunctionBegin;
PetscValidHeaderSpecific(dm,DM_CLASSID,1);
PetscBool islibmesh;
PetscInt i;
ierr = PetscObjectTypeCompare((PetscObject)dm, DMLIBMESH,&islibmesh);
if(!islibmesh) SETERRQ2(((PetscObject)dm)->comm, PETSC_ERR_ARG_WRONG, "Got DM oftype %s, not of type %s", ((PetscObject)dm)->type_name, DMLIBMESH);
DM_libMesh *dlm = (DM_libMesh *)(dm->data);
PetscValidPointer(n,2);
*n = dlm->varids->size();
if(!varnames) PetscFunctionReturn(0);
ierr = PetscMalloc(*n*sizeof(char*), varnames); CHKERRQ(ierr);
i = 0;
for(std::map<std::string, unsigned int>::const_iterator it = dlm->varids->begin(); it != dlm->varids->end(); ++it){
ierr = PetscStrallocpy(it->first.c_str(), *varnames+i); CHKERRQ(ierr);
++i;
}
PetscFunctionReturn(0);
}
示例9: BVView_Mat
PetscErrorCode BVView_Mat(BV bv,PetscViewer viewer)
{
PetscErrorCode ierr;
BV_MAT *ctx = (BV_MAT*)bv->data;
PetscViewerFormat format;
PetscBool isascii;
PetscFunctionBegin;
ierr = MatView(ctx->A,viewer);CHKERRQ(ierr);
ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);CHKERRQ(ierr);
if (isascii) {
ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
if (format == PETSC_VIEWER_ASCII_MATLAB) {
ierr = PetscViewerASCIIPrintf(viewer,"%s=%s;clear %s\n",((PetscObject)bv)->name,((PetscObject)ctx->A)->name,((PetscObject)ctx->A)->name);CHKERRQ(ierr);
if (bv->nc) {
ierr = PetscViewerASCIIPrintf(viewer,"%s=%s(:,%D:end);\n",((PetscObject)bv)->name,((PetscObject)bv)->name,bv->nc+1);CHKERRQ(ierr);
}
}
}
PetscFunctionReturn(0);
}
示例10: cells
/*@
MatMeshToCellGraph - Uses the ParMETIS package to convert a Mat that represents a mesh to a Mat the represents the graph of the coupling
between cells (the "dual" graph) and is suitable for partitioning with the MatPartitioning object. Use this to partition
cells of a mesh.
Collective on Mat
Input Parameter:
+ mesh - the graph that represents the mesh
- ncommonnodes - mesh elements that share this number of common nodes are considered neighbors, use 2 for triangules and
quadralaterials, 3 for tetrahedrals and 4 for hexahedrals
Output Parameter:
. dual - the dual graph
Notes:
Currently requires ParMetis to be installed and uses ParMETIS_V3_Mesh2Dual()
The columns of each row of the Mat mesh are the global vertex numbers of the vertices of that rows cell. The number of rows in mesh is
number of cells, the number of columns is the number of vertices.
Level: advanced
.seealso: MatMeshToVertexGraph(), MatCreateMPIAdj(), MatPartitioningCreate()
@*/
PetscErrorCode MatMeshToCellGraph(Mat mesh,PetscInt ncommonnodes,Mat *dual)
{
PetscErrorCode ierr;
PetscInt *newxadj,*newadjncy;
PetscInt numflag=0;
Mat_MPIAdj *adj = (Mat_MPIAdj *)mesh->data,*newadj;
PetscBool flg;
int status;
PetscFunctionBegin;
ierr = PetscObjectTypeCompare((PetscObject)mesh,MATMPIADJ,&flg);CHKERRQ(ierr);
if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Must use MPIAdj matrix type");
CHKMEMQ;
status = ParMETIS_V3_Mesh2Dual(mesh->rmap->range,adj->i,adj->j,&numflag,&ncommonnodes,&newxadj,&newadjncy,&((PetscObject)mesh)->comm);CHKERRQPARMETIS(status);
CHKMEMQ;
ierr = MatCreateMPIAdj(((PetscObject)mesh)->comm,mesh->rmap->n,mesh->rmap->N,newxadj,newadjncy,PETSC_NULL,dual);CHKERRQ(ierr);
newadj = (Mat_MPIAdj *)(*dual)->data;
newadj->freeaijwithfree = PETSC_TRUE; /* signal the matrix should be freed with system free since space was allocated by ParMETIS */
PetscFunctionReturn(0);
}
示例11: FormJacobian
PetscErrorCode FormJacobian(SNES snes,Vec X,Mat *J,Mat *B,MatStructure *flag,void *ptr)
{
AppCtx *user = (AppCtx *) ptr;
PetscErrorCode ierr;
KSP ksp;
PC pc;
PetscBool ismg;
*flag = SAME_NONZERO_PATTERN;
ierr = FormJacobian_Grid(user,&user->fine,X,J,B);CHKERRQ(ierr);
/* create coarse grid jacobian for preconditioner */
ierr = SNESGetKSP(snes,&ksp);CHKERRQ(ierr);
ierr = KSPGetPC(ksp,&pc);CHKERRQ(ierr);
ierr = PetscObjectTypeCompare((PetscObject)pc,PCMG,&ismg);CHKERRQ(ierr);
if (ismg) {
ierr = KSPSetOperators(user->ksp_fine,user->fine.J,user->fine.J,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
/* restrict X to coarse grid */
ierr = MatMult(user->R,X,user->coarse.x);CHKERRQ(ierr);
ierr = VecPointwiseMult(user->coarse.x,user->coarse.x,user->Rscale);CHKERRQ(ierr);
/* form Jacobian on coarse grid */
if (user->redundant_build) {
/* get copy of coarse X onto each processor */
ierr = VecScatterBegin(user->tolocalall,user->coarse.x,user->localall,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
ierr = VecScatterEnd(user->tolocalall,user->coarse.x,user->localall,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
ierr = FormJacobian_Coarse(user,&user->coarse,user->localall,&user->coarse.J,&user->coarse.J);CHKERRQ(ierr);
} else {
/* coarse grid Jacobian computed in parallel */
ierr = FormJacobian_Grid(user,&user->coarse,user->coarse.x,&user->coarse.J,&user->coarse.J);CHKERRQ(ierr);
}
ierr = KSPSetOperators(user->ksp_coarse,user->coarse.J,user->coarse.J,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
}
return 0;
}
示例12: main
int main(int argc,char **argv)
{
PetscInt i,n,start,stride;
const PetscInt *ii;
IS is;
PetscBool flg;
PetscErrorCode ierr;
ierr = PetscInitialize(&argc,&argv,(char*)0,help);CHKERRQ(ierr);
/*
Test IS of size 0
*/
ierr = ISCreateStride(PETSC_COMM_SELF,0,0,2,&is);CHKERRQ(ierr);
ierr = ISGetSize(is,&n);CHKERRQ(ierr);
if (n != 0) SETERRQ(PETSC_COMM_SELF,1,"ISCreateStride");
ierr = ISStrideGetInfo(is,&start,&stride);CHKERRQ(ierr);
if (start != 0) SETERRQ(PETSC_COMM_SELF,1,"ISStrideGetInfo");
if (stride != 2) SETERRQ(PETSC_COMM_SELF,1,"ISStrideGetInfo");
ierr = PetscObjectTypeCompare((PetscObject)is,ISSTRIDE,&flg);CHKERRQ(ierr);
if (!flg) SETERRQ(PETSC_COMM_SELF,1,"ISStride");
ierr = ISGetIndices(is,&ii);CHKERRQ(ierr);
ierr = ISRestoreIndices(is,&ii);CHKERRQ(ierr);
ierr = ISDestroy(&is);CHKERRQ(ierr);
/*
Test ISGetIndices()
*/
ierr = ISCreateStride(PETSC_COMM_SELF,10000,-8,3,&is);CHKERRQ(ierr);
ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
ierr = ISGetIndices(is,&ii);CHKERRQ(ierr);
for (i=0; i<10000; i++) {
if (ii[i] != -8 + 3*i) SETERRQ(PETSC_COMM_SELF,1,"ISGetIndices");
}
ierr = ISRestoreIndices(is,&ii);CHKERRQ(ierr);
ierr = ISDestroy(&is);CHKERRQ(ierr);
ierr = PetscFinalize();
return 0;
}
示例13: MatDestroy_SuperLU_DIST
PetscErrorCode MatDestroy_SuperLU_DIST(Mat A)
{
PetscErrorCode ierr;
Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr;
PetscBool flg;
PetscFunctionBegin;
if (lu && lu->CleanUpSuperLU_Dist) {
/* Deallocate SuperLU_DIST storage */
if (lu->MatInputMode == GLOBAL) {
Destroy_CompCol_Matrix_dist(&lu->A_sup);
} else {
Destroy_CompRowLoc_Matrix_dist(&lu->A_sup);
if ( lu->options.SolveInitialized ) {
#if defined(PETSC_USE_COMPLEX)
zSolveFinalize(&lu->options, &lu->SOLVEstruct);
#else
dSolveFinalize(&lu->options, &lu->SOLVEstruct);
#endif
}
}
Destroy_LU(A->cmap->N, &lu->grid, &lu->LUstruct);
ScalePermstructFree(&lu->ScalePermstruct);
LUstructFree(&lu->LUstruct);
/* Release the SuperLU_DIST process grid. */
superlu_gridexit(&lu->grid);
ierr = MPI_Comm_free(&(lu->comm_superlu));CHKERRQ(ierr);
}
ierr = PetscFree(A->spptr);CHKERRQ(ierr);
ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr);
if (flg) {
ierr = MatDestroy_SeqAIJ(A);CHKERRQ(ierr);
} else {
ierr = MatDestroy_MPIAIJ(A);CHKERRQ(ierr);
}
PetscFunctionReturn(0);
}
示例14: ISL2GCompose
/* Compose an IS with an ISLocalToGlobalMapping to map from IS source indices to global indices */
static PetscErrorCode ISL2GCompose(IS is,ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping *cltog)
{
PetscErrorCode ierr;
const PetscInt *idx;
PetscInt m,*idxm;
PetscBool isblock;
PetscFunctionBegin;
PetscValidHeaderSpecific(is,IS_CLASSID,1);
PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,2);
PetscValidPointer(cltog,3);
ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
if (isblock) {
PetscInt bs,lbs;
ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
ierr = ISLocalToGlobalMappingGetBlockSize(ltog,&lbs);CHKERRQ(ierr);
if (bs == lbs) {
ierr = ISGetLocalSize(is,&m);CHKERRQ(ierr);
m = m/bs;
ierr = ISBlockGetIndices(is,&idx);CHKERRQ(ierr);
ierr = PetscMalloc1(m,&idxm);CHKERRQ(ierr);
ierr = ISLocalToGlobalMappingApplyBlock(ltog,m,idx,idxm);CHKERRQ(ierr);
ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)is),bs,m,idxm,PETSC_OWN_POINTER,cltog);CHKERRQ(ierr);
ierr = ISBlockRestoreIndices(is,&idx);CHKERRQ(ierr);
PetscFunctionReturn(0);
}
}
ierr = ISGetLocalSize(is,&m);CHKERRQ(ierr);
ierr = ISGetIndices(is,&idx);CHKERRQ(ierr);
ierr = PetscMalloc1(m,&idxm);CHKERRQ(ierr);
if (ltog) {
ierr = ISLocalToGlobalMappingApply(ltog,m,idx,idxm);CHKERRQ(ierr);
} else {
ierr = PetscMemcpy(idxm,idx,m*sizeof(PetscInt));CHKERRQ(ierr);
}
ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)is),1,m,idxm,PETSC_OWN_POINTER,cltog);CHKERRQ(ierr);
ierr = ISRestoreIndices(is,&idx);CHKERRQ(ierr);
PetscFunctionReturn(0);
}
示例15: cells
/*@
MatMeshToCellGraph - Uses the ParMETIS package to convert a Mat that represents a mesh to a Mat the represents the graph of the coupling
between cells (the "dual" graph) and is suitable for partitioning with the MatPartitioning object. Use this to partition
cells of a mesh.
Collective on Mat
Input Parameter:
+ mesh - the graph that represents the mesh
- ncommonnodes - mesh elements that share this number of common nodes are considered neighbors, use 2 for triangules and
quadralaterials, 3 for tetrahedrals and 4 for hexahedrals
Output Parameter:
. dual - the dual graph
Notes:
Currently requires ParMetis to be installed and uses ParMETIS_V3_Mesh2Dual()
$ Each row of the mesh object represents a single cell in the mesh. For triangles it has 3 entries, quadralaterials 4 entries,
$ tetrahedrals 4 entries and hexahedrals 8 entries. You can mix triangles and quadrilaterals in the same mesh, but cannot
$ mix tetrahedrals and hexahedrals
$ The columns of each row of the Mat mesh are the global vertex numbers of the vertices of that row's cell.
$ The number of rows in mesh is number of cells, the number of columns is the number of vertices.
Level: advanced
.seealso: MatMeshToVertexGraph(), MatCreateMPIAdj(), MatPartitioningCreate()
@*/
PetscErrorCode MatMeshToCellGraph(Mat mesh,PetscInt ncommonnodes,Mat *dual)
{
PetscErrorCode ierr;
PetscInt *newxadj,*newadjncy;
PetscInt numflag=0;
Mat_MPIAdj *adj = (Mat_MPIAdj*)mesh->data,*newadj;
PetscBool flg;
int status;
MPI_Comm comm;
PetscFunctionBegin;
ierr = PetscObjectTypeCompare((PetscObject)mesh,MATMPIADJ,&flg);CHKERRQ(ierr);
if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Must use MPIAdj matrix type");
ierr = PetscObjectGetComm((PetscObject)mesh,&comm);CHKERRQ(ierr);
PetscStackCallParmetis(ParMETIS_V3_Mesh2Dual,((idx_t*)mesh->rmap->range,(idx_t*)adj->i,(idx_t*)adj->j,(idx_t*)&numflag,(idx_t*)&ncommonnodes,(idx_t**)&newxadj,(idx_t**)&newadjncy,&comm));
ierr = MatCreateMPIAdj(PetscObjectComm((PetscObject)mesh),mesh->rmap->n,mesh->rmap->N,newxadj,newadjncy,NULL,dual);CHKERRQ(ierr);
newadj = (Mat_MPIAdj*)(*dual)->data;
newadj->freeaijwithfree = PETSC_TRUE; /* signal the matrix should be freed with system free since space was allocated by ParMETIS */
PetscFunctionReturn(0);
}