147c6ae99SBarry Smith 247c6ae99SBarry Smith /* 347c6ae99SBarry Smith Code for manipulating distributed regular arrays in parallel. 447c6ae99SBarry Smith */ 547c6ae99SBarry Smith 64035e84dSBarry Smith #include <petsc-private/dmdaimpl.h> /*I "petscdmda.h" I*/ 747c6ae99SBarry Smith 847c6ae99SBarry Smith #undef __FUNCT__ 90a0169f3SBarry Smith #define __FUNCT__ "DMDAGetLogicalCoordinate" 100a0169f3SBarry Smith /*@C 110a0169f3SBarry Smith DMDAGetLogicalCoordinate - Returns a the i,j,k logical coordinate for the closest mesh point to a x,y,z point in the coordinates of the DMDA 120a0169f3SBarry Smith 130a0169f3SBarry Smith Collective on DMDA 140a0169f3SBarry Smith 150a0169f3SBarry Smith Input Parameters: 160a0169f3SBarry Smith + da - the distributed array 170a0169f3SBarry Smith . x,y,z - the physical coordinates 180a0169f3SBarry Smith - 190a0169f3SBarry Smith 200a0169f3SBarry Smith Output Parameters: 2133c53e3fSSatish Balay + II, JJ, KK - the logical coordinate (-1 on processes that do not contain that point) 220a0169f3SBarry Smith - X, Y, Z, - (optional) the coordinates of the located grid point 230a0169f3SBarry Smith 240a0169f3SBarry Smith Level: advanced 250a0169f3SBarry Smith 260a0169f3SBarry Smith Notes: 270a0169f3SBarry Smith All processors that share the DMDA must call this with the same coordinate value 280a0169f3SBarry Smith 290a0169f3SBarry Smith .keywords: distributed array, get, processor subset 300a0169f3SBarry Smith @*/ 3133c53e3fSSatish Balay PetscErrorCode DMDAGetLogicalCoordinate(DM da,PetscScalar x,PetscScalar y,PetscScalar z,PetscInt *II,PetscInt *JJ,PetscInt *KK,PetscScalar *X,PetscScalar *Y,PetscScalar *Z) 320a0169f3SBarry Smith { 330a0169f3SBarry Smith DM_DA *dd = (DM_DA*)da->data; 340a0169f3SBarry Smith PetscErrorCode ierr; 350a0169f3SBarry Smith Vec coors; 360a0169f3SBarry Smith DM dacoors; 370a0169f3SBarry Smith DMDACoor2d **c; 380a0169f3SBarry Smith PetscInt i,j,xs,xm,ys,ym; 39*e97f6855SBarry Smith PetscReal d,D = PETSC_MAX_REAL,Dv; 40*e97f6855SBarry Smith PetscMPIInt rank,root; 410a0169f3SBarry Smith 420a0169f3SBarry Smith PetscFunctionBegin; 430a0169f3SBarry Smith if (dd->dim == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Cannot get point from 1d DMDA"); 440a0169f3SBarry Smith if (dd->dim == 3) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Cannot get point from 3d DMDA"); 450a0169f3SBarry Smith 4633c53e3fSSatish Balay *II = -1; 47*e97f6855SBarry Smith *JJ = -1; 480a0169f3SBarry Smith 490a0169f3SBarry Smith ierr = DMGetCoordinateDM(da,&dacoors);CHKERRQ(ierr); 500a0169f3SBarry Smith ierr = DMDAGetCorners(dacoors,&xs,&ys,NULL,&xm,&ym,NULL);CHKERRQ(ierr); 510a0169f3SBarry Smith ierr = DMGetCoordinates(da,&coors);CHKERRQ(ierr); 520a0169f3SBarry Smith ierr = DMDAVecGetArray(dacoors,coors,&c);CHKERRQ(ierr); 530a0169f3SBarry Smith for (j=ys; j<ys+ym; j++) { 540a0169f3SBarry Smith for (i=xs; i<xs+xm; i++) { 550a0169f3SBarry Smith d = PetscSqrtReal(PetscRealPart( (c[j][i].x - x)*(c[j][i].x - x) + (c[j][i].y - y)*(c[j][i].y - y) )); 560a0169f3SBarry Smith if (d < D) { 570a0169f3SBarry Smith D = d; 5833c53e3fSSatish Balay *II = i; 5933c53e3fSSatish Balay *JJ = j; 600a0169f3SBarry Smith } 610a0169f3SBarry Smith } 620a0169f3SBarry Smith } 63*e97f6855SBarry Smith ierr = MPI_Allreduce(&D,&Dv,1,MPIU_REAL,MPI_MIN,PetscObjectComm((PetscObject)da));CHKERRQ(ierr); 64*e97f6855SBarry Smith if (D != Dv) { 65*e97f6855SBarry Smith *II = -1; 66*e97f6855SBarry Smith *JJ = -1; 67*e97f6855SBarry Smith rank = 0; 68*e97f6855SBarry Smith } else { 69*e97f6855SBarry Smith *X = c[*JJ][*II].x; 70*e97f6855SBarry Smith *Y = c[*JJ][*II].y; 71*e97f6855SBarry Smith ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)da),&rank);CHKERRQ(ierr); 72*e97f6855SBarry Smith rank++; 73*e97f6855SBarry Smith } 74*e97f6855SBarry Smith ierr = MPI_Allreduce(&rank,&root,1,MPI_INT,MPI_SUM,PetscObjectComm((PetscObject)da));CHKERRQ(ierr); 75*e97f6855SBarry Smith root--; 76*e97f6855SBarry Smith ierr = MPI_Bcast(X,1,MPIU_REAL,root,PetscObjectComm((PetscObject)da));CHKERRQ(ierr); 77*e97f6855SBarry Smith ierr = MPI_Bcast(Y,1,MPIU_REAL,root,PetscObjectComm((PetscObject)da));CHKERRQ(ierr); 780a0169f3SBarry Smith ierr = DMDAVecRestoreArray(dacoors,coors,&c);CHKERRQ(ierr); 790a0169f3SBarry Smith PetscFunctionReturn(0); 800a0169f3SBarry Smith } 810a0169f3SBarry Smith 820a0169f3SBarry Smith #undef __FUNCT__ 83db05f41bSBarry Smith #define __FUNCT__ "DMDAGetRay" 84db05f41bSBarry Smith /*@C 85db05f41bSBarry Smith DMDAGetRay - Returns a vector on process zero that contains a row or column of the values in a DMDA vector 86db05f41bSBarry Smith 87db05f41bSBarry Smith Collective on DMDA 88db05f41bSBarry Smith 89db05f41bSBarry Smith Input Parameters: 90db05f41bSBarry Smith + da - the distributed array 91db05f41bSBarry Smith . vec - the vector 92db05f41bSBarry Smith . dir - Cartesian direction, either DMDA_X, DMDA_Y, or DMDA_Z 93db05f41bSBarry Smith - gp - global grid point number in this direction 94db05f41bSBarry Smith 95db05f41bSBarry Smith Output Parameters: 96db05f41bSBarry Smith + newvec - the new vector that can hold the values (size zero on all processes except process 0) 97db05f41bSBarry Smith - scatter - the VecScatter that will map from the original vector to the slice 98db05f41bSBarry Smith 99db05f41bSBarry Smith Level: advanced 100db05f41bSBarry Smith 101db05f41bSBarry Smith Notes: 102db05f41bSBarry Smith All processors that share the DMDA must call this with the same gp value 103db05f41bSBarry Smith 104db05f41bSBarry Smith .keywords: distributed array, get, processor subset 105db05f41bSBarry Smith @*/ 106db05f41bSBarry Smith PetscErrorCode DMDAGetRay(DM da,DMDADirection dir,PetscInt gp,Vec *newvec,VecScatter *scatter) 107db05f41bSBarry Smith { 108db05f41bSBarry Smith PetscMPIInt rank; 109db05f41bSBarry Smith DM_DA *dd = (DM_DA*)da->data; 110db05f41bSBarry Smith PetscErrorCode ierr; 111db05f41bSBarry Smith IS is; 112db05f41bSBarry Smith AO ao; 113db05f41bSBarry Smith Vec vec; 114d1212d36SBarry Smith PetscInt *indices,i,j; 115db05f41bSBarry Smith 116db05f41bSBarry Smith PetscFunctionBegin; 117ce94432eSBarry Smith if (dd->dim == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Cannot get slice from 1d DMDA"); 118ce94432eSBarry Smith if (dd->dim == 3) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Cannot get slice from 3d DMDA"); 119db05f41bSBarry Smith ierr = DMDAGetAO(da,&ao);CHKERRQ(ierr); 120ce94432eSBarry Smith ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)da),&rank);CHKERRQ(ierr); 121db05f41bSBarry Smith if (!rank) { 122db05f41bSBarry Smith if (dir == DMDA_Y) { 123d1212d36SBarry Smith ierr = PetscMalloc(dd->w*dd->M*sizeof(PetscInt),&indices);CHKERRQ(ierr); 124d1212d36SBarry Smith indices[0] = gp*dd->M*dd->w; 1258865f1eaSKarl Rupp for (i=1; i<dd->M*dd->w; i++) indices[i] = indices[i-1] + 1; 1268865f1eaSKarl Rupp 127d1212d36SBarry Smith ierr = AOApplicationToPetsc(ao,dd->M*dd->w,indices);CHKERRQ(ierr); 128db05f41bSBarry Smith ierr = VecCreate(PETSC_COMM_SELF,newvec);CHKERRQ(ierr); 129db05f41bSBarry Smith ierr = VecSetBlockSize(*newvec,dd->w);CHKERRQ(ierr); 130db05f41bSBarry Smith ierr = VecSetSizes(*newvec,dd->M*dd->w,PETSC_DETERMINE);CHKERRQ(ierr); 131db05f41bSBarry Smith ierr = VecSetType(*newvec,VECSEQ);CHKERRQ(ierr); 132d1212d36SBarry Smith ierr = ISCreateGeneral(PETSC_COMM_SELF,dd->w*dd->M,indices,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 133db05f41bSBarry Smith } else if (dir == DMDA_X) { 134d1212d36SBarry Smith ierr = PetscMalloc(dd->w*dd->N*sizeof(PetscInt),&indices);CHKERRQ(ierr); 135d1212d36SBarry Smith indices[0] = dd->w*gp; 136d1212d36SBarry Smith for (j=1; j<dd->w; j++) indices[j] = indices[j-1] + 1; 137d1212d36SBarry Smith for (i=1; i<dd->N; i++) { 138d1212d36SBarry Smith indices[i*dd->w] = indices[i*dd->w-1] + dd->w*dd->M - dd->w + 1; 139d1212d36SBarry Smith for (j=1; j<dd->w; j++) indices[i*dd->w + j] = indices[i*dd->w + j - 1] + 1; 140d1212d36SBarry Smith } 141d1212d36SBarry Smith ierr = AOApplicationToPetsc(ao,dd->w*dd->N,indices);CHKERRQ(ierr); 142db05f41bSBarry Smith ierr = VecCreate(PETSC_COMM_SELF,newvec);CHKERRQ(ierr); 143db05f41bSBarry Smith ierr = VecSetBlockSize(*newvec,dd->w);CHKERRQ(ierr); 144db05f41bSBarry Smith ierr = VecSetSizes(*newvec,dd->N*dd->w,PETSC_DETERMINE);CHKERRQ(ierr); 145db05f41bSBarry Smith ierr = VecSetType(*newvec,VECSEQ);CHKERRQ(ierr); 146d1212d36SBarry Smith ierr = ISCreateGeneral(PETSC_COMM_SELF,dd->w*dd->N,indices,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 147db05f41bSBarry Smith } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Unknown DMDADirection"); 148db05f41bSBarry Smith } else { 149db05f41bSBarry Smith ierr = VecCreateSeq(PETSC_COMM_SELF,0,newvec);CHKERRQ(ierr); 150d1212d36SBarry Smith ierr = ISCreateGeneral(PETSC_COMM_SELF,0,0,PETSC_COPY_VALUES,&is);CHKERRQ(ierr); 151db05f41bSBarry Smith } 152db05f41bSBarry Smith ierr = DMGetGlobalVector(da,&vec);CHKERRQ(ierr); 1530298fd71SBarry Smith ierr = VecScatterCreate(vec,is,*newvec,NULL,scatter);CHKERRQ(ierr); 154db05f41bSBarry Smith ierr = DMRestoreGlobalVector(da,&vec);CHKERRQ(ierr); 155db05f41bSBarry Smith ierr = ISDestroy(&is);CHKERRQ(ierr); 156db05f41bSBarry Smith PetscFunctionReturn(0); 157db05f41bSBarry Smith } 158db05f41bSBarry Smith 159db05f41bSBarry Smith #undef __FUNCT__ 160aa219208SBarry Smith #define __FUNCT__ "DMDAGetProcessorSubset" 16147c6ae99SBarry Smith /*@C 162aa219208SBarry Smith DMDAGetProcessorSubset - Returns a communicator consisting only of the 163aa219208SBarry Smith processors in a DMDA that own a particular global x, y, or z grid point 16447c6ae99SBarry Smith (corresponding to a logical plane in a 3D grid or a line in a 2D grid). 16547c6ae99SBarry Smith 166aa219208SBarry Smith Collective on DMDA 16747c6ae99SBarry Smith 16847c6ae99SBarry Smith Input Parameters: 16947c6ae99SBarry Smith + da - the distributed array 170aa219208SBarry Smith . dir - Cartesian direction, either DMDA_X, DMDA_Y, or DMDA_Z 17147c6ae99SBarry Smith - gp - global grid point number in this direction 17247c6ae99SBarry Smith 17347c6ae99SBarry Smith Output Parameters: 17447c6ae99SBarry Smith . comm - new communicator 17547c6ae99SBarry Smith 17647c6ae99SBarry Smith Level: advanced 17747c6ae99SBarry Smith 17847c6ae99SBarry Smith Notes: 179aa219208SBarry Smith All processors that share the DMDA must call this with the same gp value 18047c6ae99SBarry Smith 18147c6ae99SBarry Smith This routine is particularly useful to compute boundary conditions 18247c6ae99SBarry Smith or other application-specific calculations that require manipulating 18347c6ae99SBarry Smith sets of data throughout a logical plane of grid points. 18447c6ae99SBarry Smith 18547c6ae99SBarry Smith .keywords: distributed array, get, processor subset 18647c6ae99SBarry Smith @*/ 1877087cfbeSBarry Smith PetscErrorCode DMDAGetProcessorSubset(DM da,DMDADirection dir,PetscInt gp,MPI_Comm *comm) 18847c6ae99SBarry Smith { 18947c6ae99SBarry Smith MPI_Group group,subgroup; 19047c6ae99SBarry Smith PetscErrorCode ierr; 19147c6ae99SBarry Smith PetscInt i,ict,flag,*owners,xs,xm,ys,ym,zs,zm; 1920298fd71SBarry Smith PetscMPIInt size,*ranks = NULL; 19347c6ae99SBarry Smith DM_DA *dd = (DM_DA*)da->data; 19447c6ae99SBarry Smith 19547c6ae99SBarry Smith PetscFunctionBegin; 19647c6ae99SBarry Smith PetscValidHeaderSpecific(da,DM_CLASSID,1); 19747c6ae99SBarry Smith flag = 0; 198aa219208SBarry Smith ierr = DMDAGetCorners(da,&xs,&ys,&zs,&xm,&ym,&zm);CHKERRQ(ierr); 199ce94432eSBarry Smith ierr = MPI_Comm_size(PetscObjectComm((PetscObject)da),&size);CHKERRQ(ierr); 200aa219208SBarry Smith if (dir == DMDA_Z) { 201ce94432eSBarry Smith if (dd->dim < 3) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"DMDA_Z invalid for DMDA dim < 3"); 20247c6ae99SBarry Smith if (gp < 0 || gp > dd->P) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"invalid grid point"); 20347c6ae99SBarry Smith if (gp >= zs && gp < zs+zm) flag = 1; 204aa219208SBarry Smith } else if (dir == DMDA_Y) { 205ce94432eSBarry Smith if (dd->dim == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"DMDA_Y invalid for DMDA dim = 1"); 20647c6ae99SBarry Smith if (gp < 0 || gp > dd->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"invalid grid point"); 20747c6ae99SBarry Smith if (gp >= ys && gp < ys+ym) flag = 1; 208aa219208SBarry Smith } else if (dir == DMDA_X) { 20947c6ae99SBarry Smith if (gp < 0 || gp > dd->M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"invalid grid point"); 21047c6ae99SBarry Smith if (gp >= xs && gp < xs+xm) flag = 1; 211ce94432eSBarry Smith } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"Invalid direction"); 21247c6ae99SBarry Smith 21347c6ae99SBarry Smith ierr = PetscMalloc2(size,PetscInt,&owners,size,PetscMPIInt,&ranks);CHKERRQ(ierr); 214ce94432eSBarry Smith ierr = MPI_Allgather(&flag,1,MPIU_INT,owners,1,MPIU_INT,PetscObjectComm((PetscObject)da));CHKERRQ(ierr); 21547c6ae99SBarry Smith ict = 0; 216aa219208SBarry Smith ierr = PetscInfo2(da,"DMDAGetProcessorSubset: dim=%D, direction=%d, procs: ",dd->dim,(int)dir);CHKERRQ(ierr); 21747c6ae99SBarry Smith for (i=0; i<size; i++) { 21847c6ae99SBarry Smith if (owners[i]) { 21947c6ae99SBarry Smith ranks[ict] = i; ict++; 22047c6ae99SBarry Smith ierr = PetscInfo1(da,"%D ",i);CHKERRQ(ierr); 22147c6ae99SBarry Smith } 22247c6ae99SBarry Smith } 22347c6ae99SBarry Smith ierr = PetscInfo(da,"\n");CHKERRQ(ierr); 224ce94432eSBarry Smith ierr = MPI_Comm_group(PetscObjectComm((PetscObject)da),&group);CHKERRQ(ierr); 22547c6ae99SBarry Smith ierr = MPI_Group_incl(group,ict,ranks,&subgroup);CHKERRQ(ierr); 226ce94432eSBarry Smith ierr = MPI_Comm_create(PetscObjectComm((PetscObject)da),subgroup,comm);CHKERRQ(ierr); 22747c6ae99SBarry Smith ierr = MPI_Group_free(&subgroup);CHKERRQ(ierr); 22847c6ae99SBarry Smith ierr = MPI_Group_free(&group);CHKERRQ(ierr); 22947c6ae99SBarry Smith ierr = PetscFree2(owners,ranks);CHKERRQ(ierr); 23047c6ae99SBarry Smith PetscFunctionReturn(0); 23147c6ae99SBarry Smith } 23247c6ae99SBarry Smith 23347c6ae99SBarry Smith #undef __FUNCT__ 234aa219208SBarry Smith #define __FUNCT__ "DMDAGetProcessorSubsets" 23547c6ae99SBarry Smith /*@C 236aa219208SBarry Smith DMDAGetProcessorSubsets - Returns communicators consisting only of the 237aa219208SBarry Smith processors in a DMDA adjacent in a particular dimension, 23847c6ae99SBarry Smith corresponding to a logical plane in a 3D grid or a line in a 2D grid. 23947c6ae99SBarry Smith 240aa219208SBarry Smith Collective on DMDA 24147c6ae99SBarry Smith 24247c6ae99SBarry Smith Input Parameters: 24347c6ae99SBarry Smith + da - the distributed array 244aa219208SBarry Smith - dir - Cartesian direction, either DMDA_X, DMDA_Y, or DMDA_Z 24547c6ae99SBarry Smith 24647c6ae99SBarry Smith Output Parameters: 24747c6ae99SBarry Smith . subcomm - new communicator 24847c6ae99SBarry Smith 24947c6ae99SBarry Smith Level: advanced 25047c6ae99SBarry Smith 25147c6ae99SBarry Smith Notes: 25247c6ae99SBarry Smith This routine is useful for distributing one-dimensional data in a tensor product grid. 25347c6ae99SBarry Smith 25447c6ae99SBarry Smith .keywords: distributed array, get, processor subset 25547c6ae99SBarry Smith @*/ 2567087cfbeSBarry Smith PetscErrorCode DMDAGetProcessorSubsets(DM da, DMDADirection dir, MPI_Comm *subcomm) 25747c6ae99SBarry Smith { 25847c6ae99SBarry Smith MPI_Comm comm; 25947c6ae99SBarry Smith MPI_Group group, subgroup; 26047c6ae99SBarry Smith PetscInt subgroupSize = 0; 26147c6ae99SBarry Smith PetscInt *firstPoints; 2620298fd71SBarry Smith PetscMPIInt size, *subgroupRanks = NULL; 26347c6ae99SBarry Smith PetscInt xs, xm, ys, ym, zs, zm, firstPoint, p; 26447c6ae99SBarry Smith PetscErrorCode ierr; 26547c6ae99SBarry Smith DM_DA *dd = (DM_DA*)da->data; 26647c6ae99SBarry Smith 26747c6ae99SBarry Smith PetscFunctionBegin; 26847c6ae99SBarry Smith PetscValidHeaderSpecific(da, DM_CLASSID, 1); 26982f516ccSBarry Smith ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr); 270aa219208SBarry Smith ierr = DMDAGetCorners(da, &xs, &ys, &zs, &xm, &ym, &zm);CHKERRQ(ierr); 27147c6ae99SBarry Smith ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 272aa219208SBarry Smith if (dir == DMDA_Z) { 273aa219208SBarry Smith if (dd->dim < 3) SETERRQ(comm,PETSC_ERR_ARG_OUTOFRANGE,"DMDA_Z invalid for DMDA dim < 3"); 27447c6ae99SBarry Smith firstPoint = zs; 275aa219208SBarry Smith } else if (dir == DMDA_Y) { 276aa219208SBarry Smith if (dd->dim == 1) SETERRQ(comm,PETSC_ERR_ARG_OUTOFRANGE,"DMDA_Y invalid for DMDA dim = 1"); 27747c6ae99SBarry Smith firstPoint = ys; 278aa219208SBarry Smith } else if (dir == DMDA_X) { 27947c6ae99SBarry Smith firstPoint = xs; 28047c6ae99SBarry Smith } else SETERRQ(comm,PETSC_ERR_ARG_OUTOFRANGE,"Invalid direction"); 28147c6ae99SBarry Smith 28247c6ae99SBarry Smith ierr = PetscMalloc2(size, PetscInt, &firstPoints, size, PetscMPIInt, &subgroupRanks);CHKERRQ(ierr); 28347c6ae99SBarry Smith ierr = MPI_Allgather(&firstPoint, 1, MPIU_INT, firstPoints, 1, MPIU_INT, comm);CHKERRQ(ierr); 284aa219208SBarry Smith ierr = PetscInfo2(da,"DMDAGetProcessorSubset: dim=%D, direction=%d, procs: ",dd->dim,(int)dir);CHKERRQ(ierr); 28547c6ae99SBarry Smith for (p = 0; p < size; ++p) { 28647c6ae99SBarry Smith if (firstPoints[p] == firstPoint) { 28747c6ae99SBarry Smith subgroupRanks[subgroupSize++] = p; 28847c6ae99SBarry Smith ierr = PetscInfo1(da, "%D ", p);CHKERRQ(ierr); 28947c6ae99SBarry Smith } 29047c6ae99SBarry Smith } 29147c6ae99SBarry Smith ierr = PetscInfo(da, "\n");CHKERRQ(ierr); 29247c6ae99SBarry Smith ierr = MPI_Comm_group(comm, &group);CHKERRQ(ierr); 29347c6ae99SBarry Smith ierr = MPI_Group_incl(group, subgroupSize, subgroupRanks, &subgroup);CHKERRQ(ierr); 29447c6ae99SBarry Smith ierr = MPI_Comm_create(comm, subgroup, subcomm);CHKERRQ(ierr); 29547c6ae99SBarry Smith ierr = MPI_Group_free(&subgroup);CHKERRQ(ierr); 29647c6ae99SBarry Smith ierr = MPI_Group_free(&group);CHKERRQ(ierr); 29747c6ae99SBarry Smith ierr = PetscFree2(firstPoints, subgroupRanks);CHKERRQ(ierr); 29847c6ae99SBarry Smith PetscFunctionReturn(0); 29947c6ae99SBarry Smith } 300