147c6ae99SBarry Smith 247c6ae99SBarry Smith /* 347c6ae99SBarry Smith Code for manipulating distributed regular arrays in parallel. 447c6ae99SBarry Smith */ 547c6ae99SBarry Smith 64035e84dSBarry Smith #include <petsc-private/dmdaimpl.h> /*I "petscdmda.h" I*/ 747c6ae99SBarry Smith 847c6ae99SBarry Smith #undef __FUNCT__ 9*0a0169f3SBarry Smith #define __FUNCT__ "DMDAGetLogicalCoordinate" 10*0a0169f3SBarry Smith /*@C 11*0a0169f3SBarry Smith DMDAGetLogicalCoordinate - Returns a the i,j,k logical coordinate for the closest mesh point to a x,y,z point in the coordinates of the DMDA 12*0a0169f3SBarry Smith 13*0a0169f3SBarry Smith Collective on DMDA 14*0a0169f3SBarry Smith 15*0a0169f3SBarry Smith Input Parameters: 16*0a0169f3SBarry Smith + da - the distributed array 17*0a0169f3SBarry Smith . x,y,z - the physical coordinates 18*0a0169f3SBarry Smith - 19*0a0169f3SBarry Smith 20*0a0169f3SBarry Smith Output Parameters: 21*0a0169f3SBarry Smith + I, J, K - the logical coordinate (-1 on processes that do not contain that point) 22*0a0169f3SBarry Smith - X, Y, Z, - (optional) the coordinates of the located grid point 23*0a0169f3SBarry Smith 24*0a0169f3SBarry Smith Level: advanced 25*0a0169f3SBarry Smith 26*0a0169f3SBarry Smith Notes: 27*0a0169f3SBarry Smith All processors that share the DMDA must call this with the same coordinate value 28*0a0169f3SBarry Smith 29*0a0169f3SBarry Smith .keywords: distributed array, get, processor subset 30*0a0169f3SBarry Smith @*/ 31*0a0169f3SBarry Smith PetscErrorCode DMDAGetLogicalCoordinate(DM da,PetscScalar x,PetscScalar y,PetscScalar z,PetscInt *I,PetscInt *J,PetscInt *K,PetscScalar *X,PetscScalar *Y,PetscScalar *Z) 32*0a0169f3SBarry Smith { 33*0a0169f3SBarry Smith DM_DA *dd = (DM_DA*)da->data; 34*0a0169f3SBarry Smith PetscErrorCode ierr; 35*0a0169f3SBarry Smith Vec coors; 36*0a0169f3SBarry Smith DM dacoors; 37*0a0169f3SBarry Smith DMDACoor2d **c; 38*0a0169f3SBarry Smith PetscInt i,j,xs,xm,ys,ym; 39*0a0169f3SBarry Smith PetscReal d,D = PETSC_MAX_REAL; 40*0a0169f3SBarry Smith PetscMPIInt size; 41*0a0169f3SBarry Smith 42*0a0169f3SBarry Smith PetscFunctionBegin; 43*0a0169f3SBarry Smith ierr = MPI_Comm_size(PetscObjectComm((PetscObject)da),&size);CHKERRQ(ierr); 44*0a0169f3SBarry Smith if (size > 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Currently not in parallel"); 45*0a0169f3SBarry Smith if (dd->dim == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Cannot get point from 1d DMDA"); 46*0a0169f3SBarry Smith if (dd->dim == 3) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Cannot get point from 3d DMDA"); 47*0a0169f3SBarry Smith 48*0a0169f3SBarry Smith *I = -1; 49*0a0169f3SBarry Smith if (J) *J = -1; 50*0a0169f3SBarry Smith if (K) *K = -1; 51*0a0169f3SBarry Smith 52*0a0169f3SBarry Smith ierr = DMGetCoordinateDM(da,&dacoors);CHKERRQ(ierr); 53*0a0169f3SBarry Smith ierr = DMDAGetCorners(dacoors,&xs,&ys,NULL,&xm,&ym,NULL);CHKERRQ(ierr); 54*0a0169f3SBarry Smith ierr = DMGetCoordinates(da,&coors);CHKERRQ(ierr); 55*0a0169f3SBarry Smith ierr = DMDAVecGetArray(dacoors,coors,&c);CHKERRQ(ierr); 56*0a0169f3SBarry Smith for (j=ys; j<ys+ym; j++) { 57*0a0169f3SBarry Smith for (i=xs; i<xs+xm; i++) { 58*0a0169f3SBarry Smith d = PetscSqrtReal(PetscRealPart( (c[j][i].x - x)*(c[j][i].x - x) + (c[j][i].y - y)*(c[j][i].y - y) )); 59*0a0169f3SBarry Smith if (d < D) { 60*0a0169f3SBarry Smith D = d; 61*0a0169f3SBarry Smith *I = i; 62*0a0169f3SBarry Smith *J = j; 63*0a0169f3SBarry Smith } 64*0a0169f3SBarry Smith } 65*0a0169f3SBarry Smith } 66*0a0169f3SBarry Smith if (X) *X = c[*J][*I].x; 67*0a0169f3SBarry Smith if (Y) *Y = c[*J][*I].y; 68*0a0169f3SBarry Smith ierr = DMDAVecRestoreArray(dacoors,coors,&c);CHKERRQ(ierr); 69*0a0169f3SBarry Smith PetscFunctionReturn(0); 70*0a0169f3SBarry Smith } 71*0a0169f3SBarry Smith 72*0a0169f3SBarry Smith #undef __FUNCT__ 73db05f41bSBarry Smith #define __FUNCT__ "DMDAGetRay" 74db05f41bSBarry Smith /*@C 75db05f41bSBarry Smith DMDAGetRay - Returns a vector on process zero that contains a row or column of the values in a DMDA vector 76db05f41bSBarry Smith 77db05f41bSBarry Smith Collective on DMDA 78db05f41bSBarry Smith 79db05f41bSBarry Smith Input Parameters: 80db05f41bSBarry Smith + da - the distributed array 81db05f41bSBarry Smith . vec - the vector 82db05f41bSBarry Smith . dir - Cartesian direction, either DMDA_X, DMDA_Y, or DMDA_Z 83db05f41bSBarry Smith - gp - global grid point number in this direction 84db05f41bSBarry Smith 85db05f41bSBarry Smith Output Parameters: 86db05f41bSBarry Smith + newvec - the new vector that can hold the values (size zero on all processes except process 0) 87db05f41bSBarry Smith - scatter - the VecScatter that will map from the original vector to the slice 88db05f41bSBarry Smith 89db05f41bSBarry Smith Level: advanced 90db05f41bSBarry Smith 91db05f41bSBarry Smith Notes: 92db05f41bSBarry Smith All processors that share the DMDA must call this with the same gp value 93db05f41bSBarry Smith 94db05f41bSBarry Smith .keywords: distributed array, get, processor subset 95db05f41bSBarry Smith @*/ 96db05f41bSBarry Smith PetscErrorCode DMDAGetRay(DM da,DMDADirection dir,PetscInt gp,Vec *newvec,VecScatter *scatter) 97db05f41bSBarry Smith { 98db05f41bSBarry Smith PetscMPIInt rank; 99db05f41bSBarry Smith DM_DA *dd = (DM_DA*)da->data; 100db05f41bSBarry Smith PetscErrorCode ierr; 101db05f41bSBarry Smith IS is; 102db05f41bSBarry Smith AO ao; 103db05f41bSBarry Smith Vec vec; 104d1212d36SBarry Smith PetscInt *indices,i,j; 105db05f41bSBarry Smith 106db05f41bSBarry Smith PetscFunctionBegin; 107ce94432eSBarry Smith if (dd->dim == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Cannot get slice from 1d DMDA"); 108ce94432eSBarry Smith if (dd->dim == 3) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Cannot get slice from 3d DMDA"); 109db05f41bSBarry Smith ierr = DMDAGetAO(da,&ao);CHKERRQ(ierr); 110ce94432eSBarry Smith ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)da),&rank);CHKERRQ(ierr); 111db05f41bSBarry Smith if (!rank) { 112db05f41bSBarry Smith if (dir == DMDA_Y) { 113d1212d36SBarry Smith ierr = PetscMalloc(dd->w*dd->M*sizeof(PetscInt),&indices);CHKERRQ(ierr); 114d1212d36SBarry Smith indices[0] = gp*dd->M*dd->w; 1158865f1eaSKarl Rupp for (i=1; i<dd->M*dd->w; i++) indices[i] = indices[i-1] + 1; 1168865f1eaSKarl Rupp 117d1212d36SBarry Smith ierr = AOApplicationToPetsc(ao,dd->M*dd->w,indices);CHKERRQ(ierr); 118db05f41bSBarry Smith ierr = VecCreate(PETSC_COMM_SELF,newvec);CHKERRQ(ierr); 119db05f41bSBarry Smith ierr = VecSetBlockSize(*newvec,dd->w);CHKERRQ(ierr); 120db05f41bSBarry Smith ierr = VecSetSizes(*newvec,dd->M*dd->w,PETSC_DETERMINE);CHKERRQ(ierr); 121db05f41bSBarry Smith ierr = VecSetType(*newvec,VECSEQ);CHKERRQ(ierr); 122d1212d36SBarry Smith ierr = ISCreateGeneral(PETSC_COMM_SELF,dd->w*dd->M,indices,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 123db05f41bSBarry Smith } else if (dir == DMDA_X) { 124d1212d36SBarry Smith ierr = PetscMalloc(dd->w*dd->N*sizeof(PetscInt),&indices);CHKERRQ(ierr); 125d1212d36SBarry Smith indices[0] = dd->w*gp; 126d1212d36SBarry Smith for (j=1; j<dd->w; j++) indices[j] = indices[j-1] + 1; 127d1212d36SBarry Smith for (i=1; i<dd->N; i++) { 128d1212d36SBarry Smith indices[i*dd->w] = indices[i*dd->w-1] + dd->w*dd->M - dd->w + 1; 129d1212d36SBarry Smith for (j=1; j<dd->w; j++) indices[i*dd->w + j] = indices[i*dd->w + j - 1] + 1; 130d1212d36SBarry Smith } 131d1212d36SBarry Smith ierr = AOApplicationToPetsc(ao,dd->w*dd->N,indices);CHKERRQ(ierr); 132db05f41bSBarry Smith ierr = VecCreate(PETSC_COMM_SELF,newvec);CHKERRQ(ierr); 133db05f41bSBarry Smith ierr = VecSetBlockSize(*newvec,dd->w);CHKERRQ(ierr); 134db05f41bSBarry Smith ierr = VecSetSizes(*newvec,dd->N*dd->w,PETSC_DETERMINE);CHKERRQ(ierr); 135db05f41bSBarry Smith ierr = VecSetType(*newvec,VECSEQ);CHKERRQ(ierr); 136d1212d36SBarry Smith ierr = ISCreateGeneral(PETSC_COMM_SELF,dd->w*dd->N,indices,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 137db05f41bSBarry Smith } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Unknown DMDADirection"); 138db05f41bSBarry Smith } else { 139db05f41bSBarry Smith ierr = VecCreateSeq(PETSC_COMM_SELF,0,newvec);CHKERRQ(ierr); 140d1212d36SBarry Smith ierr = ISCreateGeneral(PETSC_COMM_SELF,0,0,PETSC_COPY_VALUES,&is);CHKERRQ(ierr); 141db05f41bSBarry Smith } 142db05f41bSBarry Smith ierr = DMGetGlobalVector(da,&vec);CHKERRQ(ierr); 1430298fd71SBarry Smith ierr = VecScatterCreate(vec,is,*newvec,NULL,scatter);CHKERRQ(ierr); 144db05f41bSBarry Smith ierr = DMRestoreGlobalVector(da,&vec);CHKERRQ(ierr); 145db05f41bSBarry Smith ierr = ISDestroy(&is);CHKERRQ(ierr); 146db05f41bSBarry Smith PetscFunctionReturn(0); 147db05f41bSBarry Smith } 148db05f41bSBarry Smith 149db05f41bSBarry Smith #undef __FUNCT__ 150aa219208SBarry Smith #define __FUNCT__ "DMDAGetProcessorSubset" 15147c6ae99SBarry Smith /*@C 152aa219208SBarry Smith DMDAGetProcessorSubset - Returns a communicator consisting only of the 153aa219208SBarry Smith processors in a DMDA that own a particular global x, y, or z grid point 15447c6ae99SBarry Smith (corresponding to a logical plane in a 3D grid or a line in a 2D grid). 15547c6ae99SBarry Smith 156aa219208SBarry Smith Collective on DMDA 15747c6ae99SBarry Smith 15847c6ae99SBarry Smith Input Parameters: 15947c6ae99SBarry Smith + da - the distributed array 160aa219208SBarry Smith . dir - Cartesian direction, either DMDA_X, DMDA_Y, or DMDA_Z 16147c6ae99SBarry Smith - gp - global grid point number in this direction 16247c6ae99SBarry Smith 16347c6ae99SBarry Smith Output Parameters: 16447c6ae99SBarry Smith . comm - new communicator 16547c6ae99SBarry Smith 16647c6ae99SBarry Smith Level: advanced 16747c6ae99SBarry Smith 16847c6ae99SBarry Smith Notes: 169aa219208SBarry Smith All processors that share the DMDA must call this with the same gp value 17047c6ae99SBarry Smith 17147c6ae99SBarry Smith This routine is particularly useful to compute boundary conditions 17247c6ae99SBarry Smith or other application-specific calculations that require manipulating 17347c6ae99SBarry Smith sets of data throughout a logical plane of grid points. 17447c6ae99SBarry Smith 17547c6ae99SBarry Smith .keywords: distributed array, get, processor subset 17647c6ae99SBarry Smith @*/ 1777087cfbeSBarry Smith PetscErrorCode DMDAGetProcessorSubset(DM da,DMDADirection dir,PetscInt gp,MPI_Comm *comm) 17847c6ae99SBarry Smith { 17947c6ae99SBarry Smith MPI_Group group,subgroup; 18047c6ae99SBarry Smith PetscErrorCode ierr; 18147c6ae99SBarry Smith PetscInt i,ict,flag,*owners,xs,xm,ys,ym,zs,zm; 1820298fd71SBarry Smith PetscMPIInt size,*ranks = NULL; 18347c6ae99SBarry Smith DM_DA *dd = (DM_DA*)da->data; 18447c6ae99SBarry Smith 18547c6ae99SBarry Smith PetscFunctionBegin; 18647c6ae99SBarry Smith PetscValidHeaderSpecific(da,DM_CLASSID,1); 18747c6ae99SBarry Smith flag = 0; 188aa219208SBarry Smith ierr = DMDAGetCorners(da,&xs,&ys,&zs,&xm,&ym,&zm);CHKERRQ(ierr); 189ce94432eSBarry Smith ierr = MPI_Comm_size(PetscObjectComm((PetscObject)da),&size);CHKERRQ(ierr); 190aa219208SBarry Smith if (dir == DMDA_Z) { 191ce94432eSBarry Smith if (dd->dim < 3) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"DMDA_Z invalid for DMDA dim < 3"); 19247c6ae99SBarry Smith if (gp < 0 || gp > dd->P) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"invalid grid point"); 19347c6ae99SBarry Smith if (gp >= zs && gp < zs+zm) flag = 1; 194aa219208SBarry Smith } else if (dir == DMDA_Y) { 195ce94432eSBarry Smith if (dd->dim == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"DMDA_Y invalid for DMDA dim = 1"); 19647c6ae99SBarry Smith if (gp < 0 || gp > dd->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"invalid grid point"); 19747c6ae99SBarry Smith if (gp >= ys && gp < ys+ym) flag = 1; 198aa219208SBarry Smith } else if (dir == DMDA_X) { 19947c6ae99SBarry Smith if (gp < 0 || gp > dd->M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"invalid grid point"); 20047c6ae99SBarry Smith if (gp >= xs && gp < xs+xm) flag = 1; 201ce94432eSBarry Smith } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"Invalid direction"); 20247c6ae99SBarry Smith 20347c6ae99SBarry Smith ierr = PetscMalloc2(size,PetscInt,&owners,size,PetscMPIInt,&ranks);CHKERRQ(ierr); 204ce94432eSBarry Smith ierr = MPI_Allgather(&flag,1,MPIU_INT,owners,1,MPIU_INT,PetscObjectComm((PetscObject)da));CHKERRQ(ierr); 20547c6ae99SBarry Smith ict = 0; 206aa219208SBarry Smith ierr = PetscInfo2(da,"DMDAGetProcessorSubset: dim=%D, direction=%d, procs: ",dd->dim,(int)dir);CHKERRQ(ierr); 20747c6ae99SBarry Smith for (i=0; i<size; i++) { 20847c6ae99SBarry Smith if (owners[i]) { 20947c6ae99SBarry Smith ranks[ict] = i; ict++; 21047c6ae99SBarry Smith ierr = PetscInfo1(da,"%D ",i);CHKERRQ(ierr); 21147c6ae99SBarry Smith } 21247c6ae99SBarry Smith } 21347c6ae99SBarry Smith ierr = PetscInfo(da,"\n");CHKERRQ(ierr); 214ce94432eSBarry Smith ierr = MPI_Comm_group(PetscObjectComm((PetscObject)da),&group);CHKERRQ(ierr); 21547c6ae99SBarry Smith ierr = MPI_Group_incl(group,ict,ranks,&subgroup);CHKERRQ(ierr); 216ce94432eSBarry Smith ierr = MPI_Comm_create(PetscObjectComm((PetscObject)da),subgroup,comm);CHKERRQ(ierr); 21747c6ae99SBarry Smith ierr = MPI_Group_free(&subgroup);CHKERRQ(ierr); 21847c6ae99SBarry Smith ierr = MPI_Group_free(&group);CHKERRQ(ierr); 21947c6ae99SBarry Smith ierr = PetscFree2(owners,ranks);CHKERRQ(ierr); 22047c6ae99SBarry Smith PetscFunctionReturn(0); 22147c6ae99SBarry Smith } 22247c6ae99SBarry Smith 22347c6ae99SBarry Smith #undef __FUNCT__ 224aa219208SBarry Smith #define __FUNCT__ "DMDAGetProcessorSubsets" 22547c6ae99SBarry Smith /*@C 226aa219208SBarry Smith DMDAGetProcessorSubsets - Returns communicators consisting only of the 227aa219208SBarry Smith processors in a DMDA adjacent in a particular dimension, 22847c6ae99SBarry Smith corresponding to a logical plane in a 3D grid or a line in a 2D grid. 22947c6ae99SBarry Smith 230aa219208SBarry Smith Collective on DMDA 23147c6ae99SBarry Smith 23247c6ae99SBarry Smith Input Parameters: 23347c6ae99SBarry Smith + da - the distributed array 234aa219208SBarry Smith - dir - Cartesian direction, either DMDA_X, DMDA_Y, or DMDA_Z 23547c6ae99SBarry Smith 23647c6ae99SBarry Smith Output Parameters: 23747c6ae99SBarry Smith . subcomm - new communicator 23847c6ae99SBarry Smith 23947c6ae99SBarry Smith Level: advanced 24047c6ae99SBarry Smith 24147c6ae99SBarry Smith Notes: 24247c6ae99SBarry Smith This routine is useful for distributing one-dimensional data in a tensor product grid. 24347c6ae99SBarry Smith 24447c6ae99SBarry Smith .keywords: distributed array, get, processor subset 24547c6ae99SBarry Smith @*/ 2467087cfbeSBarry Smith PetscErrorCode DMDAGetProcessorSubsets(DM da, DMDADirection dir, MPI_Comm *subcomm) 24747c6ae99SBarry Smith { 24847c6ae99SBarry Smith MPI_Comm comm; 24947c6ae99SBarry Smith MPI_Group group, subgroup; 25047c6ae99SBarry Smith PetscInt subgroupSize = 0; 25147c6ae99SBarry Smith PetscInt *firstPoints; 2520298fd71SBarry Smith PetscMPIInt size, *subgroupRanks = NULL; 25347c6ae99SBarry Smith PetscInt xs, xm, ys, ym, zs, zm, firstPoint, p; 25447c6ae99SBarry Smith PetscErrorCode ierr; 25547c6ae99SBarry Smith DM_DA *dd = (DM_DA*)da->data; 25647c6ae99SBarry Smith 25747c6ae99SBarry Smith PetscFunctionBegin; 25847c6ae99SBarry Smith PetscValidHeaderSpecific(da, DM_CLASSID, 1); 25982f516ccSBarry Smith ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr); 260aa219208SBarry Smith ierr = DMDAGetCorners(da, &xs, &ys, &zs, &xm, &ym, &zm);CHKERRQ(ierr); 26147c6ae99SBarry Smith ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 262aa219208SBarry Smith if (dir == DMDA_Z) { 263aa219208SBarry Smith if (dd->dim < 3) SETERRQ(comm,PETSC_ERR_ARG_OUTOFRANGE,"DMDA_Z invalid for DMDA dim < 3"); 26447c6ae99SBarry Smith firstPoint = zs; 265aa219208SBarry Smith } else if (dir == DMDA_Y) { 266aa219208SBarry Smith if (dd->dim == 1) SETERRQ(comm,PETSC_ERR_ARG_OUTOFRANGE,"DMDA_Y invalid for DMDA dim = 1"); 26747c6ae99SBarry Smith firstPoint = ys; 268aa219208SBarry Smith } else if (dir == DMDA_X) { 26947c6ae99SBarry Smith firstPoint = xs; 27047c6ae99SBarry Smith } else SETERRQ(comm,PETSC_ERR_ARG_OUTOFRANGE,"Invalid direction"); 27147c6ae99SBarry Smith 27247c6ae99SBarry Smith ierr = PetscMalloc2(size, PetscInt, &firstPoints, size, PetscMPIInt, &subgroupRanks);CHKERRQ(ierr); 27347c6ae99SBarry Smith ierr = MPI_Allgather(&firstPoint, 1, MPIU_INT, firstPoints, 1, MPIU_INT, comm);CHKERRQ(ierr); 274aa219208SBarry Smith ierr = PetscInfo2(da,"DMDAGetProcessorSubset: dim=%D, direction=%d, procs: ",dd->dim,(int)dir);CHKERRQ(ierr); 27547c6ae99SBarry Smith for (p = 0; p < size; ++p) { 27647c6ae99SBarry Smith if (firstPoints[p] == firstPoint) { 27747c6ae99SBarry Smith subgroupRanks[subgroupSize++] = p; 27847c6ae99SBarry Smith ierr = PetscInfo1(da, "%D ", p);CHKERRQ(ierr); 27947c6ae99SBarry Smith } 28047c6ae99SBarry Smith } 28147c6ae99SBarry Smith ierr = PetscInfo(da, "\n");CHKERRQ(ierr); 28247c6ae99SBarry Smith ierr = MPI_Comm_group(comm, &group);CHKERRQ(ierr); 28347c6ae99SBarry Smith ierr = MPI_Group_incl(group, subgroupSize, subgroupRanks, &subgroup);CHKERRQ(ierr); 28447c6ae99SBarry Smith ierr = MPI_Comm_create(comm, subgroup, subcomm);CHKERRQ(ierr); 28547c6ae99SBarry Smith ierr = MPI_Group_free(&subgroup);CHKERRQ(ierr); 28647c6ae99SBarry Smith ierr = MPI_Group_free(&group);CHKERRQ(ierr); 28747c6ae99SBarry Smith ierr = PetscFree2(firstPoints, subgroupRanks);CHKERRQ(ierr); 28847c6ae99SBarry Smith PetscFunctionReturn(0); 28947c6ae99SBarry Smith } 290