xref: /petsc/src/dm/impls/da/dasub.c (revision dcca6d9d80ebd869fe6029bd05a3aa9faafef49e)
147c6ae99SBarry Smith 
247c6ae99SBarry Smith /*
347c6ae99SBarry Smith   Code for manipulating distributed regular arrays in parallel.
447c6ae99SBarry Smith */
547c6ae99SBarry Smith 
64035e84dSBarry Smith #include <petsc-private/dmdaimpl.h>    /*I   "petscdmda.h"   I*/
747c6ae99SBarry Smith 
847c6ae99SBarry Smith #undef __FUNCT__
90a0169f3SBarry Smith #define __FUNCT__ "DMDAGetLogicalCoordinate"
100a0169f3SBarry Smith /*@C
110a0169f3SBarry Smith    DMDAGetLogicalCoordinate - Returns a the i,j,k logical coordinate for the closest mesh point to a x,y,z point in the coordinates of the DMDA
120a0169f3SBarry Smith 
130a0169f3SBarry Smith    Collective on DMDA
140a0169f3SBarry Smith 
150a0169f3SBarry Smith    Input Parameters:
160a0169f3SBarry Smith +  da - the distributed array
17878cb397SSatish Balay -  x,y,z - the physical coordinates
180a0169f3SBarry Smith 
190a0169f3SBarry Smith    Output Parameters:
2033c53e3fSSatish Balay +   II, JJ, KK - the logical coordinate (-1 on processes that do not contain that point)
210a0169f3SBarry Smith -   X, Y, Z, - (optional) the coordinates of the located grid point
220a0169f3SBarry Smith 
230a0169f3SBarry Smith    Level: advanced
240a0169f3SBarry Smith 
250a0169f3SBarry Smith    Notes:
260a0169f3SBarry Smith    All processors that share the DMDA must call this with the same coordinate value
270a0169f3SBarry Smith 
280a0169f3SBarry Smith .keywords: distributed array, get, processor subset
290a0169f3SBarry Smith @*/
3033c53e3fSSatish Balay PetscErrorCode  DMDAGetLogicalCoordinate(DM da,PetscScalar x,PetscScalar y,PetscScalar z,PetscInt *II,PetscInt *JJ,PetscInt *KK,PetscScalar *X,PetscScalar *Y,PetscScalar *Z)
310a0169f3SBarry Smith {
320a0169f3SBarry Smith   DM_DA          *dd = (DM_DA*)da->data;
330a0169f3SBarry Smith   PetscErrorCode ierr;
340a0169f3SBarry Smith   Vec            coors;
350a0169f3SBarry Smith   DM             dacoors;
360a0169f3SBarry Smith   DMDACoor2d     **c;
370a0169f3SBarry Smith   PetscInt       i,j,xs,xm,ys,ym;
38e97f6855SBarry Smith   PetscReal      d,D = PETSC_MAX_REAL,Dv;
39e97f6855SBarry Smith   PetscMPIInt    rank,root;
400a0169f3SBarry Smith 
410a0169f3SBarry Smith   PetscFunctionBegin;
420a0169f3SBarry Smith   if (dd->dim == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Cannot get point from 1d DMDA");
430a0169f3SBarry Smith   if (dd->dim == 3) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Cannot get point from 3d DMDA");
440a0169f3SBarry Smith 
4533c53e3fSSatish Balay   *II = -1;
46e97f6855SBarry Smith   *JJ = -1;
470a0169f3SBarry Smith 
480a0169f3SBarry Smith   ierr = DMGetCoordinateDM(da,&dacoors);CHKERRQ(ierr);
490a0169f3SBarry Smith   ierr = DMDAGetCorners(dacoors,&xs,&ys,NULL,&xm,&ym,NULL);CHKERRQ(ierr);
500a0169f3SBarry Smith   ierr = DMGetCoordinates(da,&coors);CHKERRQ(ierr);
510a0169f3SBarry Smith   ierr = DMDAVecGetArray(dacoors,coors,&c);CHKERRQ(ierr);
520a0169f3SBarry Smith   for (j=ys; j<ys+ym; j++) {
530a0169f3SBarry Smith     for (i=xs; i<xs+xm; i++) {
540a0169f3SBarry Smith       d = PetscSqrtReal(PetscRealPart( (c[j][i].x - x)*(c[j][i].x - x) + (c[j][i].y - y)*(c[j][i].y - y) ));
550a0169f3SBarry Smith       if (d < D) {
560a0169f3SBarry Smith         D   = d;
5733c53e3fSSatish Balay         *II = i;
5833c53e3fSSatish Balay         *JJ = j;
590a0169f3SBarry Smith       }
600a0169f3SBarry Smith     }
610a0169f3SBarry Smith   }
62e97f6855SBarry Smith   ierr = MPI_Allreduce(&D,&Dv,1,MPIU_REAL,MPI_MIN,PetscObjectComm((PetscObject)da));CHKERRQ(ierr);
63e97f6855SBarry Smith   if (D != Dv) {
64e97f6855SBarry Smith     *II  = -1;
65e97f6855SBarry Smith     *JJ  = -1;
66e97f6855SBarry Smith     rank = 0;
67e97f6855SBarry Smith   } else {
68e97f6855SBarry Smith     *X = c[*JJ][*II].x;
69e97f6855SBarry Smith     *Y = c[*JJ][*II].y;
70e97f6855SBarry Smith     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)da),&rank);CHKERRQ(ierr);
71e97f6855SBarry Smith     rank++;
72e97f6855SBarry Smith   }
73e97f6855SBarry Smith   ierr = MPI_Allreduce(&rank,&root,1,MPI_INT,MPI_SUM,PetscObjectComm((PetscObject)da));CHKERRQ(ierr);
74e97f6855SBarry Smith   root--;
75ae95e784SBarry Smith   ierr = MPI_Bcast(X,1,MPIU_SCALAR,root,PetscObjectComm((PetscObject)da));CHKERRQ(ierr);
76ae95e784SBarry Smith   ierr = MPI_Bcast(Y,1,MPIU_SCALAR,root,PetscObjectComm((PetscObject)da));CHKERRQ(ierr);
770a0169f3SBarry Smith   ierr = DMDAVecRestoreArray(dacoors,coors,&c);CHKERRQ(ierr);
780a0169f3SBarry Smith   PetscFunctionReturn(0);
790a0169f3SBarry Smith }
800a0169f3SBarry Smith 
810a0169f3SBarry Smith #undef __FUNCT__
82db05f41bSBarry Smith #define __FUNCT__ "DMDAGetRay"
83db05f41bSBarry Smith /*@C
84db05f41bSBarry Smith    DMDAGetRay - Returns a vector on process zero that contains a row or column of the values in a DMDA vector
85db05f41bSBarry Smith 
86db05f41bSBarry Smith    Collective on DMDA
87db05f41bSBarry Smith 
88db05f41bSBarry Smith    Input Parameters:
89db05f41bSBarry Smith +  da - the distributed array
90db05f41bSBarry Smith .  vec - the vector
91db05f41bSBarry Smith .  dir - Cartesian direction, either DMDA_X, DMDA_Y, or DMDA_Z
92db05f41bSBarry Smith -  gp - global grid point number in this direction
93db05f41bSBarry Smith 
94db05f41bSBarry Smith    Output Parameters:
95db05f41bSBarry Smith +  newvec - the new vector that can hold the values (size zero on all processes except process 0)
96db05f41bSBarry Smith -  scatter - the VecScatter that will map from the original vector to the slice
97db05f41bSBarry Smith 
98db05f41bSBarry Smith    Level: advanced
99db05f41bSBarry Smith 
100db05f41bSBarry Smith    Notes:
101db05f41bSBarry Smith    All processors that share the DMDA must call this with the same gp value
102db05f41bSBarry Smith 
103db05f41bSBarry Smith .keywords: distributed array, get, processor subset
104db05f41bSBarry Smith @*/
105db05f41bSBarry Smith PetscErrorCode  DMDAGetRay(DM da,DMDADirection dir,PetscInt gp,Vec *newvec,VecScatter *scatter)
106db05f41bSBarry Smith {
107db05f41bSBarry Smith   PetscMPIInt    rank;
108db05f41bSBarry Smith   DM_DA          *dd = (DM_DA*)da->data;
109db05f41bSBarry Smith   PetscErrorCode ierr;
110db05f41bSBarry Smith   IS             is;
111db05f41bSBarry Smith   AO             ao;
112db05f41bSBarry Smith   Vec            vec;
113d1212d36SBarry Smith   PetscInt       *indices,i,j;
114db05f41bSBarry Smith 
115db05f41bSBarry Smith   PetscFunctionBegin;
116ce94432eSBarry Smith   if (dd->dim == 3) SETERRQ(PetscObjectComm((PetscObject) da), PETSC_ERR_SUP, "Cannot get slice from 3d DMDA");
117ce94432eSBarry Smith   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) da), &rank);CHKERRQ(ierr);
118c2001ae3SMatthew G. Knepley   ierr = DMDAGetAO(da, &ao);CHKERRQ(ierr);
119db05f41bSBarry Smith   if (!rank) {
120c2001ae3SMatthew G. Knepley     if (dd->dim == 1) {
121c2001ae3SMatthew G. Knepley       if (dir == DMDA_X) {
122c2001ae3SMatthew G. Knepley         ierr = PetscMalloc(dd->w * sizeof(PetscInt), &indices);CHKERRQ(ierr);
123c2001ae3SMatthew G. Knepley         indices[0] = dd->w*gp;
124eca19243SMatthew G. Knepley         for (i = 1; i < dd->w; ++i) indices[i] = indices[i-1] + 1;
125c2001ae3SMatthew G. Knepley         ierr = AOApplicationToPetsc(ao, dd->w, indices);CHKERRQ(ierr);
126c2001ae3SMatthew G. Knepley         ierr = VecCreate(PETSC_COMM_SELF, newvec);CHKERRQ(ierr);
127c2001ae3SMatthew G. Knepley         ierr = VecSetBlockSize(*newvec, dd->w);CHKERRQ(ierr);
128c2001ae3SMatthew G. Knepley         ierr = VecSetSizes(*newvec, dd->w, PETSC_DETERMINE);CHKERRQ(ierr);
129c2001ae3SMatthew G. Knepley         ierr = VecSetType(*newvec, VECSEQ);CHKERRQ(ierr);
130c2001ae3SMatthew G. Knepley         ierr = ISCreateGeneral(PETSC_COMM_SELF, dd->w, indices, PETSC_OWN_POINTER, &is);CHKERRQ(ierr);
131c2001ae3SMatthew G. Knepley       } else if (dir == DMDA_Y) SETERRQ(PetscObjectComm((PetscObject) da), PETSC_ERR_SUP, "Cannot get Y slice from 1d DMDA");
132c2001ae3SMatthew G. Knepley       else SETERRQ(PetscObjectComm((PetscObject) da), PETSC_ERR_ARG_OUTOFRANGE, "Unknown DMDADirection");
133c2001ae3SMatthew G. Knepley     } else {
134db05f41bSBarry Smith       if (dir == DMDA_Y) {
135d1212d36SBarry Smith         ierr       = PetscMalloc(dd->w*dd->M*sizeof(PetscInt),&indices);CHKERRQ(ierr);
136d1212d36SBarry Smith         indices[0] = gp*dd->M*dd->w;
1378865f1eaSKarl Rupp         for (i=1; i<dd->M*dd->w; i++) indices[i] = indices[i-1] + 1;
1388865f1eaSKarl Rupp 
139d1212d36SBarry Smith         ierr = AOApplicationToPetsc(ao,dd->M*dd->w,indices);CHKERRQ(ierr);
140db05f41bSBarry Smith         ierr = VecCreate(PETSC_COMM_SELF,newvec);CHKERRQ(ierr);
141db05f41bSBarry Smith         ierr = VecSetBlockSize(*newvec,dd->w);CHKERRQ(ierr);
142db05f41bSBarry Smith         ierr = VecSetSizes(*newvec,dd->M*dd->w,PETSC_DETERMINE);CHKERRQ(ierr);
143db05f41bSBarry Smith         ierr = VecSetType(*newvec,VECSEQ);CHKERRQ(ierr);
144d1212d36SBarry Smith         ierr = ISCreateGeneral(PETSC_COMM_SELF,dd->w*dd->M,indices,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
145db05f41bSBarry Smith       } else if (dir == DMDA_X) {
146d1212d36SBarry Smith         ierr       = PetscMalloc(dd->w*dd->N*sizeof(PetscInt),&indices);CHKERRQ(ierr);
147d1212d36SBarry Smith         indices[0] = dd->w*gp;
148d1212d36SBarry Smith         for (j=1; j<dd->w; j++) indices[j] = indices[j-1] + 1;
149d1212d36SBarry Smith         for (i=1; i<dd->N; i++) {
150d1212d36SBarry Smith           indices[i*dd->w] = indices[i*dd->w-1] + dd->w*dd->M - dd->w + 1;
151d1212d36SBarry Smith           for (j=1; j<dd->w; j++) indices[i*dd->w + j] = indices[i*dd->w + j - 1] + 1;
152d1212d36SBarry Smith         }
153d1212d36SBarry Smith         ierr = AOApplicationToPetsc(ao,dd->w*dd->N,indices);CHKERRQ(ierr);
154db05f41bSBarry Smith         ierr = VecCreate(PETSC_COMM_SELF,newvec);CHKERRQ(ierr);
155db05f41bSBarry Smith         ierr = VecSetBlockSize(*newvec,dd->w);CHKERRQ(ierr);
156db05f41bSBarry Smith         ierr = VecSetSizes(*newvec,dd->N*dd->w,PETSC_DETERMINE);CHKERRQ(ierr);
157db05f41bSBarry Smith         ierr = VecSetType(*newvec,VECSEQ);CHKERRQ(ierr);
158d1212d36SBarry Smith         ierr = ISCreateGeneral(PETSC_COMM_SELF,dd->w*dd->N,indices,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
159db05f41bSBarry Smith       } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Unknown DMDADirection");
160c2001ae3SMatthew G. Knepley     }
161db05f41bSBarry Smith   } else {
162db05f41bSBarry Smith     ierr = VecCreateSeq(PETSC_COMM_SELF, 0, newvec);CHKERRQ(ierr);
163d1212d36SBarry Smith     ierr = ISCreateGeneral(PETSC_COMM_SELF, 0, 0, PETSC_COPY_VALUES, &is);CHKERRQ(ierr);
164db05f41bSBarry Smith   }
165db05f41bSBarry Smith   ierr = DMGetGlobalVector(da, &vec);CHKERRQ(ierr);
1660298fd71SBarry Smith   ierr = VecScatterCreate(vec, is, *newvec, NULL, scatter);CHKERRQ(ierr);
167db05f41bSBarry Smith   ierr = DMRestoreGlobalVector(da, &vec);CHKERRQ(ierr);
168db05f41bSBarry Smith   ierr = ISDestroy(&is);CHKERRQ(ierr);
169db05f41bSBarry Smith   PetscFunctionReturn(0);
170db05f41bSBarry Smith }
171db05f41bSBarry Smith 
172db05f41bSBarry Smith #undef __FUNCT__
173aa219208SBarry Smith #define __FUNCT__ "DMDAGetProcessorSubset"
17447c6ae99SBarry Smith /*@C
175aa219208SBarry Smith    DMDAGetProcessorSubset - Returns a communicator consisting only of the
176aa219208SBarry Smith    processors in a DMDA that own a particular global x, y, or z grid point
17747c6ae99SBarry Smith    (corresponding to a logical plane in a 3D grid or a line in a 2D grid).
17847c6ae99SBarry Smith 
179aa219208SBarry Smith    Collective on DMDA
18047c6ae99SBarry Smith 
18147c6ae99SBarry Smith    Input Parameters:
18247c6ae99SBarry Smith +  da - the distributed array
183aa219208SBarry Smith .  dir - Cartesian direction, either DMDA_X, DMDA_Y, or DMDA_Z
18447c6ae99SBarry Smith -  gp - global grid point number in this direction
18547c6ae99SBarry Smith 
18647c6ae99SBarry Smith    Output Parameters:
18747c6ae99SBarry Smith .  comm - new communicator
18847c6ae99SBarry Smith 
18947c6ae99SBarry Smith    Level: advanced
19047c6ae99SBarry Smith 
19147c6ae99SBarry Smith    Notes:
192aa219208SBarry Smith    All processors that share the DMDA must call this with the same gp value
19347c6ae99SBarry Smith 
19447c6ae99SBarry Smith    This routine is particularly useful to compute boundary conditions
19547c6ae99SBarry Smith    or other application-specific calculations that require manipulating
19647c6ae99SBarry Smith    sets of data throughout a logical plane of grid points.
19747c6ae99SBarry Smith 
19847c6ae99SBarry Smith .keywords: distributed array, get, processor subset
19947c6ae99SBarry Smith @*/
2007087cfbeSBarry Smith PetscErrorCode  DMDAGetProcessorSubset(DM da,DMDADirection dir,PetscInt gp,MPI_Comm *comm)
20147c6ae99SBarry Smith {
20247c6ae99SBarry Smith   MPI_Group      group,subgroup;
20347c6ae99SBarry Smith   PetscErrorCode ierr;
20447c6ae99SBarry Smith   PetscInt       i,ict,flag,*owners,xs,xm,ys,ym,zs,zm;
2050298fd71SBarry Smith   PetscMPIInt    size,*ranks = NULL;
20647c6ae99SBarry Smith   DM_DA          *dd = (DM_DA*)da->data;
20747c6ae99SBarry Smith 
20847c6ae99SBarry Smith   PetscFunctionBegin;
20947c6ae99SBarry Smith   PetscValidHeaderSpecific(da,DM_CLASSID,1);
21047c6ae99SBarry Smith   flag = 0;
211aa219208SBarry Smith   ierr = DMDAGetCorners(da,&xs,&ys,&zs,&xm,&ym,&zm);CHKERRQ(ierr);
212ce94432eSBarry Smith   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)da),&size);CHKERRQ(ierr);
213aa219208SBarry Smith   if (dir == DMDA_Z) {
214ce94432eSBarry Smith     if (dd->dim < 3) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"DMDA_Z invalid for DMDA dim < 3");
21547c6ae99SBarry Smith     if (gp < 0 || gp > dd->P) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"invalid grid point");
21647c6ae99SBarry Smith     if (gp >= zs && gp < zs+zm) flag = 1;
217aa219208SBarry Smith   } else if (dir == DMDA_Y) {
218ce94432eSBarry Smith     if (dd->dim == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"DMDA_Y invalid for DMDA dim = 1");
21947c6ae99SBarry Smith     if (gp < 0 || gp > dd->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"invalid grid point");
22047c6ae99SBarry Smith     if (gp >= ys && gp < ys+ym) flag = 1;
221aa219208SBarry Smith   } else if (dir == DMDA_X) {
22247c6ae99SBarry Smith     if (gp < 0 || gp > dd->M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"invalid grid point");
22347c6ae99SBarry Smith     if (gp >= xs && gp < xs+xm) flag = 1;
224ce94432eSBarry Smith   } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"Invalid direction");
22547c6ae99SBarry Smith 
226*dcca6d9dSJed Brown   ierr = PetscMalloc2(size,&owners,size,&ranks);CHKERRQ(ierr);
227ce94432eSBarry Smith   ierr = MPI_Allgather(&flag,1,MPIU_INT,owners,1,MPIU_INT,PetscObjectComm((PetscObject)da));CHKERRQ(ierr);
22847c6ae99SBarry Smith   ict  = 0;
229aa219208SBarry Smith   ierr = PetscInfo2(da,"DMDAGetProcessorSubset: dim=%D, direction=%d, procs: ",dd->dim,(int)dir);CHKERRQ(ierr);
23047c6ae99SBarry Smith   for (i=0; i<size; i++) {
23147c6ae99SBarry Smith     if (owners[i]) {
23247c6ae99SBarry Smith       ranks[ict] = i; ict++;
23347c6ae99SBarry Smith       ierr       = PetscInfo1(da,"%D ",i);CHKERRQ(ierr);
23447c6ae99SBarry Smith     }
23547c6ae99SBarry Smith   }
23647c6ae99SBarry Smith   ierr = PetscInfo(da,"\n");CHKERRQ(ierr);
237ce94432eSBarry Smith   ierr = MPI_Comm_group(PetscObjectComm((PetscObject)da),&group);CHKERRQ(ierr);
23847c6ae99SBarry Smith   ierr = MPI_Group_incl(group,ict,ranks,&subgroup);CHKERRQ(ierr);
239ce94432eSBarry Smith   ierr = MPI_Comm_create(PetscObjectComm((PetscObject)da),subgroup,comm);CHKERRQ(ierr);
24047c6ae99SBarry Smith   ierr = MPI_Group_free(&subgroup);CHKERRQ(ierr);
24147c6ae99SBarry Smith   ierr = MPI_Group_free(&group);CHKERRQ(ierr);
24247c6ae99SBarry Smith   ierr = PetscFree2(owners,ranks);CHKERRQ(ierr);
24347c6ae99SBarry Smith   PetscFunctionReturn(0);
24447c6ae99SBarry Smith }
24547c6ae99SBarry Smith 
24647c6ae99SBarry Smith #undef __FUNCT__
247aa219208SBarry Smith #define __FUNCT__ "DMDAGetProcessorSubsets"
24847c6ae99SBarry Smith /*@C
249aa219208SBarry Smith    DMDAGetProcessorSubsets - Returns communicators consisting only of the
250aa219208SBarry Smith    processors in a DMDA adjacent in a particular dimension,
25147c6ae99SBarry Smith    corresponding to a logical plane in a 3D grid or a line in a 2D grid.
25247c6ae99SBarry Smith 
253aa219208SBarry Smith    Collective on DMDA
25447c6ae99SBarry Smith 
25547c6ae99SBarry Smith    Input Parameters:
25647c6ae99SBarry Smith +  da - the distributed array
257aa219208SBarry Smith -  dir - Cartesian direction, either DMDA_X, DMDA_Y, or DMDA_Z
25847c6ae99SBarry Smith 
25947c6ae99SBarry Smith    Output Parameters:
26047c6ae99SBarry Smith .  subcomm - new communicator
26147c6ae99SBarry Smith 
26247c6ae99SBarry Smith    Level: advanced
26347c6ae99SBarry Smith 
26447c6ae99SBarry Smith    Notes:
26547c6ae99SBarry Smith    This routine is useful for distributing one-dimensional data in a tensor product grid.
26647c6ae99SBarry Smith 
26747c6ae99SBarry Smith .keywords: distributed array, get, processor subset
26847c6ae99SBarry Smith @*/
2697087cfbeSBarry Smith PetscErrorCode  DMDAGetProcessorSubsets(DM da, DMDADirection dir, MPI_Comm *subcomm)
27047c6ae99SBarry Smith {
27147c6ae99SBarry Smith   MPI_Comm       comm;
27247c6ae99SBarry Smith   MPI_Group      group, subgroup;
27347c6ae99SBarry Smith   PetscInt       subgroupSize = 0;
27447c6ae99SBarry Smith   PetscInt       *firstPoints;
2750298fd71SBarry Smith   PetscMPIInt    size, *subgroupRanks = NULL;
27647c6ae99SBarry Smith   PetscInt       xs, xm, ys, ym, zs, zm, firstPoint, p;
27747c6ae99SBarry Smith   PetscErrorCode ierr;
27847c6ae99SBarry Smith   DM_DA          *dd = (DM_DA*)da->data;
27947c6ae99SBarry Smith 
28047c6ae99SBarry Smith   PetscFunctionBegin;
28147c6ae99SBarry Smith   PetscValidHeaderSpecific(da, DM_CLASSID, 1);
28282f516ccSBarry Smith   ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr);
283aa219208SBarry Smith   ierr = DMDAGetCorners(da, &xs, &ys, &zs, &xm, &ym, &zm);CHKERRQ(ierr);
28447c6ae99SBarry Smith   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
285aa219208SBarry Smith   if (dir == DMDA_Z) {
286aa219208SBarry Smith     if (dd->dim < 3) SETERRQ(comm,PETSC_ERR_ARG_OUTOFRANGE,"DMDA_Z invalid for DMDA dim < 3");
28747c6ae99SBarry Smith     firstPoint = zs;
288aa219208SBarry Smith   } else if (dir == DMDA_Y) {
289aa219208SBarry Smith     if (dd->dim == 1) SETERRQ(comm,PETSC_ERR_ARG_OUTOFRANGE,"DMDA_Y invalid for DMDA dim = 1");
29047c6ae99SBarry Smith     firstPoint = ys;
291aa219208SBarry Smith   } else if (dir == DMDA_X) {
29247c6ae99SBarry Smith     firstPoint = xs;
29347c6ae99SBarry Smith   } else SETERRQ(comm,PETSC_ERR_ARG_OUTOFRANGE,"Invalid direction");
29447c6ae99SBarry Smith 
295*dcca6d9dSJed Brown   ierr = PetscMalloc2(size, &firstPoints, size, &subgroupRanks);CHKERRQ(ierr);
29647c6ae99SBarry Smith   ierr = MPI_Allgather(&firstPoint, 1, MPIU_INT, firstPoints, 1, MPIU_INT, comm);CHKERRQ(ierr);
297aa219208SBarry Smith   ierr = PetscInfo2(da,"DMDAGetProcessorSubset: dim=%D, direction=%d, procs: ",dd->dim,(int)dir);CHKERRQ(ierr);
29847c6ae99SBarry Smith   for (p = 0; p < size; ++p) {
29947c6ae99SBarry Smith     if (firstPoints[p] == firstPoint) {
30047c6ae99SBarry Smith       subgroupRanks[subgroupSize++] = p;
30147c6ae99SBarry Smith       ierr = PetscInfo1(da, "%D ", p);CHKERRQ(ierr);
30247c6ae99SBarry Smith     }
30347c6ae99SBarry Smith   }
30447c6ae99SBarry Smith   ierr = PetscInfo(da, "\n");CHKERRQ(ierr);
30547c6ae99SBarry Smith   ierr = MPI_Comm_group(comm, &group);CHKERRQ(ierr);
30647c6ae99SBarry Smith   ierr = MPI_Group_incl(group, subgroupSize, subgroupRanks, &subgroup);CHKERRQ(ierr);
30747c6ae99SBarry Smith   ierr = MPI_Comm_create(comm, subgroup, subcomm);CHKERRQ(ierr);
30847c6ae99SBarry Smith   ierr = MPI_Group_free(&subgroup);CHKERRQ(ierr);
30947c6ae99SBarry Smith   ierr = MPI_Group_free(&group);CHKERRQ(ierr);
31047c6ae99SBarry Smith   ierr = PetscFree2(firstPoints, subgroupRanks);CHKERRQ(ierr);
31147c6ae99SBarry Smith   PetscFunctionReturn(0);
31247c6ae99SBarry Smith }
313