xref: /petsc/src/dm/impls/da/dasub.c (revision b2566f29c2b6470df769aa9f7deb9e2726b0959e)
147c6ae99SBarry Smith 
247c6ae99SBarry Smith /*
347c6ae99SBarry Smith   Code for manipulating distributed regular arrays in parallel.
447c6ae99SBarry Smith */
547c6ae99SBarry Smith 
6af0996ceSBarry Smith #include <petsc/private/dmdaimpl.h>    /*I   "petscdmda.h"   I*/
747c6ae99SBarry Smith 
847c6ae99SBarry Smith #undef __FUNCT__
90a0169f3SBarry Smith #define __FUNCT__ "DMDAGetLogicalCoordinate"
100a0169f3SBarry Smith /*@C
110a0169f3SBarry Smith    DMDAGetLogicalCoordinate - Returns a the i,j,k logical coordinate for the closest mesh point to a x,y,z point in the coordinates of the DMDA
120a0169f3SBarry Smith 
130a0169f3SBarry Smith    Collective on DMDA
140a0169f3SBarry Smith 
150a0169f3SBarry Smith    Input Parameters:
160a0169f3SBarry Smith +  da - the distributed array
17878cb397SSatish Balay -  x,y,z - the physical coordinates
180a0169f3SBarry Smith 
190a0169f3SBarry Smith    Output Parameters:
2033c53e3fSSatish Balay +   II, JJ, KK - the logical coordinate (-1 on processes that do not contain that point)
210a0169f3SBarry Smith -   X, Y, Z, - (optional) the coordinates of the located grid point
220a0169f3SBarry Smith 
230a0169f3SBarry Smith    Level: advanced
240a0169f3SBarry Smith 
250a0169f3SBarry Smith    Notes:
260a0169f3SBarry Smith    All processors that share the DMDA must call this with the same coordinate value
270a0169f3SBarry Smith 
280a0169f3SBarry Smith .keywords: distributed array, get, processor subset
290a0169f3SBarry Smith @*/
3033c53e3fSSatish Balay PetscErrorCode  DMDAGetLogicalCoordinate(DM da,PetscScalar x,PetscScalar y,PetscScalar z,PetscInt *II,PetscInt *JJ,PetscInt *KK,PetscScalar *X,PetscScalar *Y,PetscScalar *Z)
310a0169f3SBarry Smith {
320a0169f3SBarry Smith   PetscErrorCode ierr;
330a0169f3SBarry Smith   Vec            coors;
340a0169f3SBarry Smith   DM             dacoors;
350a0169f3SBarry Smith   DMDACoor2d     **c;
360a0169f3SBarry Smith   PetscInt       i,j,xs,xm,ys,ym;
37e97f6855SBarry Smith   PetscReal      d,D = PETSC_MAX_REAL,Dv;
38e97f6855SBarry Smith   PetscMPIInt    rank,root;
390a0169f3SBarry Smith 
400a0169f3SBarry Smith   PetscFunctionBegin;
41c73cfb54SMatthew G. Knepley   if (da->dim == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Cannot get point from 1d DMDA");
42c73cfb54SMatthew G. Knepley   if (da->dim == 3) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Cannot get point from 3d DMDA");
430a0169f3SBarry Smith 
4433c53e3fSSatish Balay   *II = -1;
45e97f6855SBarry Smith   *JJ = -1;
460a0169f3SBarry Smith 
470a0169f3SBarry Smith   ierr = DMGetCoordinateDM(da,&dacoors);CHKERRQ(ierr);
480a0169f3SBarry Smith   ierr = DMDAGetCorners(dacoors,&xs,&ys,NULL,&xm,&ym,NULL);CHKERRQ(ierr);
490a0169f3SBarry Smith   ierr = DMGetCoordinates(da,&coors);CHKERRQ(ierr);
505edff71fSBarry Smith   ierr = DMDAVecGetArrayRead(dacoors,coors,&c);CHKERRQ(ierr);
510a0169f3SBarry Smith   for (j=ys; j<ys+ym; j++) {
520a0169f3SBarry Smith     for (i=xs; i<xs+xm; i++) {
530a0169f3SBarry Smith       d = PetscSqrtReal(PetscRealPart( (c[j][i].x - x)*(c[j][i].x - x) + (c[j][i].y - y)*(c[j][i].y - y) ));
540a0169f3SBarry Smith       if (d < D) {
550a0169f3SBarry Smith         D   = d;
5633c53e3fSSatish Balay         *II = i;
5733c53e3fSSatish Balay         *JJ = j;
580a0169f3SBarry Smith       }
590a0169f3SBarry Smith     }
600a0169f3SBarry Smith   }
61*b2566f29SBarry Smith   ierr = MPIU_Allreduce(&D,&Dv,1,MPIU_REAL,MPIU_MIN,PetscObjectComm((PetscObject)da));CHKERRQ(ierr);
62e97f6855SBarry Smith   if (D != Dv) {
63e97f6855SBarry Smith     *II  = -1;
64e97f6855SBarry Smith     *JJ  = -1;
65e97f6855SBarry Smith     rank = 0;
66e97f6855SBarry Smith   } else {
67e97f6855SBarry Smith     *X = c[*JJ][*II].x;
68e97f6855SBarry Smith     *Y = c[*JJ][*II].y;
69e97f6855SBarry Smith     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)da),&rank);CHKERRQ(ierr);
70e97f6855SBarry Smith     rank++;
71e97f6855SBarry Smith   }
72*b2566f29SBarry Smith   ierr = MPIU_Allreduce(&rank,&root,1,MPI_INT,MPI_SUM,PetscObjectComm((PetscObject)da));CHKERRQ(ierr);
73e97f6855SBarry Smith   root--;
74ae95e784SBarry Smith   ierr = MPI_Bcast(X,1,MPIU_SCALAR,root,PetscObjectComm((PetscObject)da));CHKERRQ(ierr);
75ae95e784SBarry Smith   ierr = MPI_Bcast(Y,1,MPIU_SCALAR,root,PetscObjectComm((PetscObject)da));CHKERRQ(ierr);
765edff71fSBarry Smith   ierr = DMDAVecRestoreArrayRead(dacoors,coors,&c);CHKERRQ(ierr);
770a0169f3SBarry Smith   PetscFunctionReturn(0);
780a0169f3SBarry Smith }
790a0169f3SBarry Smith 
800a0169f3SBarry Smith #undef __FUNCT__
81db05f41bSBarry Smith #define __FUNCT__ "DMDAGetRay"
82db05f41bSBarry Smith /*@C
83db05f41bSBarry Smith    DMDAGetRay - Returns a vector on process zero that contains a row or column of the values in a DMDA vector
84db05f41bSBarry Smith 
85db05f41bSBarry Smith    Collective on DMDA
86db05f41bSBarry Smith 
87db05f41bSBarry Smith    Input Parameters:
88db05f41bSBarry Smith +  da - the distributed array
89db05f41bSBarry Smith .  vec - the vector
90db05f41bSBarry Smith .  dir - Cartesian direction, either DMDA_X, DMDA_Y, or DMDA_Z
91db05f41bSBarry Smith -  gp - global grid point number in this direction
92db05f41bSBarry Smith 
93db05f41bSBarry Smith    Output Parameters:
94db05f41bSBarry Smith +  newvec - the new vector that can hold the values (size zero on all processes except process 0)
95db05f41bSBarry Smith -  scatter - the VecScatter that will map from the original vector to the slice
96db05f41bSBarry Smith 
97db05f41bSBarry Smith    Level: advanced
98db05f41bSBarry Smith 
99db05f41bSBarry Smith    Notes:
100db05f41bSBarry Smith    All processors that share the DMDA must call this with the same gp value
101db05f41bSBarry Smith 
102db05f41bSBarry Smith .keywords: distributed array, get, processor subset
103db05f41bSBarry Smith @*/
104db05f41bSBarry Smith PetscErrorCode  DMDAGetRay(DM da,DMDADirection dir,PetscInt gp,Vec *newvec,VecScatter *scatter)
105db05f41bSBarry Smith {
106db05f41bSBarry Smith   PetscMPIInt    rank;
107db05f41bSBarry Smith   DM_DA          *dd = (DM_DA*)da->data;
108db05f41bSBarry Smith   PetscErrorCode ierr;
109db05f41bSBarry Smith   IS             is;
110db05f41bSBarry Smith   AO             ao;
111db05f41bSBarry Smith   Vec            vec;
112d1212d36SBarry Smith   PetscInt       *indices,i,j;
113db05f41bSBarry Smith 
114db05f41bSBarry Smith   PetscFunctionBegin;
115c73cfb54SMatthew G. Knepley   if (da->dim == 3) SETERRQ(PetscObjectComm((PetscObject) da), PETSC_ERR_SUP, "Cannot get slice from 3d DMDA");
116ce94432eSBarry Smith   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) da), &rank);CHKERRQ(ierr);
117c2001ae3SMatthew G. Knepley   ierr = DMDAGetAO(da, &ao);CHKERRQ(ierr);
118db05f41bSBarry Smith   if (!rank) {
119c73cfb54SMatthew G. Knepley     if (da->dim == 1) {
120c2001ae3SMatthew G. Knepley       if (dir == DMDA_X) {
121785e854fSJed Brown         ierr = PetscMalloc1(dd->w, &indices);CHKERRQ(ierr);
122c2001ae3SMatthew G. Knepley         indices[0] = dd->w*gp;
123eca19243SMatthew G. Knepley         for (i = 1; i < dd->w; ++i) indices[i] = indices[i-1] + 1;
124c2001ae3SMatthew G. Knepley         ierr = AOApplicationToPetsc(ao, dd->w, indices);CHKERRQ(ierr);
125c2001ae3SMatthew G. Knepley         ierr = VecCreate(PETSC_COMM_SELF, newvec);CHKERRQ(ierr);
126c2001ae3SMatthew G. Knepley         ierr = VecSetBlockSize(*newvec, dd->w);CHKERRQ(ierr);
127c2001ae3SMatthew G. Knepley         ierr = VecSetSizes(*newvec, dd->w, PETSC_DETERMINE);CHKERRQ(ierr);
128c2001ae3SMatthew G. Knepley         ierr = VecSetType(*newvec, VECSEQ);CHKERRQ(ierr);
129c2001ae3SMatthew G. Knepley         ierr = ISCreateGeneral(PETSC_COMM_SELF, dd->w, indices, PETSC_OWN_POINTER, &is);CHKERRQ(ierr);
130c2001ae3SMatthew G. Knepley       } else if (dir == DMDA_Y) SETERRQ(PetscObjectComm((PetscObject) da), PETSC_ERR_SUP, "Cannot get Y slice from 1d DMDA");
131c2001ae3SMatthew G. Knepley       else SETERRQ(PetscObjectComm((PetscObject) da), PETSC_ERR_ARG_OUTOFRANGE, "Unknown DMDADirection");
132c2001ae3SMatthew G. Knepley     } else {
133db05f41bSBarry Smith       if (dir == DMDA_Y) {
134785e854fSJed Brown         ierr       = PetscMalloc1(dd->w*dd->M,&indices);CHKERRQ(ierr);
135d1212d36SBarry Smith         indices[0] = gp*dd->M*dd->w;
1368865f1eaSKarl Rupp         for (i=1; i<dd->M*dd->w; i++) indices[i] = indices[i-1] + 1;
1378865f1eaSKarl Rupp 
138d1212d36SBarry Smith         ierr = AOApplicationToPetsc(ao,dd->M*dd->w,indices);CHKERRQ(ierr);
139db05f41bSBarry Smith         ierr = VecCreate(PETSC_COMM_SELF,newvec);CHKERRQ(ierr);
140db05f41bSBarry Smith         ierr = VecSetBlockSize(*newvec,dd->w);CHKERRQ(ierr);
141db05f41bSBarry Smith         ierr = VecSetSizes(*newvec,dd->M*dd->w,PETSC_DETERMINE);CHKERRQ(ierr);
142db05f41bSBarry Smith         ierr = VecSetType(*newvec,VECSEQ);CHKERRQ(ierr);
143d1212d36SBarry Smith         ierr = ISCreateGeneral(PETSC_COMM_SELF,dd->w*dd->M,indices,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
144db05f41bSBarry Smith       } else if (dir == DMDA_X) {
145785e854fSJed Brown         ierr       = PetscMalloc1(dd->w*dd->N,&indices);CHKERRQ(ierr);
146d1212d36SBarry Smith         indices[0] = dd->w*gp;
147d1212d36SBarry Smith         for (j=1; j<dd->w; j++) indices[j] = indices[j-1] + 1;
148d1212d36SBarry Smith         for (i=1; i<dd->N; i++) {
149d1212d36SBarry Smith           indices[i*dd->w] = indices[i*dd->w-1] + dd->w*dd->M - dd->w + 1;
150d1212d36SBarry Smith           for (j=1; j<dd->w; j++) indices[i*dd->w + j] = indices[i*dd->w + j - 1] + 1;
151d1212d36SBarry Smith         }
152d1212d36SBarry Smith         ierr = AOApplicationToPetsc(ao,dd->w*dd->N,indices);CHKERRQ(ierr);
153db05f41bSBarry Smith         ierr = VecCreate(PETSC_COMM_SELF,newvec);CHKERRQ(ierr);
154db05f41bSBarry Smith         ierr = VecSetBlockSize(*newvec,dd->w);CHKERRQ(ierr);
155db05f41bSBarry Smith         ierr = VecSetSizes(*newvec,dd->N*dd->w,PETSC_DETERMINE);CHKERRQ(ierr);
156db05f41bSBarry Smith         ierr = VecSetType(*newvec,VECSEQ);CHKERRQ(ierr);
157d1212d36SBarry Smith         ierr = ISCreateGeneral(PETSC_COMM_SELF,dd->w*dd->N,indices,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
158db05f41bSBarry Smith       } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Unknown DMDADirection");
159c2001ae3SMatthew G. Knepley     }
160db05f41bSBarry Smith   } else {
161db05f41bSBarry Smith     ierr = VecCreateSeq(PETSC_COMM_SELF, 0, newvec);CHKERRQ(ierr);
162d1212d36SBarry Smith     ierr = ISCreateGeneral(PETSC_COMM_SELF, 0, 0, PETSC_COPY_VALUES, &is);CHKERRQ(ierr);
163db05f41bSBarry Smith   }
164db05f41bSBarry Smith   ierr = DMGetGlobalVector(da, &vec);CHKERRQ(ierr);
1650298fd71SBarry Smith   ierr = VecScatterCreate(vec, is, *newvec, NULL, scatter);CHKERRQ(ierr);
166db05f41bSBarry Smith   ierr = DMRestoreGlobalVector(da, &vec);CHKERRQ(ierr);
167db05f41bSBarry Smith   ierr = ISDestroy(&is);CHKERRQ(ierr);
168db05f41bSBarry Smith   PetscFunctionReturn(0);
169db05f41bSBarry Smith }
170db05f41bSBarry Smith 
171db05f41bSBarry Smith #undef __FUNCT__
172aa219208SBarry Smith #define __FUNCT__ "DMDAGetProcessorSubset"
17347c6ae99SBarry Smith /*@C
174aa219208SBarry Smith    DMDAGetProcessorSubset - Returns a communicator consisting only of the
175aa219208SBarry Smith    processors in a DMDA that own a particular global x, y, or z grid point
17647c6ae99SBarry Smith    (corresponding to a logical plane in a 3D grid or a line in a 2D grid).
17747c6ae99SBarry Smith 
178aa219208SBarry Smith    Collective on DMDA
17947c6ae99SBarry Smith 
18047c6ae99SBarry Smith    Input Parameters:
18147c6ae99SBarry Smith +  da - the distributed array
182aa219208SBarry Smith .  dir - Cartesian direction, either DMDA_X, DMDA_Y, or DMDA_Z
18347c6ae99SBarry Smith -  gp - global grid point number in this direction
18447c6ae99SBarry Smith 
18547c6ae99SBarry Smith    Output Parameters:
18647c6ae99SBarry Smith .  comm - new communicator
18747c6ae99SBarry Smith 
18847c6ae99SBarry Smith    Level: advanced
18947c6ae99SBarry Smith 
19047c6ae99SBarry Smith    Notes:
191aa219208SBarry Smith    All processors that share the DMDA must call this with the same gp value
19247c6ae99SBarry Smith 
19347c6ae99SBarry Smith    This routine is particularly useful to compute boundary conditions
19447c6ae99SBarry Smith    or other application-specific calculations that require manipulating
19547c6ae99SBarry Smith    sets of data throughout a logical plane of grid points.
19647c6ae99SBarry Smith 
19747c6ae99SBarry Smith .keywords: distributed array, get, processor subset
19847c6ae99SBarry Smith @*/
1997087cfbeSBarry Smith PetscErrorCode  DMDAGetProcessorSubset(DM da,DMDADirection dir,PetscInt gp,MPI_Comm *comm)
20047c6ae99SBarry Smith {
20147c6ae99SBarry Smith   MPI_Group      group,subgroup;
20247c6ae99SBarry Smith   PetscErrorCode ierr;
20347c6ae99SBarry Smith   PetscInt       i,ict,flag,*owners,xs,xm,ys,ym,zs,zm;
2040298fd71SBarry Smith   PetscMPIInt    size,*ranks = NULL;
20547c6ae99SBarry Smith   DM_DA          *dd = (DM_DA*)da->data;
20647c6ae99SBarry Smith 
20747c6ae99SBarry Smith   PetscFunctionBegin;
20847c6ae99SBarry Smith   PetscValidHeaderSpecific(da,DM_CLASSID,1);
20947c6ae99SBarry Smith   flag = 0;
210aa219208SBarry Smith   ierr = DMDAGetCorners(da,&xs,&ys,&zs,&xm,&ym,&zm);CHKERRQ(ierr);
211ce94432eSBarry Smith   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)da),&size);CHKERRQ(ierr);
212aa219208SBarry Smith   if (dir == DMDA_Z) {
213c73cfb54SMatthew G. Knepley     if (da->dim < 3) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"DMDA_Z invalid for DMDA dim < 3");
21447c6ae99SBarry Smith     if (gp < 0 || gp > dd->P) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"invalid grid point");
21547c6ae99SBarry Smith     if (gp >= zs && gp < zs+zm) flag = 1;
216aa219208SBarry Smith   } else if (dir == DMDA_Y) {
217c73cfb54SMatthew G. Knepley     if (da->dim == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"DMDA_Y invalid for DMDA dim = 1");
21847c6ae99SBarry Smith     if (gp < 0 || gp > dd->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"invalid grid point");
21947c6ae99SBarry Smith     if (gp >= ys && gp < ys+ym) flag = 1;
220aa219208SBarry Smith   } else if (dir == DMDA_X) {
22147c6ae99SBarry Smith     if (gp < 0 || gp > dd->M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"invalid grid point");
22247c6ae99SBarry Smith     if (gp >= xs && gp < xs+xm) flag = 1;
223ce94432eSBarry Smith   } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"Invalid direction");
22447c6ae99SBarry Smith 
225dcca6d9dSJed Brown   ierr = PetscMalloc2(size,&owners,size,&ranks);CHKERRQ(ierr);
226ce94432eSBarry Smith   ierr = MPI_Allgather(&flag,1,MPIU_INT,owners,1,MPIU_INT,PetscObjectComm((PetscObject)da));CHKERRQ(ierr);
22747c6ae99SBarry Smith   ict  = 0;
228c73cfb54SMatthew G. Knepley   ierr = PetscInfo2(da,"DMDAGetProcessorSubset: dim=%D, direction=%d, procs: ",da->dim,(int)dir);CHKERRQ(ierr);
22947c6ae99SBarry Smith   for (i=0; i<size; i++) {
23047c6ae99SBarry Smith     if (owners[i]) {
23147c6ae99SBarry Smith       ranks[ict] = i; ict++;
23247c6ae99SBarry Smith       ierr       = PetscInfo1(da,"%D ",i);CHKERRQ(ierr);
23347c6ae99SBarry Smith     }
23447c6ae99SBarry Smith   }
23547c6ae99SBarry Smith   ierr = PetscInfo(da,"\n");CHKERRQ(ierr);
236ce94432eSBarry Smith   ierr = MPI_Comm_group(PetscObjectComm((PetscObject)da),&group);CHKERRQ(ierr);
23747c6ae99SBarry Smith   ierr = MPI_Group_incl(group,ict,ranks,&subgroup);CHKERRQ(ierr);
238ce94432eSBarry Smith   ierr = MPI_Comm_create(PetscObjectComm((PetscObject)da),subgroup,comm);CHKERRQ(ierr);
23947c6ae99SBarry Smith   ierr = MPI_Group_free(&subgroup);CHKERRQ(ierr);
24047c6ae99SBarry Smith   ierr = MPI_Group_free(&group);CHKERRQ(ierr);
24147c6ae99SBarry Smith   ierr = PetscFree2(owners,ranks);CHKERRQ(ierr);
24247c6ae99SBarry Smith   PetscFunctionReturn(0);
24347c6ae99SBarry Smith }
24447c6ae99SBarry Smith 
24547c6ae99SBarry Smith #undef __FUNCT__
246aa219208SBarry Smith #define __FUNCT__ "DMDAGetProcessorSubsets"
24747c6ae99SBarry Smith /*@C
248aa219208SBarry Smith    DMDAGetProcessorSubsets - Returns communicators consisting only of the
249aa219208SBarry Smith    processors in a DMDA adjacent in a particular dimension,
25047c6ae99SBarry Smith    corresponding to a logical plane in a 3D grid or a line in a 2D grid.
25147c6ae99SBarry Smith 
252aa219208SBarry Smith    Collective on DMDA
25347c6ae99SBarry Smith 
25447c6ae99SBarry Smith    Input Parameters:
25547c6ae99SBarry Smith +  da - the distributed array
256aa219208SBarry Smith -  dir - Cartesian direction, either DMDA_X, DMDA_Y, or DMDA_Z
25747c6ae99SBarry Smith 
25847c6ae99SBarry Smith    Output Parameters:
25947c6ae99SBarry Smith .  subcomm - new communicator
26047c6ae99SBarry Smith 
26147c6ae99SBarry Smith    Level: advanced
26247c6ae99SBarry Smith 
26347c6ae99SBarry Smith    Notes:
26447c6ae99SBarry Smith    This routine is useful for distributing one-dimensional data in a tensor product grid.
26547c6ae99SBarry Smith 
26647c6ae99SBarry Smith .keywords: distributed array, get, processor subset
26747c6ae99SBarry Smith @*/
2687087cfbeSBarry Smith PetscErrorCode  DMDAGetProcessorSubsets(DM da, DMDADirection dir, MPI_Comm *subcomm)
26947c6ae99SBarry Smith {
27047c6ae99SBarry Smith   MPI_Comm       comm;
27147c6ae99SBarry Smith   MPI_Group      group, subgroup;
27247c6ae99SBarry Smith   PetscInt       subgroupSize = 0;
27347c6ae99SBarry Smith   PetscInt       *firstPoints;
2740298fd71SBarry Smith   PetscMPIInt    size, *subgroupRanks = NULL;
27547c6ae99SBarry Smith   PetscInt       xs, xm, ys, ym, zs, zm, firstPoint, p;
27647c6ae99SBarry Smith   PetscErrorCode ierr;
27747c6ae99SBarry Smith 
27847c6ae99SBarry Smith   PetscFunctionBegin;
27947c6ae99SBarry Smith   PetscValidHeaderSpecific(da, DM_CLASSID, 1);
28082f516ccSBarry Smith   ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr);
281aa219208SBarry Smith   ierr = DMDAGetCorners(da, &xs, &ys, &zs, &xm, &ym, &zm);CHKERRQ(ierr);
28247c6ae99SBarry Smith   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
283aa219208SBarry Smith   if (dir == DMDA_Z) {
284c73cfb54SMatthew G. Knepley     if (da->dim < 3) SETERRQ(comm,PETSC_ERR_ARG_OUTOFRANGE,"DMDA_Z invalid for DMDA dim < 3");
28547c6ae99SBarry Smith     firstPoint = zs;
286aa219208SBarry Smith   } else if (dir == DMDA_Y) {
287c73cfb54SMatthew G. Knepley     if (da->dim == 1) SETERRQ(comm,PETSC_ERR_ARG_OUTOFRANGE,"DMDA_Y invalid for DMDA dim = 1");
28847c6ae99SBarry Smith     firstPoint = ys;
289aa219208SBarry Smith   } else if (dir == DMDA_X) {
29047c6ae99SBarry Smith     firstPoint = xs;
29147c6ae99SBarry Smith   } else SETERRQ(comm,PETSC_ERR_ARG_OUTOFRANGE,"Invalid direction");
29247c6ae99SBarry Smith 
293dcca6d9dSJed Brown   ierr = PetscMalloc2(size, &firstPoints, size, &subgroupRanks);CHKERRQ(ierr);
29447c6ae99SBarry Smith   ierr = MPI_Allgather(&firstPoint, 1, MPIU_INT, firstPoints, 1, MPIU_INT, comm);CHKERRQ(ierr);
295c73cfb54SMatthew G. Knepley   ierr = PetscInfo2(da,"DMDAGetProcessorSubset: dim=%D, direction=%d, procs: ",da->dim,(int)dir);CHKERRQ(ierr);
29647c6ae99SBarry Smith   for (p = 0; p < size; ++p) {
29747c6ae99SBarry Smith     if (firstPoints[p] == firstPoint) {
29847c6ae99SBarry Smith       subgroupRanks[subgroupSize++] = p;
29947c6ae99SBarry Smith       ierr = PetscInfo1(da, "%D ", p);CHKERRQ(ierr);
30047c6ae99SBarry Smith     }
30147c6ae99SBarry Smith   }
30247c6ae99SBarry Smith   ierr = PetscInfo(da, "\n");CHKERRQ(ierr);
30347c6ae99SBarry Smith   ierr = MPI_Comm_group(comm, &group);CHKERRQ(ierr);
30447c6ae99SBarry Smith   ierr = MPI_Group_incl(group, subgroupSize, subgroupRanks, &subgroup);CHKERRQ(ierr);
30547c6ae99SBarry Smith   ierr = MPI_Comm_create(comm, subgroup, subcomm);CHKERRQ(ierr);
30647c6ae99SBarry Smith   ierr = MPI_Group_free(&subgroup);CHKERRQ(ierr);
30747c6ae99SBarry Smith   ierr = MPI_Group_free(&group);CHKERRQ(ierr);
30847c6ae99SBarry Smith   ierr = PetscFree2(firstPoints, subgroupRanks);CHKERRQ(ierr);
30947c6ae99SBarry Smith   PetscFunctionReturn(0);
31047c6ae99SBarry Smith }
311