xref: /petsc/src/dm/impls/da/dasub.c (revision b45d2f2cb7e031d9c0de5873eca80614ca7b863b)
147c6ae99SBarry Smith 
247c6ae99SBarry Smith /*
347c6ae99SBarry Smith   Code for manipulating distributed regular arrays in parallel.
447c6ae99SBarry Smith */
547c6ae99SBarry Smith 
6*b45d2f2cSJed Brown #include <petsc-private/daimpl.h>    /*I   "petscdmda.h"   I*/
747c6ae99SBarry Smith 
847c6ae99SBarry Smith #undef __FUNCT__
9aa219208SBarry Smith #define __FUNCT__ "DMDAGetProcessorSubset"
1047c6ae99SBarry Smith /*@C
11aa219208SBarry Smith    DMDAGetProcessorSubset - Returns a communicator consisting only of the
12aa219208SBarry Smith    processors in a DMDA that own a particular global x, y, or z grid point
1347c6ae99SBarry Smith    (corresponding to a logical plane in a 3D grid or a line in a 2D grid).
1447c6ae99SBarry Smith 
15aa219208SBarry Smith    Collective on DMDA
1647c6ae99SBarry Smith 
1747c6ae99SBarry Smith    Input Parameters:
1847c6ae99SBarry Smith +  da - the distributed array
19aa219208SBarry Smith .  dir - Cartesian direction, either DMDA_X, DMDA_Y, or DMDA_Z
2047c6ae99SBarry Smith -  gp - global grid point number in this direction
2147c6ae99SBarry Smith 
2247c6ae99SBarry Smith    Output Parameters:
2347c6ae99SBarry Smith .  comm - new communicator
2447c6ae99SBarry Smith 
2547c6ae99SBarry Smith    Level: advanced
2647c6ae99SBarry Smith 
2747c6ae99SBarry Smith    Notes:
28aa219208SBarry Smith    All processors that share the DMDA must call this with the same gp value
2947c6ae99SBarry Smith 
3047c6ae99SBarry Smith    This routine is particularly useful to compute boundary conditions
3147c6ae99SBarry Smith    or other application-specific calculations that require manipulating
3247c6ae99SBarry Smith    sets of data throughout a logical plane of grid points.
3347c6ae99SBarry Smith 
3447c6ae99SBarry Smith .keywords: distributed array, get, processor subset
3547c6ae99SBarry Smith @*/
367087cfbeSBarry Smith PetscErrorCode  DMDAGetProcessorSubset(DM da,DMDADirection dir,PetscInt gp,MPI_Comm *comm)
3747c6ae99SBarry Smith {
3847c6ae99SBarry Smith   MPI_Group      group,subgroup;
3947c6ae99SBarry Smith   PetscErrorCode ierr;
4047c6ae99SBarry Smith   PetscInt       i,ict,flag,*owners,xs,xm,ys,ym,zs,zm;
4147c6ae99SBarry Smith   PetscMPIInt    size,*ranks = PETSC_NULL;
4247c6ae99SBarry Smith   DM_DA          *dd = (DM_DA*)da->data;
4347c6ae99SBarry Smith 
4447c6ae99SBarry Smith   PetscFunctionBegin;
4547c6ae99SBarry Smith   PetscValidHeaderSpecific(da,DM_CLASSID,1);
4647c6ae99SBarry Smith   flag = 0;
47aa219208SBarry Smith   ierr = DMDAGetCorners(da,&xs,&ys,&zs,&xm,&ym,&zm);CHKERRQ(ierr);
4847c6ae99SBarry Smith   ierr = MPI_Comm_size(((PetscObject)da)->comm,&size);CHKERRQ(ierr);
49aa219208SBarry Smith   if (dir == DMDA_Z) {
50aa219208SBarry Smith     if (dd->dim < 3) SETERRQ(((PetscObject)da)->comm,PETSC_ERR_ARG_OUTOFRANGE,"DMDA_Z invalid for DMDA dim < 3");
5147c6ae99SBarry Smith     if (gp < 0 || gp > dd->P) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"invalid grid point");
5247c6ae99SBarry Smith     if (gp >= zs && gp < zs+zm) flag = 1;
53aa219208SBarry Smith   } else if (dir == DMDA_Y) {
54aa219208SBarry Smith     if (dd->dim == 1) SETERRQ(((PetscObject)da)->comm,PETSC_ERR_ARG_OUTOFRANGE,"DMDA_Y invalid for DMDA dim = 1");
5547c6ae99SBarry Smith     if (gp < 0 || gp > dd->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"invalid grid point");
5647c6ae99SBarry Smith     if (gp >= ys && gp < ys+ym) flag = 1;
57aa219208SBarry Smith   } else if (dir == DMDA_X) {
5847c6ae99SBarry Smith     if (gp < 0 || gp > dd->M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"invalid grid point");
5947c6ae99SBarry Smith     if (gp >= xs && gp < xs+xm) flag = 1;
6047c6ae99SBarry Smith   } else SETERRQ(((PetscObject)da)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Invalid direction");
6147c6ae99SBarry Smith 
6247c6ae99SBarry Smith   ierr = PetscMalloc2(size,PetscInt,&owners,size,PetscMPIInt,&ranks);CHKERRQ(ierr);
6347c6ae99SBarry Smith   ierr = MPI_Allgather(&flag,1,MPIU_INT,owners,1,MPIU_INT,((PetscObject)da)->comm);CHKERRQ(ierr);
6447c6ae99SBarry Smith   ict  = 0;
65aa219208SBarry Smith   ierr = PetscInfo2(da,"DMDAGetProcessorSubset: dim=%D, direction=%d, procs: ",dd->dim,(int)dir);CHKERRQ(ierr);
6647c6ae99SBarry Smith   for (i=0; i<size; i++) {
6747c6ae99SBarry Smith     if (owners[i]) {
6847c6ae99SBarry Smith       ranks[ict] = i; ict++;
6947c6ae99SBarry Smith       ierr = PetscInfo1(da,"%D ",i);CHKERRQ(ierr);
7047c6ae99SBarry Smith     }
7147c6ae99SBarry Smith   }
7247c6ae99SBarry Smith   ierr = PetscInfo(da,"\n");CHKERRQ(ierr);
7347c6ae99SBarry Smith   ierr = MPI_Comm_group(((PetscObject)da)->comm,&group);CHKERRQ(ierr);
7447c6ae99SBarry Smith   ierr = MPI_Group_incl(group,ict,ranks,&subgroup);CHKERRQ(ierr);
7547c6ae99SBarry Smith   ierr = MPI_Comm_create(((PetscObject)da)->comm,subgroup,comm);CHKERRQ(ierr);
7647c6ae99SBarry Smith   ierr = MPI_Group_free(&subgroup);CHKERRQ(ierr);
7747c6ae99SBarry Smith   ierr = MPI_Group_free(&group);CHKERRQ(ierr);
7847c6ae99SBarry Smith   ierr = PetscFree2(owners,ranks);CHKERRQ(ierr);
7947c6ae99SBarry Smith   PetscFunctionReturn(0);
8047c6ae99SBarry Smith }
8147c6ae99SBarry Smith 
8247c6ae99SBarry Smith #undef __FUNCT__
83aa219208SBarry Smith #define __FUNCT__ "DMDAGetProcessorSubsets"
8447c6ae99SBarry Smith /*@C
85aa219208SBarry Smith    DMDAGetProcessorSubsets - Returns communicators consisting only of the
86aa219208SBarry Smith    processors in a DMDA adjacent in a particular dimension,
8747c6ae99SBarry Smith    corresponding to a logical plane in a 3D grid or a line in a 2D grid.
8847c6ae99SBarry Smith 
89aa219208SBarry Smith    Collective on DMDA
9047c6ae99SBarry Smith 
9147c6ae99SBarry Smith    Input Parameters:
9247c6ae99SBarry Smith +  da - the distributed array
93aa219208SBarry Smith -  dir - Cartesian direction, either DMDA_X, DMDA_Y, or DMDA_Z
9447c6ae99SBarry Smith 
9547c6ae99SBarry Smith    Output Parameters:
9647c6ae99SBarry Smith .  subcomm - new communicator
9747c6ae99SBarry Smith 
9847c6ae99SBarry Smith    Level: advanced
9947c6ae99SBarry Smith 
10047c6ae99SBarry Smith    Notes:
10147c6ae99SBarry Smith    This routine is useful for distributing one-dimensional data in a tensor product grid.
10247c6ae99SBarry Smith 
10347c6ae99SBarry Smith .keywords: distributed array, get, processor subset
10447c6ae99SBarry Smith @*/
1057087cfbeSBarry Smith PetscErrorCode  DMDAGetProcessorSubsets(DM da, DMDADirection dir, MPI_Comm *subcomm)
10647c6ae99SBarry Smith {
10747c6ae99SBarry Smith   MPI_Comm       comm;
10847c6ae99SBarry Smith   MPI_Group      group, subgroup;
10947c6ae99SBarry Smith   PetscInt       subgroupSize = 0;
11047c6ae99SBarry Smith   PetscInt      *firstPoints;
11147c6ae99SBarry Smith   PetscMPIInt    size, *subgroupRanks = PETSC_NULL;
11247c6ae99SBarry Smith   PetscInt       xs, xm, ys, ym, zs, zm, firstPoint, p;
11347c6ae99SBarry Smith   PetscErrorCode ierr;
11447c6ae99SBarry Smith   DM_DA          *dd = (DM_DA*)da->data;
11547c6ae99SBarry Smith 
11647c6ae99SBarry Smith   PetscFunctionBegin;
11747c6ae99SBarry Smith   PetscValidHeaderSpecific(da, DM_CLASSID, 1);
11847c6ae99SBarry Smith   comm = ((PetscObject) da)->comm;
119aa219208SBarry Smith   ierr = DMDAGetCorners(da, &xs, &ys, &zs, &xm, &ym, &zm);CHKERRQ(ierr);
12047c6ae99SBarry Smith   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
121aa219208SBarry Smith   if (dir == DMDA_Z) {
122aa219208SBarry Smith     if (dd->dim < 3) SETERRQ(comm,PETSC_ERR_ARG_OUTOFRANGE,"DMDA_Z invalid for DMDA dim < 3");
12347c6ae99SBarry Smith     firstPoint = zs;
124aa219208SBarry Smith   } else if (dir == DMDA_Y) {
125aa219208SBarry Smith     if (dd->dim == 1) SETERRQ(comm,PETSC_ERR_ARG_OUTOFRANGE,"DMDA_Y invalid for DMDA dim = 1");
12647c6ae99SBarry Smith     firstPoint = ys;
127aa219208SBarry Smith   } else if (dir == DMDA_X) {
12847c6ae99SBarry Smith     firstPoint = xs;
12947c6ae99SBarry Smith   } else SETERRQ(comm,PETSC_ERR_ARG_OUTOFRANGE,"Invalid direction");
13047c6ae99SBarry Smith 
13147c6ae99SBarry Smith   ierr = PetscMalloc2(size, PetscInt, &firstPoints, size, PetscMPIInt, &subgroupRanks);CHKERRQ(ierr);
13247c6ae99SBarry Smith   ierr = MPI_Allgather(&firstPoint, 1, MPIU_INT, firstPoints, 1, MPIU_INT, comm);CHKERRQ(ierr);
133aa219208SBarry Smith   ierr = PetscInfo2(da,"DMDAGetProcessorSubset: dim=%D, direction=%d, procs: ",dd->dim,(int)dir);CHKERRQ(ierr);
13447c6ae99SBarry Smith   for(p = 0; p < size; ++p) {
13547c6ae99SBarry Smith     if (firstPoints[p] == firstPoint) {
13647c6ae99SBarry Smith       subgroupRanks[subgroupSize++] = p;
13747c6ae99SBarry Smith       ierr = PetscInfo1(da, "%D ", p);CHKERRQ(ierr);
13847c6ae99SBarry Smith     }
13947c6ae99SBarry Smith   }
14047c6ae99SBarry Smith   ierr = PetscInfo(da, "\n");CHKERRQ(ierr);
14147c6ae99SBarry Smith   ierr = MPI_Comm_group(comm, &group);CHKERRQ(ierr);
14247c6ae99SBarry Smith   ierr = MPI_Group_incl(group, subgroupSize, subgroupRanks, &subgroup);CHKERRQ(ierr);
14347c6ae99SBarry Smith   ierr = MPI_Comm_create(comm, subgroup, subcomm);CHKERRQ(ierr);
14447c6ae99SBarry Smith   ierr = MPI_Group_free(&subgroup);CHKERRQ(ierr);
14547c6ae99SBarry Smith   ierr = MPI_Group_free(&group);CHKERRQ(ierr);
14647c6ae99SBarry Smith   ierr = PetscFree2(firstPoints, subgroupRanks);CHKERRQ(ierr);
14747c6ae99SBarry Smith   PetscFunctionReturn(0);
14847c6ae99SBarry Smith }
149