xref: /petsc/src/dm/impls/da/dasub.c (revision ce94432eddcd14845bc7e8083b7f8ea723b9bf7d)
147c6ae99SBarry Smith 
247c6ae99SBarry Smith /*
347c6ae99SBarry Smith   Code for manipulating distributed regular arrays in parallel.
447c6ae99SBarry Smith */
547c6ae99SBarry Smith 
6b45d2f2cSJed Brown #include <petsc-private/daimpl.h>    /*I   "petscdmda.h"   I*/
747c6ae99SBarry Smith 
847c6ae99SBarry Smith #undef __FUNCT__
9db05f41bSBarry Smith #define __FUNCT__ "DMDAGetRay"
10db05f41bSBarry Smith /*@C
11db05f41bSBarry Smith    DMDAGetRay - Returns a vector on process zero that contains a row or column of the values in a DMDA vector
12db05f41bSBarry Smith 
13db05f41bSBarry Smith    Collective on DMDA
14db05f41bSBarry Smith 
15db05f41bSBarry Smith    Input Parameters:
16db05f41bSBarry Smith +  da - the distributed array
17db05f41bSBarry Smith .  vec - the vector
18db05f41bSBarry Smith .  dir - Cartesian direction, either DMDA_X, DMDA_Y, or DMDA_Z
19db05f41bSBarry Smith -  gp - global grid point number in this direction
20db05f41bSBarry Smith 
21db05f41bSBarry Smith    Output Parameters:
22db05f41bSBarry Smith +  newvec - the new vector that can hold the values (size zero on all processes except process 0)
23db05f41bSBarry Smith -  scatter - the VecScatter that will map from the original vector to the slice
24db05f41bSBarry Smith 
25db05f41bSBarry Smith    Level: advanced
26db05f41bSBarry Smith 
27db05f41bSBarry Smith    Notes:
28db05f41bSBarry Smith    All processors that share the DMDA must call this with the same gp value
29db05f41bSBarry Smith 
30db05f41bSBarry Smith .keywords: distributed array, get, processor subset
31db05f41bSBarry Smith @*/
32db05f41bSBarry Smith PetscErrorCode  DMDAGetRay(DM da,DMDADirection dir,PetscInt gp,Vec *newvec,VecScatter *scatter)
33db05f41bSBarry Smith {
34db05f41bSBarry Smith   PetscMPIInt    rank;
35db05f41bSBarry Smith   DM_DA          *dd = (DM_DA*)da->data;
36db05f41bSBarry Smith   PetscErrorCode ierr;
37db05f41bSBarry Smith   IS             is;
38db05f41bSBarry Smith   AO             ao;
39db05f41bSBarry Smith   Vec            vec;
40d1212d36SBarry Smith   PetscInt       *indices,i,j;
41db05f41bSBarry Smith 
42db05f41bSBarry Smith   PetscFunctionBegin;
43*ce94432eSBarry Smith   if (dd->dim == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Cannot get slice from 1d DMDA");
44*ce94432eSBarry Smith   if (dd->dim == 3) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Cannot get slice from 3d DMDA");
45db05f41bSBarry Smith   ierr = DMDAGetAO(da,&ao);CHKERRQ(ierr);
46*ce94432eSBarry Smith   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)da),&rank);CHKERRQ(ierr);
47db05f41bSBarry Smith   if (!rank) {
48db05f41bSBarry Smith     if (dir == DMDA_Y) {
49d1212d36SBarry Smith       ierr       = PetscMalloc(dd->w*dd->M*sizeof(PetscInt),&indices);CHKERRQ(ierr);
50d1212d36SBarry Smith       indices[0] = gp*dd->M*dd->w;
518865f1eaSKarl Rupp       for (i=1; i<dd->M*dd->w; i++) indices[i] = indices[i-1] + 1;
528865f1eaSKarl Rupp 
53d1212d36SBarry Smith       ierr = AOApplicationToPetsc(ao,dd->M*dd->w,indices);CHKERRQ(ierr);
54db05f41bSBarry Smith       ierr = VecCreate(PETSC_COMM_SELF,newvec);CHKERRQ(ierr);
55db05f41bSBarry Smith       ierr = VecSetBlockSize(*newvec,dd->w);CHKERRQ(ierr);
56db05f41bSBarry Smith       ierr = VecSetSizes(*newvec,dd->M*dd->w,PETSC_DETERMINE);CHKERRQ(ierr);
57db05f41bSBarry Smith       ierr = VecSetType(*newvec,VECSEQ);CHKERRQ(ierr);
58d1212d36SBarry Smith       ierr = ISCreateGeneral(PETSC_COMM_SELF,dd->w*dd->M,indices,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
59db05f41bSBarry Smith     } else if (dir == DMDA_X) {
60d1212d36SBarry Smith       ierr       = PetscMalloc(dd->w*dd->N*sizeof(PetscInt),&indices);CHKERRQ(ierr);
61d1212d36SBarry Smith       indices[0] = dd->w*gp;
62d1212d36SBarry Smith       for (j=1; j<dd->w; j++) indices[j] = indices[j-1] + 1;
63d1212d36SBarry Smith       for (i=1; i<dd->N; i++) {
64d1212d36SBarry Smith         indices[i*dd->w] = indices[i*dd->w-1] + dd->w*dd->M - dd->w + 1;
65d1212d36SBarry Smith         for (j=1; j<dd->w; j++) indices[i*dd->w + j] = indices[i*dd->w + j - 1] + 1;
66d1212d36SBarry Smith       }
67d1212d36SBarry Smith       ierr = AOApplicationToPetsc(ao,dd->w*dd->N,indices);CHKERRQ(ierr);
68db05f41bSBarry Smith       ierr = VecCreate(PETSC_COMM_SELF,newvec);CHKERRQ(ierr);
69db05f41bSBarry Smith       ierr = VecSetBlockSize(*newvec,dd->w);CHKERRQ(ierr);
70db05f41bSBarry Smith       ierr = VecSetSizes(*newvec,dd->N*dd->w,PETSC_DETERMINE);CHKERRQ(ierr);
71db05f41bSBarry Smith       ierr = VecSetType(*newvec,VECSEQ);CHKERRQ(ierr);
72d1212d36SBarry Smith       ierr = ISCreateGeneral(PETSC_COMM_SELF,dd->w*dd->N,indices,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
73db05f41bSBarry Smith     } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Unknown DMDADirection");
74db05f41bSBarry Smith   } else {
75db05f41bSBarry Smith     ierr = VecCreateSeq(PETSC_COMM_SELF,0,newvec);CHKERRQ(ierr);
76d1212d36SBarry Smith     ierr = ISCreateGeneral(PETSC_COMM_SELF,0,0,PETSC_COPY_VALUES,&is);CHKERRQ(ierr);
77db05f41bSBarry Smith   }
78db05f41bSBarry Smith   ierr = DMGetGlobalVector(da,&vec);CHKERRQ(ierr);
790298fd71SBarry Smith   ierr = VecScatterCreate(vec,is,*newvec,NULL,scatter);CHKERRQ(ierr);
80db05f41bSBarry Smith   ierr = DMRestoreGlobalVector(da,&vec);CHKERRQ(ierr);
81db05f41bSBarry Smith   ierr = ISDestroy(&is);CHKERRQ(ierr);
82db05f41bSBarry Smith   PetscFunctionReturn(0);
83db05f41bSBarry Smith }
84db05f41bSBarry Smith 
85db05f41bSBarry Smith #undef __FUNCT__
86aa219208SBarry Smith #define __FUNCT__ "DMDAGetProcessorSubset"
8747c6ae99SBarry Smith /*@C
88aa219208SBarry Smith    DMDAGetProcessorSubset - Returns a communicator consisting only of the
89aa219208SBarry Smith    processors in a DMDA that own a particular global x, y, or z grid point
9047c6ae99SBarry Smith    (corresponding to a logical plane in a 3D grid or a line in a 2D grid).
9147c6ae99SBarry Smith 
92aa219208SBarry Smith    Collective on DMDA
9347c6ae99SBarry Smith 
9447c6ae99SBarry Smith    Input Parameters:
9547c6ae99SBarry Smith +  da - the distributed array
96aa219208SBarry Smith .  dir - Cartesian direction, either DMDA_X, DMDA_Y, or DMDA_Z
9747c6ae99SBarry Smith -  gp - global grid point number in this direction
9847c6ae99SBarry Smith 
9947c6ae99SBarry Smith    Output Parameters:
10047c6ae99SBarry Smith .  comm - new communicator
10147c6ae99SBarry Smith 
10247c6ae99SBarry Smith    Level: advanced
10347c6ae99SBarry Smith 
10447c6ae99SBarry Smith    Notes:
105aa219208SBarry Smith    All processors that share the DMDA must call this with the same gp value
10647c6ae99SBarry Smith 
10747c6ae99SBarry Smith    This routine is particularly useful to compute boundary conditions
10847c6ae99SBarry Smith    or other application-specific calculations that require manipulating
10947c6ae99SBarry Smith    sets of data throughout a logical plane of grid points.
11047c6ae99SBarry Smith 
11147c6ae99SBarry Smith .keywords: distributed array, get, processor subset
11247c6ae99SBarry Smith @*/
1137087cfbeSBarry Smith PetscErrorCode  DMDAGetProcessorSubset(DM da,DMDADirection dir,PetscInt gp,MPI_Comm *comm)
11447c6ae99SBarry Smith {
11547c6ae99SBarry Smith   MPI_Group      group,subgroup;
11647c6ae99SBarry Smith   PetscErrorCode ierr;
11747c6ae99SBarry Smith   PetscInt       i,ict,flag,*owners,xs,xm,ys,ym,zs,zm;
1180298fd71SBarry Smith   PetscMPIInt    size,*ranks = NULL;
11947c6ae99SBarry Smith   DM_DA          *dd = (DM_DA*)da->data;
12047c6ae99SBarry Smith 
12147c6ae99SBarry Smith   PetscFunctionBegin;
12247c6ae99SBarry Smith   PetscValidHeaderSpecific(da,DM_CLASSID,1);
12347c6ae99SBarry Smith   flag = 0;
124aa219208SBarry Smith   ierr = DMDAGetCorners(da,&xs,&ys,&zs,&xm,&ym,&zm);CHKERRQ(ierr);
125*ce94432eSBarry Smith   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)da),&size);CHKERRQ(ierr);
126aa219208SBarry Smith   if (dir == DMDA_Z) {
127*ce94432eSBarry Smith     if (dd->dim < 3) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"DMDA_Z invalid for DMDA dim < 3");
12847c6ae99SBarry Smith     if (gp < 0 || gp > dd->P) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"invalid grid point");
12947c6ae99SBarry Smith     if (gp >= zs && gp < zs+zm) flag = 1;
130aa219208SBarry Smith   } else if (dir == DMDA_Y) {
131*ce94432eSBarry Smith     if (dd->dim == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"DMDA_Y invalid for DMDA dim = 1");
13247c6ae99SBarry Smith     if (gp < 0 || gp > dd->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"invalid grid point");
13347c6ae99SBarry Smith     if (gp >= ys && gp < ys+ym) flag = 1;
134aa219208SBarry Smith   } else if (dir == DMDA_X) {
13547c6ae99SBarry Smith     if (gp < 0 || gp > dd->M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"invalid grid point");
13647c6ae99SBarry Smith     if (gp >= xs && gp < xs+xm) flag = 1;
137*ce94432eSBarry Smith   } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"Invalid direction");
13847c6ae99SBarry Smith 
13947c6ae99SBarry Smith   ierr = PetscMalloc2(size,PetscInt,&owners,size,PetscMPIInt,&ranks);CHKERRQ(ierr);
140*ce94432eSBarry Smith   ierr = MPI_Allgather(&flag,1,MPIU_INT,owners,1,MPIU_INT,PetscObjectComm((PetscObject)da));CHKERRQ(ierr);
14147c6ae99SBarry Smith   ict  = 0;
142aa219208SBarry Smith   ierr = PetscInfo2(da,"DMDAGetProcessorSubset: dim=%D, direction=%d, procs: ",dd->dim,(int)dir);CHKERRQ(ierr);
14347c6ae99SBarry Smith   for (i=0; i<size; i++) {
14447c6ae99SBarry Smith     if (owners[i]) {
14547c6ae99SBarry Smith       ranks[ict] = i; ict++;
14647c6ae99SBarry Smith       ierr       = PetscInfo1(da,"%D ",i);CHKERRQ(ierr);
14747c6ae99SBarry Smith     }
14847c6ae99SBarry Smith   }
14947c6ae99SBarry Smith   ierr = PetscInfo(da,"\n");CHKERRQ(ierr);
150*ce94432eSBarry Smith   ierr = MPI_Comm_group(PetscObjectComm((PetscObject)da),&group);CHKERRQ(ierr);
15147c6ae99SBarry Smith   ierr = MPI_Group_incl(group,ict,ranks,&subgroup);CHKERRQ(ierr);
152*ce94432eSBarry Smith   ierr = MPI_Comm_create(PetscObjectComm((PetscObject)da),subgroup,comm);CHKERRQ(ierr);
15347c6ae99SBarry Smith   ierr = MPI_Group_free(&subgroup);CHKERRQ(ierr);
15447c6ae99SBarry Smith   ierr = MPI_Group_free(&group);CHKERRQ(ierr);
15547c6ae99SBarry Smith   ierr = PetscFree2(owners,ranks);CHKERRQ(ierr);
15647c6ae99SBarry Smith   PetscFunctionReturn(0);
15747c6ae99SBarry Smith }
15847c6ae99SBarry Smith 
15947c6ae99SBarry Smith #undef __FUNCT__
160aa219208SBarry Smith #define __FUNCT__ "DMDAGetProcessorSubsets"
16147c6ae99SBarry Smith /*@C
162aa219208SBarry Smith    DMDAGetProcessorSubsets - Returns communicators consisting only of the
163aa219208SBarry Smith    processors in a DMDA adjacent in a particular dimension,
16447c6ae99SBarry Smith    corresponding to a logical plane in a 3D grid or a line in a 2D grid.
16547c6ae99SBarry Smith 
166aa219208SBarry Smith    Collective on DMDA
16747c6ae99SBarry Smith 
16847c6ae99SBarry Smith    Input Parameters:
16947c6ae99SBarry Smith +  da - the distributed array
170aa219208SBarry Smith -  dir - Cartesian direction, either DMDA_X, DMDA_Y, or DMDA_Z
17147c6ae99SBarry Smith 
17247c6ae99SBarry Smith    Output Parameters:
17347c6ae99SBarry Smith .  subcomm - new communicator
17447c6ae99SBarry Smith 
17547c6ae99SBarry Smith    Level: advanced
17647c6ae99SBarry Smith 
17747c6ae99SBarry Smith    Notes:
17847c6ae99SBarry Smith    This routine is useful for distributing one-dimensional data in a tensor product grid.
17947c6ae99SBarry Smith 
18047c6ae99SBarry Smith .keywords: distributed array, get, processor subset
18147c6ae99SBarry Smith @*/
1827087cfbeSBarry Smith PetscErrorCode  DMDAGetProcessorSubsets(DM da, DMDADirection dir, MPI_Comm *subcomm)
18347c6ae99SBarry Smith {
18447c6ae99SBarry Smith   MPI_Comm       comm;
18547c6ae99SBarry Smith   MPI_Group      group, subgroup;
18647c6ae99SBarry Smith   PetscInt       subgroupSize = 0;
18747c6ae99SBarry Smith   PetscInt       *firstPoints;
1880298fd71SBarry Smith   PetscMPIInt    size, *subgroupRanks = NULL;
18947c6ae99SBarry Smith   PetscInt       xs, xm, ys, ym, zs, zm, firstPoint, p;
19047c6ae99SBarry Smith   PetscErrorCode ierr;
19147c6ae99SBarry Smith   DM_DA          *dd = (DM_DA*)da->data;
19247c6ae99SBarry Smith 
19347c6ae99SBarry Smith   PetscFunctionBegin;
19447c6ae99SBarry Smith   PetscValidHeaderSpecific(da, DM_CLASSID, 1);
19547c6ae99SBarry Smith   comm = ((PetscObject) da)->comm;
196aa219208SBarry Smith   ierr = DMDAGetCorners(da, &xs, &ys, &zs, &xm, &ym, &zm);CHKERRQ(ierr);
19747c6ae99SBarry Smith   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
198aa219208SBarry Smith   if (dir == DMDA_Z) {
199aa219208SBarry Smith     if (dd->dim < 3) SETERRQ(comm,PETSC_ERR_ARG_OUTOFRANGE,"DMDA_Z invalid for DMDA dim < 3");
20047c6ae99SBarry Smith     firstPoint = zs;
201aa219208SBarry Smith   } else if (dir == DMDA_Y) {
202aa219208SBarry Smith     if (dd->dim == 1) SETERRQ(comm,PETSC_ERR_ARG_OUTOFRANGE,"DMDA_Y invalid for DMDA dim = 1");
20347c6ae99SBarry Smith     firstPoint = ys;
204aa219208SBarry Smith   } else if (dir == DMDA_X) {
20547c6ae99SBarry Smith     firstPoint = xs;
20647c6ae99SBarry Smith   } else SETERRQ(comm,PETSC_ERR_ARG_OUTOFRANGE,"Invalid direction");
20747c6ae99SBarry Smith 
20847c6ae99SBarry Smith   ierr = PetscMalloc2(size, PetscInt, &firstPoints, size, PetscMPIInt, &subgroupRanks);CHKERRQ(ierr);
20947c6ae99SBarry Smith   ierr = MPI_Allgather(&firstPoint, 1, MPIU_INT, firstPoints, 1, MPIU_INT, comm);CHKERRQ(ierr);
210aa219208SBarry Smith   ierr = PetscInfo2(da,"DMDAGetProcessorSubset: dim=%D, direction=%d, procs: ",dd->dim,(int)dir);CHKERRQ(ierr);
21147c6ae99SBarry Smith   for (p = 0; p < size; ++p) {
21247c6ae99SBarry Smith     if (firstPoints[p] == firstPoint) {
21347c6ae99SBarry Smith       subgroupRanks[subgroupSize++] = p;
21447c6ae99SBarry Smith       ierr = PetscInfo1(da, "%D ", p);CHKERRQ(ierr);
21547c6ae99SBarry Smith     }
21647c6ae99SBarry Smith   }
21747c6ae99SBarry Smith   ierr = PetscInfo(da, "\n");CHKERRQ(ierr);
21847c6ae99SBarry Smith   ierr = MPI_Comm_group(comm, &group);CHKERRQ(ierr);
21947c6ae99SBarry Smith   ierr = MPI_Group_incl(group, subgroupSize, subgroupRanks, &subgroup);CHKERRQ(ierr);
22047c6ae99SBarry Smith   ierr = MPI_Comm_create(comm, subgroup, subcomm);CHKERRQ(ierr);
22147c6ae99SBarry Smith   ierr = MPI_Group_free(&subgroup);CHKERRQ(ierr);
22247c6ae99SBarry Smith   ierr = MPI_Group_free(&group);CHKERRQ(ierr);
22347c6ae99SBarry Smith   ierr = PetscFree2(firstPoints, subgroupRanks);CHKERRQ(ierr);
22447c6ae99SBarry Smith   PetscFunctionReturn(0);
22547c6ae99SBarry Smith }
226