xref: /petsc/src/dm/impls/da/da1.c (revision b1fb7eb72c0fd6e32a939600cee8b7fad9b6339e)
1 
2 /*
3    Code for manipulating distributed regular 1d arrays in parallel.
4    This file was created by Peter Mell   6/30/95
5 */
6 
7 #include <petsc-private/dmdaimpl.h>     /*I  "petscdmda.h"   I*/
8 
9 #include <petscdraw.h>
10 #undef __FUNCT__
11 #define __FUNCT__ "DMView_DA_1d"
12 PetscErrorCode DMView_DA_1d(DM da,PetscViewer viewer)
13 {
14   PetscErrorCode ierr;
15   PetscMPIInt    rank;
16   PetscBool      iascii,isdraw,isbinary;
17   DM_DA          *dd = (DM_DA*)da->data;
18 #if defined(PETSC_HAVE_MATLAB_ENGINE)
19   PetscBool ismatlab;
20 #endif
21 
22   PetscFunctionBegin;
23   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)da),&rank);CHKERRQ(ierr);
24 
25   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
26   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
27   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
28 #if defined(PETSC_HAVE_MATLAB_ENGINE)
29   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab);CHKERRQ(ierr);
30 #endif
31   if (iascii) {
32     PetscViewerFormat format;
33 
34     ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
35     if (format != PETSC_VIEWER_ASCII_VTK && format != PETSC_VIEWER_ASCII_VTK_CELL) {
36       DMDALocalInfo info;
37       ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
38       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
39       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Processor [%d] M %D m %D w %D s %D\n",rank,dd->M,dd->m,dd->w,dd->s);CHKERRQ(ierr);
40       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"X range of indices: %D %D\n",info.xs,info.xs+info.xm);CHKERRQ(ierr);
41       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
42       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
43     } else {
44       ierr = DMView_DA_VTK(da, viewer);CHKERRQ(ierr);
45     }
46   } else if (isdraw) {
47     PetscDraw draw;
48     double    ymin = -1,ymax = 1,xmin = -1,xmax = dd->M,x;
49     PetscInt  base;
50     char      node[10];
51     PetscBool isnull;
52 
53     ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
54     ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0);
55 
56     ierr = PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax);CHKERRQ(ierr);
57     ierr = PetscDrawSynchronizedClear(draw);CHKERRQ(ierr);
58 
59     /* first processor draws all node lines */
60     if (!rank) {
61       PetscInt xmin_tmp;
62       ymin = 0.0; ymax = 0.3;
63 
64       for (xmin_tmp=0; xmin_tmp < dd->M; xmin_tmp++) {
65         ierr = PetscDrawLine(draw,(double)xmin_tmp,ymin,(double)xmin_tmp,ymax,PETSC_DRAW_BLACK);CHKERRQ(ierr);
66       }
67 
68       xmin = 0.0; xmax = dd->M - 1;
69       ierr = PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_BLACK);CHKERRQ(ierr);
70       ierr = PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_BLACK);CHKERRQ(ierr);
71     }
72 
73     ierr = PetscDrawSynchronizedFlush(draw);CHKERRQ(ierr);
74     ierr = PetscDrawPause(draw);CHKERRQ(ierr);
75 
76     /* draw my box */
77     ymin = 0; ymax = 0.3; xmin = dd->xs / dd->w; xmax = (dd->xe / dd->w)  - 1;
78     ierr = PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_RED);CHKERRQ(ierr);
79     ierr = PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_RED);CHKERRQ(ierr);
80     ierr = PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_RED);CHKERRQ(ierr);
81     ierr = PetscDrawLine(draw,xmax,ymin,xmax,ymax,PETSC_DRAW_RED);CHKERRQ(ierr);
82 
83     /* Put in index numbers */
84     base = dd->base / dd->w;
85     for (x=xmin; x<=xmax; x++) {
86       sprintf(node,"%d",(int)base++);
87       ierr = PetscDrawString(draw,x,ymin,PETSC_DRAW_RED,node);CHKERRQ(ierr);
88     }
89 
90     ierr = PetscDrawSynchronizedFlush(draw);CHKERRQ(ierr);
91     ierr = PetscDrawPause(draw);CHKERRQ(ierr);
92   } else if (isbinary) {
93     ierr = DMView_DA_Binary(da,viewer);CHKERRQ(ierr);
94 #if defined(PETSC_HAVE_MATLAB_ENGINE)
95   } else if (ismatlab) {
96     ierr = DMView_DA_Matlab(da,viewer);CHKERRQ(ierr);
97 #endif
98   }
99   PetscFunctionReturn(0);
100 }
101 
102 
103 #undef __FUNCT__
104 #define __FUNCT__ "DMSetUp_DA_1D"
105 PetscErrorCode  DMSetUp_DA_1D(DM da)
106 {
107   DM_DA            *dd   = (DM_DA*)da->data;
108   const PetscInt   M     = dd->M;
109   const PetscInt   dof   = dd->w;
110   const PetscInt   s     = dd->s;
111   const PetscInt   sDist = s;  /* stencil distance in points */
112   const PetscInt   *lx   = dd->lx;
113   DMBoundaryType   bx    = dd->bx;
114   MPI_Comm         comm;
115   Vec              local, global;
116   VecScatter       ltog, gtol;
117   IS               to, from;
118   PetscBool        flg1 = PETSC_FALSE, flg2 = PETSC_FALSE;
119   PetscMPIInt      rank, size;
120   PetscInt         i,*idx,nn,left,xs,xe,x,Xs,Xe,start,end,m,IXs,IXe;
121   PetscErrorCode   ierr;
122 
123   PetscFunctionBegin;
124   ierr = PetscObjectGetComm((PetscObject) da, &comm);CHKERRQ(ierr);
125   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
126   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
127 
128   dd->m = size;
129   m     = dd->m;
130 
131   if (s > 0) {
132     /* if not communicating data then should be ok to have nothing on some processes */
133     if (M < m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"More processes than data points! %D %D",m,M);
134     if ((M-1) < s) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Array is too small for stencil! %D %D",M-1,s);
135   }
136 
137   /*
138      Determine locally owned region
139      xs is the first local node number, x is the number of local nodes
140   */
141   if (!lx) {
142     ierr = PetscMalloc1(m, &dd->lx);CHKERRQ(ierr);
143     ierr = PetscOptionsGetBool(NULL,"-da_partition_blockcomm",&flg1,NULL);CHKERRQ(ierr);
144     ierr = PetscOptionsGetBool(NULL,"-da_partition_nodes_at_end",&flg2,NULL);CHKERRQ(ierr);
145     if (flg1) {      /* Block Comm type Distribution */
146       xs = rank*M/m;
147       x  = (rank + 1)*M/m - xs;
148     } else if (flg2) { /* The odd nodes are evenly distributed across last nodes */
149       x = (M + rank)/m;
150       if (M/m == x) xs = rank*x;
151       else          xs = rank*(x-1) + (M+rank)%(x*m);
152     } else { /* The odd nodes are evenly distributed across the first k nodes */
153       /* Regular PETSc Distribution */
154       x = M/m + ((M % m) > rank);
155       if (rank >= (M % m)) xs = (rank * (PetscInt)(M/m) + M % m);
156       else                 xs = rank * (PetscInt)(M/m) + rank;
157     }
158     ierr = MPI_Allgather(&xs,1,MPIU_INT,dd->lx,1,MPIU_INT,comm);CHKERRQ(ierr);
159     for (i=0; i<m-1; i++) dd->lx[i] = dd->lx[i+1] - dd->lx[i];
160     dd->lx[m-1] = M - dd->lx[m-1];
161   } else {
162     x  = lx[rank];
163     xs = 0;
164     for (i=0; i<rank; i++) xs += lx[i];
165     /* verify that data user provided is consistent */
166     left = xs;
167     for (i=rank; i<size; i++) left += lx[i];
168     if (left != M) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Sum of lx across processors not equal to M %D %D",left,M);
169   }
170 
171   /*
172    check if the scatter requires more than one process neighbor or wraps around
173    the domain more than once
174   */
175   if ((x < s) & ((M > 1) | (bx == DM_BOUNDARY_PERIODIC))) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local x-width of domain x %D is smaller than stencil width s %D",x,s);
176 
177   xe  = xs + x;
178 
179   /* determine ghost region (Xs) and region scattered into (IXs)  */
180   if (xs-sDist > 0) {
181     Xs  = xs - sDist;
182     IXs = xs - sDist;
183   } else {
184     if (bx) Xs = xs - sDist;
185     else Xs = 0;
186     IXs = 0;
187   }
188   if (xe+sDist <= M) {
189     Xe  = xe + sDist;
190     IXe = xe + sDist;
191   } else {
192     if (bx) Xe = xe + sDist;
193     else Xe = M;
194     IXe = M;
195   }
196 
197   if (bx == DM_BOUNDARY_PERIODIC || bx == DM_BOUNDARY_MIRROR) {
198     Xs  = xs - sDist;
199     Xe  = xe + sDist;
200     IXs = xs - sDist;
201     IXe = xe + sDist;
202   }
203 
204   /* allocate the base parallel and sequential vectors */
205   dd->Nlocal = dof*x;
206   ierr       = VecCreateMPIWithArray(comm,dof,dd->Nlocal,PETSC_DECIDE,NULL,&global);CHKERRQ(ierr);
207   dd->nlocal = dof*(Xe-Xs);
208   ierr       = VecCreateSeqWithArray(PETSC_COMM_SELF,dof,dd->nlocal,NULL,&local);CHKERRQ(ierr);
209 
210   /* Create Local to Global Vector Scatter Context */
211   /* local to global inserts non-ghost point region into global */
212   ierr = VecGetOwnershipRange(global,&start,&end);CHKERRQ(ierr);
213   ierr = ISCreateStride(comm,dof*x,start,1,&to);CHKERRQ(ierr);
214   ierr = ISCreateStride(comm,dof*x,dof*(xs-Xs),1,&from);CHKERRQ(ierr);
215   ierr = VecScatterCreate(local,from,global,to,&ltog);CHKERRQ(ierr);
216   ierr = PetscLogObjectParent((PetscObject)da,(PetscObject)ltog);CHKERRQ(ierr);
217   ierr = ISDestroy(&from);CHKERRQ(ierr);
218   ierr = ISDestroy(&to);CHKERRQ(ierr);
219 
220   /* Create Global to Local Vector Scatter Context */
221   /* global to local must retrieve ghost points */
222   ierr = ISCreateStride(comm,dof*(IXe-IXs),dof*(IXs-Xs),1,&to);CHKERRQ(ierr);
223 
224   ierr = PetscMalloc1((x+2*(sDist)),&idx);CHKERRQ(ierr);
225   ierr = PetscLogObjectMemory((PetscObject)da,(x+2*(sDist))*sizeof(PetscInt));CHKERRQ(ierr);
226 
227   for (i=0; i<IXs-Xs; i++) idx[i] = -1; /* prepend with -1s if needed for ghosted case*/
228 
229   nn = IXs-Xs;
230   if (bx == DM_BOUNDARY_PERIODIC) { /* Handle all cases with periodic first */
231     for (i=0; i<sDist; i++) {  /* Left ghost points */
232       if ((xs-sDist+i)>=0) idx[nn++] = xs-sDist+i;
233       else                 idx[nn++] = M+(xs-sDist+i);
234     }
235 
236     for (i=0; i<x; i++) idx [nn++] = xs + i;  /* Non-ghost points */
237 
238     for (i=0; i<sDist; i++) { /* Right ghost points */
239       if ((xe+i)<M) idx [nn++] =  xe+i;
240       else          idx [nn++] = (xe+i) - M;
241     }
242   } else if (bx == DM_BOUNDARY_MIRROR) { /* Handle all cases with periodic first */
243     for (i=0; i<(sDist); i++) {  /* Left ghost points */
244       if ((xs-sDist+i)>=0) idx[nn++] = xs-sDist+i;
245       else                 idx[nn++] = sDist - i;
246     }
247 
248     for (i=0; i<x; i++) idx [nn++] = xs + i;  /* Non-ghost points */
249 
250     for (i=0; i<(sDist); i++) { /* Right ghost points */
251       if ((xe+i)<dof) idx[nn++] =  xe+i;
252       else              idx[nn++] = M - (i + 2);
253     }
254   } else {      /* Now do all cases with no periodicity */
255     if (0 <= xs-sDist) {
256       for (i=0; i<sDist; i++) idx[nn++] = xs - sDist + i;
257     } else {
258       for (i=0; i<xs; i++) idx[nn++] = i;
259     }
260 
261     for (i=0; i<x; i++) idx [nn++] = xs + i;
262 
263     if ((xe+sDist)<=M) {
264       for (i=0; i<sDist; i++) idx[nn++]=xe+i;
265     } else {
266       for (i=xe; i<M; i++) idx[nn++]=i;
267     }
268   }
269 
270   ierr = ISCreateBlock(comm,dof,nn-IXs+Xs,&idx[IXs-Xs],PETSC_USE_POINTER,&from);CHKERRQ(ierr);
271   ierr = VecScatterCreate(global,from,local,to,&gtol);CHKERRQ(ierr);
272   ierr = PetscLogObjectParent((PetscObject)da,(PetscObject)gtol);CHKERRQ(ierr);
273   ierr = ISDestroy(&to);CHKERRQ(ierr);
274   ierr = ISDestroy(&from);CHKERRQ(ierr);
275   ierr = VecDestroy(&local);CHKERRQ(ierr);
276   ierr = VecDestroy(&global);CHKERRQ(ierr);
277 
278   dd->xs = dof*xs; dd->xe = dof*xe; dd->ys = 0; dd->ye = 1; dd->zs = 0; dd->ze = 1;
279   dd->Xs = dof*Xs; dd->Xe = dof*Xe; dd->Ys = 0; dd->Ye = 1; dd->Zs = 0; dd->Ze = 1;
280 
281   dd->gtol      = gtol;
282   dd->ltog      = ltog;
283   dd->base      = dof*xs;
284   da->ops->view = DMView_DA_1d;
285 
286   /*
287      Set the local to global ordering in the global vector, this allows use
288      of VecSetValuesLocal().
289   */
290   for (i=0; i<Xe-IXe; i++) idx[nn++] = -1; /* pad with -1s if needed for ghosted case*/
291 
292   ierr = ISLocalToGlobalMappingCreate(comm,dof,nn,idx,PETSC_OWN_POINTER,&da->ltogmap);CHKERRQ(ierr);
293   ierr = PetscLogObjectParent((PetscObject)da,(PetscObject)da->ltogmap);CHKERRQ(ierr);
294 
295   PetscFunctionReturn(0);
296 }
297 
298 
299 #undef __FUNCT__
300 #define __FUNCT__ "DMDACreate1d"
301 /*@C
302    DMDACreate1d - Creates an object that will manage the communication of  one-dimensional
303    regular array data that is distributed across some processors.
304 
305    Collective on MPI_Comm
306 
307    Input Parameters:
308 +  comm - MPI communicator
309 .  bx - type of ghost cells at the boundary the array should have, if any. Use
310           DM_BOUNDARY_NONE, DM_BOUNDARY_GHOSTED, or DM_BOUNDARY_PERIODIC.
311 .  M - global dimension of the array (use -M to indicate that it may be set to a different value
312             from the command line with -da_grid_x <M>)
313 .  dof - number of degrees of freedom per node
314 .  s - stencil width
315 -  lx - array containing number of nodes in the X direction on each processor,
316         or NULL. If non-null, must be of length as the number of processes in the MPI_Comm.
317 
318    Output Parameter:
319 .  da - the resulting distributed array object
320 
321    Options Database Key:
322 +  -dm_view - Calls DMView() at the conclusion of DMDACreate1d()
323 .  -da_grid_x <nx> - number of grid points in x direction; can set if M < 0
324 .  -da_refine_x <rx> - refinement factor
325 -  -da_refine <n> - refine the DMDA n times before creating it, if M < 0
326 
327    Level: beginner
328 
329    Notes:
330    The array data itself is NOT stored in the DMDA, it is stored in Vec objects;
331    The appropriate vector objects can be obtained with calls to DMCreateGlobalVector()
332    and DMCreateLocalVector() and calls to VecDuplicate() if more are needed.
333 
334 .keywords: distributed array, create, one-dimensional
335 
336 .seealso: DMDestroy(), DMView(), DMDACreate2d(), DMDACreate3d(), DMGlobalToLocalBegin(), DMDASetRefinementFactor(),
337           DMGlobalToLocalEnd(), DMLocalToGlobalBegin(), DMLocalToLocalBegin(), DMLocalToLocalEnd(), DMDAGetRefinementFactor(),
338           DMDAGetInfo(), DMCreateGlobalVector(), DMCreateLocalVector(), DMDACreateNaturalVector(), DMLoad(), DMDAGetOwnershipRanges()
339 
340 @*/
341 PetscErrorCode  DMDACreate1d(MPI_Comm comm, DMBoundaryType bx, PetscInt M, PetscInt dof, PetscInt s, const PetscInt lx[], DM *da)
342 {
343   PetscErrorCode ierr;
344   PetscMPIInt    size;
345 
346   PetscFunctionBegin;
347   ierr = DMDACreate(comm, da);CHKERRQ(ierr);
348   ierr = DMDASetDim(*da, 1);CHKERRQ(ierr);
349   ierr = DMDASetSizes(*da, M, 1, 1);CHKERRQ(ierr);
350   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
351   ierr = DMDASetNumProcs(*da, size, PETSC_DECIDE, PETSC_DECIDE);CHKERRQ(ierr);
352   ierr = DMDASetBoundaryType(*da, bx, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE);CHKERRQ(ierr);
353   ierr = DMDASetDof(*da, dof);CHKERRQ(ierr);
354   ierr = DMDASetStencilWidth(*da, s);CHKERRQ(ierr);
355   ierr = DMDASetOwnershipRanges(*da, lx, NULL, NULL);CHKERRQ(ierr);
356   /* This violates the behavior for other classes, but right now users expect negative dimensions to be handled this way */
357   ierr = DMSetFromOptions(*da);CHKERRQ(ierr);
358   ierr = DMSetUp(*da);CHKERRQ(ierr);
359   PetscFunctionReturn(0);
360 }
361