xref: /petsc/src/dm/impls/da/da.c (revision 087e84ac3e51d503ac665666b40cc5b500e5d8a5)
1 #include <petsc/private/dmdaimpl.h>    /*I   "petscdmda.h"   I*/
2 
3 #undef __FUNCT__
4 #define __FUNCT__ "DMDASetSizes"
5 /*@
6   DMDASetSizes - Sets the global sizes
7 
8   Logically Collective on DMDA
9 
10   Input Parameters:
11 + da - the DMDA
12 . M - the global X size (or PETSC_DECIDE)
13 . N - the global Y size (or PETSC_DECIDE)
14 - P - the global Z size (or PETSC_DECIDE)
15 
16   Level: intermediate
17 
18 .seealso: DMDAGetSize(), PetscSplitOwnership()
19 @*/
20 PetscErrorCode  DMDASetSizes(DM da, PetscInt M, PetscInt N, PetscInt P)
21 {
22   DM_DA *dd = (DM_DA*)da->data;
23 
24   PetscFunctionBegin;
25   PetscValidHeaderSpecific(da, DM_CLASSID, 1);
26   PetscValidLogicalCollectiveInt(da,M,2);
27   PetscValidLogicalCollectiveInt(da,N,3);
28   PetscValidLogicalCollectiveInt(da,P,4);
29   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
30 
31   dd->M = M;
32   dd->N = N;
33   dd->P = P;
34   PetscFunctionReturn(0);
35 }
36 
37 #undef __FUNCT__
38 #define __FUNCT__ "DMDASetNumProcs"
39 /*@
40   DMDASetNumProcs - Sets the number of processes in each dimension
41 
42   Logically Collective on DMDA
43 
44   Input Parameters:
45 + da - the DMDA
46 . m - the number of X procs (or PETSC_DECIDE)
47 . n - the number of Y procs (or PETSC_DECIDE)
48 - p - the number of Z procs (or PETSC_DECIDE)
49 
50   Level: intermediate
51 
52 .seealso: DMDASetSizes(), DMDAGetSize(), PetscSplitOwnership()
53 @*/
54 PetscErrorCode  DMDASetNumProcs(DM da, PetscInt m, PetscInt n, PetscInt p)
55 {
56   DM_DA          *dd = (DM_DA*)da->data;
57   PetscErrorCode ierr;
58   PetscMPIInt    size;
59 
60   PetscFunctionBegin;
61   PetscValidHeaderSpecific(da, DM_CLASSID, 1);
62   PetscValidLogicalCollectiveInt(da,m,2);
63   PetscValidLogicalCollectiveInt(da,n,3);
64   PetscValidLogicalCollectiveInt(da,p,4);
65   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
66   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)da),&size);CHKERRQ(ierr);
67   dd->m = m;
68   dd->n = n;
69   dd->p = p;
70   if (da->dim == 2) {
71     if ((dd->m > 0) && (dd->n < 0)) {
72       dd->n = size/dd->m;
73       if (dd->n*dd->m != size) SETERRQ2(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"%D processes in X direction not divisible into comm size %d",m,size);
74     }
75     if ((dd->n > 0) && (dd->m < 0)) {
76       dd->m = size/dd->n;
77       if (dd->n*dd->m != size) SETERRQ2(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"%D processes in Y direction not divisible into comm size %d",n,size);
78     }
79     if (m*n != size) SETERRQ3(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Number processes in x %D times y %D direction must each MPI_Comm_size() %d",m,n,size);
80   } else if (da->dim == 3) {
81     if (m*n*p != size) SETERRQ4(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Number processes in x %D times y %D time z %D direction must each MPI_Comm_size() %d",m,n,p,size);
82   }
83   PetscFunctionReturn(0);
84 }
85 
86 #undef __FUNCT__
87 #define __FUNCT__ "DMDASetBoundaryType"
88 /*@
89   DMDASetBoundaryType - Sets the type of ghost nodes on domain boundaries.
90 
91   Not collective
92 
93   Input Parameter:
94 + da    - The DMDA
95 - bx,by,bz - One of DM_BOUNDARY_NONE, DM_BOUNDARY_GHOSTED, DM_BOUNDARY_PERIODIC
96 
97   Level: intermediate
98 
99 .keywords:  distributed array, periodicity
100 .seealso: DMDACreate(), DMDestroy(), DMDA, DMBoundaryType
101 @*/
102 PetscErrorCode  DMDASetBoundaryType(DM da,DMBoundaryType bx,DMBoundaryType by,DMBoundaryType bz)
103 {
104   DM_DA *dd = (DM_DA*)da->data;
105 
106   PetscFunctionBegin;
107   PetscValidHeaderSpecific(da,DM_CLASSID,1);
108   PetscValidLogicalCollectiveEnum(da,bx,2);
109   PetscValidLogicalCollectiveEnum(da,by,3);
110   PetscValidLogicalCollectiveEnum(da,bz,4);
111   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
112   dd->bx = bx;
113   dd->by = by;
114   dd->bz = bz;
115   PetscFunctionReturn(0);
116 }
117 
118 #undef __FUNCT__
119 #define __FUNCT__ "DMDASetDof"
120 /*@
121   DMDASetDof - Sets the number of degrees of freedom per vertex
122 
123   Not collective
124 
125   Input Parameters:
126 + da  - The DMDA
127 - dof - Number of degrees of freedom
128 
129   Level: intermediate
130 
131 .keywords:  distributed array, degrees of freedom
132 .seealso: DMDAGetDof(), DMDACreate(), DMDestroy(), DMDA
133 @*/
134 PetscErrorCode  DMDASetDof(DM da, PetscInt dof)
135 {
136   DM_DA *dd = (DM_DA*)da->data;
137 
138   PetscFunctionBegin;
139   PetscValidHeaderSpecific(da,DM_CLASSID,1);
140   PetscValidLogicalCollectiveInt(da,dof,2);
141   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
142   dd->w  = dof;
143   da->bs = dof;
144   PetscFunctionReturn(0);
145 }
146 
147 #undef __FUNCT__
148 #define __FUNCT__ "DMDAGetDof"
149 /*@
150   DMDAGetDof - Gets the number of degrees of freedom per vertex
151 
152   Not collective
153 
154   Input Parameter:
155 . da  - The DMDA
156 
157   Output Parameter:
158 . dof - Number of degrees of freedom
159 
160   Level: intermediate
161 
162 .keywords:  distributed array, degrees of freedom
163 .seealso: DMDASetDof(), DMDACreate(), DMDestroy(), DMDA
164 @*/
165 PetscErrorCode DMDAGetDof(DM da, PetscInt *dof)
166 {
167   DM_DA *dd = (DM_DA *) da->data;
168 
169   PetscFunctionBegin;
170   PetscValidHeaderSpecific(da,DM_CLASSID,1);
171   PetscValidPointer(dof,2);
172   *dof = dd->w;
173   PetscFunctionReturn(0);
174 }
175 
176 #undef __FUNCT__
177 #define __FUNCT__ "DMDAGetOverlap"
178 /*@
179   DMDAGetOverlap - Gets the size of the per-processor overlap.
180 
181   Not collective
182 
183   Input Parameters:
184 . da  - The DMDA
185 
186   Output Parameters:
187 + x   - Overlap in the x direction
188 . y   - Overlap in the y direction
189 - z   - Overlap in the z direction
190 
191   Level: intermediate
192 
193 .keywords:  distributed array, overlap, domain decomposition
194 .seealso: DMDACreateDomainDecomposition(), DMDASetOverlap(), DMDA
195 @*/
196 PetscErrorCode  DMDAGetOverlap(DM da,PetscInt *x,PetscInt *y,PetscInt *z)
197 {
198   DM_DA *dd = (DM_DA*)da->data;
199 
200   PetscFunctionBegin;
201   PetscValidHeaderSpecific(da,DM_CLASSID,1);
202   if (x) *x = dd->xol;
203   if (y) *y = dd->yol;
204   if (z) *z = dd->zol;
205   PetscFunctionReturn(0);
206 }
207 
208 #undef __FUNCT__
209 #define __FUNCT__ "DMDASetOverlap"
210 /*@
211   DMDASetOverlap - Sets the size of the per-processor overlap.
212 
213   Not collective
214 
215   Input Parameters:
216 + da  - The DMDA
217 . x   - Overlap in the x direction
218 . y   - Overlap in the y direction
219 - z   - Overlap in the z direction
220 
221   Level: intermediate
222 
223 .keywords:  distributed array, overlap, domain decomposition
224 .seealso: DMDACreateDomainDecomposition(), DMDAGetOverlap(), DMDA
225 @*/
226 PetscErrorCode  DMDASetOverlap(DM da,PetscInt x,PetscInt y,PetscInt z)
227 {
228   DM_DA *dd = (DM_DA*)da->data;
229 
230   PetscFunctionBegin;
231   PetscValidHeaderSpecific(da,DM_CLASSID,1);
232   PetscValidLogicalCollectiveInt(da,x,2);
233   PetscValidLogicalCollectiveInt(da,y,3);
234   PetscValidLogicalCollectiveInt(da,z,4);
235   dd->xol = x;
236   dd->yol = y;
237   dd->zol = z;
238   PetscFunctionReturn(0);
239 }
240 
241 
242 #undef __FUNCT__
243 #define __FUNCT__ "DMDAGetNumLocalSubDomains"
244 /*@
245   DMDAGetNumLocalSubDomains - Gets the number of local subdomains created upon decomposition.
246 
247   Not collective
248 
249   Input Parameters:
250 . da  - The DMDA
251 
252   Output Parameters:
253 + Nsub   - Number of local subdomains created upon decomposition
254 
255   Level: intermediate
256 
257 .keywords:  distributed array, domain decomposition
258 .seealso: DMDACreateDomainDecomposition(), DMDASetNumLocalSubDomains(), DMDA
259 @*/
260 PetscErrorCode  DMDAGetNumLocalSubDomains(DM da,PetscInt *Nsub)
261 {
262   DM_DA *dd = (DM_DA*)da->data;
263 
264   PetscFunctionBegin;
265   PetscValidHeaderSpecific(da,DM_CLASSID,1);
266   if (Nsub) *Nsub = dd->Nsub;
267   PetscFunctionReturn(0);
268 }
269 
270 #undef __FUNCT__
271 #define __FUNCT__ "DMDASetNumLocalSubDomains"
272 /*@
273   DMDASetNumLocalSubDomains - Sets the number of local subdomains created upon decomposition.
274 
275   Not collective
276 
277   Input Parameters:
278 + da  - The DMDA
279 - Nsub - The number of local subdomains requested
280 
281   Level: intermediate
282 
283 .keywords:  distributed array, domain decomposition
284 .seealso: DMDACreateDomainDecomposition(), DMDAGetNumLocalSubDomains(), DMDA
285 @*/
286 PetscErrorCode  DMDASetNumLocalSubDomains(DM da,PetscInt Nsub)
287 {
288   DM_DA *dd = (DM_DA*)da->data;
289 
290   PetscFunctionBegin;
291   PetscValidHeaderSpecific(da,DM_CLASSID,1);
292   PetscValidLogicalCollectiveInt(da,Nsub,2);
293   dd->Nsub = Nsub;
294   PetscFunctionReturn(0);
295 }
296 
297 #undef __FUNCT__
298 #define __FUNCT__ "DMDASetOffset"
299 /*@
300   DMDASetOffset - Sets the index offset of the DA.
301 
302   Collective on DA
303 
304   Input Parameter:
305 + da  - The DMDA
306 . xo  - The offset in the x direction
307 . yo  - The offset in the y direction
308 - zo  - The offset in the z direction
309 
310   Level: intermediate
311 
312   Notes: This is used primarily to overlap a computation on a local DA with that on a global DA without
313   changing boundary conditions or subdomain features that depend upon the global offsets.
314 
315 .keywords:  distributed array, degrees of freedom
316 .seealso: DMDAGetOffset(), DMDAVecGetArray()
317 @*/
318 PetscErrorCode  DMDASetOffset(DM da, PetscInt xo, PetscInt yo, PetscInt zo, PetscInt Mo, PetscInt No, PetscInt Po)
319 {
320   PetscErrorCode ierr;
321   DM_DA          *dd = (DM_DA*)da->data;
322 
323   PetscFunctionBegin;
324   PetscValidHeaderSpecific(da,DM_CLASSID,1);
325   PetscValidLogicalCollectiveInt(da,xo,2);
326   PetscValidLogicalCollectiveInt(da,yo,3);
327   PetscValidLogicalCollectiveInt(da,zo,4);
328   PetscValidLogicalCollectiveInt(da,Mo,5);
329   PetscValidLogicalCollectiveInt(da,No,6);
330   PetscValidLogicalCollectiveInt(da,Po,7);
331   dd->xo = xo;
332   dd->yo = yo;
333   dd->zo = zo;
334   dd->Mo = Mo;
335   dd->No = No;
336   dd->Po = Po;
337 
338   if (da->coordinateDM) {
339     ierr = DMDASetOffset(da->coordinateDM,xo,yo,zo,Mo,No,Po);CHKERRQ(ierr);
340   }
341   PetscFunctionReturn(0);
342 }
343 
344 #undef __FUNCT__
345 #define __FUNCT__ "DMDAGetOffset"
346 /*@
347   DMDAGetOffset - Gets the index offset of the DA.
348 
349   Not collective
350 
351   Input Parameter:
352 . da  - The DMDA
353 
354   Output Parameters:
355 + xo  - The offset in the x direction
356 . yo  - The offset in the y direction
357 . zo  - The offset in the z direction
358 . Mo  - The global size in the x direction
359 . No  - The global size in the y direction
360 - Po  - The global size in the z direction
361 
362   Level: intermediate
363 
364 .keywords:  distributed array, degrees of freedom
365 .seealso: DMDASetOffset(), DMDAVecGetArray()
366 @*/
367 PetscErrorCode  DMDAGetOffset(DM da,PetscInt *xo,PetscInt *yo,PetscInt *zo,PetscInt *Mo,PetscInt *No,PetscInt *Po)
368 {
369   DM_DA *dd = (DM_DA*)da->data;
370 
371   PetscFunctionBegin;
372   PetscValidHeaderSpecific(da,DM_CLASSID,1);
373   if (xo) *xo = dd->xo;
374   if (yo) *yo = dd->yo;
375   if (zo) *zo = dd->zo;
376   if (Mo) *Mo = dd->Mo;
377   if (No) *No = dd->No;
378   if (Po) *Po = dd->Po;
379   PetscFunctionReturn(0);
380 }
381 
382 #undef __FUNCT__
383 #define __FUNCT__ "DMDAGetNonOverlappingRegion"
384 /*@
385   DMDAGetNonOverlappingRegion - Gets the indices of the nonoverlapping region of a subdomain DM.
386 
387   Not collective
388 
389   Input Parameter:
390 . da  - The DMDA
391 
392   Output Parameters:
393 + xs  - The start of the region in x
394 . ys  - The start of the region in y
395 . zs  - The start of the region in z
396 . xs  - The size of the region in x
397 . ys  - The size of the region in y
398 . zs  - The size of the region in z
399 
400   Level: intermediate
401 
402 .keywords:  distributed array, degrees of freedom
403 .seealso: DMDAGetOffset(), DMDAVecGetArray()
404 @*/
405 PetscErrorCode  DMDAGetNonOverlappingRegion(DM da, PetscInt *xs, PetscInt *ys, PetscInt *zs, PetscInt *xm, PetscInt *ym, PetscInt *zm)
406 {
407   DM_DA          *dd = (DM_DA*)da->data;
408 
409   PetscFunctionBegin;
410   PetscValidHeaderSpecific(da,DM_CLASSID,1);
411   if (xs) *xs = dd->nonxs;
412   if (ys) *ys = dd->nonys;
413   if (zs) *zs = dd->nonzs;
414   if (xm) *xm = dd->nonxm;
415   if (ym) *ym = dd->nonym;
416   if (zm) *zm = dd->nonzm;
417   PetscFunctionReturn(0);
418 }
419 
420 
421 #undef __FUNCT__
422 #define __FUNCT__ "DMDASetNonOverlappingRegion"
423 /*@
424   DMDASetNonOverlappingRegion - Sets the indices of the nonoverlapping region of a subdomain DM.
425 
426   Collective on DA
427 
428   Input Parameter:
429 + da  - The DMDA
430 . xs  - The start of the region in x
431 . ys  - The start of the region in y
432 . zs  - The start of the region in z
433 . xs  - The size of the region in x
434 . ys  - The size of the region in y
435 . zs  - The size of the region in z
436 
437   Level: intermediate
438 
439 .keywords:  distributed array, degrees of freedom
440 .seealso: DMDAGetOffset(), DMDAVecGetArray()
441 @*/
442 PetscErrorCode  DMDASetNonOverlappingRegion(DM da, PetscInt xs, PetscInt ys, PetscInt zs, PetscInt xm, PetscInt ym, PetscInt zm)
443 {
444   DM_DA          *dd = (DM_DA*)da->data;
445 
446   PetscFunctionBegin;
447   PetscValidHeaderSpecific(da,DM_CLASSID,1);
448   PetscValidLogicalCollectiveInt(da,xs,2);
449   PetscValidLogicalCollectiveInt(da,ys,3);
450   PetscValidLogicalCollectiveInt(da,zs,4);
451   PetscValidLogicalCollectiveInt(da,xm,5);
452   PetscValidLogicalCollectiveInt(da,ym,6);
453   PetscValidLogicalCollectiveInt(da,zm,7);
454   dd->nonxs = xs;
455   dd->nonys = ys;
456   dd->nonzs = zs;
457   dd->nonxm = xm;
458   dd->nonym = ym;
459   dd->nonzm = zm;
460 
461   PetscFunctionReturn(0);
462 }
463 
464 #undef __FUNCT__
465 #define __FUNCT__ "DMDASetStencilType"
466 /*@
467   DMDASetStencilType - Sets the type of the communication stencil
468 
469   Logically Collective on DMDA
470 
471   Input Parameter:
472 + da    - The DMDA
473 - stype - The stencil type, use either DMDA_STENCIL_BOX or DMDA_STENCIL_STAR.
474 
475   Level: intermediate
476 
477 .keywords:  distributed array, stencil
478 .seealso: DMDACreate(), DMDestroy(), DMDA
479 @*/
480 PetscErrorCode  DMDASetStencilType(DM da, DMDAStencilType stype)
481 {
482   DM_DA *dd = (DM_DA*)da->data;
483 
484   PetscFunctionBegin;
485   PetscValidHeaderSpecific(da,DM_CLASSID,1);
486   PetscValidLogicalCollectiveEnum(da,stype,2);
487   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
488   dd->stencil_type = stype;
489   PetscFunctionReturn(0);
490 }
491 
492 #undef __FUNCT__
493 #define __FUNCT__ "DMDAGetStencilType"
494 /*@
495   DMDAGetStencilType - Gets the type of the communication stencil
496 
497   Not collective
498 
499   Input Parameter:
500 . da    - The DMDA
501 
502   Output Parameter:
503 . stype - The stencil type, use either DMDA_STENCIL_BOX or DMDA_STENCIL_STAR.
504 
505   Level: intermediate
506 
507 .keywords:  distributed array, stencil
508 .seealso: DMDACreate(), DMDestroy(), DMDA
509 @*/
510 PetscErrorCode DMDAGetStencilType(DM da, DMDAStencilType *stype)
511 {
512   DM_DA *dd = (DM_DA*)da->data;
513 
514   PetscFunctionBegin;
515   PetscValidHeaderSpecific(da,DM_CLASSID,1);
516   PetscValidPointer(stype,2);
517   *stype = dd->stencil_type;
518   PetscFunctionReturn(0);
519 }
520 
521 #undef __FUNCT__
522 #define __FUNCT__ "DMDASetStencilWidth"
523 /*@
524   DMDASetStencilWidth - Sets the width of the communication stencil
525 
526   Logically Collective on DMDA
527 
528   Input Parameter:
529 + da    - The DMDA
530 - width - The stencil width
531 
532   Level: intermediate
533 
534 .keywords:  distributed array, stencil
535 .seealso: DMDACreate(), DMDestroy(), DMDA
536 @*/
537 PetscErrorCode  DMDASetStencilWidth(DM da, PetscInt width)
538 {
539   DM_DA *dd = (DM_DA*)da->data;
540 
541   PetscFunctionBegin;
542   PetscValidHeaderSpecific(da,DM_CLASSID,1);
543   PetscValidLogicalCollectiveInt(da,width,2);
544   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
545   dd->s = width;
546   PetscFunctionReturn(0);
547 }
548 
549 #undef __FUNCT__
550 #define __FUNCT__ "DMDAGetStencilWidth"
551 /*@
552   DMDAGetStencilWidth - Gets the width of the communication stencil
553 
554   Not collective
555 
556   Input Parameter:
557 . da    - The DMDA
558 
559   Output Parameter:
560 . width - The stencil width
561 
562   Level: intermediate
563 
564 .keywords:  distributed array, stencil
565 .seealso: DMDACreate(), DMDestroy(), DMDA
566 @*/
567 PetscErrorCode DMDAGetStencilWidth(DM da, PetscInt *width)
568 {
569   DM_DA *dd = (DM_DA *) da->data;
570 
571   PetscFunctionBegin;
572   PetscValidHeaderSpecific(da,DM_CLASSID,1);
573   PetscValidPointer(width,2);
574   *width = dd->s;
575   PetscFunctionReturn(0);
576 }
577 
578 #undef __FUNCT__
579 #define __FUNCT__ "DMDACheckOwnershipRanges_Private"
580 static PetscErrorCode DMDACheckOwnershipRanges_Private(DM da,PetscInt M,PetscInt m,const PetscInt lx[])
581 {
582   PetscInt i,sum;
583 
584   PetscFunctionBegin;
585   if (M < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Global dimension not set");
586   for (i=sum=0; i<m; i++) sum += lx[i];
587   if (sum != M) SETERRQ2(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_INCOMP,"Ownership ranges sum to %D but global dimension is %D",sum,M);
588   PetscFunctionReturn(0);
589 }
590 
591 #undef __FUNCT__
592 #define __FUNCT__ "DMDASetOwnershipRanges"
593 /*@
594   DMDASetOwnershipRanges - Sets the number of nodes in each direction on each process
595 
596   Logically Collective on DMDA
597 
598   Input Parameter:
599 + da - The DMDA
600 . lx - array containing number of nodes in the X direction on each process, or NULL. If non-null, must be of length da->m
601 . ly - array containing number of nodes in the Y direction on each process, or NULL. If non-null, must be of length da->n
602 - lz - array containing number of nodes in the Z direction on each process, or NULL. If non-null, must be of length da->p.
603 
604   Level: intermediate
605 
606   Note: these numbers are NOT multiplied by the number of dof per node.
607 
608 .keywords:  distributed array
609 .seealso: DMDACreate(), DMDestroy(), DMDA
610 @*/
611 PetscErrorCode  DMDASetOwnershipRanges(DM da, const PetscInt lx[], const PetscInt ly[], const PetscInt lz[])
612 {
613   PetscErrorCode ierr;
614   DM_DA          *dd = (DM_DA*)da->data;
615 
616   PetscFunctionBegin;
617   PetscValidHeaderSpecific(da,DM_CLASSID,1);
618   if (da->setupcalled) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()");
619   if (lx) {
620     if (dd->m < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs");
621     ierr = DMDACheckOwnershipRanges_Private(da,dd->M,dd->m,lx);CHKERRQ(ierr);
622     if (!dd->lx) {
623       ierr = PetscMalloc1(dd->m, &dd->lx);CHKERRQ(ierr);
624     }
625     ierr = PetscMemcpy(dd->lx, lx, dd->m*sizeof(PetscInt));CHKERRQ(ierr);
626   }
627   if (ly) {
628     if (dd->n < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs");
629     ierr = DMDACheckOwnershipRanges_Private(da,dd->N,dd->n,ly);CHKERRQ(ierr);
630     if (!dd->ly) {
631       ierr = PetscMalloc1(dd->n, &dd->ly);CHKERRQ(ierr);
632     }
633     ierr = PetscMemcpy(dd->ly, ly, dd->n*sizeof(PetscInt));CHKERRQ(ierr);
634   }
635   if (lz) {
636     if (dd->p < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs");
637     ierr = DMDACheckOwnershipRanges_Private(da,dd->P,dd->p,lz);CHKERRQ(ierr);
638     if (!dd->lz) {
639       ierr = PetscMalloc1(dd->p, &dd->lz);CHKERRQ(ierr);
640     }
641     ierr = PetscMemcpy(dd->lz, lz, dd->p*sizeof(PetscInt));CHKERRQ(ierr);
642   }
643   PetscFunctionReturn(0);
644 }
645 
646 #undef __FUNCT__
647 #define __FUNCT__ "DMDASetInterpolationType"
648 /*@
649        DMDASetInterpolationType - Sets the type of interpolation that will be
650           returned by DMCreateInterpolation()
651 
652    Logically Collective on DMDA
653 
654    Input Parameter:
655 +  da - initial distributed array
656 .  ctype - DMDA_Q1 and DMDA_Q0 are currently the only supported forms
657 
658    Level: intermediate
659 
660    Notes: you should call this on the coarser of the two DMDAs you pass to DMCreateInterpolation()
661 
662 .keywords:  distributed array, interpolation
663 
664 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType
665 @*/
666 PetscErrorCode  DMDASetInterpolationType(DM da,DMDAInterpolationType ctype)
667 {
668   DM_DA *dd = (DM_DA*)da->data;
669 
670   PetscFunctionBegin;
671   PetscValidHeaderSpecific(da,DM_CLASSID,1);
672   PetscValidLogicalCollectiveEnum(da,ctype,2);
673   dd->interptype = ctype;
674   PetscFunctionReturn(0);
675 }
676 
677 #undef __FUNCT__
678 #define __FUNCT__ "DMDAGetInterpolationType"
679 /*@
680        DMDAGetInterpolationType - Gets the type of interpolation that will be
681           used by DMCreateInterpolation()
682 
683    Not Collective
684 
685    Input Parameter:
686 .  da - distributed array
687 
688    Output Parameter:
689 .  ctype - interpolation type (DMDA_Q1 and DMDA_Q0 are currently the only supported forms)
690 
691    Level: intermediate
692 
693 .keywords:  distributed array, interpolation
694 
695 .seealso: DMDA, DMDAInterpolationType, DMDASetInterpolationType(), DMCreateInterpolation()
696 @*/
697 PetscErrorCode  DMDAGetInterpolationType(DM da,DMDAInterpolationType *ctype)
698 {
699   DM_DA *dd = (DM_DA*)da->data;
700 
701   PetscFunctionBegin;
702   PetscValidHeaderSpecific(da,DM_CLASSID,1);
703   PetscValidPointer(ctype,2);
704   *ctype = dd->interptype;
705   PetscFunctionReturn(0);
706 }
707 
708 #undef __FUNCT__
709 #define __FUNCT__ "DMDAGetNeighbors"
710 /*@C
711       DMDAGetNeighbors - Gets an array containing the MPI rank of all the current
712         processes neighbors.
713 
714     Not Collective
715 
716    Input Parameter:
717 .     da - the DMDA object
718 
719    Output Parameters:
720 .     ranks - the neighbors ranks, stored with the x index increasing most rapidly.
721               this process itself is in the list
722 
723    Notes: In 2d the array is of length 9, in 3d of length 27
724           Not supported in 1d
725           Do not free the array, it is freed when the DMDA is destroyed.
726 
727    Fortran Notes: In fortran you must pass in an array of the appropriate length.
728 
729    Level: intermediate
730 
731 @*/
732 PetscErrorCode  DMDAGetNeighbors(DM da,const PetscMPIInt *ranks[])
733 {
734   DM_DA *dd = (DM_DA*)da->data;
735 
736   PetscFunctionBegin;
737   PetscValidHeaderSpecific(da,DM_CLASSID,1);
738   *ranks = dd->neighbors;
739   PetscFunctionReturn(0);
740 }
741 
742 #undef __FUNCT__
743 #define __FUNCT__ "DMDAGetOwnershipRanges"
744 /*@C
745       DMDAGetOwnershipRanges - Gets the ranges of indices in the x, y and z direction that are owned by each process
746 
747     Not Collective
748 
749    Input Parameter:
750 .     da - the DMDA object
751 
752    Output Parameter:
753 +     lx - ownership along x direction (optional)
754 .     ly - ownership along y direction (optional)
755 -     lz - ownership along z direction (optional)
756 
757    Level: intermediate
758 
759     Note: these correspond to the optional final arguments passed to DMDACreate(), DMDACreate2d(), DMDACreate3d()
760 
761     In Fortran one must pass in arrays lx, ly, and lz that are long enough to hold the values; the sixth, seventh and
762     eighth arguments from DMDAGetInfo()
763 
764      In C you should not free these arrays, nor change the values in them. They will only have valid values while the
765     DMDA they came from still exists (has not been destroyed).
766 
767     These numbers are NOT multiplied by the number of dof per node.
768 
769 .seealso: DMDAGetCorners(), DMDAGetGhostCorners(), DMDACreate(), DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), VecGetOwnershipRanges()
770 @*/
771 PetscErrorCode  DMDAGetOwnershipRanges(DM da,const PetscInt *lx[],const PetscInt *ly[],const PetscInt *lz[])
772 {
773   DM_DA *dd = (DM_DA*)da->data;
774 
775   PetscFunctionBegin;
776   PetscValidHeaderSpecific(da,DM_CLASSID,1);
777   if (lx) *lx = dd->lx;
778   if (ly) *ly = dd->ly;
779   if (lz) *lz = dd->lz;
780   PetscFunctionReturn(0);
781 }
782 
783 #undef __FUNCT__
784 #define __FUNCT__ "DMDASetRefinementFactor"
785 /*@
786      DMDASetRefinementFactor - Set the ratios that the DMDA grid is refined
787 
788     Logically Collective on DMDA
789 
790   Input Parameters:
791 +    da - the DMDA object
792 .    refine_x - ratio of fine grid to coarse in x direction (2 by default)
793 .    refine_y - ratio of fine grid to coarse in y direction (2 by default)
794 -    refine_z - ratio of fine grid to coarse in z direction (2 by default)
795 
796   Options Database:
797 +  -da_refine_x - refinement ratio in x direction
798 .  -da_refine_y - refinement ratio in y direction
799 -  -da_refine_z - refinement ratio in z direction
800 
801   Level: intermediate
802 
803     Notes: Pass PETSC_IGNORE to leave a value unchanged
804 
805 .seealso: DMRefine(), DMDAGetRefinementFactor()
806 @*/
807 PetscErrorCode  DMDASetRefinementFactor(DM da, PetscInt refine_x, PetscInt refine_y,PetscInt refine_z)
808 {
809   DM_DA *dd = (DM_DA*)da->data;
810 
811   PetscFunctionBegin;
812   PetscValidHeaderSpecific(da,DM_CLASSID,1);
813   PetscValidLogicalCollectiveInt(da,refine_x,2);
814   PetscValidLogicalCollectiveInt(da,refine_y,3);
815   PetscValidLogicalCollectiveInt(da,refine_z,4);
816 
817   if (refine_x > 0) dd->refine_x = refine_x;
818   if (refine_y > 0) dd->refine_y = refine_y;
819   if (refine_z > 0) dd->refine_z = refine_z;
820   PetscFunctionReturn(0);
821 }
822 
823 #undef __FUNCT__
824 #define __FUNCT__ "DMDAGetRefinementFactor"
825 /*@C
826      DMDAGetRefinementFactor - Gets the ratios that the DMDA grid is refined
827 
828     Not Collective
829 
830   Input Parameter:
831 .    da - the DMDA object
832 
833   Output Parameters:
834 +    refine_x - ratio of fine grid to coarse in x direction (2 by default)
835 .    refine_y - ratio of fine grid to coarse in y direction (2 by default)
836 -    refine_z - ratio of fine grid to coarse in z direction (2 by default)
837 
838   Level: intermediate
839 
840     Notes: Pass NULL for values you do not need
841 
842 .seealso: DMRefine(), DMDASetRefinementFactor()
843 @*/
844 PetscErrorCode  DMDAGetRefinementFactor(DM da, PetscInt *refine_x, PetscInt *refine_y,PetscInt *refine_z)
845 {
846   DM_DA *dd = (DM_DA*)da->data;
847 
848   PetscFunctionBegin;
849   PetscValidHeaderSpecific(da,DM_CLASSID,1);
850   if (refine_x) *refine_x = dd->refine_x;
851   if (refine_y) *refine_y = dd->refine_y;
852   if (refine_z) *refine_z = dd->refine_z;
853   PetscFunctionReturn(0);
854 }
855 
856 #undef __FUNCT__
857 #define __FUNCT__ "DMDASetGetMatrix"
858 /*@C
859      DMDASetGetMatrix - Sets the routine used by the DMDA to allocate a matrix.
860 
861     Logically Collective on DMDA
862 
863   Input Parameters:
864 +    da - the DMDA object
865 -    f - the function that allocates the matrix for that specific DMDA
866 
867   Level: developer
868 
869    Notes: See DMDASetBlockFills() that provides a simple way to provide the nonzero structure for
870        the diagonal and off-diagonal blocks of the matrix
871 
872 .seealso: DMCreateMatrix(), DMDASetBlockFills()
873 @*/
874 PetscErrorCode  DMDASetGetMatrix(DM da,PetscErrorCode (*f)(DM, Mat*))
875 {
876   PetscFunctionBegin;
877   PetscValidHeaderSpecific(da,DM_CLASSID,1);
878   da->ops->creatematrix = f;
879   PetscFunctionReturn(0);
880 }
881 
882 #undef __FUNCT__
883 #define __FUNCT__ "DMDARefineOwnershipRanges"
884 /*
885   Creates "balanced" ownership ranges after refinement, constrained by the need for the
886   fine grid boundaries to fall within one stencil width of the coarse partition.
887 
888   Uses a greedy algorithm to handle non-ideal layouts, could probably do something better.
889 */
890 static PetscErrorCode DMDARefineOwnershipRanges(DM da,PetscBool periodic,PetscInt stencil_width,PetscInt ratio,PetscInt m,const PetscInt lc[],PetscInt lf[])
891 {
892   PetscInt       i,totalc = 0,remaining,startc = 0,startf = 0;
893   PetscErrorCode ierr;
894 
895   PetscFunctionBegin;
896   if (ratio < 1) SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_USER,"Requested refinement ratio %D must be at least 1",ratio);
897   if (ratio == 1) {
898     ierr = PetscMemcpy(lf,lc,m*sizeof(lc[0]));CHKERRQ(ierr);
899     PetscFunctionReturn(0);
900   }
901   for (i=0; i<m; i++) totalc += lc[i];
902   remaining = (!periodic) + ratio * (totalc - (!periodic));
903   for (i=0; i<m; i++) {
904     PetscInt want = remaining/(m-i) + !!(remaining%(m-i));
905     if (i == m-1) lf[i] = want;
906     else {
907       const PetscInt nextc = startc + lc[i];
908       /* Move the first fine node of the next subdomain to the right until the coarse node on its left is within one
909        * coarse stencil width of the first coarse node in the next subdomain. */
910       while ((startf+want)/ratio < nextc - stencil_width) want++;
911       /* Move the last fine node in the current subdomain to the left until the coarse node on its right is within one
912        * coarse stencil width of the last coarse node in the current subdomain. */
913       while ((startf+want-1+ratio-1)/ratio > nextc-1+stencil_width) want--;
914       /* Make sure all constraints are satisfied */
915       if (want < 0 || want > remaining || ((startf+want)/ratio < nextc - stencil_width)
916           || ((startf+want-1+ratio-1)/ratio > nextc-1+stencil_width)) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Could not find a compatible refined ownership range");
917     }
918     lf[i]      = want;
919     startc    += lc[i];
920     startf    += lf[i];
921     remaining -= lf[i];
922   }
923   PetscFunctionReturn(0);
924 }
925 
926 #undef __FUNCT__
927 #define __FUNCT__ "DMDACoarsenOwnershipRanges"
928 /*
929   Creates "balanced" ownership ranges after coarsening, constrained by the need for the
930   fine grid boundaries to fall within one stencil width of the coarse partition.
931 
932   Uses a greedy algorithm to handle non-ideal layouts, could probably do something better.
933 */
934 static PetscErrorCode DMDACoarsenOwnershipRanges(DM da,PetscBool periodic,PetscInt stencil_width,PetscInt ratio,PetscInt m,const PetscInt lf[],PetscInt lc[])
935 {
936   PetscInt       i,totalf,remaining,startc,startf;
937   PetscErrorCode ierr;
938 
939   PetscFunctionBegin;
940   if (ratio < 1) SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_USER,"Requested refinement ratio %D must be at least 1",ratio);
941   if (ratio == 1) {
942     ierr = PetscMemcpy(lc,lf,m*sizeof(lf[0]));CHKERRQ(ierr);
943     PetscFunctionReturn(0);
944   }
945   for (i=0,totalf=0; i<m; i++) totalf += lf[i];
946   remaining = (!periodic) + (totalf - (!periodic)) / ratio;
947   for (i=0,startc=0,startf=0; i<m; i++) {
948     PetscInt want = remaining/(m-i) + !!(remaining%(m-i));
949     if (i == m-1) lc[i] = want;
950     else {
951       const PetscInt nextf = startf+lf[i];
952       /* Slide first coarse node of next subdomain to the left until the coarse node to the left of the first fine
953        * node is within one stencil width. */
954       while (nextf/ratio < startc+want-stencil_width) want--;
955       /* Slide the last coarse node of the current subdomain to the right until the coarse node to the right of the last
956        * fine node is within one stencil width. */
957       while ((nextf-1+ratio-1)/ratio > startc+want-1+stencil_width) want++;
958       if (want < 0 || want > remaining
959           || (nextf/ratio < startc+want-stencil_width) || ((nextf-1+ratio-1)/ratio > startc+want-1+stencil_width)) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Could not find a compatible coarsened ownership range");
960     }
961     lc[i]      = want;
962     startc    += lc[i];
963     startf    += lf[i];
964     remaining -= lc[i];
965   }
966   PetscFunctionReturn(0);
967 }
968 
969 #undef __FUNCT__
970 #define __FUNCT__ "DMRefine_DA"
971 PetscErrorCode  DMRefine_DA(DM da,MPI_Comm comm,DM *daref)
972 {
973   PetscErrorCode ierr;
974   PetscInt       M,N,P,i,dim;
975   DM             da2;
976   DM_DA          *dd = (DM_DA*)da->data,*dd2;
977 
978   PetscFunctionBegin;
979   PetscValidHeaderSpecific(da,DM_CLASSID,1);
980   PetscValidPointer(daref,3);
981 
982   ierr = DMGetDimension(da, &dim);CHKERRQ(ierr);
983   if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
984     M = dd->refine_x*dd->M;
985   } else {
986     M = 1 + dd->refine_x*(dd->M - 1);
987   }
988   if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
989     if (dim > 1) {
990       N = dd->refine_y*dd->N;
991     } else {
992       N = 1;
993     }
994   } else {
995     N = 1 + dd->refine_y*(dd->N - 1);
996   }
997   if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
998     if (dim > 2) {
999       P = dd->refine_z*dd->P;
1000     } else {
1001       P = 1;
1002     }
1003   } else {
1004     P = 1 + dd->refine_z*(dd->P - 1);
1005   }
1006   ierr = DMDACreate(PetscObjectComm((PetscObject)da),&da2);CHKERRQ(ierr);
1007   ierr = DMSetOptionsPrefix(da2,((PetscObject)da)->prefix);CHKERRQ(ierr);
1008   ierr = DMSetDimension(da2,dim);CHKERRQ(ierr);
1009   ierr = DMDASetSizes(da2,M,N,P);CHKERRQ(ierr);
1010   ierr = DMDASetNumProcs(da2,dd->m,dd->n,dd->p);CHKERRQ(ierr);
1011   ierr = DMDASetBoundaryType(da2,dd->bx,dd->by,dd->bz);CHKERRQ(ierr);
1012   ierr = DMDASetDof(da2,dd->w);CHKERRQ(ierr);
1013   ierr = DMDASetStencilType(da2,dd->stencil_type);CHKERRQ(ierr);
1014   ierr = DMDASetStencilWidth(da2,dd->s);CHKERRQ(ierr);
1015   if (dim == 3) {
1016     PetscInt *lx,*ly,*lz;
1017     ierr = PetscMalloc3(dd->m,&lx,dd->n,&ly,dd->p,&lz);CHKERRQ(ierr);
1018     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
1019     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_y,dd->n,dd->ly,ly);CHKERRQ(ierr);
1020     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_z,dd->p,dd->lz,lz);CHKERRQ(ierr);
1021     ierr = DMDASetOwnershipRanges(da2,lx,ly,lz);CHKERRQ(ierr);
1022     ierr = PetscFree3(lx,ly,lz);CHKERRQ(ierr);
1023   } else if (dim == 2) {
1024     PetscInt *lx,*ly;
1025     ierr = PetscMalloc2(dd->m,&lx,dd->n,&ly);CHKERRQ(ierr);
1026     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
1027     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_y,dd->n,dd->ly,ly);CHKERRQ(ierr);
1028     ierr = DMDASetOwnershipRanges(da2,lx,ly,NULL);CHKERRQ(ierr);
1029     ierr = PetscFree2(lx,ly);CHKERRQ(ierr);
1030   } else if (dim == 1) {
1031     PetscInt *lx;
1032     ierr = PetscMalloc1(dd->m,&lx);CHKERRQ(ierr);
1033     ierr = DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
1034     ierr = DMDASetOwnershipRanges(da2,lx,NULL,NULL);CHKERRQ(ierr);
1035     ierr = PetscFree(lx);CHKERRQ(ierr);
1036   }
1037   dd2 = (DM_DA*)da2->data;
1038 
1039   /* allow overloaded (user replaced) operations to be inherited by refinement clones */
1040   da2->ops->creatematrix = da->ops->creatematrix;
1041   /* da2->ops->createinterpolation = da->ops->createinterpolation; this causes problem with SNESVI */
1042   da2->ops->getcoloring = da->ops->getcoloring;
1043   dd2->interptype       = dd->interptype;
1044 
1045   /* copy fill information if given */
1046   if (dd->dfill) {
1047     ierr = PetscMalloc1(dd->dfill[dd->w]+dd->w+1,&dd2->dfill);CHKERRQ(ierr);
1048     ierr = PetscMemcpy(dd2->dfill,dd->dfill,(dd->dfill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr);
1049   }
1050   if (dd->ofill) {
1051     ierr = PetscMalloc1(dd->ofill[dd->w]+dd->w+1,&dd2->ofill);CHKERRQ(ierr);
1052     ierr = PetscMemcpy(dd2->ofill,dd->ofill,(dd->ofill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr);
1053   }
1054   /* copy the refine information */
1055   dd2->coarsen_x = dd2->refine_x = dd->refine_x;
1056   dd2->coarsen_y = dd2->refine_y = dd->refine_y;
1057   dd2->coarsen_z = dd2->refine_z = dd->refine_z;
1058 
1059   if (dd->refine_z_hier) {
1060     if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_z_hier_n) {
1061       dd2->refine_z = dd->refine_z_hier[da->levelup - da->leveldown + 1];
1062     }
1063     if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_z_hier_n) {
1064       dd2->coarsen_z = dd->refine_z_hier[da->levelup - da->leveldown];
1065     }
1066     dd2->refine_z_hier_n = dd->refine_z_hier_n;
1067     ierr = PetscMalloc1(dd2->refine_z_hier_n,&dd2->refine_z_hier);CHKERRQ(ierr);
1068     ierr = PetscMemcpy(dd2->refine_z_hier,dd->refine_z_hier,dd2->refine_z_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1069   }
1070   if (dd->refine_y_hier) {
1071     if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_y_hier_n) {
1072       dd2->refine_y = dd->refine_y_hier[da->levelup - da->leveldown + 1];
1073     }
1074     if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_y_hier_n) {
1075       dd2->coarsen_y = dd->refine_y_hier[da->levelup - da->leveldown];
1076     }
1077     dd2->refine_y_hier_n = dd->refine_y_hier_n;
1078     ierr = PetscMalloc1(dd2->refine_y_hier_n,&dd2->refine_y_hier);CHKERRQ(ierr);
1079     ierr = PetscMemcpy(dd2->refine_y_hier,dd->refine_y_hier,dd2->refine_y_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1080   }
1081   if (dd->refine_x_hier) {
1082     if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_x_hier_n) {
1083       dd2->refine_x = dd->refine_x_hier[da->levelup - da->leveldown + 1];
1084     }
1085     if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_x_hier_n) {
1086       dd2->coarsen_x = dd->refine_x_hier[da->levelup - da->leveldown];
1087     }
1088     dd2->refine_x_hier_n = dd->refine_x_hier_n;
1089     ierr = PetscMalloc1(dd2->refine_x_hier_n,&dd2->refine_x_hier);CHKERRQ(ierr);
1090     ierr = PetscMemcpy(dd2->refine_x_hier,dd->refine_x_hier,dd2->refine_x_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1091   }
1092 
1093 
1094   /* copy vector type information */
1095   ierr = DMSetVecType(da2,da->vectype);CHKERRQ(ierr);
1096 
1097   dd2->lf = dd->lf;
1098   dd2->lj = dd->lj;
1099 
1100   da2->leveldown = da->leveldown;
1101   da2->levelup   = da->levelup + 1;
1102 
1103   ierr = DMSetUp(da2);CHKERRQ(ierr);
1104 
1105   /* interpolate coordinates if they are set on the coarse grid */
1106   if (da->coordinates) {
1107     DM  cdaf,cdac;
1108     Vec coordsc,coordsf;
1109     Mat II;
1110 
1111     ierr = DMGetCoordinateDM(da,&cdac);CHKERRQ(ierr);
1112     ierr = DMGetCoordinates(da,&coordsc);CHKERRQ(ierr);
1113     ierr = DMGetCoordinateDM(da2,&cdaf);CHKERRQ(ierr);
1114     /* force creation of the coordinate vector */
1115     ierr = DMDASetUniformCoordinates(da2,0.0,1.0,0.0,1.0,0.0,1.0);CHKERRQ(ierr);
1116     ierr = DMGetCoordinates(da2,&coordsf);CHKERRQ(ierr);
1117     ierr = DMCreateInterpolation(cdac,cdaf,&II,NULL);CHKERRQ(ierr);
1118     ierr = MatInterpolate(II,coordsc,coordsf);CHKERRQ(ierr);
1119     ierr = MatDestroy(&II);CHKERRQ(ierr);
1120   }
1121 
1122   for (i=0; i<da->bs; i++) {
1123     const char *fieldname;
1124     ierr = DMDAGetFieldName(da,i,&fieldname);CHKERRQ(ierr);
1125     ierr = DMDASetFieldName(da2,i,fieldname);CHKERRQ(ierr);
1126   }
1127 
1128   *daref = da2;
1129   PetscFunctionReturn(0);
1130 }
1131 
1132 
1133 #undef __FUNCT__
1134 #define __FUNCT__ "DMCoarsen_DA"
1135 PetscErrorCode  DMCoarsen_DA(DM da, MPI_Comm comm,DM *daref)
1136 {
1137   PetscErrorCode ierr;
1138   PetscInt       M,N,P,i,dim;
1139   DM             da2;
1140   DM_DA          *dd = (DM_DA*)da->data,*dd2;
1141 
1142   PetscFunctionBegin;
1143   PetscValidHeaderSpecific(da,DM_CLASSID,1);
1144   PetscValidPointer(daref,3);
1145 
1146   ierr = DMGetDimension(da, &dim);CHKERRQ(ierr);
1147   if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1148     M = dd->M / dd->coarsen_x;
1149   } else {
1150     M = 1 + (dd->M - 1) / dd->coarsen_x;
1151   }
1152   if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1153     if (dim > 1) {
1154       N = dd->N / dd->coarsen_y;
1155     } else {
1156       N = 1;
1157     }
1158   } else {
1159     N = 1 + (dd->N - 1) / dd->coarsen_y;
1160   }
1161   if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1162     if (dim > 2) {
1163       P = dd->P / dd->coarsen_z;
1164     } else {
1165       P = 1;
1166     }
1167   } else {
1168     P = 1 + (dd->P - 1) / dd->coarsen_z;
1169   }
1170   ierr = DMDACreate(PetscObjectComm((PetscObject)da),&da2);CHKERRQ(ierr);
1171   ierr = DMSetOptionsPrefix(da2,((PetscObject)da)->prefix);CHKERRQ(ierr);
1172   ierr = DMSetDimension(da2,dim);CHKERRQ(ierr);
1173   ierr = DMDASetSizes(da2,M,N,P);CHKERRQ(ierr);
1174   ierr = DMDASetNumProcs(da2,dd->m,dd->n,dd->p);CHKERRQ(ierr);
1175   ierr = DMDASetBoundaryType(da2,dd->bx,dd->by,dd->bz);CHKERRQ(ierr);
1176   ierr = DMDASetDof(da2,dd->w);CHKERRQ(ierr);
1177   ierr = DMDASetStencilType(da2,dd->stencil_type);CHKERRQ(ierr);
1178   ierr = DMDASetStencilWidth(da2,dd->s);CHKERRQ(ierr);
1179   if (dim == 3) {
1180     PetscInt *lx,*ly,*lz;
1181     ierr = PetscMalloc3(dd->m,&lx,dd->n,&ly,dd->p,&lz);CHKERRQ(ierr);
1182     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
1183     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_y,dd->n,dd->ly,ly);CHKERRQ(ierr);
1184     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_z,dd->p,dd->lz,lz);CHKERRQ(ierr);
1185     ierr = DMDASetOwnershipRanges(da2,lx,ly,lz);CHKERRQ(ierr);
1186     ierr = PetscFree3(lx,ly,lz);CHKERRQ(ierr);
1187   } else if (dim == 2) {
1188     PetscInt *lx,*ly;
1189     ierr = PetscMalloc2(dd->m,&lx,dd->n,&ly);CHKERRQ(ierr);
1190     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
1191     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_y,dd->n,dd->ly,ly);CHKERRQ(ierr);
1192     ierr = DMDASetOwnershipRanges(da2,lx,ly,NULL);CHKERRQ(ierr);
1193     ierr = PetscFree2(lx,ly);CHKERRQ(ierr);
1194   } else if (dim == 1) {
1195     PetscInt *lx;
1196     ierr = PetscMalloc1(dd->m,&lx);CHKERRQ(ierr);
1197     ierr = DMDACoarsenOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx);CHKERRQ(ierr);
1198     ierr = DMDASetOwnershipRanges(da2,lx,NULL,NULL);CHKERRQ(ierr);
1199     ierr = PetscFree(lx);CHKERRQ(ierr);
1200   }
1201   dd2 = (DM_DA*)da2->data;
1202 
1203   /* allow overloaded (user replaced) operations to be inherited by refinement clones; why are only some inherited and not all? */
1204   /* da2->ops->createinterpolation = da->ops->createinterpolation; copying this one causes trouble for DMSetVI */
1205   da2->ops->creatematrix = da->ops->creatematrix;
1206   da2->ops->getcoloring  = da->ops->getcoloring;
1207   dd2->interptype        = dd->interptype;
1208 
1209   /* copy fill information if given */
1210   if (dd->dfill) {
1211     ierr = PetscMalloc1(dd->dfill[dd->w]+dd->w+1,&dd2->dfill);CHKERRQ(ierr);
1212     ierr = PetscMemcpy(dd2->dfill,dd->dfill,(dd->dfill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr);
1213   }
1214   if (dd->ofill) {
1215     ierr = PetscMalloc1(dd->ofill[dd->w]+dd->w+1,&dd2->ofill);CHKERRQ(ierr);
1216     ierr = PetscMemcpy(dd2->ofill,dd->ofill,(dd->ofill[dd->w]+dd->w+1)*sizeof(PetscInt));CHKERRQ(ierr);
1217   }
1218   /* copy the refine information */
1219   dd2->coarsen_x = dd2->refine_x = dd->coarsen_x;
1220   dd2->coarsen_y = dd2->refine_y = dd->coarsen_y;
1221   dd2->coarsen_z = dd2->refine_z = dd->coarsen_z;
1222 
1223   if (dd->refine_z_hier) {
1224     if (da->levelup - da->leveldown -1 > -1 && da->levelup - da->leveldown - 1< dd->refine_z_hier_n) {
1225       dd2->refine_z = dd->refine_z_hier[da->levelup - da->leveldown - 1];
1226     }
1227     if (da->levelup - da->leveldown - 2 > -1 && da->levelup - da->leveldown - 2 < dd->refine_z_hier_n) {
1228       dd2->coarsen_z = dd->refine_z_hier[da->levelup - da->leveldown - 2];
1229     }
1230     dd2->refine_z_hier_n = dd->refine_z_hier_n;
1231     ierr = PetscMalloc1(dd2->refine_z_hier_n,&dd2->refine_z_hier);CHKERRQ(ierr);
1232     ierr = PetscMemcpy(dd2->refine_z_hier,dd->refine_z_hier,dd2->refine_z_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1233   }
1234   if (dd->refine_y_hier) {
1235     if (da->levelup - da->leveldown - 1 > -1 && da->levelup - da->leveldown - 1< dd->refine_y_hier_n) {
1236       dd2->refine_y = dd->refine_y_hier[da->levelup - da->leveldown - 1];
1237     }
1238     if (da->levelup - da->leveldown - 2 > -1 && da->levelup - da->leveldown - 2 < dd->refine_y_hier_n) {
1239       dd2->coarsen_y = dd->refine_y_hier[da->levelup - da->leveldown - 2];
1240     }
1241     dd2->refine_y_hier_n = dd->refine_y_hier_n;
1242     ierr = PetscMalloc1(dd2->refine_y_hier_n,&dd2->refine_y_hier);CHKERRQ(ierr);
1243     ierr = PetscMemcpy(dd2->refine_y_hier,dd->refine_y_hier,dd2->refine_y_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1244   }
1245   if (dd->refine_x_hier) {
1246     if (da->levelup - da->leveldown - 1 > -1 && da->levelup - da->leveldown - 1 < dd->refine_x_hier_n) {
1247       dd2->refine_x = dd->refine_x_hier[da->levelup - da->leveldown - 1];
1248     }
1249     if (da->levelup - da->leveldown - 2 > -1 && da->levelup - da->leveldown - 2 < dd->refine_x_hier_n) {
1250       dd2->coarsen_x = dd->refine_x_hier[da->levelup - da->leveldown - 2];
1251     }
1252     dd2->refine_x_hier_n = dd->refine_x_hier_n;
1253     ierr = PetscMalloc1(dd2->refine_x_hier_n,&dd2->refine_x_hier);CHKERRQ(ierr);
1254     ierr = PetscMemcpy(dd2->refine_x_hier,dd->refine_x_hier,dd2->refine_x_hier_n*sizeof(PetscInt));CHKERRQ(ierr);
1255   }
1256 
1257   /* copy vector type information */
1258   ierr = DMSetVecType(da2,da->vectype);CHKERRQ(ierr);
1259 
1260   dd2->lf = dd->lf;
1261   dd2->lj = dd->lj;
1262 
1263   da2->leveldown = da->leveldown + 1;
1264   da2->levelup   = da->levelup;
1265 
1266   ierr = DMSetUp(da2);CHKERRQ(ierr);
1267 
1268   /* inject coordinates if they are set on the fine grid */
1269   if (da->coordinates) {
1270     DM         cdaf,cdac;
1271     Vec        coordsc,coordsf;
1272     Mat        inject;
1273     VecScatter vscat;
1274 
1275     ierr = DMGetCoordinateDM(da,&cdaf);CHKERRQ(ierr);
1276     ierr = DMGetCoordinates(da,&coordsf);CHKERRQ(ierr);
1277     ierr = DMGetCoordinateDM(da2,&cdac);CHKERRQ(ierr);
1278     /* force creation of the coordinate vector */
1279     ierr = DMDASetUniformCoordinates(da2,0.0,1.0,0.0,1.0,0.0,1.0);CHKERRQ(ierr);
1280     ierr = DMGetCoordinates(da2,&coordsc);CHKERRQ(ierr);
1281 
1282     ierr = DMCreateInjection(cdac,cdaf,&inject);CHKERRQ(ierr);
1283     ierr = MatScatterGetVecScatter(inject,&vscat);CHKERRQ(ierr);
1284     ierr = VecScatterBegin(vscat,coordsf,coordsc,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1285     ierr = VecScatterEnd(vscat,coordsf,coordsc,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1286     ierr = MatDestroy(&inject);CHKERRQ(ierr);
1287   }
1288 
1289   for (i=0; i<da->bs; i++) {
1290     const char *fieldname;
1291     ierr = DMDAGetFieldName(da,i,&fieldname);CHKERRQ(ierr);
1292     ierr = DMDASetFieldName(da2,i,fieldname);CHKERRQ(ierr);
1293   }
1294 
1295   *daref = da2;
1296   PetscFunctionReturn(0);
1297 }
1298 
1299 #undef __FUNCT__
1300 #define __FUNCT__ "DMRefineHierarchy_DA"
1301 PetscErrorCode  DMRefineHierarchy_DA(DM da,PetscInt nlevels,DM daf[])
1302 {
1303   PetscErrorCode ierr;
1304   PetscInt       i,n,*refx,*refy,*refz;
1305 
1306   PetscFunctionBegin;
1307   PetscValidHeaderSpecific(da,DM_CLASSID,1);
1308   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
1309   if (nlevels == 0) PetscFunctionReturn(0);
1310   PetscValidPointer(daf,3);
1311 
1312   /* Get refinement factors, defaults taken from the coarse DMDA */
1313   ierr = PetscMalloc3(nlevels,&refx,nlevels,&refy,nlevels,&refz);CHKERRQ(ierr);
1314   for (i=0; i<nlevels; i++) {
1315     ierr = DMDAGetRefinementFactor(da,&refx[i],&refy[i],&refz[i]);CHKERRQ(ierr);
1316   }
1317   n    = nlevels;
1318   ierr = PetscOptionsGetIntArray(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_refine_hierarchy_x",refx,&n,NULL);CHKERRQ(ierr);
1319   n    = nlevels;
1320   ierr = PetscOptionsGetIntArray(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_refine_hierarchy_y",refy,&n,NULL);CHKERRQ(ierr);
1321   n    = nlevels;
1322   ierr = PetscOptionsGetIntArray(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_refine_hierarchy_z",refz,&n,NULL);CHKERRQ(ierr);
1323 
1324   ierr = DMDASetRefinementFactor(da,refx[0],refy[0],refz[0]);CHKERRQ(ierr);
1325   ierr = DMRefine(da,PetscObjectComm((PetscObject)da),&daf[0]);CHKERRQ(ierr);
1326   for (i=1; i<nlevels; i++) {
1327     ierr = DMDASetRefinementFactor(daf[i-1],refx[i],refy[i],refz[i]);CHKERRQ(ierr);
1328     ierr = DMRefine(daf[i-1],PetscObjectComm((PetscObject)da),&daf[i]);CHKERRQ(ierr);
1329   }
1330   ierr = PetscFree3(refx,refy,refz);CHKERRQ(ierr);
1331   PetscFunctionReturn(0);
1332 }
1333 
1334 #undef __FUNCT__
1335 #define __FUNCT__ "DMCoarsenHierarchy_DA"
1336 PetscErrorCode  DMCoarsenHierarchy_DA(DM da,PetscInt nlevels,DM dac[])
1337 {
1338   PetscErrorCode ierr;
1339   PetscInt       i;
1340 
1341   PetscFunctionBegin;
1342   PetscValidHeaderSpecific(da,DM_CLASSID,1);
1343   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
1344   if (nlevels == 0) PetscFunctionReturn(0);
1345   PetscValidPointer(dac,3);
1346   ierr = DMCoarsen(da,PetscObjectComm((PetscObject)da),&dac[0]);CHKERRQ(ierr);
1347   for (i=1; i<nlevels; i++) {
1348     ierr = DMCoarsen(dac[i-1],PetscObjectComm((PetscObject)da),&dac[i]);CHKERRQ(ierr);
1349   }
1350   PetscFunctionReturn(0);
1351 }
1352