xref: /petsc/src/dm/interface/dm.c (revision be081cd66c6344e0381e975908126fb3e880340e)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
2 
3 PetscClassId  DM_CLASSID;
4 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal;
5 
6 #undef __FUNCT__
7 #define __FUNCT__ "DMCreate"
8 /*@
9   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
10 
11    If you never  call DMSetType()  it will generate an
12    error when you try to use the vector.
13 
14   Collective on MPI_Comm
15 
16   Input Parameter:
17 . comm - The communicator for the DM object
18 
19   Output Parameter:
20 . dm - The DM object
21 
22   Level: beginner
23 
24 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
25 @*/
26 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
27 {
28   DM             v;
29   PetscErrorCode ierr;
30 
31   PetscFunctionBegin;
32   PetscValidPointer(dm,2);
33   *dm = PETSC_NULL;
34 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
35   ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr);
36   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
37   ierr = DMInitializePackage(PETSC_NULL);CHKERRQ(ierr);
38 #endif
39 
40   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, -1, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
41   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
42 
43 
44   v->ltogmap      = PETSC_NULL;
45   v->ltogmapb     = PETSC_NULL;
46   v->bs           = 1;
47   v->coloringtype = IS_COLORING_GLOBAL;
48   ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
49   ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
50   v->defaultSection       = PETSC_NULL;
51   v->defaultGlobalSection = PETSC_NULL;
52   {
53     PetscInt i;
54     for (i = 0; i < 10; ++i) {
55       v->nullspaceConstructors[i] = PETSC_NULL;
56     }
57   }
58   v->numFields = 0;
59   v->fields    = PETSC_NULL;
60 
61   *dm = v;
62   PetscFunctionReturn(0);
63 }
64 
65 #undef __FUNCT__
66 #define __FUNCT__ "DMSetVecType"
67 /*@C
68        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
69 
70    Logically Collective on DMDA
71 
72    Input Parameter:
73 +  da - initial distributed array
74 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
75 
76    Options Database:
77 .   -dm_vec_type ctype
78 
79    Level: intermediate
80 
81 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
82 @*/
83 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
84 {
85   PetscErrorCode ierr;
86 
87   PetscFunctionBegin;
88   PetscValidHeaderSpecific(da,DM_CLASSID,1);
89   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
90   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
91   PetscFunctionReturn(0);
92 }
93 
94 #undef __FUNCT__
95 #define __FUNCT__ "VecGetDM"
96 /*@
97   VecGetDM - Gets the DM defining the data layout of the vector
98 
99   Not collective
100 
101   Input Parameter:
102 . v - The Vec
103 
104   Output Parameter:
105 . dm - The DM
106 
107   Level: intermediate
108 
109 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
110 @*/
111 PetscErrorCode VecGetDM(Vec v, DM *dm)
112 {
113   PetscErrorCode ierr;
114 
115   PetscFunctionBegin;
116   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
117   PetscValidPointer(dm,2);
118   ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
119   PetscFunctionReturn(0);
120 }
121 
122 #undef __FUNCT__
123 #define __FUNCT__ "VecSetDM"
124 /*@
125   VecSetDM - Sets the DM defining the data layout of the vector
126 
127   Not collective
128 
129   Input Parameters:
130 + v - The Vec
131 - dm - The DM
132 
133   Level: intermediate
134 
135 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
136 @*/
137 PetscErrorCode VecSetDM(Vec v, DM dm)
138 {
139   PetscErrorCode ierr;
140 
141   PetscFunctionBegin;
142   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
143   PetscValidHeaderSpecific(dm,DM_CLASSID,2);
144   ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
145   PetscFunctionReturn(0);
146 }
147 
148 #undef __FUNCT__
149 #define __FUNCT__ "DMSetMatType"
150 /*@C
151        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
152 
153    Logically Collective on DM
154 
155    Input Parameter:
156 +  dm - the DM context
157 .  ctype - the matrix type
158 
159    Options Database:
160 .   -dm_mat_type ctype
161 
162    Level: intermediate
163 
164 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType
165 @*/
166 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
167 {
168   PetscErrorCode ierr;
169   PetscFunctionBegin;
170   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
171   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
172   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
173   PetscFunctionReturn(0);
174 }
175 
176 #undef __FUNCT__
177 #define __FUNCT__ "MatGetDM"
178 /*@
179   MatGetDM - Gets the DM defining the data layout of the matrix
180 
181   Not collective
182 
183   Input Parameter:
184 . A - The Mat
185 
186   Output Parameter:
187 . dm - The DM
188 
189   Level: intermediate
190 
191 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
192 @*/
193 PetscErrorCode MatGetDM(Mat A, DM *dm)
194 {
195   PetscErrorCode ierr;
196 
197   PetscFunctionBegin;
198   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
199   PetscValidPointer(dm,2);
200   ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
201   PetscFunctionReturn(0);
202 }
203 
204 #undef __FUNCT__
205 #define __FUNCT__ "MatSetDM"
206 /*@
207   MatSetDM - Sets the DM defining the data layout of the matrix
208 
209   Not collective
210 
211   Input Parameters:
212 + A - The Mat
213 - dm - The DM
214 
215   Level: intermediate
216 
217 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
218 @*/
219 PetscErrorCode MatSetDM(Mat A, DM dm)
220 {
221   PetscErrorCode ierr;
222 
223   PetscFunctionBegin;
224   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
225   if (dm) {PetscValidHeaderSpecific(dm,DM_CLASSID,2);}
226   ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
227   PetscFunctionReturn(0);
228 }
229 
230 #undef __FUNCT__
231 #define __FUNCT__ "DMSetOptionsPrefix"
232 /*@C
233    DMSetOptionsPrefix - Sets the prefix used for searching for all
234    DMDA options in the database.
235 
236    Logically Collective on DMDA
237 
238    Input Parameter:
239 +  da - the DMDA context
240 -  prefix - the prefix to prepend to all option names
241 
242    Notes:
243    A hyphen (-) must NOT be given at the beginning of the prefix name.
244    The first character of all runtime options is AUTOMATICALLY the hyphen.
245 
246    Level: advanced
247 
248 .keywords: DMDA, set, options, prefix, database
249 
250 .seealso: DMSetFromOptions()
251 @*/
252 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
253 {
254   PetscErrorCode ierr;
255 
256   PetscFunctionBegin;
257   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
258   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
259   PetscFunctionReturn(0);
260 }
261 
262 #undef __FUNCT__
263 #define __FUNCT__ "DMDestroy"
264 /*@
265     DMDestroy - Destroys a vector packer or DMDA.
266 
267     Collective on DM
268 
269     Input Parameter:
270 .   dm - the DM object to destroy
271 
272     Level: developer
273 
274 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
275 
276 @*/
277 PetscErrorCode  DMDestroy(DM *dm)
278 {
279   PetscInt       i, cnt = 0, f;
280   DMNamedVecLink nlink,nnext;
281   PetscErrorCode ierr;
282 
283   PetscFunctionBegin;
284   if (!*dm) PetscFunctionReturn(0);
285   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
286 
287   /* count all the circular references of DM and its contained Vecs */
288   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
289     if ((*dm)->localin[i])  {cnt++;}
290     if ((*dm)->globalin[i]) {cnt++;}
291   }
292   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
293   if ((*dm)->x) {
294     DM obj;
295     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
296     if (obj == *dm) cnt++;
297   }
298 
299   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
300   /*
301      Need this test because the dm references the vectors that
302      reference the dm, so destroying the dm calls destroy on the
303      vectors that cause another destroy on the dm
304   */
305   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
306   ((PetscObject) (*dm))->refct = 0;
307   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
308     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
309     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
310   }
311   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
312     nnext = nlink->next;
313     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
314     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
315     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
316     ierr = PetscFree(nlink);CHKERRQ(ierr);
317   }
318   (*dm)->namedglobal = PETSC_NULL;
319 
320   /* Destroy the list of hooks */
321   {
322     DMCoarsenHookLink link,next;
323     for (link=(*dm)->coarsenhook; link; link=next) {
324       next = link->next;
325       ierr = PetscFree(link);CHKERRQ(ierr);
326     }
327     (*dm)->coarsenhook = PETSC_NULL;
328   }
329   {
330     DMRefineHookLink link,next;
331     for (link=(*dm)->refinehook; link; link=next) {
332       next = link->next;
333       ierr = PetscFree(link);CHKERRQ(ierr);
334     }
335     (*dm)->refinehook = PETSC_NULL;
336   }
337   {
338     DMSubDomainHookLink link,next;
339     for (link=(*dm)->subdomainhook; link; link=next) {
340       next = link->next;
341       ierr = PetscFree(link);CHKERRQ(ierr);
342     }
343     (*dm)->subdomainhook = PETSC_NULL;
344   }
345   /* Destroy the work arrays */
346   {
347     DMWorkLink link,next;
348     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
349     for (link=(*dm)->workin; link; link=next) {
350       next = link->next;
351       ierr = PetscFree(link->mem);CHKERRQ(ierr);
352       ierr = PetscFree(link);CHKERRQ(ierr);
353     }
354     (*dm)->workin = PETSC_NULL;
355   }
356 
357   ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr);
358   ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr);
359   ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr);
360 
361   if ((*dm)->ctx && (*dm)->ctxdestroy) {
362     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
363   }
364   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
365   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
366   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
367   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
368   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
369   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
370   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
371 
372   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
373   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
374   ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr);
375   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
376   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
377 
378   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
379   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
380   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
381 
382   for (f = 0; f < (*dm)->numFields; ++f) {
383     ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr);
384   }
385   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
386   /* if memory was published with AMS then destroy it */
387   ierr = PetscObjectDepublish(*dm);CHKERRQ(ierr);
388 
389   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
390   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
391   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
392   PetscFunctionReturn(0);
393 }
394 
395 #undef __FUNCT__
396 #define __FUNCT__ "DMSetUp"
397 /*@
398     DMSetUp - sets up the data structures inside a DM object
399 
400     Collective on DM
401 
402     Input Parameter:
403 .   dm - the DM object to setup
404 
405     Level: developer
406 
407 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
408 
409 @*/
410 PetscErrorCode  DMSetUp(DM dm)
411 {
412   PetscErrorCode ierr;
413 
414   PetscFunctionBegin;
415   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
416   if (dm->setupcalled) PetscFunctionReturn(0);
417   if (dm->ops->setup) {
418     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
419   }
420   dm->setupcalled = PETSC_TRUE;
421   PetscFunctionReturn(0);
422 }
423 
424 #undef __FUNCT__
425 #define __FUNCT__ "DMSetFromOptions"
426 /*@
427     DMSetFromOptions - sets parameters in a DM from the options database
428 
429     Collective on DM
430 
431     Input Parameter:
432 .   dm - the DM object to set options for
433 
434     Options Database:
435 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
436 .   -dm_vec_type <type>  type of vector to create inside DM
437 .   -dm_mat_type <type>  type of matrix to create inside DM
438 -   -dm_coloring_type <global or ghosted>
439 
440     Level: developer
441 
442 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
443 
444 @*/
445 PetscErrorCode  DMSetFromOptions(DM dm)
446 {
447   PetscBool      flg1 = PETSC_FALSE,flg2 = PETSC_FALSE,flg3 = PETSC_FALSE,flg4 = PETSC_FALSE,flg;
448   PetscErrorCode ierr;
449   char           typeName[256] = MATAIJ;
450 
451   PetscFunctionBegin;
452   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
453   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
454     ierr = PetscOptionsBool("-dm_view", "Information on DM", "DMView", flg1, &flg1, PETSC_NULL);CHKERRQ(ierr);
455     ierr = PetscOptionsBool("-dm_view_detail", "Exhaustive mesh description", "DMView", flg2, &flg2, PETSC_NULL);CHKERRQ(ierr);
456     ierr = PetscOptionsBool("-dm_view_vtk", "Output mesh in VTK format", "DMView", flg3, &flg3, PETSC_NULL);CHKERRQ(ierr);
457     ierr = PetscOptionsBool("-dm_view_latex", "Output mesh in LaTeX TikZ format", "DMView", flg4, &flg4, PETSC_NULL);CHKERRQ(ierr);
458     ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,PETSC_NULL);CHKERRQ(ierr);
459     ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
460     if (flg) {
461       ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
462     }
463     ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype?dm->mattype:typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
464     if (flg) {
465       ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
466     }
467     ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,PETSC_NULL);CHKERRQ(ierr);
468     if (dm->ops->setfromoptions) {
469       ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
470     }
471     /* process any options handlers added with PetscObjectAddOptionsHandler() */
472     ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
473   ierr = PetscOptionsEnd();CHKERRQ(ierr);
474   if (flg1) {
475     ierr = DMView(dm, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
476   }
477   if (flg2) {
478     PetscViewer viewer;
479 
480     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
481     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
482     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_INFO_DETAIL);CHKERRQ(ierr);
483     ierr = DMView(dm, viewer);CHKERRQ(ierr);
484     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
485   }
486   if (flg3) {
487     PetscViewer viewer;
488 
489     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
490     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
491     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_VTK);CHKERRQ(ierr);
492     ierr = PetscViewerFileSetName(viewer, "mesh.vtk");CHKERRQ(ierr);
493     ierr = DMView(dm, viewer);CHKERRQ(ierr);
494     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
495   }
496   if (flg4) {
497     PetscViewer viewer;
498 
499     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
500     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
501     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_LATEX);CHKERRQ(ierr);
502     ierr = PetscViewerFileSetName(viewer, "mesh.tex");CHKERRQ(ierr);
503     ierr = DMView(dm, viewer);CHKERRQ(ierr);
504     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
505   }
506   PetscFunctionReturn(0);
507 }
508 
509 #undef __FUNCT__
510 #define __FUNCT__ "DMView"
511 /*@C
512     DMView - Views a vector packer or DMDA.
513 
514     Collective on DM
515 
516     Input Parameter:
517 +   dm - the DM object to view
518 -   v - the viewer
519 
520     Level: developer
521 
522 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
523 
524 @*/
525 PetscErrorCode  DMView(DM dm,PetscViewer v)
526 {
527   PetscErrorCode ierr;
528   PetscBool      isbinary;
529 
530   PetscFunctionBegin;
531   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
532   if (!v) {
533     ierr = PetscViewerASCIIGetStdout(((PetscObject)dm)->comm,&v);CHKERRQ(ierr);
534   }
535   ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
536   if (isbinary) {
537     PetscInt         classid = DM_FILE_CLASSID;
538     MPI_Comm         comm;
539     PetscMPIInt      rank;
540     char             type[256];
541 
542     ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
543     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
544     if (!rank) {
545       ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
546       ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr);
547       ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
548     }
549   }
550   if (dm->ops->view) {
551     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
552   }
553   PetscFunctionReturn(0);
554 }
555 
556 PETSC_EXTERN PetscErrorCode VecView_Complex_Local(Vec, PetscViewer);
557 PETSC_EXTERN PetscErrorCode VecView_Complex(Vec, PetscViewer);
558 
559 #undef __FUNCT__
560 #define __FUNCT__ "DMCreateGlobalVector"
561 /*@
562     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
563 
564     Collective on DM
565 
566     Input Parameter:
567 .   dm - the DM object
568 
569     Output Parameter:
570 .   vec - the global vector
571 
572     Level: beginner
573 
574 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
575 
576 @*/
577 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
578 {
579   PetscErrorCode ierr;
580 
581   PetscFunctionBegin;
582   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
583   if (dm->defaultSection) {
584     PetscSection gSection;
585     PetscInt     localSize, blockSize = -1, pStart, pEnd, p;
586 
587     ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
588     ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr);
589     for(p = pStart; p < pEnd; ++p) {
590       PetscInt dof, cdof;
591 
592       ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr);
593       ierr = PetscSectionGetConstraintDof(dm->defaultSection, p, &cdof);CHKERRQ(ierr);
594       if ((blockSize < 0) && (dof > 0)) blockSize = dof-cdof;
595       if ((dof > 0) && (dof-cdof != blockSize)) {
596         blockSize = 1;
597         break;
598       }
599     }
600     ierr = PetscSectionGetConstrainedStorageSize(dm->defaultGlobalSection, &localSize);CHKERRQ(ierr);
601     if (localSize%blockSize) SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONG, "Mismatch between blocksize %d and local storage size %d", blockSize, localSize);
602     ierr = VecCreate(((PetscObject) dm)->comm, vec);CHKERRQ(ierr);
603     ierr = VecSetSizes(*vec, localSize, PETSC_DETERMINE);CHKERRQ(ierr);
604     ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr);
605     /* ierr = VecSetType(*vec, dm->vectype);CHKERRQ(ierr); */
606     ierr = VecSetFromOptions(*vec);CHKERRQ(ierr);
607     ierr = VecSetDM(*vec, dm);CHKERRQ(ierr);
608     /* ierr = VecSetLocalToGlobalMapping(*vec, dm->ltogmap);CHKERRQ(ierr); */
609     /* ierr = VecSetLocalToGlobalMappingBlock(*vec, dm->ltogmapb);CHKERRQ(ierr); */
610     /* ierr = VecSetOperation(*vec, VECOP_DUPLICATE, (void(*)(void)) VecDuplicate_MPI_DM);CHKERRQ(ierr); */
611     ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex);CHKERRQ(ierr);
612   } else {
613     ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
614   }
615   PetscFunctionReturn(0);
616 }
617 
618 #undef __FUNCT__
619 #define __FUNCT__ "DMCreateLocalVector"
620 /*@
621     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
622 
623     Not Collective
624 
625     Input Parameter:
626 .   dm - the DM object
627 
628     Output Parameter:
629 .   vec - the local vector
630 
631     Level: beginner
632 
633 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
634 
635 @*/
636 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
637 {
638   PetscErrorCode ierr;
639 
640   PetscFunctionBegin;
641   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
642   if (dm->defaultSection) {
643     PetscInt localSize, blockSize = -1, pStart, pEnd, p;
644 
645     ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr);
646     for(p = pStart; p < pEnd; ++p) {
647       PetscInt dof;
648 
649       ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr);
650       if ((blockSize < 0) && (dof > 0)) blockSize = dof;
651       if ((dof > 0) && (dof != blockSize)) {
652         blockSize = 1;
653         break;
654       }
655     }
656     ierr = PetscSectionGetStorageSize(dm->defaultSection, &localSize);CHKERRQ(ierr);
657     ierr = VecCreate(PETSC_COMM_SELF, vec);CHKERRQ(ierr);
658     ierr = VecSetSizes(*vec, localSize, localSize);CHKERRQ(ierr);
659     ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr);
660     ierr = VecSetFromOptions(*vec);CHKERRQ(ierr);
661     ierr = VecSetDM(*vec, dm);CHKERRQ(ierr);
662     ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex_Local);CHKERRQ(ierr);
663   } else {
664     ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
665   }
666   PetscFunctionReturn(0);
667 }
668 
669 #undef __FUNCT__
670 #define __FUNCT__ "DMGetLocalToGlobalMapping"
671 /*@
672    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
673 
674    Collective on DM
675 
676    Input Parameter:
677 .  dm - the DM that provides the mapping
678 
679    Output Parameter:
680 .  ltog - the mapping
681 
682    Level: intermediate
683 
684    Notes:
685    This mapping can then be used by VecSetLocalToGlobalMapping() or
686    MatSetLocalToGlobalMapping().
687 
688 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
689 @*/
690 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
691 {
692   PetscErrorCode ierr;
693 
694   PetscFunctionBegin;
695   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
696   PetscValidPointer(ltog,2);
697   if (!dm->ltogmap) {
698     PetscSection section, sectionGlobal;
699 
700     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
701     if (section) {
702       PetscInt      *ltog;
703       PetscInt       pStart, pEnd, size, p, l;
704 
705       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
706       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
707       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
708       ierr = PetscMalloc(size * sizeof(PetscInt), &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
709       for (p = pStart, l = 0; p < pEnd; ++p) {
710         PetscInt dof, off, c;
711 
712         /* Should probably use constrained dofs */
713         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
714         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
715         for (c = 0; c < dof; ++c, ++l) {
716           ltog[l] = off+c;
717         }
718       }
719       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
720       ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr);
721     } else {
722       if (!dm->ops->createlocaltoglobalmapping) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
723       ierr = (*dm->ops->createlocaltoglobalmapping)(dm);CHKERRQ(ierr);
724     }
725   }
726   *ltog = dm->ltogmap;
727   PetscFunctionReturn(0);
728 }
729 
730 #undef __FUNCT__
731 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
732 /*@
733    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
734 
735    Collective on DM
736 
737    Input Parameter:
738 .  da - the distributed array that provides the mapping
739 
740    Output Parameter:
741 .  ltog - the block mapping
742 
743    Level: intermediate
744 
745    Notes:
746    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
747    MatSetLocalToGlobalMappingBlock().
748 
749 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
750 @*/
751 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
752 {
753   PetscErrorCode ierr;
754 
755   PetscFunctionBegin;
756   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
757   PetscValidPointer(ltog,2);
758   if (!dm->ltogmapb) {
759     PetscInt bs;
760     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
761     if (bs > 1) {
762       if (!dm->ops->createlocaltoglobalmappingblock) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
763       ierr = (*dm->ops->createlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
764     } else {
765       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
766       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
767     }
768   }
769   *ltog = dm->ltogmapb;
770   PetscFunctionReturn(0);
771 }
772 
773 #undef __FUNCT__
774 #define __FUNCT__ "DMGetBlockSize"
775 /*@
776    DMGetBlockSize - Gets the inherent block size associated with a DM
777 
778    Not Collective
779 
780    Input Parameter:
781 .  dm - the DM with block structure
782 
783    Output Parameter:
784 .  bs - the block size, 1 implies no exploitable block structure
785 
786    Level: intermediate
787 
788 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
789 @*/
790 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
791 {
792   PetscFunctionBegin;
793   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
794   PetscValidPointer(bs,2);
795   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
796   *bs = dm->bs;
797   PetscFunctionReturn(0);
798 }
799 
800 #undef __FUNCT__
801 #define __FUNCT__ "DMCreateInterpolation"
802 /*@
803     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
804 
805     Collective on DM
806 
807     Input Parameter:
808 +   dm1 - the DM object
809 -   dm2 - the second, finer DM object
810 
811     Output Parameter:
812 +  mat - the interpolation
813 -  vec - the scaling (optional)
814 
815     Level: developer
816 
817     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
818         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
819 
820         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
821         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
822 
823 
824 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
825 
826 @*/
827 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
828 {
829   PetscErrorCode ierr;
830 
831   PetscFunctionBegin;
832   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
833   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
834   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
835   PetscFunctionReturn(0);
836 }
837 
838 #undef __FUNCT__
839 #define __FUNCT__ "DMCreateInjection"
840 /*@
841     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
842 
843     Collective on DM
844 
845     Input Parameter:
846 +   dm1 - the DM object
847 -   dm2 - the second, finer DM object
848 
849     Output Parameter:
850 .   ctx - the injection
851 
852     Level: developer
853 
854    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
855         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
856 
857 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
858 
859 @*/
860 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
861 {
862   PetscErrorCode ierr;
863 
864   PetscFunctionBegin;
865   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
866   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
867   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
868   PetscFunctionReturn(0);
869 }
870 
871 #undef __FUNCT__
872 #define __FUNCT__ "DMCreateColoring"
873 /*@C
874     DMCreateColoring - Gets coloring for a DMDA or DMComposite
875 
876     Collective on DM
877 
878     Input Parameter:
879 +   dm - the DM object
880 .   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
881 -   matype - either MATAIJ or MATBAIJ
882 
883     Output Parameter:
884 .   coloring - the coloring
885 
886     Level: developer
887 
888 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix()
889 
890 @*/
891 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring)
892 {
893   PetscErrorCode ierr;
894 
895   PetscFunctionBegin;
896   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
897   if (!dm->ops->getcoloring) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No coloring for this type of DM yet");
898   ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr);
899   PetscFunctionReturn(0);
900 }
901 
902 #undef __FUNCT__
903 #define __FUNCT__ "DMCreateMatrix"
904 /*@C
905     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
906 
907     Collective on DM
908 
909     Input Parameter:
910 +   dm - the DM object
911 -   mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or
912             any type which inherits from one of these (such as MATAIJ)
913 
914     Output Parameter:
915 .   mat - the empty Jacobian
916 
917     Level: beginner
918 
919     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
920        do not need to do it yourself.
921 
922        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
923        the nonzero pattern call DMDASetMatPreallocateOnly()
924 
925        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
926        internally by PETSc.
927 
928        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
929        the indices for the global numbering for DMDAs which is complicated.
930 
931 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
932 
933 @*/
934 PetscErrorCode  DMCreateMatrix(DM dm,MatType mtype,Mat *mat)
935 {
936   PetscErrorCode ierr;
937 
938   PetscFunctionBegin;
939   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
940 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
941   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
942 #endif
943   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
944   PetscValidPointer(mat,3);
945   if (dm->mattype) {
946     ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr);
947   } else {
948     ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr);
949   }
950   PetscFunctionReturn(0);
951 }
952 
953 #undef __FUNCT__
954 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
955 /*@
956   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
957     preallocated but the nonzero structure and zero values will not be set.
958 
959   Logically Collective on DMDA
960 
961   Input Parameter:
962 + dm - the DM
963 - only - PETSC_TRUE if only want preallocation
964 
965   Level: developer
966 .seealso DMCreateMatrix()
967 @*/
968 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
969 {
970   PetscFunctionBegin;
971   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
972   dm->prealloc_only = only;
973   PetscFunctionReturn(0);
974 }
975 
976 #undef __FUNCT__
977 #define __FUNCT__ "DMGetWorkArray"
978 /*@C
979   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
980 
981   Not Collective
982 
983   Input Parameters:
984 + dm - the DM object
985 . count - The minium size
986 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
987 
988   Output Parameter:
989 . array - the work array
990 
991   Level: developer
992 
993 .seealso DMDestroy(), DMCreate()
994 @*/
995 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
996 {
997   PetscErrorCode ierr;
998   DMWorkLink link;
999   size_t size;
1000 
1001   PetscFunctionBegin;
1002   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1003   PetscValidPointer(mem,4);
1004   if (dm->workin) {
1005     link = dm->workin;
1006     dm->workin = dm->workin->next;
1007   } else {
1008     ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr);
1009   }
1010   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
1011   if (size*count > link->bytes) {
1012     ierr = PetscFree(link->mem);CHKERRQ(ierr);
1013     ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
1014     link->bytes = size*count;
1015   }
1016   link->next = dm->workout;
1017   dm->workout = link;
1018   *(void**)mem = link->mem;
1019   PetscFunctionReturn(0);
1020 }
1021 
1022 #undef __FUNCT__
1023 #define __FUNCT__ "DMRestoreWorkArray"
1024 /*@C
1025   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
1026 
1027   Not Collective
1028 
1029   Input Parameters:
1030 + dm - the DM object
1031 . count - The minium size
1032 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
1033 
1034   Output Parameter:
1035 . array - the work array
1036 
1037   Level: developer
1038 
1039 .seealso DMDestroy(), DMCreate()
1040 @*/
1041 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1042 {
1043   DMWorkLink *p,link;
1044 
1045   PetscFunctionBegin;
1046   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1047   PetscValidPointer(mem,4);
1048   for (p=&dm->workout; (link=*p); p=&link->next) {
1049     if (link->mem == *(void**)mem) {
1050       *p = link->next;
1051       link->next = dm->workin;
1052       dm->workin = link;
1053       *(void**)mem = PETSC_NULL;
1054       PetscFunctionReturn(0);
1055     }
1056   }
1057   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1058   PetscFunctionReturn(0);
1059 }
1060 
1061 #undef __FUNCT__
1062 #define __FUNCT__ "DMSetNullSpaceConstructor"
1063 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1064 {
1065   PetscFunctionBegin;
1066   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1067   if (field >= 10) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1068   dm->nullspaceConstructors[field] = nullsp;
1069   PetscFunctionReturn(0);
1070 }
1071 
1072 #undef __FUNCT__
1073 #define __FUNCT__ "DMCreateFieldIS"
1074 /*@C
1075   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1076 
1077   Not collective
1078 
1079   Input Parameter:
1080 . dm - the DM object
1081 
1082   Output Parameters:
1083 + numFields  - The number of fields (or PETSC_NULL if not requested)
1084 . fieldNames - The name for each field (or PETSC_NULL if not requested)
1085 - fields     - The global indices for each field (or PETSC_NULL if not requested)
1086 
1087   Level: intermediate
1088 
1089   Notes:
1090   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1091   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1092   PetscFree().
1093 
1094 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1095 @*/
1096 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1097 {
1098   PetscSection   section, sectionGlobal;
1099   PetscErrorCode ierr;
1100 
1101   PetscFunctionBegin;
1102   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1103   if (numFields) {
1104     PetscValidPointer(numFields,2);
1105     *numFields = 0;
1106   }
1107   if (fieldNames) {
1108     PetscValidPointer(fieldNames,3);
1109     *fieldNames = PETSC_NULL;
1110   }
1111   if (fields) {
1112     PetscValidPointer(fields,4);
1113     *fields = PETSC_NULL;
1114   }
1115   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1116   if (section) {
1117     PetscInt *fieldSizes, **fieldIndices;
1118     PetscInt  nF, f, pStart, pEnd, p;
1119 
1120     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
1121     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
1122     ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt *,&fieldIndices);CHKERRQ(ierr);
1123     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
1124     for (f = 0; f < nF; ++f) {
1125       fieldSizes[f] = 0;
1126     }
1127     for (p = pStart; p < pEnd; ++p) {
1128       PetscInt gdof;
1129 
1130       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1131       if (gdof > 0) {
1132         for (f = 0; f < nF; ++f) {
1133           PetscInt fdof, fcdof;
1134 
1135           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1136           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1137           fieldSizes[f] += fdof-fcdof;
1138         }
1139       }
1140     }
1141     for (f = 0; f < nF; ++f) {
1142       ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr);
1143       fieldSizes[f] = 0;
1144     }
1145     for (p = pStart; p < pEnd; ++p) {
1146       PetscInt gdof, goff;
1147 
1148       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1149       if (gdof > 0) {
1150         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
1151         for (f = 0; f < nF; ++f) {
1152           PetscInt fdof, fcdof, fc;
1153 
1154           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1155           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1156           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1157             fieldIndices[f][fieldSizes[f]] = goff++;
1158           }
1159         }
1160       }
1161     }
1162     if (numFields) {*numFields = nF;}
1163     if (fieldNames) {
1164       ierr = PetscMalloc(nF * sizeof(char *), fieldNames);CHKERRQ(ierr);
1165       for (f = 0; f < nF; ++f) {
1166         const char *fieldName;
1167 
1168         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1169         ierr = PetscStrallocpy(fieldName, (char **) &(*fieldNames)[f]);CHKERRQ(ierr);
1170       }
1171     }
1172     if (fields) {
1173       ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr);
1174       for (f = 0; f < nF; ++f) {
1175         ierr = ISCreateGeneral(((PetscObject) dm)->comm, fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1176       }
1177     }
1178     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1179   } else {
1180     if (dm->ops->createfieldis) {ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);}
1181   }
1182   PetscFunctionReturn(0);
1183 }
1184 
1185 #undef __FUNCT__
1186 #define __FUNCT__ "DMCreateFieldDecompositionDM"
1187 /*@C
1188   DMCreateFieldDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into fields.
1189 
1190   Not Collective
1191 
1192   Input Parameters:
1193 + dm   - the DM object
1194 - name - the name of the field decomposition
1195 
1196   Output Parameter:
1197 . ddm  - the field decomposition DM (PETSC_NULL, if no such decomposition is known)
1198 
1199   Level: advanced
1200 
1201 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1202 @*/
1203 PetscErrorCode DMCreateFieldDecompositionDM(DM dm, const char* name, DM *ddm)
1204 {
1205   PetscErrorCode ierr;
1206 
1207   PetscFunctionBegin;
1208   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1209   PetscValidCharPointer(name,2);
1210   PetscValidPointer(ddm,3);
1211   *ddm = PETSC_NULL;
1212   if (dm->ops->createfielddecompositiondm) {
1213     ierr = (*dm->ops->createfielddecompositiondm)(dm,name,ddm); CHKERRQ(ierr);
1214   }
1215   PetscFunctionReturn(0);
1216 }
1217 
1218 #undef __FUNCT__
1219 #define __FUNCT__ "DMCreateFieldDecomposition"
1220 /*@C
1221   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1222                           corresponding to different fields: each IS contains the global indices of the dofs of the
1223                           corresponding field. The optional list of DMs define the DM for each subproblem.
1224                           Generalizes DMCreateFieldIS().
1225 
1226   Not collective
1227 
1228   Input Parameter:
1229 . dm - the DM object
1230 
1231   Output Parameters:
1232 + len       - The number of subproblems in the field decomposition (or PETSC_NULL if not requested)
1233 . namelist  - The name for each field (or PETSC_NULL if not requested)
1234 . islist    - The global indices for each field (or PETSC_NULL if not requested)
1235 - dmlist    - The DMs for each field subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1236 
1237   Level: intermediate
1238 
1239   Notes:
1240   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1241   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1242   and all of the arrays should be freed with PetscFree().
1243 
1244 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1245 @*/
1246 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1247 {
1248   PetscErrorCode ierr;
1249 
1250   PetscFunctionBegin;
1251   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1252   if (len)      {PetscValidPointer(len,2);      *len      = 0;}
1253   if (namelist) {PetscValidPointer(namelist,3); *namelist = 0;}
1254   if (islist)   {PetscValidPointer(islist,4);   *islist   = 0;}
1255   if (dmlist)   {PetscValidPointer(dmlist,5);   *dmlist   = 0;}
1256   if (!dm->ops->createfielddecomposition) {
1257     PetscSection section;
1258     PetscInt     numFields, f;
1259 
1260     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1261     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1262     if (section && numFields && dm->ops->createsubdm) {
1263       *len = numFields;
1264       ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr);
1265       for (f = 0; f < numFields; ++f) {
1266         const char *fieldName;
1267 
1268         ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr);
1269         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1270         ierr = PetscStrallocpy(fieldName, (char **) &(*namelist)[f]);CHKERRQ(ierr);
1271       }
1272     } else {
1273       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1274       /* By default there are no DMs associated with subproblems. */
1275       if (dmlist) *dmlist = PETSC_NULL;
1276     }
1277   }
1278   else {
1279     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist); CHKERRQ(ierr);
1280   }
1281   PetscFunctionReturn(0);
1282 }
1283 
1284 #undef __FUNCT__
1285 #define __FUNCT__ "DMCreateSubDM"
1286 /*@C
1287   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1288                   The fields are defined by DMCreateFieldIS().
1289 
1290   Not collective
1291 
1292   Input Parameters:
1293 + dm - the DM object
1294 . numFields - number of fields in this subproblem
1295 - len       - The number of subproblems in the decomposition (or PETSC_NULL if not requested)
1296 
1297   Output Parameters:
1298 . is - The global indices for the subproblem
1299 - dm - The DM for the subproblem
1300 
1301   Level: intermediate
1302 
1303 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1304 @*/
1305 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1306 {
1307   PetscErrorCode ierr;
1308 
1309   PetscFunctionBegin;
1310   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1311   PetscValidPointer(fields,3);
1312   if (is) {PetscValidPointer(is,4);}
1313   if (subdm) {PetscValidPointer(subdm,5);}
1314   if (dm->ops->createsubdm) {
1315     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm); CHKERRQ(ierr);
1316   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1317   PetscFunctionReturn(0);
1318 }
1319 
1320 #undef __FUNCT__
1321 #define __FUNCT__ "DMCreateDomainDecompositionDM"
1322 /*@C
1323   DMCreateDomainDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into subdomains.
1324 
1325   Not Collective
1326 
1327   Input Parameters:
1328 + dm   - the DM object
1329 - name - the name of the subdomain decomposition
1330 
1331   Output Parameter:
1332 . ddm  - the subdomain decomposition DM (PETSC_NULL, if no such decomposition is known)
1333 
1334   Level: advanced
1335 
1336 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1337 @*/
1338 PetscErrorCode DMCreateDomainDecompositionDM(DM dm, const char* name, DM *ddm)
1339 {
1340   PetscErrorCode ierr;
1341 
1342   PetscFunctionBegin;
1343   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1344   PetscValidCharPointer(name,2);
1345   PetscValidPointer(ddm,3);
1346   *ddm = PETSC_NULL;
1347   if (dm->ops->createdomaindecompositiondm) {
1348     ierr = (*dm->ops->createdomaindecompositiondm)(dm,name,ddm); CHKERRQ(ierr);
1349   }
1350   PetscFunctionReturn(0);
1351 }
1352 
1353 #undef __FUNCT__
1354 #define __FUNCT__ "DMCreateDomainDecomposition"
1355 /*@C
1356   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1357                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1358                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1359                           define a nonoverlapping covering, while outer subdomains can overlap.
1360                           The optional list of DMs define the DM for each subproblem.
1361 
1362   Not collective
1363 
1364   Input Parameter:
1365 . dm - the DM object
1366 
1367   Output Parameters:
1368 + len         - The number of subproblems in the domain decomposition (or PETSC_NULL if not requested)
1369 . namelist    - The name for each subdomain (or PETSC_NULL if not requested)
1370 . innerislist - The global indices for each inner subdomain (or PETSC_NULL, if not requested)
1371 . outerislist - The global indices for each outer subdomain (or PETSC_NULL, if not requested)
1372 - dmlist      - The DMs for each subdomain subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1373 
1374   Level: intermediate
1375 
1376   Notes:
1377   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1378   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1379   and all of the arrays should be freed with PetscFree().
1380 
1381 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1382 @*/
1383 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1384 {
1385   PetscErrorCode ierr;
1386   DMSubDomainHookLink link;
1387   PetscInt            i,l;
1388 
1389   PetscFunctionBegin;
1390   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1391   if (len)           {PetscValidPointer(len,2);            *len         = PETSC_NULL;}
1392   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = PETSC_NULL;}
1393   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = PETSC_NULL;}
1394   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = PETSC_NULL;}
1395   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = PETSC_NULL;}
1396   if (dm->ops->createdomaindecomposition) {
1397     ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist); CHKERRQ(ierr);
1398   }
1399   if (len) *len = l;
1400   for (i = 0; i < l; i++) {
1401     for (link=dm->subdomainhook; link; link=link->next) {
1402       if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);}
1403     }
1404   }
1405   PetscFunctionReturn(0);
1406 }
1407 
1408 
1409 #undef __FUNCT__
1410 #define __FUNCT__ "DMCreateDomainDecompositionScatters"
1411 /*@C
1412   DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector
1413 
1414   Not collective
1415 
1416   Input Parameters:
1417 + dm - the DM object
1418 . n  - the number of subdomain scatters
1419 - subdms - the local subdomains
1420 
1421   Output Parameters:
1422 + n     - the number of scatters returned
1423 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain
1424 . oscat - scatter from global vector to overlapping global vector entries on subdomain
1425 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts)
1426 
1427   Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution
1428   of general nonlinear problems with overlapping subdomain methods.  While merely having index sets that enable subsets
1429   of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of
1430   solution and residual data.
1431 
1432   Level: developer
1433 
1434 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1435 @*/
1436 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat)
1437 {
1438   PetscErrorCode ierr;
1439 
1440   PetscFunctionBegin;
1441   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1442   PetscValidPointer(subdms,3);
1443   PetscValidPointer(iscat,4);
1444   PetscValidPointer(oscat,5);
1445   PetscValidPointer(gscat,6);
1446   if (dm->ops->createddscatters) {
1447     ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat); CHKERRQ(ierr);
1448   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined");
1449   PetscFunctionReturn(0);
1450 }
1451 
1452 #undef __FUNCT__
1453 #define __FUNCT__ "DMRefine"
1454 /*@
1455   DMRefine - Refines a DM object
1456 
1457   Collective on DM
1458 
1459   Input Parameter:
1460 + dm   - the DM object
1461 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1462 
1463   Output Parameter:
1464 . dmf - the refined DM, or PETSC_NULL
1465 
1466   Note: If no refinement was done, the return value is PETSC_NULL
1467 
1468   Level: developer
1469 
1470 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1471 @*/
1472 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1473 {
1474   PetscErrorCode ierr;
1475   DMRefineHookLink link;
1476 
1477   PetscFunctionBegin;
1478   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1479   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1480   if (*dmf) {
1481     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1482     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1483     (*dmf)->ctx       = dm->ctx;
1484     (*dmf)->leveldown = dm->leveldown;
1485     (*dmf)->levelup   = dm->levelup + 1;
1486     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1487     for (link=dm->refinehook; link; link=link->next) {
1488       if (link->refinehook) {ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);}
1489     }
1490   }
1491   PetscFunctionReturn(0);
1492 }
1493 
1494 #undef __FUNCT__
1495 #define __FUNCT__ "DMRefineHookAdd"
1496 /*@
1497    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1498 
1499    Logically Collective
1500 
1501    Input Arguments:
1502 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1503 .  refinehook - function to run when setting up a coarser level
1504 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1505 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1506 
1507    Calling sequence of refinehook:
1508 $    refinehook(DM coarse,DM fine,void *ctx);
1509 
1510 +  coarse - coarse level DM
1511 .  fine - fine level DM to interpolate problem to
1512 -  ctx - optional user-defined function context
1513 
1514    Calling sequence for interphook:
1515 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1516 
1517 +  coarse - coarse level DM
1518 .  interp - matrix interpolating a coarse-level solution to the finer grid
1519 .  fine - fine level DM to update
1520 -  ctx - optional user-defined function context
1521 
1522    Level: advanced
1523 
1524    Notes:
1525    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1526 
1527    If this function is called multiple times, the hooks will be run in the order they are added.
1528 
1529 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1530 @*/
1531 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1532 {
1533   PetscErrorCode ierr;
1534   DMRefineHookLink link,*p;
1535 
1536   PetscFunctionBegin;
1537   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1538   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1539   ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1540   link->refinehook = refinehook;
1541   link->interphook = interphook;
1542   link->ctx = ctx;
1543   link->next = PETSC_NULL;
1544   *p = link;
1545   PetscFunctionReturn(0);
1546 }
1547 
1548 #undef __FUNCT__
1549 #define __FUNCT__ "DMInterpolate"
1550 /*@
1551    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1552 
1553    Collective if any hooks are
1554 
1555    Input Arguments:
1556 +  coarse - coarser DM to use as a base
1557 .  restrct - interpolation matrix, apply using MatInterpolate()
1558 -  fine - finer DM to update
1559 
1560    Level: developer
1561 
1562 .seealso: DMRefineHookAdd(), MatInterpolate()
1563 @*/
1564 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1565 {
1566   PetscErrorCode ierr;
1567   DMRefineHookLink link;
1568 
1569   PetscFunctionBegin;
1570   for (link=fine->refinehook; link; link=link->next) {
1571     if (link->interphook) {ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);}
1572   }
1573   PetscFunctionReturn(0);
1574 }
1575 
1576 #undef __FUNCT__
1577 #define __FUNCT__ "DMGetRefineLevel"
1578 /*@
1579     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1580 
1581     Not Collective
1582 
1583     Input Parameter:
1584 .   dm - the DM object
1585 
1586     Output Parameter:
1587 .   level - number of refinements
1588 
1589     Level: developer
1590 
1591 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1592 
1593 @*/
1594 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1595 {
1596   PetscFunctionBegin;
1597   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1598   *level = dm->levelup;
1599   PetscFunctionReturn(0);
1600 }
1601 
1602 #undef __FUNCT__
1603 #define __FUNCT__ "DMGlobalToLocalBegin"
1604 /*@
1605     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1606 
1607     Neighbor-wise Collective on DM
1608 
1609     Input Parameters:
1610 +   dm - the DM object
1611 .   g - the global vector
1612 .   mode - INSERT_VALUES or ADD_VALUES
1613 -   l - the local vector
1614 
1615 
1616     Level: beginner
1617 
1618 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1619 
1620 @*/
1621 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1622 {
1623   PetscSF        sf;
1624   PetscErrorCode ierr;
1625 
1626   PetscFunctionBegin;
1627   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1628   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1629   if (sf) {
1630     PetscScalar *lArray, *gArray;
1631 
1632     if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1633     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1634     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1635     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1636     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1637     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1638   } else {
1639     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1640   }
1641   PetscFunctionReturn(0);
1642 }
1643 
1644 #undef __FUNCT__
1645 #define __FUNCT__ "DMGlobalToLocalEnd"
1646 /*@
1647     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1648 
1649     Neighbor-wise Collective on DM
1650 
1651     Input Parameters:
1652 +   dm - the DM object
1653 .   g - the global vector
1654 .   mode - INSERT_VALUES or ADD_VALUES
1655 -   l - the local vector
1656 
1657 
1658     Level: beginner
1659 
1660 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1661 
1662 @*/
1663 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1664 {
1665   PetscSF        sf;
1666   PetscErrorCode ierr;
1667   PetscScalar    *lArray, *gArray;
1668 
1669   PetscFunctionBegin;
1670   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1671   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1672   if (sf) {
1673   if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1674 
1675     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1676     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1677     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1678     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1679     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1680   } else {
1681     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1682   }
1683   PetscFunctionReturn(0);
1684 }
1685 
1686 #undef __FUNCT__
1687 #define __FUNCT__ "DMLocalToGlobalBegin"
1688 /*@
1689     DMLocalToGlobalBegin - updates global vectors from local vectors
1690 
1691     Neighbor-wise Collective on DM
1692 
1693     Input Parameters:
1694 +   dm - the DM object
1695 .   l - the local vector
1696 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1697            base point.
1698 - - the global vector
1699 
1700     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1701            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1702            global array to the final global array with VecAXPY().
1703 
1704     Level: beginner
1705 
1706 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1707 
1708 @*/
1709 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1710 {
1711   PetscSF        sf;
1712   PetscErrorCode ierr;
1713 
1714   PetscFunctionBegin;
1715   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1716   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1717   if (sf) {
1718     MPI_Op       op;
1719     PetscScalar *lArray, *gArray;
1720 
1721     switch(mode) {
1722     case INSERT_VALUES:
1723     case INSERT_ALL_VALUES:
1724 #if defined(PETSC_HAVE_MPI_REPLACE)
1725       op = MPI_REPLACE; break;
1726 #else
1727       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1728 #endif
1729     case ADD_VALUES:
1730     case ADD_ALL_VALUES:
1731       op = MPI_SUM; break;
1732   default:
1733     SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1734     }
1735     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1736     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1737     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1738     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1739     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1740   } else {
1741     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1742   }
1743   PetscFunctionReturn(0);
1744 }
1745 
1746 #undef __FUNCT__
1747 #define __FUNCT__ "DMLocalToGlobalEnd"
1748 /*@
1749     DMLocalToGlobalEnd - updates global vectors from local vectors
1750 
1751     Neighbor-wise Collective on DM
1752 
1753     Input Parameters:
1754 +   dm - the DM object
1755 .   l - the local vector
1756 .   mode - INSERT_VALUES or ADD_VALUES
1757 -   g - the global vector
1758 
1759 
1760     Level: beginner
1761 
1762 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1763 
1764 @*/
1765 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1766 {
1767   PetscSF        sf;
1768   PetscErrorCode ierr;
1769 
1770   PetscFunctionBegin;
1771   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1772   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1773   if (sf) {
1774     MPI_Op       op;
1775     PetscScalar *lArray, *gArray;
1776 
1777     switch(mode) {
1778     case INSERT_VALUES:
1779     case INSERT_ALL_VALUES:
1780 #if defined(PETSC_HAVE_MPI_REPLACE)
1781       op = MPI_REPLACE; break;
1782 #else
1783       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1784 #endif
1785     case ADD_VALUES:
1786     case ADD_ALL_VALUES:
1787       op = MPI_SUM; break;
1788     default:
1789       SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1790     }
1791     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1792     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1793     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1794     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1795     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1796   } else {
1797     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1798   }
1799   PetscFunctionReturn(0);
1800 }
1801 
1802 #undef __FUNCT__
1803 #define __FUNCT__ "DMCoarsen"
1804 /*@
1805     DMCoarsen - Coarsens a DM object
1806 
1807     Collective on DM
1808 
1809     Input Parameter:
1810 +   dm - the DM object
1811 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1812 
1813     Output Parameter:
1814 .   dmc - the coarsened DM
1815 
1816     Level: developer
1817 
1818 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1819 
1820 @*/
1821 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1822 {
1823   PetscErrorCode ierr;
1824   DMCoarsenHookLink link;
1825 
1826   PetscFunctionBegin;
1827   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1828   ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1829   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1830   ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1831   (*dmc)->ctx       = dm->ctx;
1832   (*dmc)->levelup   = dm->levelup;
1833   (*dmc)->leveldown = dm->leveldown + 1;
1834   ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
1835   for (link=dm->coarsenhook; link; link=link->next) {
1836     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
1837   }
1838   PetscFunctionReturn(0);
1839 }
1840 
1841 #undef __FUNCT__
1842 #define __FUNCT__ "DMCoarsenHookAdd"
1843 /*@
1844    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1845 
1846    Logically Collective
1847 
1848    Input Arguments:
1849 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1850 .  coarsenhook - function to run when setting up a coarser level
1851 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
1852 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1853 
1854    Calling sequence of coarsenhook:
1855 $    coarsenhook(DM fine,DM coarse,void *ctx);
1856 
1857 +  fine - fine level DM
1858 .  coarse - coarse level DM to restrict problem to
1859 -  ctx - optional user-defined function context
1860 
1861    Calling sequence for restricthook:
1862 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
1863 
1864 +  fine - fine level DM
1865 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
1866 .  rscale - scaling vector for restriction
1867 .  inject - matrix restricting by injection
1868 .  coarse - coarse level DM to update
1869 -  ctx - optional user-defined function context
1870 
1871    Level: advanced
1872 
1873    Notes:
1874    This function is only needed if auxiliary data needs to be set up on coarse grids.
1875 
1876    If this function is called multiple times, the hooks will be run in the order they are added.
1877 
1878    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1879    extract the finest level information from its context (instead of from the SNES).
1880 
1881 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1882 @*/
1883 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
1884 {
1885   PetscErrorCode ierr;
1886   DMCoarsenHookLink link,*p;
1887 
1888   PetscFunctionBegin;
1889   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
1890   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1891   ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
1892   link->coarsenhook = coarsenhook;
1893   link->restricthook = restricthook;
1894   link->ctx = ctx;
1895   link->next = PETSC_NULL;
1896   *p = link;
1897   PetscFunctionReturn(0);
1898 }
1899 
1900 #undef __FUNCT__
1901 #define __FUNCT__ "DMRestrict"
1902 /*@
1903    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
1904 
1905    Collective if any hooks are
1906 
1907    Input Arguments:
1908 +  fine - finer DM to use as a base
1909 .  restrct - restriction matrix, apply using MatRestrict()
1910 .  inject - injection matrix, also use MatRestrict()
1911 -  coarse - coarer DM to update
1912 
1913    Level: developer
1914 
1915 .seealso: DMCoarsenHookAdd(), MatRestrict()
1916 @*/
1917 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
1918 {
1919   PetscErrorCode ierr;
1920   DMCoarsenHookLink link;
1921 
1922   PetscFunctionBegin;
1923   for (link=fine->coarsenhook; link; link=link->next) {
1924     if (link->restricthook) {ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);}
1925   }
1926   PetscFunctionReturn(0);
1927 }
1928 
1929 #undef __FUNCT__
1930 #define __FUNCT__ "DMSubDomainHookAdd"
1931 /*@
1932    DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid
1933 
1934    Logically Collective
1935 
1936    Input Arguments:
1937 +  global - global DM
1938 .
1939 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
1940 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1941 
1942    Calling sequence for restricthook:
1943 $    restricthook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1944 
1945 +  global - global DM
1946 .  out    - scatter to the outer (with ghost and overlap points) block vector
1947 .  in     - scatter to block vector values only owned locally
1948 .  block  - block DM (may just be a shell if the global DM is passed in correctly)
1949 -  ctx - optional user-defined function context
1950 
1951    Level: advanced
1952 
1953    Notes:
1954    This function is only needed if auxiliary data needs to be set up on coarse grids.
1955 
1956    If this function is called multiple times, the hooks will be run in the order they are added.
1957 
1958    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1959    extract the finest level information from its context (instead of from the SNES).
1960 
1961 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1962 @*/
1963 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
1964 {
1965   PetscErrorCode ierr;
1966   DMSubDomainHookLink link,*p;
1967 
1968   PetscFunctionBegin;
1969   PetscValidHeaderSpecific(global,DM_CLASSID,1);
1970   for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1971   ierr = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr);
1972   link->restricthook = restricthook;
1973   link->ddhook = ddhook;
1974   link->ctx = ctx;
1975   link->next = PETSC_NULL;
1976   *p = link;
1977   PetscFunctionReturn(0);
1978 }
1979 
1980 #undef __FUNCT__
1981 #define __FUNCT__ "DMSubDomainRestrict"
1982 /*@
1983    DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd()
1984 
1985    Collective if any hooks are
1986 
1987    Input Arguments:
1988 +  fine - finer DM to use as a base
1989 .  oscatter - scatter from domain global vector filling subdomain global vector with overlap
1990 .  gscatter - scatter from domain global vector filling subdomain local vector with ghosts
1991 -  coarse - coarer DM to update
1992 
1993    Level: developer
1994 
1995 .seealso: DMCoarsenHookAdd(), MatRestrict()
1996 @*/
1997 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm)
1998 {
1999   PetscErrorCode ierr;
2000   DMSubDomainHookLink link;
2001 
2002   PetscFunctionBegin;
2003   for (link=global->subdomainhook; link; link=link->next) {
2004     if (link->restricthook) {ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr);}
2005   }
2006   PetscFunctionReturn(0);
2007 }
2008 
2009 #undef __FUNCT__
2010 #define __FUNCT__ "DMGetCoarsenLevel"
2011 /*@
2012     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
2013 
2014     Not Collective
2015 
2016     Input Parameter:
2017 .   dm - the DM object
2018 
2019     Output Parameter:
2020 .   level - number of coarsenings
2021 
2022     Level: developer
2023 
2024 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2025 
2026 @*/
2027 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
2028 {
2029   PetscFunctionBegin;
2030   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2031   *level = dm->leveldown;
2032   PetscFunctionReturn(0);
2033 }
2034 
2035 
2036 
2037 #undef __FUNCT__
2038 #define __FUNCT__ "DMRefineHierarchy"
2039 /*@C
2040     DMRefineHierarchy - Refines a DM object, all levels at once
2041 
2042     Collective on DM
2043 
2044     Input Parameter:
2045 +   dm - the DM object
2046 -   nlevels - the number of levels of refinement
2047 
2048     Output Parameter:
2049 .   dmf - the refined DM hierarchy
2050 
2051     Level: developer
2052 
2053 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2054 
2055 @*/
2056 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2057 {
2058   PetscErrorCode ierr;
2059 
2060   PetscFunctionBegin;
2061   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2062   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2063   if (nlevels == 0) PetscFunctionReturn(0);
2064   if (dm->ops->refinehierarchy) {
2065     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
2066   } else if (dm->ops->refine) {
2067     PetscInt i;
2068 
2069     ierr = DMRefine(dm,((PetscObject)dm)->comm,&dmf[0]);CHKERRQ(ierr);
2070     for (i=1; i<nlevels; i++) {
2071       ierr = DMRefine(dmf[i-1],((PetscObject)dm)->comm,&dmf[i]);CHKERRQ(ierr);
2072     }
2073   } else {
2074     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2075   }
2076   PetscFunctionReturn(0);
2077 }
2078 
2079 #undef __FUNCT__
2080 #define __FUNCT__ "DMCoarsenHierarchy"
2081 /*@C
2082     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2083 
2084     Collective on DM
2085 
2086     Input Parameter:
2087 +   dm - the DM object
2088 -   nlevels - the number of levels of coarsening
2089 
2090     Output Parameter:
2091 .   dmc - the coarsened DM hierarchy
2092 
2093     Level: developer
2094 
2095 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2096 
2097 @*/
2098 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2099 {
2100   PetscErrorCode ierr;
2101 
2102   PetscFunctionBegin;
2103   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2104   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2105   if (nlevels == 0) PetscFunctionReturn(0);
2106   PetscValidPointer(dmc,3);
2107   if (dm->ops->coarsenhierarchy) {
2108     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
2109   } else if (dm->ops->coarsen) {
2110     PetscInt i;
2111 
2112     ierr = DMCoarsen(dm,((PetscObject)dm)->comm,&dmc[0]);CHKERRQ(ierr);
2113     for (i=1; i<nlevels; i++) {
2114       ierr = DMCoarsen(dmc[i-1],((PetscObject)dm)->comm,&dmc[i]);CHKERRQ(ierr);
2115     }
2116   } else {
2117     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2118   }
2119   PetscFunctionReturn(0);
2120 }
2121 
2122 #undef __FUNCT__
2123 #define __FUNCT__ "DMCreateAggregates"
2124 /*@
2125    DMCreateAggregates - Gets the aggregates that map between
2126    grids associated with two DMs.
2127 
2128    Collective on DM
2129 
2130    Input Parameters:
2131 +  dmc - the coarse grid DM
2132 -  dmf - the fine grid DM
2133 
2134    Output Parameters:
2135 .  rest - the restriction matrix (transpose of the projection matrix)
2136 
2137    Level: intermediate
2138 
2139 .keywords: interpolation, restriction, multigrid
2140 
2141 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2142 @*/
2143 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2144 {
2145   PetscErrorCode ierr;
2146 
2147   PetscFunctionBegin;
2148   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
2149   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
2150   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
2151   PetscFunctionReturn(0);
2152 }
2153 
2154 #undef __FUNCT__
2155 #define __FUNCT__ "DMSetApplicationContextDestroy"
2156 /*@C
2157     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2158 
2159     Not Collective
2160 
2161     Input Parameters:
2162 +   dm - the DM object
2163 -   destroy - the destroy function
2164 
2165     Level: intermediate
2166 
2167 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2168 
2169 @*/
2170 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2171 {
2172   PetscFunctionBegin;
2173   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2174   dm->ctxdestroy = destroy;
2175   PetscFunctionReturn(0);
2176 }
2177 
2178 #undef __FUNCT__
2179 #define __FUNCT__ "DMSetApplicationContext"
2180 /*@
2181     DMSetApplicationContext - Set a user context into a DM object
2182 
2183     Not Collective
2184 
2185     Input Parameters:
2186 +   dm - the DM object
2187 -   ctx - the user context
2188 
2189     Level: intermediate
2190 
2191 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2192 
2193 @*/
2194 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2195 {
2196   PetscFunctionBegin;
2197   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2198   dm->ctx = ctx;
2199   PetscFunctionReturn(0);
2200 }
2201 
2202 #undef __FUNCT__
2203 #define __FUNCT__ "DMGetApplicationContext"
2204 /*@
2205     DMGetApplicationContext - Gets a user context from a DM object
2206 
2207     Not Collective
2208 
2209     Input Parameter:
2210 .   dm - the DM object
2211 
2212     Output Parameter:
2213 .   ctx - the user context
2214 
2215     Level: intermediate
2216 
2217 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2218 
2219 @*/
2220 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2221 {
2222   PetscFunctionBegin;
2223   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2224   *(void**)ctx = dm->ctx;
2225   PetscFunctionReturn(0);
2226 }
2227 
2228 #undef __FUNCT__
2229 #define __FUNCT__ "DMSetVariableBounds"
2230 /*@C
2231     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2232 
2233     Logically Collective on DM
2234 
2235     Input Parameter:
2236 +   dm - the DM object
2237 -   f - the function that computes variable bounds used by SNESVI (use PETSC_NULL to cancel a previous function that was set)
2238 
2239     Level: intermediate
2240 
2241 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2242          DMSetJacobian()
2243 
2244 @*/
2245 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2246 {
2247   PetscFunctionBegin;
2248   dm->ops->computevariablebounds = f;
2249   PetscFunctionReturn(0);
2250 }
2251 
2252 #undef __FUNCT__
2253 #define __FUNCT__ "DMHasVariableBounds"
2254 /*@
2255     DMHasVariableBounds - does the DM object have a variable bounds function?
2256 
2257     Not Collective
2258 
2259     Input Parameter:
2260 .   dm - the DM object to destroy
2261 
2262     Output Parameter:
2263 .   flg - PETSC_TRUE if the variable bounds function exists
2264 
2265     Level: developer
2266 
2267 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2268 
2269 @*/
2270 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2271 {
2272   PetscFunctionBegin;
2273   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2274   PetscFunctionReturn(0);
2275 }
2276 
2277 #undef __FUNCT__
2278 #define __FUNCT__ "DMComputeVariableBounds"
2279 /*@C
2280     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2281 
2282     Logically Collective on DM
2283 
2284     Input Parameters:
2285 +   dm - the DM object to destroy
2286 -   x  - current solution at which the bounds are computed
2287 
2288     Output parameters:
2289 +   xl - lower bound
2290 -   xu - upper bound
2291 
2292     Level: intermediate
2293 
2294 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2295 
2296 @*/
2297 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2298 {
2299   PetscErrorCode ierr;
2300   PetscFunctionBegin;
2301   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2302   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2303   if (dm->ops->computevariablebounds) {
2304     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu); CHKERRQ(ierr);
2305   }
2306   else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2307   PetscFunctionReturn(0);
2308 }
2309 
2310 #undef __FUNCT__
2311 #define __FUNCT__ "DMHasColoring"
2312 /*@
2313     DMHasColoring - does the DM object have a method of providing a coloring?
2314 
2315     Not Collective
2316 
2317     Input Parameter:
2318 .   dm - the DM object
2319 
2320     Output Parameter:
2321 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2322 
2323     Level: developer
2324 
2325 .seealso DMHasFunction(), DMCreateColoring()
2326 
2327 @*/
2328 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2329 {
2330   PetscFunctionBegin;
2331   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2332   PetscFunctionReturn(0);
2333 }
2334 
2335 #undef  __FUNCT__
2336 #define __FUNCT__ "DMSetVec"
2337 /*@C
2338     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2339 
2340     Collective on DM
2341 
2342     Input Parameter:
2343 +   dm - the DM object
2344 -   x - location to compute residual and Jacobian, if PETSC_NULL is passed to those routines; will be PETSC_NULL for linear problems.
2345 
2346     Level: developer
2347 
2348 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2349 
2350 @*/
2351 PetscErrorCode  DMSetVec(DM dm,Vec x)
2352 {
2353   PetscErrorCode ierr;
2354   PetscFunctionBegin;
2355   if (x) {
2356     if (!dm->x) {
2357       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2358     }
2359     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2360   }
2361   else if (dm->x) {
2362     ierr = VecDestroy(&dm->x);  CHKERRQ(ierr);
2363   }
2364   PetscFunctionReturn(0);
2365 }
2366 
2367 PetscFList DMList                       = PETSC_NULL;
2368 PetscBool  DMRegisterAllCalled          = PETSC_FALSE;
2369 
2370 #undef __FUNCT__
2371 #define __FUNCT__ "DMSetType"
2372 /*@C
2373   DMSetType - Builds a DM, for a particular DM implementation.
2374 
2375   Collective on DM
2376 
2377   Input Parameters:
2378 + dm     - The DM object
2379 - method - The name of the DM type
2380 
2381   Options Database Key:
2382 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2383 
2384   Notes:
2385   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2386 
2387   Level: intermediate
2388 
2389 .keywords: DM, set, type
2390 .seealso: DMGetType(), DMCreate()
2391 @*/
2392 PetscErrorCode  DMSetType(DM dm, DMType method)
2393 {
2394   PetscErrorCode (*r)(DM);
2395   PetscBool      match;
2396   PetscErrorCode ierr;
2397 
2398   PetscFunctionBegin;
2399   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2400   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2401   if (match) PetscFunctionReturn(0);
2402 
2403   if (!DMRegisterAllCalled) {ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);}
2404   ierr = PetscFListFind(DMList, ((PetscObject)dm)->comm, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr);
2405   if (!r) SETERRQ1(((PetscObject)dm)->comm,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2406 
2407   if (dm->ops->destroy) {
2408     ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2409     dm->ops->destroy = PETSC_NULL;
2410   }
2411   ierr = (*r)(dm);CHKERRQ(ierr);
2412   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2413   PetscFunctionReturn(0);
2414 }
2415 
2416 #undef __FUNCT__
2417 #define __FUNCT__ "DMGetType"
2418 /*@C
2419   DMGetType - Gets the DM type name (as a string) from the DM.
2420 
2421   Not Collective
2422 
2423   Input Parameter:
2424 . dm  - The DM
2425 
2426   Output Parameter:
2427 . type - The DM type name
2428 
2429   Level: intermediate
2430 
2431 .keywords: DM, get, type, name
2432 .seealso: DMSetType(), DMCreate()
2433 @*/
2434 PetscErrorCode  DMGetType(DM dm, DMType *type)
2435 {
2436   PetscErrorCode ierr;
2437 
2438   PetscFunctionBegin;
2439   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2440   PetscValidCharPointer(type,2);
2441   if (!DMRegisterAllCalled) {
2442     ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);
2443   }
2444   *type = ((PetscObject)dm)->type_name;
2445   PetscFunctionReturn(0);
2446 }
2447 
2448 #undef __FUNCT__
2449 #define __FUNCT__ "DMConvert"
2450 /*@C
2451   DMConvert - Converts a DM to another DM, either of the same or different type.
2452 
2453   Collective on DM
2454 
2455   Input Parameters:
2456 + dm - the DM
2457 - newtype - new DM type (use "same" for the same type)
2458 
2459   Output Parameter:
2460 . M - pointer to new DM
2461 
2462   Notes:
2463   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2464   the MPI communicator of the generated DM is always the same as the communicator
2465   of the input DM.
2466 
2467   Level: intermediate
2468 
2469 .seealso: DMCreate()
2470 @*/
2471 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2472 {
2473   DM             B;
2474   char           convname[256];
2475   PetscBool      sametype, issame;
2476   PetscErrorCode ierr;
2477 
2478   PetscFunctionBegin;
2479   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2480   PetscValidType(dm,1);
2481   PetscValidPointer(M,3);
2482   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2483   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2484   {
2485     PetscErrorCode (*conv)(DM, DMType, DM *) = PETSC_NULL;
2486 
2487     /*
2488        Order of precedence:
2489        1) See if a specialized converter is known to the current DM.
2490        2) See if a specialized converter is known to the desired DM class.
2491        3) See if a good general converter is registered for the desired class
2492        4) See if a good general converter is known for the current matrix.
2493        5) Use a really basic converter.
2494     */
2495 
2496     /* 1) See if a specialized converter is known to the current DM and the desired class */
2497     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2498     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2499     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2500     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2501     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2502     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2503     if (conv) goto foundconv;
2504 
2505     /* 2)  See if a specialized converter is known to the desired DM class. */
2506     ierr = DMCreate(((PetscObject) dm)->comm, &B);CHKERRQ(ierr);
2507     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2508     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2509     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2510     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2511     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2512     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2513     ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2514     if (conv) {
2515       ierr = DMDestroy(&B);CHKERRQ(ierr);
2516       goto foundconv;
2517     }
2518 
2519 #if 0
2520     /* 3) See if a good general converter is registered for the desired class */
2521     conv = B->ops->convertfrom;
2522     ierr = DMDestroy(&B);CHKERRQ(ierr);
2523     if (conv) goto foundconv;
2524 
2525     /* 4) See if a good general converter is known for the current matrix */
2526     if (dm->ops->convert) {
2527       conv = dm->ops->convert;
2528     }
2529     if (conv) goto foundconv;
2530 #endif
2531 
2532     /* 5) Use a really basic converter. */
2533     SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2534 
2535     foundconv:
2536     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2537     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2538     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2539   }
2540   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2541   PetscFunctionReturn(0);
2542 }
2543 
2544 /*--------------------------------------------------------------------------------------------------------------------*/
2545 
2546 #undef __FUNCT__
2547 #define __FUNCT__ "DMRegister"
2548 /*@C
2549   DMRegister - See DMRegisterDynamic()
2550 
2551   Level: advanced
2552 @*/
2553 PetscErrorCode  DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM))
2554 {
2555   char fullname[PETSC_MAX_PATH_LEN];
2556   PetscErrorCode ierr;
2557 
2558   PetscFunctionBegin;
2559   ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr);
2560   ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr);
2561   ierr = PetscStrcat(fullname, name);CHKERRQ(ierr);
2562   ierr = PetscFListAdd(&DMList, sname, fullname, (void (*)(void)) function);CHKERRQ(ierr);
2563   PetscFunctionReturn(0);
2564 }
2565 
2566 
2567 /*--------------------------------------------------------------------------------------------------------------------*/
2568 #undef __FUNCT__
2569 #define __FUNCT__ "DMRegisterDestroy"
2570 /*@C
2571    DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic().
2572 
2573    Not Collective
2574 
2575    Level: advanced
2576 
2577 .keywords: DM, register, destroy
2578 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic()
2579 @*/
2580 PetscErrorCode  DMRegisterDestroy(void)
2581 {
2582   PetscErrorCode ierr;
2583 
2584   PetscFunctionBegin;
2585   ierr = PetscFListDestroy(&DMList);CHKERRQ(ierr);
2586   DMRegisterAllCalled = PETSC_FALSE;
2587   PetscFunctionReturn(0);
2588 }
2589 
2590 #undef __FUNCT__
2591 #define __FUNCT__ "DMLoad"
2592 /*@C
2593   DMLoad - Loads a DM that has been stored in binary  with DMView().
2594 
2595   Collective on PetscViewer
2596 
2597   Input Parameters:
2598 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2599            some related function before a call to DMLoad().
2600 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2601            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2602 
2603    Level: intermediate
2604 
2605   Notes:
2606    The type is determined by the data in the file, any type set into the DM before this call is ignored.
2607 
2608   Notes for advanced users:
2609   Most users should not need to know the details of the binary storage
2610   format, since DMLoad() and DMView() completely hide these details.
2611   But for anyone who's interested, the standard binary matrix storage
2612   format is
2613 .vb
2614      has not yet been determined
2615 .ve
2616 
2617 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2618 @*/
2619 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2620 {
2621   PetscErrorCode ierr;
2622   PetscBool      isbinary;
2623   PetscInt       classid;
2624   char           type[256];
2625 
2626   PetscFunctionBegin;
2627   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
2628   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
2629   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
2630   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
2631 
2632   ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr);
2633   if (classid != DM_FILE_CLASSID) SETERRQ(((PetscObject)newdm)->comm,PETSC_ERR_ARG_WRONG,"Not DM next in file");
2634   ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr);
2635   ierr = DMSetType(newdm, type);CHKERRQ(ierr);
2636   ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
2637   PetscFunctionReturn(0);
2638 }
2639 
2640 /******************************** FEM Support **********************************/
2641 
2642 #undef __FUNCT__
2643 #define __FUNCT__ "DMPrintCellVector"
2644 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) {
2645   PetscInt       f;
2646   PetscErrorCode ierr;
2647 
2648   PetscFunctionBegin;
2649   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2650   for (f = 0; f < len; ++f) {
2651     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr);
2652   }
2653   PetscFunctionReturn(0);
2654 }
2655 
2656 #undef __FUNCT__
2657 #define __FUNCT__ "DMPrintCellMatrix"
2658 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) {
2659   PetscInt       f, g;
2660   PetscErrorCode ierr;
2661 
2662   PetscFunctionBegin;
2663   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2664   for (f = 0; f < rows; ++f) {
2665     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
2666     for (g = 0; g < cols; ++g) {
2667       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
2668     }
2669     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
2670   }
2671   PetscFunctionReturn(0);
2672 }
2673 
2674 #undef __FUNCT__
2675 #define __FUNCT__ "DMGetDefaultSection"
2676 /*@
2677   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
2678 
2679   Input Parameter:
2680 . dm - The DM
2681 
2682   Output Parameter:
2683 . section - The PetscSection
2684 
2685   Level: intermediate
2686 
2687   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2688 
2689 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2690 @*/
2691 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) {
2692   PetscFunctionBegin;
2693   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2694   PetscValidPointer(section, 2);
2695   *section = dm->defaultSection;
2696   PetscFunctionReturn(0);
2697 }
2698 
2699 #undef __FUNCT__
2700 #define __FUNCT__ "DMSetDefaultSection"
2701 /*@
2702   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
2703 
2704   Input Parameters:
2705 + dm - The DM
2706 - section - The PetscSection
2707 
2708   Level: intermediate
2709 
2710   Note: Any existing Section will be destroyed
2711 
2712 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2713 @*/
2714 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) {
2715   PetscInt       numFields;
2716   PetscInt       f;
2717   PetscErrorCode ierr;
2718 
2719   PetscFunctionBegin;
2720   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2721   ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
2722   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2723   dm->defaultSection = section;
2724   ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
2725   if (numFields) {
2726     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
2727     for (f = 0; f < numFields; ++f) {
2728       const char *name;
2729 
2730       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
2731       ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr);
2732     }
2733   }
2734   PetscFunctionReturn(0);
2735 }
2736 
2737 #undef __FUNCT__
2738 #define __FUNCT__ "DMGetDefaultGlobalSection"
2739 /*@
2740   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
2741 
2742   Collective on DM
2743 
2744   Input Parameter:
2745 . dm - The DM
2746 
2747   Output Parameter:
2748 . section - The PetscSection
2749 
2750   Level: intermediate
2751 
2752   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2753 
2754 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2755 @*/
2756 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) {
2757   PetscErrorCode ierr;
2758 
2759   PetscFunctionBegin;
2760   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2761   PetscValidPointer(section, 2);
2762   if (!dm->defaultGlobalSection) {
2763     if (!dm->defaultSection || !dm->sf) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
2764     ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
2765     ierr = PetscSectionGetValueLayout(((PetscObject)dm)->comm,dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr);
2766   }
2767   *section = dm->defaultGlobalSection;
2768   PetscFunctionReturn(0);
2769 }
2770 
2771 #undef __FUNCT__
2772 #define __FUNCT__ "DMSetDefaultGlobalSection"
2773 /*@
2774   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
2775 
2776   Input Parameters:
2777 + dm - The DM
2778 - section - The PetscSection
2779 
2780   Level: intermediate
2781 
2782   Note: Any existing Section will be destroyed
2783 
2784 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
2785 @*/
2786 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) {
2787   PetscErrorCode ierr;
2788 
2789   PetscFunctionBegin;
2790   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2791   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2792   dm->defaultGlobalSection = section;
2793   PetscFunctionReturn(0);
2794 }
2795 
2796 #undef __FUNCT__
2797 #define __FUNCT__ "DMGetDefaultSF"
2798 /*@
2799   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
2800   it is created from the default PetscSection layouts in the DM.
2801 
2802   Input Parameter:
2803 . dm - The DM
2804 
2805   Output Parameter:
2806 . sf - The PetscSF
2807 
2808   Level: intermediate
2809 
2810   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2811 
2812 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
2813 @*/
2814 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) {
2815   PetscInt       nroots;
2816   PetscErrorCode ierr;
2817 
2818   PetscFunctionBegin;
2819   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2820   PetscValidPointer(sf, 2);
2821   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, PETSC_NULL, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr);
2822   if (nroots < 0) {
2823     PetscSection section, gSection;
2824 
2825     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
2826     if (section) {
2827       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
2828       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
2829     } else {
2830       *sf = PETSC_NULL;
2831       PetscFunctionReturn(0);
2832     }
2833   }
2834   *sf = dm->defaultSF;
2835   PetscFunctionReturn(0);
2836 }
2837 
2838 #undef __FUNCT__
2839 #define __FUNCT__ "DMSetDefaultSF"
2840 /*@
2841   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
2842 
2843   Input Parameters:
2844 + dm - The DM
2845 - sf - The PetscSF
2846 
2847   Level: intermediate
2848 
2849   Note: Any previous SF is destroyed
2850 
2851 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
2852 @*/
2853 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) {
2854   PetscErrorCode ierr;
2855 
2856   PetscFunctionBegin;
2857   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2858   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
2859   ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
2860   dm->defaultSF = sf;
2861   PetscFunctionReturn(0);
2862 }
2863 
2864 #undef __FUNCT__
2865 #define __FUNCT__ "DMCreateDefaultSF"
2866 /*@C
2867   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
2868   describing the data layout.
2869 
2870   Input Parameters:
2871 + dm - The DM
2872 . localSection - PetscSection describing the local data layout
2873 - globalSection - PetscSection describing the global data layout
2874 
2875   Level: intermediate
2876 
2877 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
2878 @*/
2879 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
2880 {
2881   MPI_Comm        comm = ((PetscObject) dm)->comm;
2882   PetscLayout     layout;
2883   const PetscInt *ranges;
2884   PetscInt       *local;
2885   PetscSFNode    *remote;
2886   PetscInt        pStart, pEnd, p, nroots, nleaves, l;
2887   PetscMPIInt     size, rank;
2888   PetscErrorCode  ierr;
2889 
2890   PetscFunctionBegin;
2891   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2892   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2893   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2894   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
2895   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
2896   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
2897   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
2898   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
2899   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
2900   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
2901   for (p = pStart, nleaves = 0; p < pEnd; ++p) {
2902     PetscInt gdof, gcdof;
2903 
2904     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2905     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2906     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2907   }
2908   ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr);
2909   ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr);
2910   for (p = pStart, l = 0; p < pEnd; ++p) {
2911     const PetscInt *cind;
2912     PetscInt        dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
2913 
2914     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
2915     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
2916     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
2917     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
2918     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2919     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2920     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
2921     if (!gdof) continue; /* Censored point */
2922     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2923     if (gsize != dof-cdof) {
2924       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
2925       cdof = 0; /* Ignore constraints */
2926     }
2927     for (d = 0, c = 0; d < dof; ++d) {
2928       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
2929       local[l+d-c] = off+d;
2930     }
2931     if (gdof < 0) {
2932       for(d = 0; d < gsize; ++d, ++l) {
2933         PetscInt offset = -(goff+1) + d, r;
2934 
2935         ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr);
2936         if (r < 0) r = -(r+2);
2937         remote[l].rank  = r;
2938         remote[l].index = offset - ranges[r];
2939       }
2940     } else {
2941       for(d = 0; d < gsize; ++d, ++l) {
2942         remote[l].rank  = rank;
2943         remote[l].index = goff+d - ranges[rank];
2944       }
2945     }
2946   }
2947   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
2948   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
2949   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
2950   PetscFunctionReturn(0);
2951 }
2952 
2953 #undef __FUNCT__
2954 #define __FUNCT__ "DMGetPointSF"
2955 /*@
2956   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
2957 
2958   Input Parameter:
2959 . dm - The DM
2960 
2961   Output Parameter:
2962 . sf - The PetscSF
2963 
2964   Level: intermediate
2965 
2966   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2967 
2968 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
2969 @*/
2970 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) {
2971   PetscFunctionBegin;
2972   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2973   PetscValidPointer(sf, 2);
2974   *sf = dm->sf;
2975   PetscFunctionReturn(0);
2976 }
2977 
2978 #undef __FUNCT__
2979 #define __FUNCT__ "DMSetPointSF"
2980 /*@
2981   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
2982 
2983   Input Parameters:
2984 + dm - The DM
2985 - sf - The PetscSF
2986 
2987   Level: intermediate
2988 
2989 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
2990 @*/
2991 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) {
2992   PetscErrorCode ierr;
2993 
2994   PetscFunctionBegin;
2995   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2996   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
2997   ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr);
2998   ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr);
2999   dm->sf = sf;
3000   PetscFunctionReturn(0);
3001 }
3002 
3003 #undef __FUNCT__
3004 #define __FUNCT__ "DMGetNumFields"
3005 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3006 {
3007   PetscFunctionBegin;
3008   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3009   PetscValidPointer(numFields, 2);
3010   *numFields = dm->numFields;
3011   PetscFunctionReturn(0);
3012 }
3013 
3014 #undef __FUNCT__
3015 #define __FUNCT__ "DMSetNumFields"
3016 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3017 {
3018   PetscInt       f;
3019   PetscErrorCode ierr;
3020 
3021   PetscFunctionBegin;
3022   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3023   for (f = 0; f < dm->numFields; ++f) {
3024     ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr);
3025   }
3026   ierr = PetscFree(dm->fields);CHKERRQ(ierr);
3027   dm->numFields = numFields;
3028   ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr);
3029   for (f = 0; f < dm->numFields; ++f) {
3030     ierr = PetscContainerCreate(((PetscObject) dm)->comm, (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr);
3031   }
3032   PetscFunctionReturn(0);
3033 }
3034 
3035 #undef __FUNCT__
3036 #define __FUNCT__ "DMGetField"
3037 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3038 {
3039   PetscFunctionBegin;
3040   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3041   PetscValidPointer(field, 2);
3042   if (!dm->fields) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3043   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3044   *field = dm->fields[f];
3045   PetscFunctionReturn(0);
3046 }
3047 
3048 #undef __FUNCT__
3049 #define __FUNCT__ "DMSetCoordinates"
3050 /*@
3051   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3052 
3053   Collective on DM
3054 
3055   Input Parameters:
3056 + dm - the DM
3057 - c - coordinate vector
3058 
3059   Note:
3060   The coordinates do include those for ghost points, which are in the local vector
3061 
3062   Level: intermediate
3063 
3064 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3065 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3066 @*/
3067 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3068 {
3069   PetscErrorCode ierr;
3070 
3071   PetscFunctionBegin;
3072   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3073   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3074   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3075   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3076   dm->coordinates = c;
3077   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3078   PetscFunctionReturn(0);
3079 }
3080 
3081 #undef __FUNCT__
3082 #define __FUNCT__ "DMSetCoordinatesLocal"
3083 /*@
3084   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3085 
3086   Collective on DM
3087 
3088    Input Parameters:
3089 +  dm - the DM
3090 -  c - coordinate vector
3091 
3092   Note:
3093   The coordinates of ghost points can be set using DMSetCoordinates()
3094   followed by DMGetCoordinatesLocal(). This is intended to enable the
3095   setting of ghost coordinates outside of the domain.
3096 
3097   Level: intermediate
3098 
3099 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3100 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3101 @*/
3102 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3103 {
3104   PetscErrorCode ierr;
3105 
3106   PetscFunctionBegin;
3107   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3108   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3109   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3110   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3111   dm->coordinatesLocal = c;
3112   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3113   PetscFunctionReturn(0);
3114 }
3115 
3116 #undef __FUNCT__
3117 #define __FUNCT__ "DMGetCoordinates"
3118 /*@
3119   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3120 
3121   Not Collective
3122 
3123   Input Parameter:
3124 . dm - the DM
3125 
3126   Output Parameter:
3127 . c - global coordinate vector
3128 
3129   Note:
3130   This is a borrowed reference, so the user should NOT destroy this vector
3131 
3132   Each process has only the local coordinates (does NOT have the ghost coordinates).
3133 
3134   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3135   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3136 
3137   Level: intermediate
3138 
3139 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3140 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3141 @*/
3142 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3143 {
3144   PetscErrorCode ierr;
3145 
3146   PetscFunctionBegin;
3147   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3148   PetscValidPointer(c,2);
3149   if (!dm->coordinates && dm->coordinatesLocal) {
3150     DM cdm = PETSC_NULL;
3151 
3152     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3153     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3154     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3155     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3156     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3157   }
3158   *c = dm->coordinates;
3159   PetscFunctionReturn(0);
3160 }
3161 
3162 #undef __FUNCT__
3163 #define __FUNCT__ "DMGetCoordinatesLocal"
3164 /*@
3165   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3166 
3167   Collective on DM
3168 
3169   Input Parameter:
3170 . dm - the DM
3171 
3172   Output Parameter:
3173 . c - coordinate vector
3174 
3175   Note:
3176   This is a borrowed reference, so the user should NOT destroy this vector
3177 
3178   Each process has the local and ghost coordinates
3179 
3180   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3181   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3182 
3183   Level: intermediate
3184 
3185 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3186 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3187 @*/
3188 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3189 {
3190   PetscErrorCode ierr;
3191 
3192   PetscFunctionBegin;
3193   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3194   PetscValidPointer(c,2);
3195   if (!dm->coordinatesLocal && dm->coordinates) {
3196     DM cdm = PETSC_NULL;
3197 
3198     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3199     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3200     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3201     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3202     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3203   }
3204   *c = dm->coordinatesLocal;
3205   PetscFunctionReturn(0);
3206 }
3207 
3208 #undef __FUNCT__
3209 #define __FUNCT__ "DMGetCoordinateDM"
3210 /*@
3211   DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates
3212 
3213   Collective on DM
3214 
3215   Input Parameter:
3216 . dm - the DM
3217 
3218   Output Parameter:
3219 . cdm - coordinate DM
3220 
3221   Level: intermediate
3222 
3223 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3224 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3225 @*/
3226 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3227 {
3228   PetscErrorCode ierr;
3229 
3230   PetscFunctionBegin;
3231   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3232   PetscValidPointer(cdm,2);
3233   if (!dm->coordinateDM) {
3234     if (!dm->ops->createcoordinatedm) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3235     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3236   }
3237   *cdm = dm->coordinateDM;
3238   PetscFunctionReturn(0);
3239 }
3240