xref: /petsc/src/dm/interface/dm.c (revision 32f0905035ffe6296383d6f3767aadb2abd5438a)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
2 
3 PetscClassId  DM_CLASSID;
4 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal;
5 
6 #undef __FUNCT__
7 #define __FUNCT__ "DMViewFromOptions"
8 /*
9   Processes command line options to determine if/how a dm
10   is to be viewed.
11 
12 */
13 PetscErrorCode  DMViewFromOptions(DM dm,const char optionname[])
14 {
15   PetscErrorCode ierr;
16   PetscBool      flg;
17   PetscViewer    viewer;
18 
19   PetscFunctionBegin;
20   ierr = PetscOptionsGetViewer(((PetscObject)dm)->comm,((PetscObject)dm)->prefix,optionname,&viewer,&flg);CHKERRQ(ierr);
21   if (flg) {
22     ierr = DMView(dm,viewer);CHKERRQ(ierr);
23     ierr = PetscOptionsRestoreViewer(viewer);CHKERRQ(ierr);
24   }
25   PetscFunctionReturn(0);
26 }
27 
28 
29 #undef __FUNCT__
30 #define __FUNCT__ "DMCreate"
31 /*@
32   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
33 
34    If you never  call DMSetType()  it will generate an
35    error when you try to use the vector.
36 
37   Collective on MPI_Comm
38 
39   Input Parameter:
40 . comm - The communicator for the DM object
41 
42   Output Parameter:
43 . dm - The DM object
44 
45   Level: beginner
46 
47 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
48 @*/
49 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
50 {
51   DM             v;
52   PetscErrorCode ierr;
53 
54   PetscFunctionBegin;
55   PetscValidPointer(dm,2);
56   *dm = PETSC_NULL;
57 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
58   ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr);
59   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
60   ierr = DMInitializePackage(PETSC_NULL);CHKERRQ(ierr);
61 #endif
62 
63   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, -1, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
64   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
65 
66 
67   v->ltogmap      = PETSC_NULL;
68   v->ltogmapb     = PETSC_NULL;
69   v->bs           = 1;
70   v->coloringtype = IS_COLORING_GLOBAL;
71   ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
72   ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
73   v->defaultSection       = PETSC_NULL;
74   v->defaultGlobalSection = PETSC_NULL;
75   {
76     PetscInt i;
77     for (i = 0; i < 10; ++i) {
78       v->nullspaceConstructors[i] = PETSC_NULL;
79     }
80   }
81   v->numFields = 0;
82   v->fields    = PETSC_NULL;
83 
84   *dm = v;
85   PetscFunctionReturn(0);
86 }
87 
88 #undef __FUNCT__
89 #define __FUNCT__ "DMSetVecType"
90 /*@C
91        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
92 
93    Logically Collective on DMDA
94 
95    Input Parameter:
96 +  da - initial distributed array
97 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
98 
99    Options Database:
100 .   -dm_vec_type ctype
101 
102    Level: intermediate
103 
104 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
105 @*/
106 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
107 {
108   PetscErrorCode ierr;
109 
110   PetscFunctionBegin;
111   PetscValidHeaderSpecific(da,DM_CLASSID,1);
112   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
113   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
114   PetscFunctionReturn(0);
115 }
116 
117 #undef __FUNCT__
118 #define __FUNCT__ "VecGetDM"
119 /*@
120   VecGetDM - Gets the DM defining the data layout of the vector
121 
122   Not collective
123 
124   Input Parameter:
125 . v - The Vec
126 
127   Output Parameter:
128 . dm - The DM
129 
130   Level: intermediate
131 
132 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
133 @*/
134 PetscErrorCode VecGetDM(Vec v, DM *dm)
135 {
136   PetscErrorCode ierr;
137 
138   PetscFunctionBegin;
139   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
140   PetscValidPointer(dm,2);
141   ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
142   PetscFunctionReturn(0);
143 }
144 
145 #undef __FUNCT__
146 #define __FUNCT__ "VecSetDM"
147 /*@
148   VecSetDM - Sets the DM defining the data layout of the vector
149 
150   Not collective
151 
152   Input Parameters:
153 + v - The Vec
154 - dm - The DM
155 
156   Level: intermediate
157 
158 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
159 @*/
160 PetscErrorCode VecSetDM(Vec v, DM dm)
161 {
162   PetscErrorCode ierr;
163 
164   PetscFunctionBegin;
165   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
166   PetscValidHeaderSpecific(dm,DM_CLASSID,2);
167   ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
168   PetscFunctionReturn(0);
169 }
170 
171 #undef __FUNCT__
172 #define __FUNCT__ "DMSetMatType"
173 /*@C
174        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
175 
176    Logically Collective on DM
177 
178    Input Parameter:
179 +  dm - the DM context
180 .  ctype - the matrix type
181 
182    Options Database:
183 .   -dm_mat_type ctype
184 
185    Level: intermediate
186 
187 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType
188 @*/
189 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
190 {
191   PetscErrorCode ierr;
192   PetscFunctionBegin;
193   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
194   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
195   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
196   PetscFunctionReturn(0);
197 }
198 
199 #undef __FUNCT__
200 #define __FUNCT__ "MatGetDM"
201 /*@
202   MatGetDM - Gets the DM defining the data layout of the matrix
203 
204   Not collective
205 
206   Input Parameter:
207 . A - The Mat
208 
209   Output Parameter:
210 . dm - The DM
211 
212   Level: intermediate
213 
214 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
215 @*/
216 PetscErrorCode MatGetDM(Mat A, DM *dm)
217 {
218   PetscErrorCode ierr;
219 
220   PetscFunctionBegin;
221   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
222   PetscValidPointer(dm,2);
223   ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
224   PetscFunctionReturn(0);
225 }
226 
227 #undef __FUNCT__
228 #define __FUNCT__ "MatSetDM"
229 /*@
230   MatSetDM - Sets the DM defining the data layout of the matrix
231 
232   Not collective
233 
234   Input Parameters:
235 + A - The Mat
236 - dm - The DM
237 
238   Level: intermediate
239 
240 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
241 @*/
242 PetscErrorCode MatSetDM(Mat A, DM dm)
243 {
244   PetscErrorCode ierr;
245 
246   PetscFunctionBegin;
247   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
248   if (dm) {PetscValidHeaderSpecific(dm,DM_CLASSID,2);}
249   ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
250   PetscFunctionReturn(0);
251 }
252 
253 #undef __FUNCT__
254 #define __FUNCT__ "DMSetOptionsPrefix"
255 /*@C
256    DMSetOptionsPrefix - Sets the prefix used for searching for all
257    DMDA options in the database.
258 
259    Logically Collective on DMDA
260 
261    Input Parameter:
262 +  da - the DMDA context
263 -  prefix - the prefix to prepend to all option names
264 
265    Notes:
266    A hyphen (-) must NOT be given at the beginning of the prefix name.
267    The first character of all runtime options is AUTOMATICALLY the hyphen.
268 
269    Level: advanced
270 
271 .keywords: DMDA, set, options, prefix, database
272 
273 .seealso: DMSetFromOptions()
274 @*/
275 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
276 {
277   PetscErrorCode ierr;
278 
279   PetscFunctionBegin;
280   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
281   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
282   PetscFunctionReturn(0);
283 }
284 
285 #undef __FUNCT__
286 #define __FUNCT__ "DMDestroy"
287 /*@
288     DMDestroy - Destroys a vector packer or DMDA.
289 
290     Collective on DM
291 
292     Input Parameter:
293 .   dm - the DM object to destroy
294 
295     Level: developer
296 
297 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
298 
299 @*/
300 PetscErrorCode  DMDestroy(DM *dm)
301 {
302   PetscInt       i, cnt = 0, f;
303   DMNamedVecLink nlink,nnext;
304   PetscErrorCode ierr;
305 
306   PetscFunctionBegin;
307   if (!*dm) PetscFunctionReturn(0);
308   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
309 
310   /* count all the circular references of DM and its contained Vecs */
311   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
312     if ((*dm)->localin[i])  {cnt++;}
313     if ((*dm)->globalin[i]) {cnt++;}
314   }
315   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
316   if ((*dm)->x) {
317     DM obj;
318     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
319     if (obj == *dm) cnt++;
320   }
321 
322   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
323   /*
324      Need this test because the dm references the vectors that
325      reference the dm, so destroying the dm calls destroy on the
326      vectors that cause another destroy on the dm
327   */
328   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
329   ((PetscObject) (*dm))->refct = 0;
330   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
331     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
332     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
333   }
334   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
335     nnext = nlink->next;
336     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
337     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
338     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
339     ierr = PetscFree(nlink);CHKERRQ(ierr);
340   }
341   (*dm)->namedglobal = PETSC_NULL;
342 
343   /* Destroy the list of hooks */
344   {
345     DMCoarsenHookLink link,next;
346     for (link=(*dm)->coarsenhook; link; link=next) {
347       next = link->next;
348       ierr = PetscFree(link);CHKERRQ(ierr);
349     }
350     (*dm)->coarsenhook = PETSC_NULL;
351   }
352   {
353     DMRefineHookLink link,next;
354     for (link=(*dm)->refinehook; link; link=next) {
355       next = link->next;
356       ierr = PetscFree(link);CHKERRQ(ierr);
357     }
358     (*dm)->refinehook = PETSC_NULL;
359   }
360   {
361     DMSubDomainHookLink link,next;
362     for (link=(*dm)->subdomainhook; link; link=next) {
363       next = link->next;
364       ierr = PetscFree(link);CHKERRQ(ierr);
365     }
366     (*dm)->subdomainhook = PETSC_NULL;
367   }
368   {
369     DMGlobalToLocalHookLink link,next;
370     for (link=(*dm)->gtolhook; link; link=next) {
371       next = link->next;
372       ierr = PetscFree(link);CHKERRQ(ierr);
373     }
374     (*dm)->gtolhook = PETSC_NULL;
375   }
376   /* Destroy the work arrays */
377   {
378     DMWorkLink link,next;
379     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
380     for (link=(*dm)->workin; link; link=next) {
381       next = link->next;
382       ierr = PetscFree(link->mem);CHKERRQ(ierr);
383       ierr = PetscFree(link);CHKERRQ(ierr);
384     }
385     (*dm)->workin = PETSC_NULL;
386   }
387 
388   ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr);
389   ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr);
390   ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr);
391 
392   if ((*dm)->ctx && (*dm)->ctxdestroy) {
393     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
394   }
395   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
396   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
397   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
398   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
399   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
400   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
401   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
402 
403   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
404   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
405   ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr);
406   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
407   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
408 
409   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
410   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
411   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
412 
413   for (f = 0; f < (*dm)->numFields; ++f) {
414     ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr);
415   }
416   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
417   /* if memory was published with AMS then destroy it */
418   ierr = PetscObjectDepublish(*dm);CHKERRQ(ierr);
419 
420   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
421   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
422   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
423   PetscFunctionReturn(0);
424 }
425 
426 #undef __FUNCT__
427 #define __FUNCT__ "DMSetUp"
428 /*@
429     DMSetUp - sets up the data structures inside a DM object
430 
431     Collective on DM
432 
433     Input Parameter:
434 .   dm - the DM object to setup
435 
436     Level: developer
437 
438 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
439 
440 @*/
441 PetscErrorCode  DMSetUp(DM dm)
442 {
443   PetscErrorCode ierr;
444 
445   PetscFunctionBegin;
446   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
447   if (dm->setupcalled) PetscFunctionReturn(0);
448   if (dm->ops->setup) {
449     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
450   }
451   dm->setupcalled = PETSC_TRUE;
452   PetscFunctionReturn(0);
453 }
454 
455 #undef __FUNCT__
456 #define __FUNCT__ "DMSetFromOptions"
457 /*@
458     DMSetFromOptions - sets parameters in a DM from the options database
459 
460     Collective on DM
461 
462     Input Parameter:
463 .   dm - the DM object to set options for
464 
465     Options Database:
466 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
467 .   -dm_vec_type <type>  type of vector to create inside DM
468 .   -dm_mat_type <type>  type of matrix to create inside DM
469 -   -dm_coloring_type <global or ghosted>
470 
471     Level: developer
472 
473 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
474 
475 @*/
476 PetscErrorCode  DMSetFromOptions(DM dm)
477 {
478   PetscBool      flg;
479   PetscErrorCode ierr;
480 
481   PetscFunctionBegin;
482   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
483   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
484     ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,PETSC_NULL);CHKERRQ(ierr);
485     ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
486     if (flg) {
487       ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
488     }
489     ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype?dm->mattype:typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
490     if (flg) {
491       ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
492     }
493     ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,PETSC_NULL);CHKERRQ(ierr);
494     if (dm->ops->setfromoptions) {
495       ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
496     }
497     /* process any options handlers added with PetscObjectAddOptionsHandler() */
498     ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
499   ierr = PetscOptionsEnd();CHKERRQ(ierr);
500   ierr = DMViewFromOptions(dm,"-dm_view");CHKERRQ(ierr);
501   PetscFunctionReturn(0);
502 }
503 
504 #undef __FUNCT__
505 #define __FUNCT__ "DMView"
506 /*@C
507     DMView - Views a vector packer or DMDA.
508 
509     Collective on DM
510 
511     Input Parameter:
512 +   dm - the DM object to view
513 -   v - the viewer
514 
515     Level: developer
516 
517 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
518 
519 @*/
520 PetscErrorCode  DMView(DM dm,PetscViewer v)
521 {
522   PetscErrorCode ierr;
523   PetscBool      isbinary;
524 
525   PetscFunctionBegin;
526   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
527   if (!v) {
528     ierr = PetscViewerASCIIGetStdout(((PetscObject)dm)->comm,&v);CHKERRQ(ierr);
529   }
530   ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
531   if (isbinary) {
532     PetscInt         classid = DM_FILE_CLASSID;
533     char             type[256];
534 
535     ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
536     ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr);
537     ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
538   }
539   if (dm->ops->view) {
540     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
541   }
542   PetscFunctionReturn(0);
543 }
544 
545 PETSC_EXTERN PetscErrorCode VecView_Complex_Local(Vec, PetscViewer);
546 PETSC_EXTERN PetscErrorCode VecView_Complex(Vec, PetscViewer);
547 
548 #undef __FUNCT__
549 #define __FUNCT__ "DMCreateGlobalVector"
550 /*@
551     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
552 
553     Collective on DM
554 
555     Input Parameter:
556 .   dm - the DM object
557 
558     Output Parameter:
559 .   vec - the global vector
560 
561     Level: beginner
562 
563 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
564 
565 @*/
566 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
567 {
568   PetscErrorCode ierr;
569 
570   PetscFunctionBegin;
571   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
572   if (dm->defaultSection) {
573     PetscSection gSection;
574     PetscInt     localSize, blockSize = -1, pStart, pEnd, p;
575 
576     ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
577     ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr);
578     for(p = pStart; p < pEnd; ++p) {
579       PetscInt dof, cdof;
580 
581       ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr);
582       ierr = PetscSectionGetConstraintDof(dm->defaultSection, p, &cdof);CHKERRQ(ierr);
583       if ((blockSize < 0) && (dof > 0)) blockSize = dof-cdof;
584       if ((dof > 0) && (dof-cdof != blockSize)) {
585         blockSize = 1;
586         break;
587       }
588     }
589     ierr = PetscSectionGetConstrainedStorageSize(dm->defaultGlobalSection, &localSize);CHKERRQ(ierr);
590     if (localSize%blockSize) SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONG, "Mismatch between blocksize %d and local storage size %d", blockSize, localSize);
591     ierr = VecCreate(((PetscObject) dm)->comm, vec);CHKERRQ(ierr);
592     ierr = VecSetSizes(*vec, localSize, PETSC_DETERMINE);CHKERRQ(ierr);
593     ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr);
594     /* ierr = VecSetType(*vec, dm->vectype);CHKERRQ(ierr); */
595     ierr = VecSetFromOptions(*vec);CHKERRQ(ierr);
596     ierr = VecSetDM(*vec, dm);CHKERRQ(ierr);
597     /* ierr = VecSetLocalToGlobalMapping(*vec, dm->ltogmap);CHKERRQ(ierr); */
598     /* ierr = VecSetLocalToGlobalMappingBlock(*vec, dm->ltogmapb);CHKERRQ(ierr); */
599     /* ierr = VecSetOperation(*vec, VECOP_DUPLICATE, (void(*)(void)) VecDuplicate_MPI_DM);CHKERRQ(ierr); */
600     ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex);CHKERRQ(ierr);
601   } else {
602     ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
603   }
604   PetscFunctionReturn(0);
605 }
606 
607 #undef __FUNCT__
608 #define __FUNCT__ "DMCreateLocalVector"
609 /*@
610     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
611 
612     Not Collective
613 
614     Input Parameter:
615 .   dm - the DM object
616 
617     Output Parameter:
618 .   vec - the local vector
619 
620     Level: beginner
621 
622 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
623 
624 @*/
625 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
626 {
627   PetscErrorCode ierr;
628 
629   PetscFunctionBegin;
630   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
631   if (dm->defaultSection) {
632     PetscInt localSize, blockSize = -1, pStart, pEnd, p;
633 
634     ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr);
635     for(p = pStart; p < pEnd; ++p) {
636       PetscInt dof;
637 
638       ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr);
639       if ((blockSize < 0) && (dof > 0)) blockSize = dof;
640       if ((dof > 0) && (dof != blockSize)) {
641         blockSize = 1;
642         break;
643       }
644     }
645     ierr = PetscSectionGetStorageSize(dm->defaultSection, &localSize);CHKERRQ(ierr);
646     ierr = VecCreate(PETSC_COMM_SELF, vec);CHKERRQ(ierr);
647     ierr = VecSetSizes(*vec, localSize, localSize);CHKERRQ(ierr);
648     ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr);
649     ierr = VecSetFromOptions(*vec);CHKERRQ(ierr);
650     ierr = VecSetDM(*vec, dm);CHKERRQ(ierr);
651     ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex_Local);CHKERRQ(ierr);
652   } else {
653     ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
654   }
655   PetscFunctionReturn(0);
656 }
657 
658 #undef __FUNCT__
659 #define __FUNCT__ "DMGetLocalToGlobalMapping"
660 /*@
661    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
662 
663    Collective on DM
664 
665    Input Parameter:
666 .  dm - the DM that provides the mapping
667 
668    Output Parameter:
669 .  ltog - the mapping
670 
671    Level: intermediate
672 
673    Notes:
674    This mapping can then be used by VecSetLocalToGlobalMapping() or
675    MatSetLocalToGlobalMapping().
676 
677 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
678 @*/
679 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
680 {
681   PetscErrorCode ierr;
682 
683   PetscFunctionBegin;
684   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
685   PetscValidPointer(ltog,2);
686   if (!dm->ltogmap) {
687     PetscSection section, sectionGlobal;
688 
689     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
690     if (section) {
691       PetscInt      *ltog;
692       PetscInt       pStart, pEnd, size, p, l;
693 
694       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
695       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
696       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
697       ierr = PetscMalloc(size * sizeof(PetscInt), &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
698       for (p = pStart, l = 0; p < pEnd; ++p) {
699         PetscInt dof, off, c;
700 
701         /* Should probably use constrained dofs */
702         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
703         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
704         for (c = 0; c < dof; ++c, ++l) {
705           ltog[l] = off+c;
706         }
707       }
708       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
709       ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr);
710     } else {
711       if (!dm->ops->createlocaltoglobalmapping) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
712       ierr = (*dm->ops->createlocaltoglobalmapping)(dm);CHKERRQ(ierr);
713     }
714   }
715   *ltog = dm->ltogmap;
716   PetscFunctionReturn(0);
717 }
718 
719 #undef __FUNCT__
720 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
721 /*@
722    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
723 
724    Collective on DM
725 
726    Input Parameter:
727 .  da - the distributed array that provides the mapping
728 
729    Output Parameter:
730 .  ltog - the block mapping
731 
732    Level: intermediate
733 
734    Notes:
735    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
736    MatSetLocalToGlobalMappingBlock().
737 
738 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
739 @*/
740 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
741 {
742   PetscErrorCode ierr;
743 
744   PetscFunctionBegin;
745   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
746   PetscValidPointer(ltog,2);
747   if (!dm->ltogmapb) {
748     PetscInt bs;
749     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
750     if (bs > 1) {
751       if (!dm->ops->createlocaltoglobalmappingblock) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
752       ierr = (*dm->ops->createlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
753     } else {
754       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
755       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
756     }
757   }
758   *ltog = dm->ltogmapb;
759   PetscFunctionReturn(0);
760 }
761 
762 #undef __FUNCT__
763 #define __FUNCT__ "DMGetBlockSize"
764 /*@
765    DMGetBlockSize - Gets the inherent block size associated with a DM
766 
767    Not Collective
768 
769    Input Parameter:
770 .  dm - the DM with block structure
771 
772    Output Parameter:
773 .  bs - the block size, 1 implies no exploitable block structure
774 
775    Level: intermediate
776 
777 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
778 @*/
779 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
780 {
781   PetscFunctionBegin;
782   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
783   PetscValidPointer(bs,2);
784   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
785   *bs = dm->bs;
786   PetscFunctionReturn(0);
787 }
788 
789 #undef __FUNCT__
790 #define __FUNCT__ "DMCreateInterpolation"
791 /*@
792     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
793 
794     Collective on DM
795 
796     Input Parameter:
797 +   dm1 - the DM object
798 -   dm2 - the second, finer DM object
799 
800     Output Parameter:
801 +  mat - the interpolation
802 -  vec - the scaling (optional)
803 
804     Level: developer
805 
806     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
807         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
808 
809         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
810         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
811 
812 
813 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
814 
815 @*/
816 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
817 {
818   PetscErrorCode ierr;
819 
820   PetscFunctionBegin;
821   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
822   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
823   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
824   PetscFunctionReturn(0);
825 }
826 
827 #undef __FUNCT__
828 #define __FUNCT__ "DMCreateInjection"
829 /*@
830     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
831 
832     Collective on DM
833 
834     Input Parameter:
835 +   dm1 - the DM object
836 -   dm2 - the second, finer DM object
837 
838     Output Parameter:
839 .   ctx - the injection
840 
841     Level: developer
842 
843    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
844         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
845 
846 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
847 
848 @*/
849 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
850 {
851   PetscErrorCode ierr;
852 
853   PetscFunctionBegin;
854   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
855   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
856   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
857   PetscFunctionReturn(0);
858 }
859 
860 #undef __FUNCT__
861 #define __FUNCT__ "DMCreateColoring"
862 /*@C
863     DMCreateColoring - Gets coloring for a DMDA or DMComposite
864 
865     Collective on DM
866 
867     Input Parameter:
868 +   dm - the DM object
869 .   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
870 -   matype - either MATAIJ or MATBAIJ
871 
872     Output Parameter:
873 .   coloring - the coloring
874 
875     Level: developer
876 
877 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix()
878 
879 @*/
880 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring)
881 {
882   PetscErrorCode ierr;
883 
884   PetscFunctionBegin;
885   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
886   if (!dm->ops->getcoloring) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No coloring for this type of DM yet");
887   ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr);
888   PetscFunctionReturn(0);
889 }
890 
891 #undef __FUNCT__
892 #define __FUNCT__ "DMCreateMatrix"
893 /*@C
894     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
895 
896     Collective on DM
897 
898     Input Parameter:
899 +   dm - the DM object
900 -   mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or
901             any type which inherits from one of these (such as MATAIJ)
902 
903     Output Parameter:
904 .   mat - the empty Jacobian
905 
906     Level: beginner
907 
908     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
909        do not need to do it yourself.
910 
911        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
912        the nonzero pattern call DMDASetMatPreallocateOnly()
913 
914        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
915        internally by PETSc.
916 
917        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
918        the indices for the global numbering for DMDAs which is complicated.
919 
920 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
921 
922 @*/
923 PetscErrorCode  DMCreateMatrix(DM dm,MatType mtype,Mat *mat)
924 {
925   PetscErrorCode ierr;
926 
927   PetscFunctionBegin;
928   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
929 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
930   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
931 #endif
932   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
933   PetscValidPointer(mat,3);
934   if (dm->mattype) {
935     ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr);
936   } else {
937     ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr);
938   }
939   PetscFunctionReturn(0);
940 }
941 
942 #undef __FUNCT__
943 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
944 /*@
945   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
946     preallocated but the nonzero structure and zero values will not be set.
947 
948   Logically Collective on DMDA
949 
950   Input Parameter:
951 + dm - the DM
952 - only - PETSC_TRUE if only want preallocation
953 
954   Level: developer
955 .seealso DMCreateMatrix()
956 @*/
957 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
958 {
959   PetscFunctionBegin;
960   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
961   dm->prealloc_only = only;
962   PetscFunctionReturn(0);
963 }
964 
965 #undef __FUNCT__
966 #define __FUNCT__ "DMGetWorkArray"
967 /*@C
968   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
969 
970   Not Collective
971 
972   Input Parameters:
973 + dm - the DM object
974 . count - The minium size
975 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
976 
977   Output Parameter:
978 . array - the work array
979 
980   Level: developer
981 
982 .seealso DMDestroy(), DMCreate()
983 @*/
984 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
985 {
986   PetscErrorCode ierr;
987   DMWorkLink link;
988   size_t size;
989 
990   PetscFunctionBegin;
991   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
992   PetscValidPointer(mem,4);
993   if (dm->workin) {
994     link = dm->workin;
995     dm->workin = dm->workin->next;
996   } else {
997     ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr);
998   }
999   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
1000   if (size*count > link->bytes) {
1001     ierr = PetscFree(link->mem);CHKERRQ(ierr);
1002     ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
1003     link->bytes = size*count;
1004   }
1005   link->next = dm->workout;
1006   dm->workout = link;
1007   *(void**)mem = link->mem;
1008   PetscFunctionReturn(0);
1009 }
1010 
1011 #undef __FUNCT__
1012 #define __FUNCT__ "DMRestoreWorkArray"
1013 /*@C
1014   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
1015 
1016   Not Collective
1017 
1018   Input Parameters:
1019 + dm - the DM object
1020 . count - The minium size
1021 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
1022 
1023   Output Parameter:
1024 . array - the work array
1025 
1026   Level: developer
1027 
1028 .seealso DMDestroy(), DMCreate()
1029 @*/
1030 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1031 {
1032   DMWorkLink *p,link;
1033 
1034   PetscFunctionBegin;
1035   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1036   PetscValidPointer(mem,4);
1037   for (p=&dm->workout; (link=*p); p=&link->next) {
1038     if (link->mem == *(void**)mem) {
1039       *p = link->next;
1040       link->next = dm->workin;
1041       dm->workin = link;
1042       *(void**)mem = PETSC_NULL;
1043       PetscFunctionReturn(0);
1044     }
1045   }
1046   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1047   PetscFunctionReturn(0);
1048 }
1049 
1050 #undef __FUNCT__
1051 #define __FUNCT__ "DMSetNullSpaceConstructor"
1052 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1053 {
1054   PetscFunctionBegin;
1055   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1056   if (field >= 10) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1057   dm->nullspaceConstructors[field] = nullsp;
1058   PetscFunctionReturn(0);
1059 }
1060 
1061 #undef __FUNCT__
1062 #define __FUNCT__ "DMCreateFieldIS"
1063 /*@C
1064   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1065 
1066   Not collective
1067 
1068   Input Parameter:
1069 . dm - the DM object
1070 
1071   Output Parameters:
1072 + numFields  - The number of fields (or PETSC_NULL if not requested)
1073 . fieldNames - The name for each field (or PETSC_NULL if not requested)
1074 - fields     - The global indices for each field (or PETSC_NULL if not requested)
1075 
1076   Level: intermediate
1077 
1078   Notes:
1079   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1080   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1081   PetscFree().
1082 
1083 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1084 @*/
1085 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1086 {
1087   PetscSection   section, sectionGlobal;
1088   PetscErrorCode ierr;
1089 
1090   PetscFunctionBegin;
1091   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1092   if (numFields) {
1093     PetscValidPointer(numFields,2);
1094     *numFields = 0;
1095   }
1096   if (fieldNames) {
1097     PetscValidPointer(fieldNames,3);
1098     *fieldNames = PETSC_NULL;
1099   }
1100   if (fields) {
1101     PetscValidPointer(fields,4);
1102     *fields = PETSC_NULL;
1103   }
1104   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1105   if (section) {
1106     PetscInt *fieldSizes, **fieldIndices;
1107     PetscInt  nF, f, pStart, pEnd, p;
1108 
1109     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
1110     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
1111     ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt *,&fieldIndices);CHKERRQ(ierr);
1112     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
1113     for (f = 0; f < nF; ++f) {
1114       fieldSizes[f] = 0;
1115     }
1116     for (p = pStart; p < pEnd; ++p) {
1117       PetscInt gdof;
1118 
1119       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1120       if (gdof > 0) {
1121         for (f = 0; f < nF; ++f) {
1122           PetscInt fdof, fcdof;
1123 
1124           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1125           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1126           fieldSizes[f] += fdof-fcdof;
1127         }
1128       }
1129     }
1130     for (f = 0; f < nF; ++f) {
1131       ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr);
1132       fieldSizes[f] = 0;
1133     }
1134     for (p = pStart; p < pEnd; ++p) {
1135       PetscInt gdof, goff;
1136 
1137       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1138       if (gdof > 0) {
1139         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
1140         for (f = 0; f < nF; ++f) {
1141           PetscInt fdof, fcdof, fc;
1142 
1143           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1144           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1145           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1146             fieldIndices[f][fieldSizes[f]] = goff++;
1147           }
1148         }
1149       }
1150     }
1151     if (numFields) {*numFields = nF;}
1152     if (fieldNames) {
1153       ierr = PetscMalloc(nF * sizeof(char *), fieldNames);CHKERRQ(ierr);
1154       for (f = 0; f < nF; ++f) {
1155         const char *fieldName;
1156 
1157         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1158         ierr = PetscStrallocpy(fieldName, (char **) &(*fieldNames)[f]);CHKERRQ(ierr);
1159       }
1160     }
1161     if (fields) {
1162       ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr);
1163       for (f = 0; f < nF; ++f) {
1164         ierr = ISCreateGeneral(((PetscObject) dm)->comm, fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1165       }
1166     }
1167     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1168   } else {
1169     if (dm->ops->createfieldis) {ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);}
1170   }
1171   PetscFunctionReturn(0);
1172 }
1173 
1174 #undef __FUNCT__
1175 #define __FUNCT__ "DMCreateFieldDecompositionDM"
1176 /*@C
1177   DMCreateFieldDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into fields.
1178 
1179   Not Collective
1180 
1181   Input Parameters:
1182 + dm   - the DM object
1183 - name - the name of the field decomposition
1184 
1185   Output Parameter:
1186 . ddm  - the field decomposition DM (PETSC_NULL, if no such decomposition is known)
1187 
1188   Level: advanced
1189 
1190 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1191 @*/
1192 PetscErrorCode DMCreateFieldDecompositionDM(DM dm, const char* name, DM *ddm)
1193 {
1194   PetscErrorCode ierr;
1195 
1196   PetscFunctionBegin;
1197   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1198   PetscValidCharPointer(name,2);
1199   PetscValidPointer(ddm,3);
1200   *ddm = PETSC_NULL;
1201   if (dm->ops->createfielddecompositiondm) {
1202     ierr = (*dm->ops->createfielddecompositiondm)(dm,name,ddm); CHKERRQ(ierr);
1203   }
1204   PetscFunctionReturn(0);
1205 }
1206 
1207 #undef __FUNCT__
1208 #define __FUNCT__ "DMCreateFieldDecomposition"
1209 /*@C
1210   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1211                           corresponding to different fields: each IS contains the global indices of the dofs of the
1212                           corresponding field. The optional list of DMs define the DM for each subproblem.
1213                           Generalizes DMCreateFieldIS().
1214 
1215   Not collective
1216 
1217   Input Parameter:
1218 . dm - the DM object
1219 
1220   Output Parameters:
1221 + len       - The number of subproblems in the field decomposition (or PETSC_NULL if not requested)
1222 . namelist  - The name for each field (or PETSC_NULL if not requested)
1223 . islist    - The global indices for each field (or PETSC_NULL if not requested)
1224 - dmlist    - The DMs for each field subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1225 
1226   Level: intermediate
1227 
1228   Notes:
1229   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1230   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1231   and all of the arrays should be freed with PetscFree().
1232 
1233 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1234 @*/
1235 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1236 {
1237   PetscErrorCode ierr;
1238 
1239   PetscFunctionBegin;
1240   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1241   if (len)      {PetscValidPointer(len,2);      *len      = 0;}
1242   if (namelist) {PetscValidPointer(namelist,3); *namelist = 0;}
1243   if (islist)   {PetscValidPointer(islist,4);   *islist   = 0;}
1244   if (dmlist)   {PetscValidPointer(dmlist,5);   *dmlist   = 0;}
1245   if (!dm->ops->createfielddecomposition) {
1246     PetscSection section;
1247     PetscInt     numFields, f;
1248 
1249     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1250     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1251     if (section && numFields && dm->ops->createsubdm) {
1252       *len = numFields;
1253       ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr);
1254       for (f = 0; f < numFields; ++f) {
1255         const char *fieldName;
1256 
1257         ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr);
1258         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1259         ierr = PetscStrallocpy(fieldName, (char **) &(*namelist)[f]);CHKERRQ(ierr);
1260       }
1261     } else {
1262       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1263       /* By default there are no DMs associated with subproblems. */
1264       if (dmlist) *dmlist = PETSC_NULL;
1265     }
1266   }
1267   else {
1268     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist); CHKERRQ(ierr);
1269   }
1270   PetscFunctionReturn(0);
1271 }
1272 
1273 #undef __FUNCT__
1274 #define __FUNCT__ "DMCreateSubDM"
1275 /*@C
1276   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1277                   The fields are defined by DMCreateFieldIS().
1278 
1279   Not collective
1280 
1281   Input Parameters:
1282 + dm - the DM object
1283 . numFields - number of fields in this subproblem
1284 - len       - The number of subproblems in the decomposition (or PETSC_NULL if not requested)
1285 
1286   Output Parameters:
1287 . is - The global indices for the subproblem
1288 - dm - The DM for the subproblem
1289 
1290   Level: intermediate
1291 
1292 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1293 @*/
1294 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1295 {
1296   PetscErrorCode ierr;
1297 
1298   PetscFunctionBegin;
1299   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1300   PetscValidPointer(fields,3);
1301   if (is) {PetscValidPointer(is,4);}
1302   if (subdm) {PetscValidPointer(subdm,5);}
1303   if (dm->ops->createsubdm) {
1304     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm); CHKERRQ(ierr);
1305   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1306   PetscFunctionReturn(0);
1307 }
1308 
1309 #undef __FUNCT__
1310 #define __FUNCT__ "DMCreateDomainDecompositionDM"
1311 /*@C
1312   DMCreateDomainDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into subdomains.
1313 
1314   Not Collective
1315 
1316   Input Parameters:
1317 + dm   - the DM object
1318 - name - the name of the subdomain decomposition
1319 
1320   Output Parameter:
1321 . ddm  - the subdomain decomposition DM (PETSC_NULL, if no such decomposition is known)
1322 
1323   Level: advanced
1324 
1325 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1326 @*/
1327 PetscErrorCode DMCreateDomainDecompositionDM(DM dm, const char* name, DM *ddm)
1328 {
1329   PetscErrorCode ierr;
1330 
1331   PetscFunctionBegin;
1332   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1333   PetscValidCharPointer(name,2);
1334   PetscValidPointer(ddm,3);
1335   *ddm = PETSC_NULL;
1336   if (dm->ops->createdomaindecompositiondm) {
1337     ierr = (*dm->ops->createdomaindecompositiondm)(dm,name,ddm); CHKERRQ(ierr);
1338   }
1339   PetscFunctionReturn(0);
1340 }
1341 
1342 #undef __FUNCT__
1343 #define __FUNCT__ "DMCreateDomainDecomposition"
1344 /*@C
1345   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1346                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1347                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1348                           define a nonoverlapping covering, while outer subdomains can overlap.
1349                           The optional list of DMs define the DM for each subproblem.
1350 
1351   Not collective
1352 
1353   Input Parameter:
1354 . dm - the DM object
1355 
1356   Output Parameters:
1357 + len         - The number of subproblems in the domain decomposition (or PETSC_NULL if not requested)
1358 . namelist    - The name for each subdomain (or PETSC_NULL if not requested)
1359 . innerislist - The global indices for each inner subdomain (or PETSC_NULL, if not requested)
1360 . outerislist - The global indices for each outer subdomain (or PETSC_NULL, if not requested)
1361 - dmlist      - The DMs for each subdomain subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1362 
1363   Level: intermediate
1364 
1365   Notes:
1366   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1367   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1368   and all of the arrays should be freed with PetscFree().
1369 
1370 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1371 @*/
1372 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1373 {
1374   PetscErrorCode ierr;
1375   DMSubDomainHookLink link;
1376   PetscInt            i,l;
1377 
1378   PetscFunctionBegin;
1379   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1380   if (len)           {PetscValidPointer(len,2);            *len         = PETSC_NULL;}
1381   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = PETSC_NULL;}
1382   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = PETSC_NULL;}
1383   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = PETSC_NULL;}
1384   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = PETSC_NULL;}
1385   if (dm->ops->createdomaindecomposition) {
1386     ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist); CHKERRQ(ierr);
1387   } else {
1388     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DMCreateDomainDecomposition not supported for this DM type!");CHKERRQ(ierr);
1389   }
1390   if (len) *len = l;
1391   for (i = 0; i < l; i++) {
1392     for (link=dm->subdomainhook; link; link=link->next) {
1393       if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);}
1394     }
1395     (*dmlist)[i]->ctx = dm->ctx;
1396   }
1397   PetscFunctionReturn(0);
1398 }
1399 
1400 
1401 #undef __FUNCT__
1402 #define __FUNCT__ "DMCreateDomainDecompositionScatters"
1403 /*@C
1404   DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector
1405 
1406   Not collective
1407 
1408   Input Parameters:
1409 + dm - the DM object
1410 . n  - the number of subdomain scatters
1411 - subdms - the local subdomains
1412 
1413   Output Parameters:
1414 + n     - the number of scatters returned
1415 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain
1416 . oscat - scatter from global vector to overlapping global vector entries on subdomain
1417 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts)
1418 
1419   Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution
1420   of general nonlinear problems with overlapping subdomain methods.  While merely having index sets that enable subsets
1421   of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of
1422   solution and residual data.
1423 
1424   Level: developer
1425 
1426 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1427 @*/
1428 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat)
1429 {
1430   PetscErrorCode ierr;
1431 
1432   PetscFunctionBegin;
1433   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1434   PetscValidPointer(subdms,3);
1435   PetscValidPointer(iscat,4);
1436   PetscValidPointer(oscat,5);
1437   PetscValidPointer(gscat,6);
1438   if (dm->ops->createddscatters) {
1439     ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat); CHKERRQ(ierr);
1440   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined");
1441   PetscFunctionReturn(0);
1442 }
1443 
1444 #undef __FUNCT__
1445 #define __FUNCT__ "DMRefine"
1446 /*@
1447   DMRefine - Refines a DM object
1448 
1449   Collective on DM
1450 
1451   Input Parameter:
1452 + dm   - the DM object
1453 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1454 
1455   Output Parameter:
1456 . dmf - the refined DM, or PETSC_NULL
1457 
1458   Note: If no refinement was done, the return value is PETSC_NULL
1459 
1460   Level: developer
1461 
1462 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1463 @*/
1464 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1465 {
1466   PetscErrorCode ierr;
1467   DMRefineHookLink link;
1468 
1469   PetscFunctionBegin;
1470   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1471   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1472   if (*dmf) {
1473     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1474     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1475     (*dmf)->ctx       = dm->ctx;
1476     (*dmf)->leveldown = dm->leveldown;
1477     (*dmf)->levelup   = dm->levelup + 1;
1478     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1479     for (link=dm->refinehook; link; link=link->next) {
1480       if (link->refinehook) {ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);}
1481     }
1482   }
1483   PetscFunctionReturn(0);
1484 }
1485 
1486 #undef __FUNCT__
1487 #define __FUNCT__ "DMRefineHookAdd"
1488 /*@
1489    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1490 
1491    Logically Collective
1492 
1493    Input Arguments:
1494 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1495 .  refinehook - function to run when setting up a coarser level
1496 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1497 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1498 
1499    Calling sequence of refinehook:
1500 $    refinehook(DM coarse,DM fine,void *ctx);
1501 
1502 +  coarse - coarse level DM
1503 .  fine - fine level DM to interpolate problem to
1504 -  ctx - optional user-defined function context
1505 
1506    Calling sequence for interphook:
1507 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1508 
1509 +  coarse - coarse level DM
1510 .  interp - matrix interpolating a coarse-level solution to the finer grid
1511 .  fine - fine level DM to update
1512 -  ctx - optional user-defined function context
1513 
1514    Level: advanced
1515 
1516    Notes:
1517    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1518 
1519    If this function is called multiple times, the hooks will be run in the order they are added.
1520 
1521 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1522 @*/
1523 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1524 {
1525   PetscErrorCode ierr;
1526   DMRefineHookLink link,*p;
1527 
1528   PetscFunctionBegin;
1529   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1530   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1531   ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1532   link->refinehook = refinehook;
1533   link->interphook = interphook;
1534   link->ctx = ctx;
1535   link->next = PETSC_NULL;
1536   *p = link;
1537   PetscFunctionReturn(0);
1538 }
1539 
1540 #undef __FUNCT__
1541 #define __FUNCT__ "DMInterpolate"
1542 /*@
1543    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1544 
1545    Collective if any hooks are
1546 
1547    Input Arguments:
1548 +  coarse - coarser DM to use as a base
1549 .  restrct - interpolation matrix, apply using MatInterpolate()
1550 -  fine - finer DM to update
1551 
1552    Level: developer
1553 
1554 .seealso: DMRefineHookAdd(), MatInterpolate()
1555 @*/
1556 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1557 {
1558   PetscErrorCode ierr;
1559   DMRefineHookLink link;
1560 
1561   PetscFunctionBegin;
1562   for (link=fine->refinehook; link; link=link->next) {
1563     if (link->interphook) {ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);}
1564   }
1565   PetscFunctionReturn(0);
1566 }
1567 
1568 #undef __FUNCT__
1569 #define __FUNCT__ "DMGetRefineLevel"
1570 /*@
1571     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1572 
1573     Not Collective
1574 
1575     Input Parameter:
1576 .   dm - the DM object
1577 
1578     Output Parameter:
1579 .   level - number of refinements
1580 
1581     Level: developer
1582 
1583 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1584 
1585 @*/
1586 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1587 {
1588   PetscFunctionBegin;
1589   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1590   *level = dm->levelup;
1591   PetscFunctionReturn(0);
1592 }
1593 
1594 #undef __FUNCT__
1595 #define __FUNCT__ "DMGlobalToLocalHookAdd"
1596 /*@
1597    DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called
1598 
1599    Logically Collective
1600 
1601    Input Arguments:
1602 +  dm - the DM
1603 .  beginhook - function to run at the beginning of DMGlobalToLocalBegin()
1604 .  endhook - function to run after DMGlobalToLocalEnd() has completed
1605 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1606 
1607    Calling sequence for beginhook:
1608 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1609 
1610 +  dm - global DM
1611 .  g - global vector
1612 .  mode - mode
1613 .  l - local vector
1614 -  ctx - optional user-defined function context
1615 
1616 
1617    Calling sequence for endhook:
1618 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1619 
1620 +  global - global DM
1621 -  ctx - optional user-defined function context
1622 
1623    Level: advanced
1624 
1625    Notes:
1626    This function is only needed if auxiliary data needs to be set up on coarse grids.
1627 
1628    If this function is called multiple times, the hooks will be run in the order they are added.
1629 
1630    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1631    extract the finest level information from its context (instead of from the SNES).
1632 
1633 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1634 @*/
1635 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx)
1636 {
1637   PetscErrorCode ierr;
1638   DMGlobalToLocalHookLink link,*p;
1639 
1640   PetscFunctionBegin;
1641   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1642   for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1643   ierr = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr);
1644   link->beginhook = beginhook;
1645   link->endhook = endhook;
1646   link->ctx = ctx;
1647   link->next = PETSC_NULL;
1648   *p = link;
1649   PetscFunctionReturn(0);
1650 }
1651 
1652 #undef __FUNCT__
1653 #define __FUNCT__ "DMGlobalToLocalBegin"
1654 /*@
1655     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1656 
1657     Neighbor-wise Collective on DM
1658 
1659     Input Parameters:
1660 +   dm - the DM object
1661 .   g - the global vector
1662 .   mode - INSERT_VALUES or ADD_VALUES
1663 -   l - the local vector
1664 
1665 
1666     Level: beginner
1667 
1668 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1669 
1670 @*/
1671 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1672 {
1673   PetscSF        sf;
1674   PetscErrorCode ierr;
1675   DMGlobalToLocalHookLink link;
1676 
1677   PetscFunctionBegin;
1678   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1679   for (link=dm->gtolhook; link; link=link->next) {
1680     if (link->beginhook) {ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1681   }
1682   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1683   if (sf) {
1684     PetscScalar *lArray, *gArray;
1685 
1686     if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1687     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1688     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1689     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1690     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1691     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1692   } else {
1693     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1694   }
1695   PetscFunctionReturn(0);
1696 }
1697 
1698 #undef __FUNCT__
1699 #define __FUNCT__ "DMGlobalToLocalEnd"
1700 /*@
1701     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1702 
1703     Neighbor-wise Collective on DM
1704 
1705     Input Parameters:
1706 +   dm - the DM object
1707 .   g - the global vector
1708 .   mode - INSERT_VALUES or ADD_VALUES
1709 -   l - the local vector
1710 
1711 
1712     Level: beginner
1713 
1714 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1715 
1716 @*/
1717 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1718 {
1719   PetscSF        sf;
1720   PetscErrorCode ierr;
1721   PetscScalar    *lArray, *gArray;
1722   DMGlobalToLocalHookLink link;
1723 
1724   PetscFunctionBegin;
1725   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1726   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1727   if (sf) {
1728   if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1729 
1730     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1731     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1732     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1733     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1734     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1735   } else {
1736     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1737   }
1738   for (link=dm->gtolhook; link; link=link->next) {
1739     if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1740   }
1741   PetscFunctionReturn(0);
1742 }
1743 
1744 #undef __FUNCT__
1745 #define __FUNCT__ "DMLocalToGlobalBegin"
1746 /*@
1747     DMLocalToGlobalBegin - updates global vectors from local vectors
1748 
1749     Neighbor-wise Collective on DM
1750 
1751     Input Parameters:
1752 +   dm - the DM object
1753 .   l - the local vector
1754 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1755            base point.
1756 - - the global vector
1757 
1758     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1759            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1760            global array to the final global array with VecAXPY().
1761 
1762     Level: beginner
1763 
1764 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1765 
1766 @*/
1767 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1768 {
1769   PetscSF        sf;
1770   PetscErrorCode ierr;
1771 
1772   PetscFunctionBegin;
1773   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1774   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1775   if (sf) {
1776     MPI_Op       op;
1777     PetscScalar *lArray, *gArray;
1778 
1779     switch(mode) {
1780     case INSERT_VALUES:
1781     case INSERT_ALL_VALUES:
1782 #if defined(PETSC_HAVE_MPI_REPLACE)
1783       op = MPI_REPLACE; break;
1784 #else
1785       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1786 #endif
1787     case ADD_VALUES:
1788     case ADD_ALL_VALUES:
1789       op = MPI_SUM; break;
1790   default:
1791     SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1792     }
1793     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1794     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1795     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1796     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1797     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1798   } else {
1799     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1800   }
1801   PetscFunctionReturn(0);
1802 }
1803 
1804 #undef __FUNCT__
1805 #define __FUNCT__ "DMLocalToGlobalEnd"
1806 /*@
1807     DMLocalToGlobalEnd - updates global vectors from local vectors
1808 
1809     Neighbor-wise Collective on DM
1810 
1811     Input Parameters:
1812 +   dm - the DM object
1813 .   l - the local vector
1814 .   mode - INSERT_VALUES or ADD_VALUES
1815 -   g - the global vector
1816 
1817 
1818     Level: beginner
1819 
1820 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1821 
1822 @*/
1823 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1824 {
1825   PetscSF        sf;
1826   PetscErrorCode ierr;
1827 
1828   PetscFunctionBegin;
1829   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1830   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1831   if (sf) {
1832     MPI_Op       op;
1833     PetscScalar *lArray, *gArray;
1834 
1835     switch(mode) {
1836     case INSERT_VALUES:
1837     case INSERT_ALL_VALUES:
1838 #if defined(PETSC_HAVE_MPI_REPLACE)
1839       op = MPI_REPLACE; break;
1840 #else
1841       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1842 #endif
1843     case ADD_VALUES:
1844     case ADD_ALL_VALUES:
1845       op = MPI_SUM; break;
1846     default:
1847       SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1848     }
1849     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1850     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1851     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1852     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1853     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1854   } else {
1855     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1856   }
1857   PetscFunctionReturn(0);
1858 }
1859 
1860 #undef __FUNCT__
1861 #define __FUNCT__ "DMCoarsen"
1862 /*@
1863     DMCoarsen - Coarsens a DM object
1864 
1865     Collective on DM
1866 
1867     Input Parameter:
1868 +   dm - the DM object
1869 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1870 
1871     Output Parameter:
1872 .   dmc - the coarsened DM
1873 
1874     Level: developer
1875 
1876 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1877 
1878 @*/
1879 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1880 {
1881   PetscErrorCode ierr;
1882   DMCoarsenHookLink link;
1883 
1884   PetscFunctionBegin;
1885   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1886   ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1887   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1888   ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1889   (*dmc)->ctx       = dm->ctx;
1890   (*dmc)->levelup   = dm->levelup;
1891   (*dmc)->leveldown = dm->leveldown + 1;
1892   ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
1893   for (link=dm->coarsenhook; link; link=link->next) {
1894     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
1895   }
1896   PetscFunctionReturn(0);
1897 }
1898 
1899 #undef __FUNCT__
1900 #define __FUNCT__ "DMCoarsenHookAdd"
1901 /*@
1902    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1903 
1904    Logically Collective
1905 
1906    Input Arguments:
1907 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1908 .  coarsenhook - function to run when setting up a coarser level
1909 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
1910 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1911 
1912    Calling sequence of coarsenhook:
1913 $    coarsenhook(DM fine,DM coarse,void *ctx);
1914 
1915 +  fine - fine level DM
1916 .  coarse - coarse level DM to restrict problem to
1917 -  ctx - optional user-defined function context
1918 
1919    Calling sequence for restricthook:
1920 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
1921 
1922 +  fine - fine level DM
1923 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
1924 .  rscale - scaling vector for restriction
1925 .  inject - matrix restricting by injection
1926 .  coarse - coarse level DM to update
1927 -  ctx - optional user-defined function context
1928 
1929    Level: advanced
1930 
1931    Notes:
1932    This function is only needed if auxiliary data needs to be set up on coarse grids.
1933 
1934    If this function is called multiple times, the hooks will be run in the order they are added.
1935 
1936    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1937    extract the finest level information from its context (instead of from the SNES).
1938 
1939 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1940 @*/
1941 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
1942 {
1943   PetscErrorCode ierr;
1944   DMCoarsenHookLink link,*p;
1945 
1946   PetscFunctionBegin;
1947   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
1948   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1949   ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
1950   link->coarsenhook = coarsenhook;
1951   link->restricthook = restricthook;
1952   link->ctx = ctx;
1953   link->next = PETSC_NULL;
1954   *p = link;
1955   PetscFunctionReturn(0);
1956 }
1957 
1958 #undef __FUNCT__
1959 #define __FUNCT__ "DMRestrict"
1960 /*@
1961    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
1962 
1963    Collective if any hooks are
1964 
1965    Input Arguments:
1966 +  fine - finer DM to use as a base
1967 .  restrct - restriction matrix, apply using MatRestrict()
1968 .  inject - injection matrix, also use MatRestrict()
1969 -  coarse - coarer DM to update
1970 
1971    Level: developer
1972 
1973 .seealso: DMCoarsenHookAdd(), MatRestrict()
1974 @*/
1975 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
1976 {
1977   PetscErrorCode ierr;
1978   DMCoarsenHookLink link;
1979 
1980   PetscFunctionBegin;
1981   for (link=fine->coarsenhook; link; link=link->next) {
1982     if (link->restricthook) {ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);}
1983   }
1984   PetscFunctionReturn(0);
1985 }
1986 
1987 #undef __FUNCT__
1988 #define __FUNCT__ "DMSubDomainHookAdd"
1989 /*@
1990    DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid
1991 
1992    Logically Collective
1993 
1994    Input Arguments:
1995 +  global - global DM
1996 .
1997 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
1998 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1999 
2000    Calling sequence for restricthook:
2001 $    restricthook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
2002 
2003 +  global - global DM
2004 .  out    - scatter to the outer (with ghost and overlap points) block vector
2005 .  in     - scatter to block vector values only owned locally
2006 .  block  - block DM (may just be a shell if the global DM is passed in correctly)
2007 -  ctx - optional user-defined function context
2008 
2009    Level: advanced
2010 
2011    Notes:
2012    This function is only needed if auxiliary data needs to be set up on coarse grids.
2013 
2014    If this function is called multiple times, the hooks will be run in the order they are added.
2015 
2016    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
2017    extract the finest level information from its context (instead of from the SNES).
2018 
2019 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
2020 @*/
2021 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
2022 {
2023   PetscErrorCode ierr;
2024   DMSubDomainHookLink link,*p;
2025 
2026   PetscFunctionBegin;
2027   PetscValidHeaderSpecific(global,DM_CLASSID,1);
2028   for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2029   ierr = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr);
2030   link->restricthook = restricthook;
2031   link->ddhook = ddhook;
2032   link->ctx = ctx;
2033   link->next = PETSC_NULL;
2034   *p = link;
2035   PetscFunctionReturn(0);
2036 }
2037 
2038 #undef __FUNCT__
2039 #define __FUNCT__ "DMSubDomainRestrict"
2040 /*@
2041    DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd()
2042 
2043    Collective if any hooks are
2044 
2045    Input Arguments:
2046 +  fine - finer DM to use as a base
2047 .  oscatter - scatter from domain global vector filling subdomain global vector with overlap
2048 .  gscatter - scatter from domain global vector filling subdomain local vector with ghosts
2049 -  coarse - coarer DM to update
2050 
2051    Level: developer
2052 
2053 .seealso: DMCoarsenHookAdd(), MatRestrict()
2054 @*/
2055 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm)
2056 {
2057   PetscErrorCode ierr;
2058   DMSubDomainHookLink link;
2059 
2060   PetscFunctionBegin;
2061   for (link=global->subdomainhook; link; link=link->next) {
2062     if (link->restricthook) {ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr);}
2063   }
2064   PetscFunctionReturn(0);
2065 }
2066 
2067 #undef __FUNCT__
2068 #define __FUNCT__ "DMGetCoarsenLevel"
2069 /*@
2070     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
2071 
2072     Not Collective
2073 
2074     Input Parameter:
2075 .   dm - the DM object
2076 
2077     Output Parameter:
2078 .   level - number of coarsenings
2079 
2080     Level: developer
2081 
2082 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2083 
2084 @*/
2085 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
2086 {
2087   PetscFunctionBegin;
2088   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2089   *level = dm->leveldown;
2090   PetscFunctionReturn(0);
2091 }
2092 
2093 
2094 
2095 #undef __FUNCT__
2096 #define __FUNCT__ "DMRefineHierarchy"
2097 /*@C
2098     DMRefineHierarchy - Refines a DM object, all levels at once
2099 
2100     Collective on DM
2101 
2102     Input Parameter:
2103 +   dm - the DM object
2104 -   nlevels - the number of levels of refinement
2105 
2106     Output Parameter:
2107 .   dmf - the refined DM hierarchy
2108 
2109     Level: developer
2110 
2111 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2112 
2113 @*/
2114 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2115 {
2116   PetscErrorCode ierr;
2117 
2118   PetscFunctionBegin;
2119   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2120   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2121   if (nlevels == 0) PetscFunctionReturn(0);
2122   if (dm->ops->refinehierarchy) {
2123     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
2124   } else if (dm->ops->refine) {
2125     PetscInt i;
2126 
2127     ierr = DMRefine(dm,((PetscObject)dm)->comm,&dmf[0]);CHKERRQ(ierr);
2128     for (i=1; i<nlevels; i++) {
2129       ierr = DMRefine(dmf[i-1],((PetscObject)dm)->comm,&dmf[i]);CHKERRQ(ierr);
2130     }
2131   } else {
2132     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2133   }
2134   PetscFunctionReturn(0);
2135 }
2136 
2137 #undef __FUNCT__
2138 #define __FUNCT__ "DMCoarsenHierarchy"
2139 /*@C
2140     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2141 
2142     Collective on DM
2143 
2144     Input Parameter:
2145 +   dm - the DM object
2146 -   nlevels - the number of levels of coarsening
2147 
2148     Output Parameter:
2149 .   dmc - the coarsened DM hierarchy
2150 
2151     Level: developer
2152 
2153 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2154 
2155 @*/
2156 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2157 {
2158   PetscErrorCode ierr;
2159 
2160   PetscFunctionBegin;
2161   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2162   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2163   if (nlevels == 0) PetscFunctionReturn(0);
2164   PetscValidPointer(dmc,3);
2165   if (dm->ops->coarsenhierarchy) {
2166     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
2167   } else if (dm->ops->coarsen) {
2168     PetscInt i;
2169 
2170     ierr = DMCoarsen(dm,((PetscObject)dm)->comm,&dmc[0]);CHKERRQ(ierr);
2171     for (i=1; i<nlevels; i++) {
2172       ierr = DMCoarsen(dmc[i-1],((PetscObject)dm)->comm,&dmc[i]);CHKERRQ(ierr);
2173     }
2174   } else {
2175     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2176   }
2177   PetscFunctionReturn(0);
2178 }
2179 
2180 #undef __FUNCT__
2181 #define __FUNCT__ "DMCreateAggregates"
2182 /*@
2183    DMCreateAggregates - Gets the aggregates that map between
2184    grids associated with two DMs.
2185 
2186    Collective on DM
2187 
2188    Input Parameters:
2189 +  dmc - the coarse grid DM
2190 -  dmf - the fine grid DM
2191 
2192    Output Parameters:
2193 .  rest - the restriction matrix (transpose of the projection matrix)
2194 
2195    Level: intermediate
2196 
2197 .keywords: interpolation, restriction, multigrid
2198 
2199 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2200 @*/
2201 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2202 {
2203   PetscErrorCode ierr;
2204 
2205   PetscFunctionBegin;
2206   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
2207   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
2208   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
2209   PetscFunctionReturn(0);
2210 }
2211 
2212 #undef __FUNCT__
2213 #define __FUNCT__ "DMSetApplicationContextDestroy"
2214 /*@C
2215     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2216 
2217     Not Collective
2218 
2219     Input Parameters:
2220 +   dm - the DM object
2221 -   destroy - the destroy function
2222 
2223     Level: intermediate
2224 
2225 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2226 
2227 @*/
2228 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2229 {
2230   PetscFunctionBegin;
2231   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2232   dm->ctxdestroy = destroy;
2233   PetscFunctionReturn(0);
2234 }
2235 
2236 #undef __FUNCT__
2237 #define __FUNCT__ "DMSetApplicationContext"
2238 /*@
2239     DMSetApplicationContext - Set a user context into a DM object
2240 
2241     Not Collective
2242 
2243     Input Parameters:
2244 +   dm - the DM object
2245 -   ctx - the user context
2246 
2247     Level: intermediate
2248 
2249 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2250 
2251 @*/
2252 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2253 {
2254   PetscFunctionBegin;
2255   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2256   dm->ctx = ctx;
2257   PetscFunctionReturn(0);
2258 }
2259 
2260 #undef __FUNCT__
2261 #define __FUNCT__ "DMGetApplicationContext"
2262 /*@
2263     DMGetApplicationContext - Gets a user context from a DM object
2264 
2265     Not Collective
2266 
2267     Input Parameter:
2268 .   dm - the DM object
2269 
2270     Output Parameter:
2271 .   ctx - the user context
2272 
2273     Level: intermediate
2274 
2275 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2276 
2277 @*/
2278 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2279 {
2280   PetscFunctionBegin;
2281   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2282   *(void**)ctx = dm->ctx;
2283   PetscFunctionReturn(0);
2284 }
2285 
2286 #undef __FUNCT__
2287 #define __FUNCT__ "DMSetVariableBounds"
2288 /*@C
2289     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2290 
2291     Logically Collective on DM
2292 
2293     Input Parameter:
2294 +   dm - the DM object
2295 -   f - the function that computes variable bounds used by SNESVI (use PETSC_NULL to cancel a previous function that was set)
2296 
2297     Level: intermediate
2298 
2299 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2300          DMSetJacobian()
2301 
2302 @*/
2303 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2304 {
2305   PetscFunctionBegin;
2306   dm->ops->computevariablebounds = f;
2307   PetscFunctionReturn(0);
2308 }
2309 
2310 #undef __FUNCT__
2311 #define __FUNCT__ "DMHasVariableBounds"
2312 /*@
2313     DMHasVariableBounds - does the DM object have a variable bounds function?
2314 
2315     Not Collective
2316 
2317     Input Parameter:
2318 .   dm - the DM object to destroy
2319 
2320     Output Parameter:
2321 .   flg - PETSC_TRUE if the variable bounds function exists
2322 
2323     Level: developer
2324 
2325 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2326 
2327 @*/
2328 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2329 {
2330   PetscFunctionBegin;
2331   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2332   PetscFunctionReturn(0);
2333 }
2334 
2335 #undef __FUNCT__
2336 #define __FUNCT__ "DMComputeVariableBounds"
2337 /*@C
2338     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2339 
2340     Logically Collective on DM
2341 
2342     Input Parameters:
2343 +   dm - the DM object to destroy
2344 -   x  - current solution at which the bounds are computed
2345 
2346     Output parameters:
2347 +   xl - lower bound
2348 -   xu - upper bound
2349 
2350     Level: intermediate
2351 
2352 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2353 
2354 @*/
2355 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2356 {
2357   PetscErrorCode ierr;
2358   PetscFunctionBegin;
2359   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2360   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2361   if (dm->ops->computevariablebounds) {
2362     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu); CHKERRQ(ierr);
2363   }
2364   else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2365   PetscFunctionReturn(0);
2366 }
2367 
2368 #undef __FUNCT__
2369 #define __FUNCT__ "DMHasColoring"
2370 /*@
2371     DMHasColoring - does the DM object have a method of providing a coloring?
2372 
2373     Not Collective
2374 
2375     Input Parameter:
2376 .   dm - the DM object
2377 
2378     Output Parameter:
2379 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2380 
2381     Level: developer
2382 
2383 .seealso DMHasFunction(), DMCreateColoring()
2384 
2385 @*/
2386 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2387 {
2388   PetscFunctionBegin;
2389   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2390   PetscFunctionReturn(0);
2391 }
2392 
2393 #undef  __FUNCT__
2394 #define __FUNCT__ "DMSetVec"
2395 /*@C
2396     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2397 
2398     Collective on DM
2399 
2400     Input Parameter:
2401 +   dm - the DM object
2402 -   x - location to compute residual and Jacobian, if PETSC_NULL is passed to those routines; will be PETSC_NULL for linear problems.
2403 
2404     Level: developer
2405 
2406 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2407 
2408 @*/
2409 PetscErrorCode  DMSetVec(DM dm,Vec x)
2410 {
2411   PetscErrorCode ierr;
2412   PetscFunctionBegin;
2413   if (x) {
2414     if (!dm->x) {
2415       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2416     }
2417     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2418   }
2419   else if (dm->x) {
2420     ierr = VecDestroy(&dm->x);  CHKERRQ(ierr);
2421   }
2422   PetscFunctionReturn(0);
2423 }
2424 
2425 PetscFList DMList                       = PETSC_NULL;
2426 PetscBool  DMRegisterAllCalled          = PETSC_FALSE;
2427 
2428 #undef __FUNCT__
2429 #define __FUNCT__ "DMSetType"
2430 /*@C
2431   DMSetType - Builds a DM, for a particular DM implementation.
2432 
2433   Collective on DM
2434 
2435   Input Parameters:
2436 + dm     - The DM object
2437 - method - The name of the DM type
2438 
2439   Options Database Key:
2440 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2441 
2442   Notes:
2443   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2444 
2445   Level: intermediate
2446 
2447 .keywords: DM, set, type
2448 .seealso: DMGetType(), DMCreate()
2449 @*/
2450 PetscErrorCode  DMSetType(DM dm, DMType method)
2451 {
2452   PetscErrorCode (*r)(DM);
2453   PetscBool      match;
2454   PetscErrorCode ierr;
2455 
2456   PetscFunctionBegin;
2457   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2458   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2459   if (match) PetscFunctionReturn(0);
2460 
2461   if (!DMRegisterAllCalled) {ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);}
2462   ierr = PetscFListFind(DMList, ((PetscObject)dm)->comm, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr);
2463   if (!r) SETERRQ1(((PetscObject)dm)->comm,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2464 
2465   if (dm->ops->destroy) {
2466     ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2467     dm->ops->destroy = PETSC_NULL;
2468   }
2469   ierr = (*r)(dm);CHKERRQ(ierr);
2470   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2471   PetscFunctionReturn(0);
2472 }
2473 
2474 #undef __FUNCT__
2475 #define __FUNCT__ "DMGetType"
2476 /*@C
2477   DMGetType - Gets the DM type name (as a string) from the DM.
2478 
2479   Not Collective
2480 
2481   Input Parameter:
2482 . dm  - The DM
2483 
2484   Output Parameter:
2485 . type - The DM type name
2486 
2487   Level: intermediate
2488 
2489 .keywords: DM, get, type, name
2490 .seealso: DMSetType(), DMCreate()
2491 @*/
2492 PetscErrorCode  DMGetType(DM dm, DMType *type)
2493 {
2494   PetscErrorCode ierr;
2495 
2496   PetscFunctionBegin;
2497   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2498   PetscValidCharPointer(type,2);
2499   if (!DMRegisterAllCalled) {
2500     ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);
2501   }
2502   *type = ((PetscObject)dm)->type_name;
2503   PetscFunctionReturn(0);
2504 }
2505 
2506 #undef __FUNCT__
2507 #define __FUNCT__ "DMConvert"
2508 /*@C
2509   DMConvert - Converts a DM to another DM, either of the same or different type.
2510 
2511   Collective on DM
2512 
2513   Input Parameters:
2514 + dm - the DM
2515 - newtype - new DM type (use "same" for the same type)
2516 
2517   Output Parameter:
2518 . M - pointer to new DM
2519 
2520   Notes:
2521   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2522   the MPI communicator of the generated DM is always the same as the communicator
2523   of the input DM.
2524 
2525   Level: intermediate
2526 
2527 .seealso: DMCreate()
2528 @*/
2529 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2530 {
2531   DM             B;
2532   char           convname[256];
2533   PetscBool      sametype, issame;
2534   PetscErrorCode ierr;
2535 
2536   PetscFunctionBegin;
2537   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2538   PetscValidType(dm,1);
2539   PetscValidPointer(M,3);
2540   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2541   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2542   {
2543     PetscErrorCode (*conv)(DM, DMType, DM *) = PETSC_NULL;
2544 
2545     /*
2546        Order of precedence:
2547        1) See if a specialized converter is known to the current DM.
2548        2) See if a specialized converter is known to the desired DM class.
2549        3) See if a good general converter is registered for the desired class
2550        4) See if a good general converter is known for the current matrix.
2551        5) Use a really basic converter.
2552     */
2553 
2554     /* 1) See if a specialized converter is known to the current DM and the desired class */
2555     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2556     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2557     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2558     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2559     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2560     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2561     if (conv) goto foundconv;
2562 
2563     /* 2)  See if a specialized converter is known to the desired DM class. */
2564     ierr = DMCreate(((PetscObject) dm)->comm, &B);CHKERRQ(ierr);
2565     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2566     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2567     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2568     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2569     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2570     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2571     ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2572     if (conv) {
2573       ierr = DMDestroy(&B);CHKERRQ(ierr);
2574       goto foundconv;
2575     }
2576 
2577 #if 0
2578     /* 3) See if a good general converter is registered for the desired class */
2579     conv = B->ops->convertfrom;
2580     ierr = DMDestroy(&B);CHKERRQ(ierr);
2581     if (conv) goto foundconv;
2582 
2583     /* 4) See if a good general converter is known for the current matrix */
2584     if (dm->ops->convert) {
2585       conv = dm->ops->convert;
2586     }
2587     if (conv) goto foundconv;
2588 #endif
2589 
2590     /* 5) Use a really basic converter. */
2591     SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2592 
2593     foundconv:
2594     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2595     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2596     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2597   }
2598   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2599   PetscFunctionReturn(0);
2600 }
2601 
2602 /*--------------------------------------------------------------------------------------------------------------------*/
2603 
2604 #undef __FUNCT__
2605 #define __FUNCT__ "DMRegister"
2606 /*@C
2607   DMRegister - See DMRegisterDynamic()
2608 
2609   Level: advanced
2610 @*/
2611 PetscErrorCode  DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM))
2612 {
2613   char fullname[PETSC_MAX_PATH_LEN];
2614   PetscErrorCode ierr;
2615 
2616   PetscFunctionBegin;
2617   ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr);
2618   ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr);
2619   ierr = PetscStrcat(fullname, name);CHKERRQ(ierr);
2620   ierr = PetscFListAdd(&DMList, sname, fullname, (void (*)(void)) function);CHKERRQ(ierr);
2621   PetscFunctionReturn(0);
2622 }
2623 
2624 
2625 /*--------------------------------------------------------------------------------------------------------------------*/
2626 #undef __FUNCT__
2627 #define __FUNCT__ "DMRegisterDestroy"
2628 /*@C
2629    DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic().
2630 
2631    Not Collective
2632 
2633    Level: advanced
2634 
2635 .keywords: DM, register, destroy
2636 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic()
2637 @*/
2638 PetscErrorCode  DMRegisterDestroy(void)
2639 {
2640   PetscErrorCode ierr;
2641 
2642   PetscFunctionBegin;
2643   ierr = PetscFListDestroy(&DMList);CHKERRQ(ierr);
2644   DMRegisterAllCalled = PETSC_FALSE;
2645   PetscFunctionReturn(0);
2646 }
2647 
2648 #undef __FUNCT__
2649 #define __FUNCT__ "DMLoad"
2650 /*@C
2651   DMLoad - Loads a DM that has been stored in binary  with DMView().
2652 
2653   Collective on PetscViewer
2654 
2655   Input Parameters:
2656 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2657            some related function before a call to DMLoad().
2658 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2659            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2660 
2661    Level: intermediate
2662 
2663   Notes:
2664    The type is determined by the data in the file, any type set into the DM before this call is ignored.
2665 
2666   Notes for advanced users:
2667   Most users should not need to know the details of the binary storage
2668   format, since DMLoad() and DMView() completely hide these details.
2669   But for anyone who's interested, the standard binary matrix storage
2670   format is
2671 .vb
2672      has not yet been determined
2673 .ve
2674 
2675 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2676 @*/
2677 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2678 {
2679   PetscErrorCode ierr;
2680   PetscBool      isbinary;
2681   PetscInt       classid;
2682   char           type[256];
2683 
2684   PetscFunctionBegin;
2685   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
2686   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
2687   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
2688   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
2689 
2690   ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr);
2691   if (classid != DM_FILE_CLASSID) SETERRQ(((PetscObject)newdm)->comm,PETSC_ERR_ARG_WRONG,"Not DM next in file");
2692   ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr);
2693   ierr = DMSetType(newdm, type);CHKERRQ(ierr);
2694   if (newdm->ops->load) {
2695     ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
2696   }
2697   PetscFunctionReturn(0);
2698 }
2699 
2700 /******************************** FEM Support **********************************/
2701 
2702 #undef __FUNCT__
2703 #define __FUNCT__ "DMPrintCellVector"
2704 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) {
2705   PetscInt       f;
2706   PetscErrorCode ierr;
2707 
2708   PetscFunctionBegin;
2709   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2710   for (f = 0; f < len; ++f) {
2711     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr);
2712   }
2713   PetscFunctionReturn(0);
2714 }
2715 
2716 #undef __FUNCT__
2717 #define __FUNCT__ "DMPrintCellMatrix"
2718 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) {
2719   PetscInt       f, g;
2720   PetscErrorCode ierr;
2721 
2722   PetscFunctionBegin;
2723   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2724   for (f = 0; f < rows; ++f) {
2725     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
2726     for (g = 0; g < cols; ++g) {
2727       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
2728     }
2729     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
2730   }
2731   PetscFunctionReturn(0);
2732 }
2733 
2734 #undef __FUNCT__
2735 #define __FUNCT__ "DMGetDefaultSection"
2736 /*@
2737   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
2738 
2739   Input Parameter:
2740 . dm - The DM
2741 
2742   Output Parameter:
2743 . section - The PetscSection
2744 
2745   Level: intermediate
2746 
2747   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2748 
2749 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2750 @*/
2751 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) {
2752   PetscFunctionBegin;
2753   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2754   PetscValidPointer(section, 2);
2755   *section = dm->defaultSection;
2756   PetscFunctionReturn(0);
2757 }
2758 
2759 #undef __FUNCT__
2760 #define __FUNCT__ "DMSetDefaultSection"
2761 /*@
2762   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
2763 
2764   Input Parameters:
2765 + dm - The DM
2766 - section - The PetscSection
2767 
2768   Level: intermediate
2769 
2770   Note: Any existing Section will be destroyed
2771 
2772 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2773 @*/
2774 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) {
2775   PetscInt       numFields;
2776   PetscInt       f;
2777   PetscErrorCode ierr;
2778 
2779   PetscFunctionBegin;
2780   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2781   ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
2782   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2783   dm->defaultSection = section;
2784   ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
2785   if (numFields) {
2786     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
2787     for (f = 0; f < numFields; ++f) {
2788       const char *name;
2789 
2790       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
2791       ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr);
2792     }
2793   }
2794   PetscFunctionReturn(0);
2795 }
2796 
2797 #undef __FUNCT__
2798 #define __FUNCT__ "DMGetDefaultGlobalSection"
2799 /*@
2800   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
2801 
2802   Collective on DM
2803 
2804   Input Parameter:
2805 . dm - The DM
2806 
2807   Output Parameter:
2808 . section - The PetscSection
2809 
2810   Level: intermediate
2811 
2812   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2813 
2814 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2815 @*/
2816 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) {
2817   PetscErrorCode ierr;
2818 
2819   PetscFunctionBegin;
2820   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2821   PetscValidPointer(section, 2);
2822   if (!dm->defaultGlobalSection) {
2823     if (!dm->defaultSection || !dm->sf) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
2824     ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
2825     ierr = PetscSectionGetValueLayout(((PetscObject)dm)->comm,dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr);
2826   }
2827   *section = dm->defaultGlobalSection;
2828   PetscFunctionReturn(0);
2829 }
2830 
2831 #undef __FUNCT__
2832 #define __FUNCT__ "DMSetDefaultGlobalSection"
2833 /*@
2834   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
2835 
2836   Input Parameters:
2837 + dm - The DM
2838 - section - The PetscSection
2839 
2840   Level: intermediate
2841 
2842   Note: Any existing Section will be destroyed
2843 
2844 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
2845 @*/
2846 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) {
2847   PetscErrorCode ierr;
2848 
2849   PetscFunctionBegin;
2850   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2851   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2852   dm->defaultGlobalSection = section;
2853   PetscFunctionReturn(0);
2854 }
2855 
2856 #undef __FUNCT__
2857 #define __FUNCT__ "DMGetDefaultSF"
2858 /*@
2859   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
2860   it is created from the default PetscSection layouts in the DM.
2861 
2862   Input Parameter:
2863 . dm - The DM
2864 
2865   Output Parameter:
2866 . sf - The PetscSF
2867 
2868   Level: intermediate
2869 
2870   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2871 
2872 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
2873 @*/
2874 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) {
2875   PetscInt       nroots;
2876   PetscErrorCode ierr;
2877 
2878   PetscFunctionBegin;
2879   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2880   PetscValidPointer(sf, 2);
2881   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, PETSC_NULL, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr);
2882   if (nroots < 0) {
2883     PetscSection section, gSection;
2884 
2885     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
2886     if (section) {
2887       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
2888       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
2889     } else {
2890       *sf = PETSC_NULL;
2891       PetscFunctionReturn(0);
2892     }
2893   }
2894   *sf = dm->defaultSF;
2895   PetscFunctionReturn(0);
2896 }
2897 
2898 #undef __FUNCT__
2899 #define __FUNCT__ "DMSetDefaultSF"
2900 /*@
2901   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
2902 
2903   Input Parameters:
2904 + dm - The DM
2905 - sf - The PetscSF
2906 
2907   Level: intermediate
2908 
2909   Note: Any previous SF is destroyed
2910 
2911 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
2912 @*/
2913 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) {
2914   PetscErrorCode ierr;
2915 
2916   PetscFunctionBegin;
2917   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2918   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
2919   ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
2920   dm->defaultSF = sf;
2921   PetscFunctionReturn(0);
2922 }
2923 
2924 #undef __FUNCT__
2925 #define __FUNCT__ "DMCreateDefaultSF"
2926 /*@C
2927   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
2928   describing the data layout.
2929 
2930   Input Parameters:
2931 + dm - The DM
2932 . localSection - PetscSection describing the local data layout
2933 - globalSection - PetscSection describing the global data layout
2934 
2935   Level: intermediate
2936 
2937 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
2938 @*/
2939 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
2940 {
2941   MPI_Comm        comm = ((PetscObject) dm)->comm;
2942   PetscLayout     layout;
2943   const PetscInt *ranges;
2944   PetscInt       *local;
2945   PetscSFNode    *remote;
2946   PetscInt        pStart, pEnd, p, nroots, nleaves, l;
2947   PetscMPIInt     size, rank;
2948   PetscErrorCode  ierr;
2949 
2950   PetscFunctionBegin;
2951   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2952   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2953   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2954   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
2955   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
2956   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
2957   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
2958   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
2959   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
2960   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
2961   for (p = pStart, nleaves = 0; p < pEnd; ++p) {
2962     PetscInt gdof, gcdof;
2963 
2964     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2965     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2966     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2967   }
2968   ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr);
2969   ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr);
2970   for (p = pStart, l = 0; p < pEnd; ++p) {
2971     const PetscInt *cind;
2972     PetscInt        dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
2973 
2974     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
2975     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
2976     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
2977     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
2978     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2979     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2980     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
2981     if (!gdof) continue; /* Censored point */
2982     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2983     if (gsize != dof-cdof) {
2984       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
2985       cdof = 0; /* Ignore constraints */
2986     }
2987     for (d = 0, c = 0; d < dof; ++d) {
2988       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
2989       local[l+d-c] = off+d;
2990     }
2991     if (gdof < 0) {
2992       for(d = 0; d < gsize; ++d, ++l) {
2993         PetscInt offset = -(goff+1) + d, r;
2994 
2995         ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr);
2996         if (r < 0) r = -(r+2);
2997         remote[l].rank  = r;
2998         remote[l].index = offset - ranges[r];
2999       }
3000     } else {
3001       for(d = 0; d < gsize; ++d, ++l) {
3002         remote[l].rank  = rank;
3003         remote[l].index = goff+d - ranges[rank];
3004       }
3005     }
3006   }
3007   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
3008   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3009   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
3010   PetscFunctionReturn(0);
3011 }
3012 
3013 #undef __FUNCT__
3014 #define __FUNCT__ "DMGetPointSF"
3015 /*@
3016   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
3017 
3018   Input Parameter:
3019 . dm - The DM
3020 
3021   Output Parameter:
3022 . sf - The PetscSF
3023 
3024   Level: intermediate
3025 
3026   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
3027 
3028 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3029 @*/
3030 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) {
3031   PetscFunctionBegin;
3032   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3033   PetscValidPointer(sf, 2);
3034   *sf = dm->sf;
3035   PetscFunctionReturn(0);
3036 }
3037 
3038 #undef __FUNCT__
3039 #define __FUNCT__ "DMSetPointSF"
3040 /*@
3041   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
3042 
3043   Input Parameters:
3044 + dm - The DM
3045 - sf - The PetscSF
3046 
3047   Level: intermediate
3048 
3049 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3050 @*/
3051 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) {
3052   PetscErrorCode ierr;
3053 
3054   PetscFunctionBegin;
3055   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3056   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
3057   ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr);
3058   ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr);
3059   dm->sf = sf;
3060   PetscFunctionReturn(0);
3061 }
3062 
3063 #undef __FUNCT__
3064 #define __FUNCT__ "DMGetNumFields"
3065 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3066 {
3067   PetscFunctionBegin;
3068   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3069   PetscValidPointer(numFields, 2);
3070   *numFields = dm->numFields;
3071   PetscFunctionReturn(0);
3072 }
3073 
3074 #undef __FUNCT__
3075 #define __FUNCT__ "DMSetNumFields"
3076 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3077 {
3078   PetscInt       f;
3079   PetscErrorCode ierr;
3080 
3081   PetscFunctionBegin;
3082   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3083   for (f = 0; f < dm->numFields; ++f) {
3084     ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr);
3085   }
3086   ierr = PetscFree(dm->fields);CHKERRQ(ierr);
3087   dm->numFields = numFields;
3088   ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr);
3089   for (f = 0; f < dm->numFields; ++f) {
3090     ierr = PetscContainerCreate(((PetscObject) dm)->comm, (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr);
3091   }
3092   PetscFunctionReturn(0);
3093 }
3094 
3095 #undef __FUNCT__
3096 #define __FUNCT__ "DMGetField"
3097 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3098 {
3099   PetscFunctionBegin;
3100   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3101   PetscValidPointer(field, 2);
3102   if (!dm->fields) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3103   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3104   *field = dm->fields[f];
3105   PetscFunctionReturn(0);
3106 }
3107 
3108 #undef __FUNCT__
3109 #define __FUNCT__ "DMSetCoordinates"
3110 /*@
3111   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3112 
3113   Collective on DM
3114 
3115   Input Parameters:
3116 + dm - the DM
3117 - c - coordinate vector
3118 
3119   Note:
3120   The coordinates do include those for ghost points, which are in the local vector
3121 
3122   Level: intermediate
3123 
3124 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3125 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3126 @*/
3127 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3128 {
3129   PetscErrorCode ierr;
3130 
3131   PetscFunctionBegin;
3132   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3133   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3134   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3135   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3136   dm->coordinates = c;
3137   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3138   PetscFunctionReturn(0);
3139 }
3140 
3141 #undef __FUNCT__
3142 #define __FUNCT__ "DMSetCoordinatesLocal"
3143 /*@
3144   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3145 
3146   Collective on DM
3147 
3148    Input Parameters:
3149 +  dm - the DM
3150 -  c - coordinate vector
3151 
3152   Note:
3153   The coordinates of ghost points can be set using DMSetCoordinates()
3154   followed by DMGetCoordinatesLocal(). This is intended to enable the
3155   setting of ghost coordinates outside of the domain.
3156 
3157   Level: intermediate
3158 
3159 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3160 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3161 @*/
3162 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3163 {
3164   PetscErrorCode ierr;
3165 
3166   PetscFunctionBegin;
3167   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3168   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3169   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3170   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3171   dm->coordinatesLocal = c;
3172   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3173   PetscFunctionReturn(0);
3174 }
3175 
3176 #undef __FUNCT__
3177 #define __FUNCT__ "DMGetCoordinates"
3178 /*@
3179   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3180 
3181   Not Collective
3182 
3183   Input Parameter:
3184 . dm - the DM
3185 
3186   Output Parameter:
3187 . c - global coordinate vector
3188 
3189   Note:
3190   This is a borrowed reference, so the user should NOT destroy this vector
3191 
3192   Each process has only the local coordinates (does NOT have the ghost coordinates).
3193 
3194   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3195   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3196 
3197   Level: intermediate
3198 
3199 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3200 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3201 @*/
3202 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3203 {
3204   PetscErrorCode ierr;
3205 
3206   PetscFunctionBegin;
3207   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3208   PetscValidPointer(c,2);
3209   if (!dm->coordinates && dm->coordinatesLocal) {
3210     DM cdm = PETSC_NULL;
3211 
3212     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3213     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3214     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3215     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3216     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3217   }
3218   *c = dm->coordinates;
3219   PetscFunctionReturn(0);
3220 }
3221 
3222 #undef __FUNCT__
3223 #define __FUNCT__ "DMGetCoordinatesLocal"
3224 /*@
3225   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3226 
3227   Collective on DM
3228 
3229   Input Parameter:
3230 . dm - the DM
3231 
3232   Output Parameter:
3233 . c - coordinate vector
3234 
3235   Note:
3236   This is a borrowed reference, so the user should NOT destroy this vector
3237 
3238   Each process has the local and ghost coordinates
3239 
3240   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3241   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3242 
3243   Level: intermediate
3244 
3245 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3246 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3247 @*/
3248 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3249 {
3250   PetscErrorCode ierr;
3251 
3252   PetscFunctionBegin;
3253   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3254   PetscValidPointer(c,2);
3255   if (!dm->coordinatesLocal && dm->coordinates) {
3256     DM cdm = PETSC_NULL;
3257 
3258     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3259     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3260     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3261     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3262     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3263   }
3264   *c = dm->coordinatesLocal;
3265   PetscFunctionReturn(0);
3266 }
3267 
3268 #undef __FUNCT__
3269 #define __FUNCT__ "DMGetCoordinateDM"
3270 /*@
3271   DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates
3272 
3273   Collective on DM
3274 
3275   Input Parameter:
3276 . dm - the DM
3277 
3278   Output Parameter:
3279 . cdm - coordinate DM
3280 
3281   Level: intermediate
3282 
3283 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3284 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3285 @*/
3286 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3287 {
3288   PetscErrorCode ierr;
3289 
3290   PetscFunctionBegin;
3291   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3292   PetscValidPointer(cdm,2);
3293   if (!dm->coordinateDM) {
3294     if (!dm->ops->createcoordinatedm) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3295     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3296   }
3297   *cdm = dm->coordinateDM;
3298   PetscFunctionReturn(0);
3299 }
3300