xref: /petsc/src/dm/interface/dm.c (revision 297d2bf4710edef2c214acbb5eb670dee89ebd78)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
2 #include <petscsf.h>
3 
4 PetscClassId  DM_CLASSID;
5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal, DM_LocalToLocal;
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "DMViewFromOptions"
9 /*
10   DMViewFromOptions - Processes command line options to determine if/how a DM is to be viewed.
11 
12   Collective on Vec
13 
14   Input Parameters:
15 + dm   - the DM
16 . prefix - prefix to use for viewing, or NULL to use prefix of 'dm'
17 - optionname - option to activate viewing
18 
19   Level: intermediate
20 
21 .keywords: DM, view, options, database
22 .seealso: VecViewFromOptions(), MatViewFromOptions()
23 */
24 PetscErrorCode  DMViewFromOptions(DM dm,const char prefix[],const char optionname[])
25 {
26   PetscErrorCode    ierr;
27   PetscBool         flg;
28   PetscViewer       viewer;
29   PetscViewerFormat format;
30 
31   PetscFunctionBegin;
32   if (prefix) {
33     ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr);
34   } else {
35     ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),((PetscObject)dm)->prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr);
36   }
37   if (flg) {
38     ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr);
39     ierr = DMView(dm,viewer);CHKERRQ(ierr);
40     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
41     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
42   }
43   PetscFunctionReturn(0);
44 }
45 
46 
47 #undef __FUNCT__
48 #define __FUNCT__ "DMCreate"
49 /*@
50   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
51 
52    If you never  call DMSetType()  it will generate an
53    error when you try to use the vector.
54 
55   Collective on MPI_Comm
56 
57   Input Parameter:
58 . comm - The communicator for the DM object
59 
60   Output Parameter:
61 . dm - The DM object
62 
63   Level: beginner
64 
65 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
66 @*/
67 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
68 {
69   DM             v;
70   PetscErrorCode ierr;
71 
72   PetscFunctionBegin;
73   PetscValidPointer(dm,2);
74   *dm = NULL;
75 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
76   ierr = PetscSysInitializePackage();CHKERRQ(ierr);
77   ierr = VecInitializePackage();CHKERRQ(ierr);
78   ierr = MatInitializePackage();CHKERRQ(ierr);
79   ierr = DMInitializePackage();CHKERRQ(ierr);
80 #endif
81 
82   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
83   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
84 
85 
86   v->ltogmap              = NULL;
87   v->ltogmapb             = NULL;
88   v->bs                   = 1;
89   v->coloringtype         = IS_COLORING_GLOBAL;
90   ierr                    = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
91   ierr                    = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
92   v->defaultSection       = NULL;
93   v->defaultGlobalSection = NULL;
94   {
95     PetscInt i;
96     for (i = 0; i < 10; ++i) {
97       v->nullspaceConstructors[i] = NULL;
98     }
99   }
100   v->numFields = 0;
101   v->fields    = NULL;
102 
103   *dm = v;
104   PetscFunctionReturn(0);
105 }
106 
107 #undef __FUNCT__
108 #define __FUNCT__ "DMClone"
109 /*@
110   DMClone - Creates a DM object with the same topology as the original.
111 
112   Collective on MPI_Comm
113 
114   Input Parameter:
115 . dm - The original DM object
116 
117   Output Parameter:
118 . newdm  - The new DM object
119 
120   Level: beginner
121 
122 .keywords: DM, topology, create
123 @*/
124 PetscErrorCode DMClone(DM dm, DM *newdm)
125 {
126   PetscSF        sf;
127   Vec            coords;
128   void          *ctx;
129   PetscErrorCode ierr;
130 
131   PetscFunctionBegin;
132   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
133   PetscValidPointer(newdm,2);
134   ierr = DMCreate(PetscObjectComm((PetscObject)dm), newdm);CHKERRQ(ierr);
135   if (dm->ops->clone) {
136     ierr = (*dm->ops->clone)(dm, newdm);CHKERRQ(ierr);
137   }
138   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
139   ierr = DMSetPointSF(*newdm, sf);CHKERRQ(ierr);
140   ierr = DMGetApplicationContext(dm, &ctx);CHKERRQ(ierr);
141   ierr = DMSetApplicationContext(*newdm, ctx);CHKERRQ(ierr);
142   ierr = DMGetCoordinatesLocal(dm, &coords);CHKERRQ(ierr);
143   if (coords) {
144     ierr = DMSetCoordinatesLocal(*newdm, coords);CHKERRQ(ierr);
145   } else {
146     ierr = DMGetCoordinates(dm, &coords);CHKERRQ(ierr);
147     if (coords) {ierr = DMSetCoordinates(*newdm, coords);CHKERRQ(ierr);}
148   }
149   PetscFunctionReturn(0);
150 }
151 
152 #undef __FUNCT__
153 #define __FUNCT__ "DMSetVecType"
154 /*@C
155        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
156 
157    Logically Collective on DMDA
158 
159    Input Parameter:
160 +  da - initial distributed array
161 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
162 
163    Options Database:
164 .   -dm_vec_type ctype
165 
166    Level: intermediate
167 
168 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
169 @*/
170 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
171 {
172   PetscErrorCode ierr;
173 
174   PetscFunctionBegin;
175   PetscValidHeaderSpecific(da,DM_CLASSID,1);
176   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
177   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
178   PetscFunctionReturn(0);
179 }
180 
181 #undef __FUNCT__
182 #define __FUNCT__ "VecGetDM"
183 /*@
184   VecGetDM - Gets the DM defining the data layout of the vector
185 
186   Not collective
187 
188   Input Parameter:
189 . v - The Vec
190 
191   Output Parameter:
192 . dm - The DM
193 
194   Level: intermediate
195 
196 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
197 @*/
198 PetscErrorCode VecGetDM(Vec v, DM *dm)
199 {
200   PetscErrorCode ierr;
201 
202   PetscFunctionBegin;
203   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
204   PetscValidPointer(dm,2);
205   ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr);
206   PetscFunctionReturn(0);
207 }
208 
209 #undef __FUNCT__
210 #define __FUNCT__ "VecSetDM"
211 /*@
212   VecSetDM - Sets the DM defining the data layout of the vector
213 
214   Not collective
215 
216   Input Parameters:
217 + v - The Vec
218 - dm - The DM
219 
220   Level: intermediate
221 
222 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
223 @*/
224 PetscErrorCode VecSetDM(Vec v, DM dm)
225 {
226   PetscErrorCode ierr;
227 
228   PetscFunctionBegin;
229   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
230   if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2);
231   ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
232   PetscFunctionReturn(0);
233 }
234 
235 #undef __FUNCT__
236 #define __FUNCT__ "DMSetMatType"
237 /*@C
238        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
239 
240    Logically Collective on DM
241 
242    Input Parameter:
243 +  dm - the DM context
244 .  ctype - the matrix type
245 
246    Options Database:
247 .   -dm_mat_type ctype
248 
249    Level: intermediate
250 
251 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType
252 @*/
253 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
254 {
255   PetscErrorCode ierr;
256 
257   PetscFunctionBegin;
258   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
259   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
260   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
261   PetscFunctionReturn(0);
262 }
263 
264 #undef __FUNCT__
265 #define __FUNCT__ "MatGetDM"
266 /*@
267   MatGetDM - Gets the DM defining the data layout of the matrix
268 
269   Not collective
270 
271   Input Parameter:
272 . A - The Mat
273 
274   Output Parameter:
275 . dm - The DM
276 
277   Level: intermediate
278 
279 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
280 @*/
281 PetscErrorCode MatGetDM(Mat A, DM *dm)
282 {
283   PetscErrorCode ierr;
284 
285   PetscFunctionBegin;
286   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
287   PetscValidPointer(dm,2);
288   ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr);
289   PetscFunctionReturn(0);
290 }
291 
292 #undef __FUNCT__
293 #define __FUNCT__ "MatSetDM"
294 /*@
295   MatSetDM - Sets the DM defining the data layout of the matrix
296 
297   Not collective
298 
299   Input Parameters:
300 + A - The Mat
301 - dm - The DM
302 
303   Level: intermediate
304 
305 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
306 @*/
307 PetscErrorCode MatSetDM(Mat A, DM dm)
308 {
309   PetscErrorCode ierr;
310 
311   PetscFunctionBegin;
312   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
313   if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2);
314   ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
315   PetscFunctionReturn(0);
316 }
317 
318 #undef __FUNCT__
319 #define __FUNCT__ "DMSetOptionsPrefix"
320 /*@C
321    DMSetOptionsPrefix - Sets the prefix used for searching for all
322    DMDA options in the database.
323 
324    Logically Collective on DMDA
325 
326    Input Parameter:
327 +  da - the DMDA context
328 -  prefix - the prefix to prepend to all option names
329 
330    Notes:
331    A hyphen (-) must NOT be given at the beginning of the prefix name.
332    The first character of all runtime options is AUTOMATICALLY the hyphen.
333 
334    Level: advanced
335 
336 .keywords: DMDA, set, options, prefix, database
337 
338 .seealso: DMSetFromOptions()
339 @*/
340 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
341 {
342   PetscErrorCode ierr;
343 
344   PetscFunctionBegin;
345   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
346   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
347   PetscFunctionReturn(0);
348 }
349 
350 #undef __FUNCT__
351 #define __FUNCT__ "DMDestroy"
352 /*@
353     DMDestroy - Destroys a vector packer or DMDA.
354 
355     Collective on DM
356 
357     Input Parameter:
358 .   dm - the DM object to destroy
359 
360     Level: developer
361 
362 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
363 
364 @*/
365 PetscErrorCode  DMDestroy(DM *dm)
366 {
367   PetscInt       i, cnt = 0, f;
368   DMNamedVecLink nlink,nnext;
369   PetscErrorCode ierr;
370 
371   PetscFunctionBegin;
372   if (!*dm) PetscFunctionReturn(0);
373   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
374 
375   /* count all the circular references of DM and its contained Vecs */
376   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
377     if ((*dm)->localin[i])  cnt++;
378     if ((*dm)->globalin[i]) cnt++;
379   }
380   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
381   if ((*dm)->x) {
382     DM obj;
383     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
384     if (obj == *dm) cnt++;
385   }
386   for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++;
387   if ((*dm)->x) {
388     DM obj;
389     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
390     if (obj == *dm) cnt++;
391   }
392 
393   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
394   /*
395      Need this test because the dm references the vectors that
396      reference the dm, so destroying the dm calls destroy on the
397      vectors that cause another destroy on the dm
398   */
399   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
400   ((PetscObject) (*dm))->refct = 0;
401   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
402     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
403     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
404   }
405   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
406     nnext = nlink->next;
407     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
408     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
409     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
410     ierr = PetscFree(nlink);CHKERRQ(ierr);
411   }
412   (*dm)->namedglobal = NULL;
413 
414   for (nlink=(*dm)->namedlocal; nlink; nlink=nnext) { /* Destroy the named local vectors */
415     nnext = nlink->next;
416     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
417     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
418     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
419     ierr = PetscFree(nlink);CHKERRQ(ierr);
420   }
421   (*dm)->namedlocal = NULL;
422 
423   /* Destroy the list of hooks */
424   {
425     DMCoarsenHookLink link,next;
426     for (link=(*dm)->coarsenhook; link; link=next) {
427       next = link->next;
428       ierr = PetscFree(link);CHKERRQ(ierr);
429     }
430     (*dm)->coarsenhook = NULL;
431   }
432   {
433     DMRefineHookLink link,next;
434     for (link=(*dm)->refinehook; link; link=next) {
435       next = link->next;
436       ierr = PetscFree(link);CHKERRQ(ierr);
437     }
438     (*dm)->refinehook = NULL;
439   }
440   {
441     DMSubDomainHookLink link,next;
442     for (link=(*dm)->subdomainhook; link; link=next) {
443       next = link->next;
444       ierr = PetscFree(link);CHKERRQ(ierr);
445     }
446     (*dm)->subdomainhook = NULL;
447   }
448   {
449     DMGlobalToLocalHookLink link,next;
450     for (link=(*dm)->gtolhook; link; link=next) {
451       next = link->next;
452       ierr = PetscFree(link);CHKERRQ(ierr);
453     }
454     (*dm)->gtolhook = NULL;
455   }
456   /* Destroy the work arrays */
457   {
458     DMWorkLink link,next;
459     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
460     for (link=(*dm)->workin; link; link=next) {
461       next = link->next;
462       ierr = PetscFree(link->mem);CHKERRQ(ierr);
463       ierr = PetscFree(link);CHKERRQ(ierr);
464     }
465     (*dm)->workin = NULL;
466   }
467 
468   ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr);
469   ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr);
470   ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr);
471 
472   if ((*dm)->ctx && (*dm)->ctxdestroy) {
473     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
474   }
475   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
476   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
477   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
478   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
479   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
480   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
481   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
482 
483   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
484   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
485   ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr);
486   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
487   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
488 
489   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
490   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
491   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
492 
493   for (f = 0; f < (*dm)->numFields; ++f) {
494     ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr);
495   }
496   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
497   /* if memory was published with AMS then destroy it */
498   ierr = PetscObjectAMSViewOff((PetscObject)*dm);CHKERRQ(ierr);
499 
500   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
501   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
502   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
503   PetscFunctionReturn(0);
504 }
505 
506 #undef __FUNCT__
507 #define __FUNCT__ "DMSetUp"
508 /*@
509     DMSetUp - sets up the data structures inside a DM object
510 
511     Collective on DM
512 
513     Input Parameter:
514 .   dm - the DM object to setup
515 
516     Level: developer
517 
518 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
519 
520 @*/
521 PetscErrorCode  DMSetUp(DM dm)
522 {
523   PetscErrorCode ierr;
524 
525   PetscFunctionBegin;
526   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
527   if (dm->setupcalled) PetscFunctionReturn(0);
528   if (dm->ops->setup) {
529     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
530   }
531   dm->setupcalled = PETSC_TRUE;
532   PetscFunctionReturn(0);
533 }
534 
535 #undef __FUNCT__
536 #define __FUNCT__ "DMSetFromOptions"
537 /*@
538     DMSetFromOptions - sets parameters in a DM from the options database
539 
540     Collective on DM
541 
542     Input Parameter:
543 .   dm - the DM object to set options for
544 
545     Options Database:
546 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
547 .   -dm_vec_type <type>  type of vector to create inside DM
548 .   -dm_mat_type <type>  type of matrix to create inside DM
549 -   -dm_coloring_type <global or ghosted>
550 
551     Level: developer
552 
553 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
554 
555 @*/
556 PetscErrorCode  DMSetFromOptions(DM dm)
557 {
558   char           typeName[256] = MATAIJ;
559   PetscBool      flg;
560   PetscErrorCode ierr;
561 
562   PetscFunctionBegin;
563   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
564   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
565   ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);CHKERRQ(ierr);
566   ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
567   if (flg) {
568     ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
569   }
570   ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
571   if (flg) {
572     ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
573   }
574   ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);CHKERRQ(ierr);
575   if (dm->ops->setfromoptions) {
576     ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
577   }
578   /* process any options handlers added with PetscObjectAddOptionsHandler() */
579   ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
580   ierr = PetscOptionsEnd();CHKERRQ(ierr);
581   ierr = DMViewFromOptions(dm,NULL,"-dm_view");CHKERRQ(ierr);
582   PetscFunctionReturn(0);
583 }
584 
585 #undef __FUNCT__
586 #define __FUNCT__ "DMView"
587 /*@C
588     DMView - Views a vector packer or DMDA.
589 
590     Collective on DM
591 
592     Input Parameter:
593 +   dm - the DM object to view
594 -   v - the viewer
595 
596     Level: developer
597 
598 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
599 
600 @*/
601 PetscErrorCode  DMView(DM dm,PetscViewer v)
602 {
603   PetscErrorCode ierr;
604   PetscBool      isbinary;
605 
606   PetscFunctionBegin;
607   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
608   if (!v) {
609     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);CHKERRQ(ierr);
610   }
611   ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
612   if (isbinary) {
613     PetscInt classid = DM_FILE_CLASSID;
614     char     type[256];
615 
616     ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
617     ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr);
618     ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
619   }
620   if (dm->ops->view) {
621     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
622   }
623   PetscFunctionReturn(0);
624 }
625 
626 #undef __FUNCT__
627 #define __FUNCT__ "DMCreateGlobalVector"
628 /*@
629     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
630 
631     Collective on DM
632 
633     Input Parameter:
634 .   dm - the DM object
635 
636     Output Parameter:
637 .   vec - the global vector
638 
639     Level: beginner
640 
641 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
642 
643 @*/
644 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
645 {
646   PetscErrorCode ierr;
647 
648   PetscFunctionBegin;
649   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
650   ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
651   PetscFunctionReturn(0);
652 }
653 
654 #undef __FUNCT__
655 #define __FUNCT__ "DMCreateLocalVector"
656 /*@
657     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
658 
659     Not Collective
660 
661     Input Parameter:
662 .   dm - the DM object
663 
664     Output Parameter:
665 .   vec - the local vector
666 
667     Level: beginner
668 
669 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
670 
671 @*/
672 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
673 {
674   PetscErrorCode ierr;
675 
676   PetscFunctionBegin;
677   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
678   ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
679   PetscFunctionReturn(0);
680 }
681 
682 #undef __FUNCT__
683 #define __FUNCT__ "DMGetLocalToGlobalMapping"
684 /*@
685    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
686 
687    Collective on DM
688 
689    Input Parameter:
690 .  dm - the DM that provides the mapping
691 
692    Output Parameter:
693 .  ltog - the mapping
694 
695    Level: intermediate
696 
697    Notes:
698    This mapping can then be used by VecSetLocalToGlobalMapping() or
699    MatSetLocalToGlobalMapping().
700 
701 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
702 @*/
703 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
704 {
705   PetscErrorCode ierr;
706 
707   PetscFunctionBegin;
708   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
709   PetscValidPointer(ltog,2);
710   if (!dm->ltogmap) {
711     PetscSection section, sectionGlobal;
712 
713     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
714     if (section) {
715       PetscInt *ltog;
716       PetscInt pStart, pEnd, size, p, l;
717 
718       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
719       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
720       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
721       ierr = PetscMalloc(size * sizeof(PetscInt), &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
722       for (p = pStart, l = 0; p < pEnd; ++p) {
723         PetscInt dof, off, c;
724 
725         /* Should probably use constrained dofs */
726         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
727         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
728         for (c = 0; c < dof; ++c, ++l) {
729           ltog[l] = off+c;
730         }
731       }
732       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
733       ierr = PetscLogObjectParent((PetscObject)dm, (PetscObject)dm->ltogmap);CHKERRQ(ierr);
734     } else {
735       if (!dm->ops->getlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
736       ierr = (*dm->ops->getlocaltoglobalmapping)(dm);CHKERRQ(ierr);
737     }
738   }
739   *ltog = dm->ltogmap;
740   PetscFunctionReturn(0);
741 }
742 
743 #undef __FUNCT__
744 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
745 /*@
746    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
747 
748    Collective on DM
749 
750    Input Parameter:
751 .  da - the distributed array that provides the mapping
752 
753    Output Parameter:
754 .  ltog - the block mapping
755 
756    Level: intermediate
757 
758    Notes:
759    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
760    MatSetLocalToGlobalMappingBlock().
761 
762 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
763 @*/
764 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
765 {
766   PetscErrorCode ierr;
767 
768   PetscFunctionBegin;
769   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
770   PetscValidPointer(ltog,2);
771   if (!dm->ltogmapb) {
772     PetscInt bs;
773     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
774     if (bs > 1) {
775       if (!dm->ops->getlocaltoglobalmappingblock) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
776       ierr = (*dm->ops->getlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
777     } else {
778       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
779       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
780     }
781   }
782   *ltog = dm->ltogmapb;
783   PetscFunctionReturn(0);
784 }
785 
786 #undef __FUNCT__
787 #define __FUNCT__ "DMGetBlockSize"
788 /*@
789    DMGetBlockSize - Gets the inherent block size associated with a DM
790 
791    Not Collective
792 
793    Input Parameter:
794 .  dm - the DM with block structure
795 
796    Output Parameter:
797 .  bs - the block size, 1 implies no exploitable block structure
798 
799    Level: intermediate
800 
801 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
802 @*/
803 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
804 {
805   PetscFunctionBegin;
806   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
807   PetscValidPointer(bs,2);
808   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
809   *bs = dm->bs;
810   PetscFunctionReturn(0);
811 }
812 
813 #undef __FUNCT__
814 #define __FUNCT__ "DMCreateInterpolation"
815 /*@
816     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
817 
818     Collective on DM
819 
820     Input Parameter:
821 +   dm1 - the DM object
822 -   dm2 - the second, finer DM object
823 
824     Output Parameter:
825 +  mat - the interpolation
826 -  vec - the scaling (optional)
827 
828     Level: developer
829 
830     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
831         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
832 
833         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
834         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
835 
836 
837 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
838 
839 @*/
840 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
841 {
842   PetscErrorCode ierr;
843 
844   PetscFunctionBegin;
845   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
846   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
847   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
848   PetscFunctionReturn(0);
849 }
850 
851 #undef __FUNCT__
852 #define __FUNCT__ "DMCreateInjection"
853 /*@
854     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
855 
856     Collective on DM
857 
858     Input Parameter:
859 +   dm1 - the DM object
860 -   dm2 - the second, finer DM object
861 
862     Output Parameter:
863 .   ctx - the injection
864 
865     Level: developer
866 
867    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
868         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
869 
870 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
871 
872 @*/
873 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
874 {
875   PetscErrorCode ierr;
876 
877   PetscFunctionBegin;
878   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
879   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
880   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
881   PetscFunctionReturn(0);
882 }
883 
884 #undef __FUNCT__
885 #define __FUNCT__ "DMCreateColoring"
886 /*@C
887     DMCreateColoring - Gets coloring for a DMDA or DMComposite
888 
889     Collective on DM
890 
891     Input Parameter:
892 +   dm - the DM object
893 .   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
894 -   matype - either MATAIJ or MATBAIJ
895 
896     Output Parameter:
897 .   coloring - the coloring
898 
899     Level: developer
900 
901 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix()
902 
903 @*/
904 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring)
905 {
906   PetscErrorCode ierr;
907 
908   PetscFunctionBegin;
909   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
910   if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet");
911   ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr);
912   PetscFunctionReturn(0);
913 }
914 
915 #undef __FUNCT__
916 #define __FUNCT__ "DMCreateMatrix"
917 /*@C
918     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
919 
920     Collective on DM
921 
922     Input Parameter:
923 +   dm - the DM object
924 -   mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or
925             any type which inherits from one of these (such as MATAIJ)
926 
927     Output Parameter:
928 .   mat - the empty Jacobian
929 
930     Level: beginner
931 
932     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
933        do not need to do it yourself.
934 
935        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
936        the nonzero pattern call DMDASetMatPreallocateOnly()
937 
938        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
939        internally by PETSc.
940 
941        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
942        the indices for the global numbering for DMDAs which is complicated.
943 
944 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
945 
946 @*/
947 PetscErrorCode  DMCreateMatrix(DM dm,MatType mtype,Mat *mat)
948 {
949   PetscErrorCode ierr;
950 
951   PetscFunctionBegin;
952   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
953 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
954   ierr = MatInitializePackage();CHKERRQ(ierr);
955 #endif
956   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
957   PetscValidPointer(mat,3);
958   if (dm->mattype) {
959     ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr);
960   } else {
961     ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr);
962   }
963   PetscFunctionReturn(0);
964 }
965 
966 #undef __FUNCT__
967 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
968 /*@
969   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
970     preallocated but the nonzero structure and zero values will not be set.
971 
972   Logically Collective on DMDA
973 
974   Input Parameter:
975 + dm - the DM
976 - only - PETSC_TRUE if only want preallocation
977 
978   Level: developer
979 .seealso DMCreateMatrix()
980 @*/
981 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
982 {
983   PetscFunctionBegin;
984   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
985   dm->prealloc_only = only;
986   PetscFunctionReturn(0);
987 }
988 
989 #undef __FUNCT__
990 #define __FUNCT__ "DMGetWorkArray"
991 /*@C
992   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
993 
994   Not Collective
995 
996   Input Parameters:
997 + dm - the DM object
998 . count - The minium size
999 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
1000 
1001   Output Parameter:
1002 . array - the work array
1003 
1004   Level: developer
1005 
1006 .seealso DMDestroy(), DMCreate()
1007 @*/
1008 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1009 {
1010   PetscErrorCode ierr;
1011   DMWorkLink     link;
1012   size_t         size;
1013 
1014   PetscFunctionBegin;
1015   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1016   PetscValidPointer(mem,4);
1017   if (dm->workin) {
1018     link       = dm->workin;
1019     dm->workin = dm->workin->next;
1020   } else {
1021     ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr);
1022   }
1023   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
1024   if (size*count > link->bytes) {
1025     ierr        = PetscFree(link->mem);CHKERRQ(ierr);
1026     ierr        = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
1027     link->bytes = size*count;
1028   }
1029   link->next   = dm->workout;
1030   dm->workout  = link;
1031   *(void**)mem = link->mem;
1032   PetscFunctionReturn(0);
1033 }
1034 
1035 #undef __FUNCT__
1036 #define __FUNCT__ "DMRestoreWorkArray"
1037 /*@C
1038   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
1039 
1040   Not Collective
1041 
1042   Input Parameters:
1043 + dm - the DM object
1044 . count - The minium size
1045 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
1046 
1047   Output Parameter:
1048 . array - the work array
1049 
1050   Level: developer
1051 
1052 .seealso DMDestroy(), DMCreate()
1053 @*/
1054 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1055 {
1056   DMWorkLink *p,link;
1057 
1058   PetscFunctionBegin;
1059   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1060   PetscValidPointer(mem,4);
1061   for (p=&dm->workout; (link=*p); p=&link->next) {
1062     if (link->mem == *(void**)mem) {
1063       *p           = link->next;
1064       link->next   = dm->workin;
1065       dm->workin   = link;
1066       *(void**)mem = NULL;
1067       PetscFunctionReturn(0);
1068     }
1069   }
1070   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1071   PetscFunctionReturn(0);
1072 }
1073 
1074 #undef __FUNCT__
1075 #define __FUNCT__ "DMSetNullSpaceConstructor"
1076 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1077 {
1078   PetscFunctionBegin;
1079   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1080   if (field >= 10) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1081   dm->nullspaceConstructors[field] = nullsp;
1082   PetscFunctionReturn(0);
1083 }
1084 
1085 #undef __FUNCT__
1086 #define __FUNCT__ "DMCreateFieldIS"
1087 /*@C
1088   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1089 
1090   Not collective
1091 
1092   Input Parameter:
1093 . dm - the DM object
1094 
1095   Output Parameters:
1096 + numFields  - The number of fields (or NULL if not requested)
1097 . fieldNames - The name for each field (or NULL if not requested)
1098 - fields     - The global indices for each field (or NULL if not requested)
1099 
1100   Level: intermediate
1101 
1102   Notes:
1103   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1104   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1105   PetscFree().
1106 
1107 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1108 @*/
1109 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1110 {
1111   PetscSection   section, sectionGlobal;
1112   PetscErrorCode ierr;
1113 
1114   PetscFunctionBegin;
1115   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1116   if (numFields) {
1117     PetscValidPointer(numFields,2);
1118     *numFields = 0;
1119   }
1120   if (fieldNames) {
1121     PetscValidPointer(fieldNames,3);
1122     *fieldNames = NULL;
1123   }
1124   if (fields) {
1125     PetscValidPointer(fields,4);
1126     *fields = NULL;
1127   }
1128   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1129   if (section) {
1130     PetscInt *fieldSizes, **fieldIndices;
1131     PetscInt nF, f, pStart, pEnd, p;
1132 
1133     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
1134     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
1135     ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt*,&fieldIndices);CHKERRQ(ierr);
1136     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
1137     for (f = 0; f < nF; ++f) {
1138       fieldSizes[f] = 0;
1139     }
1140     for (p = pStart; p < pEnd; ++p) {
1141       PetscInt gdof;
1142 
1143       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1144       if (gdof > 0) {
1145         for (f = 0; f < nF; ++f) {
1146           PetscInt fdof, fcdof;
1147 
1148           ierr           = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1149           ierr           = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1150           fieldSizes[f] += fdof-fcdof;
1151         }
1152       }
1153     }
1154     for (f = 0; f < nF; ++f) {
1155       ierr          = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr);
1156       fieldSizes[f] = 0;
1157     }
1158     for (p = pStart; p < pEnd; ++p) {
1159       PetscInt gdof, goff;
1160 
1161       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1162       if (gdof > 0) {
1163         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
1164         for (f = 0; f < nF; ++f) {
1165           PetscInt fdof, fcdof, fc;
1166 
1167           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1168           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1169           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1170             fieldIndices[f][fieldSizes[f]] = goff++;
1171           }
1172         }
1173       }
1174     }
1175     if (numFields) *numFields = nF;
1176     if (fieldNames) {
1177       ierr = PetscMalloc(nF * sizeof(char*), fieldNames);CHKERRQ(ierr);
1178       for (f = 0; f < nF; ++f) {
1179         const char *fieldName;
1180 
1181         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1182         ierr = PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);CHKERRQ(ierr);
1183       }
1184     }
1185     if (fields) {
1186       ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr);
1187       for (f = 0; f < nF; ++f) {
1188         ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1189       }
1190     }
1191     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1192   } else if (dm->ops->createfieldis) {
1193     ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);
1194   }
1195   PetscFunctionReturn(0);
1196 }
1197 
1198 
1199 #undef __FUNCT__
1200 #define __FUNCT__ "DMCreateFieldDecomposition"
1201 /*@C
1202   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1203                           corresponding to different fields: each IS contains the global indices of the dofs of the
1204                           corresponding field. The optional list of DMs define the DM for each subproblem.
1205                           Generalizes DMCreateFieldIS().
1206 
1207   Not collective
1208 
1209   Input Parameter:
1210 . dm - the DM object
1211 
1212   Output Parameters:
1213 + len       - The number of subproblems in the field decomposition (or NULL if not requested)
1214 . namelist  - The name for each field (or NULL if not requested)
1215 . islist    - The global indices for each field (or NULL if not requested)
1216 - dmlist    - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)
1217 
1218   Level: intermediate
1219 
1220   Notes:
1221   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1222   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1223   and all of the arrays should be freed with PetscFree().
1224 
1225 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1226 @*/
1227 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1228 {
1229   PetscErrorCode ierr;
1230 
1231   PetscFunctionBegin;
1232   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1233   if (len) {
1234     PetscValidPointer(len,2);
1235     *len = 0;
1236   }
1237   if (namelist) {
1238     PetscValidPointer(namelist,3);
1239     *namelist = 0;
1240   }
1241   if (islist) {
1242     PetscValidPointer(islist,4);
1243     *islist = 0;
1244   }
1245   if (dmlist) {
1246     PetscValidPointer(dmlist,5);
1247     *dmlist = 0;
1248   }
1249   /*
1250    Is it a good idea to apply the following check across all impls?
1251    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1252    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1253    */
1254   if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1255   if (!dm->ops->createfielddecomposition) {
1256     PetscSection section;
1257     PetscInt     numFields, f;
1258 
1259     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1260     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1261     if (section && numFields && dm->ops->createsubdm) {
1262       *len = numFields;
1263       ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr);
1264       for (f = 0; f < numFields; ++f) {
1265         const char *fieldName;
1266 
1267         ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr);
1268         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1269         ierr = PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);CHKERRQ(ierr);
1270       }
1271     } else {
1272       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1273       /* By default there are no DMs associated with subproblems. */
1274       if (dmlist) *dmlist = NULL;
1275     }
1276   } else {
1277     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr);
1278   }
1279   PetscFunctionReturn(0);
1280 }
1281 
1282 #undef __FUNCT__
1283 #define __FUNCT__ "DMCreateSubDM"
1284 /*@C
1285   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1286                   The fields are defined by DMCreateFieldIS().
1287 
1288   Not collective
1289 
1290   Input Parameters:
1291 + dm - the DM object
1292 . numFields - number of fields in this subproblem
1293 - len       - The number of subproblems in the decomposition (or NULL if not requested)
1294 
1295   Output Parameters:
1296 . is - The global indices for the subproblem
1297 - dm - The DM for the subproblem
1298 
1299   Level: intermediate
1300 
1301 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1302 @*/
1303 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1304 {
1305   PetscErrorCode ierr;
1306 
1307   PetscFunctionBegin;
1308   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1309   PetscValidPointer(fields,3);
1310   if (is) PetscValidPointer(is,4);
1311   if (subdm) PetscValidPointer(subdm,5);
1312   if (dm->ops->createsubdm) {
1313     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr);
1314   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1315   PetscFunctionReturn(0);
1316 }
1317 
1318 
1319 #undef __FUNCT__
1320 #define __FUNCT__ "DMCreateDomainDecomposition"
1321 /*@C
1322   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1323                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1324                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1325                           define a nonoverlapping covering, while outer subdomains can overlap.
1326                           The optional list of DMs define the DM for each subproblem.
1327 
1328   Not collective
1329 
1330   Input Parameter:
1331 . dm - the DM object
1332 
1333   Output Parameters:
1334 + len         - The number of subproblems in the domain decomposition (or NULL if not requested)
1335 . namelist    - The name for each subdomain (or NULL if not requested)
1336 . innerislist - The global indices for each inner subdomain (or NULL, if not requested)
1337 . outerislist - The global indices for each outer subdomain (or NULL, if not requested)
1338 - dmlist      - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)
1339 
1340   Level: intermediate
1341 
1342   Notes:
1343   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1344   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1345   and all of the arrays should be freed with PetscFree().
1346 
1347 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1348 @*/
1349 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1350 {
1351   PetscErrorCode      ierr;
1352   DMSubDomainHookLink link;
1353   PetscInt            i,l;
1354 
1355   PetscFunctionBegin;
1356   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1357   if (len)           {PetscValidPointer(len,2);            *len         = 0;}
1358   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = NULL;}
1359   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = NULL;}
1360   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = NULL;}
1361   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = NULL;}
1362   /*
1363    Is it a good idea to apply the following check across all impls?
1364    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1365    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1366    */
1367   if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1368   if (dm->ops->createdomaindecomposition) {
1369     ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr);
1370     /* copy subdomain hooks and context over to the subdomain DMs */
1371     if (dmlist) {
1372       for (i = 0; i < l; i++) {
1373         for (link=dm->subdomainhook; link; link=link->next) {
1374           if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);}
1375         }
1376         (*dmlist)[i]->ctx = dm->ctx;
1377       }
1378     }
1379     if (len) *len = l;
1380   }
1381   PetscFunctionReturn(0);
1382 }
1383 
1384 
1385 #undef __FUNCT__
1386 #define __FUNCT__ "DMCreateDomainDecompositionScatters"
1387 /*@C
1388   DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector
1389 
1390   Not collective
1391 
1392   Input Parameters:
1393 + dm - the DM object
1394 . n  - the number of subdomain scatters
1395 - subdms - the local subdomains
1396 
1397   Output Parameters:
1398 + n     - the number of scatters returned
1399 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain
1400 . oscat - scatter from global vector to overlapping global vector entries on subdomain
1401 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts)
1402 
1403   Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution
1404   of general nonlinear problems with overlapping subdomain methods.  While merely having index sets that enable subsets
1405   of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of
1406   solution and residual data.
1407 
1408   Level: developer
1409 
1410 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1411 @*/
1412 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat)
1413 {
1414   PetscErrorCode ierr;
1415 
1416   PetscFunctionBegin;
1417   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1418   PetscValidPointer(subdms,3);
1419   if (dm->ops->createddscatters) {
1420     ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr);
1421   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined");
1422   PetscFunctionReturn(0);
1423 }
1424 
1425 #undef __FUNCT__
1426 #define __FUNCT__ "DMRefine"
1427 /*@
1428   DMRefine - Refines a DM object
1429 
1430   Collective on DM
1431 
1432   Input Parameter:
1433 + dm   - the DM object
1434 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1435 
1436   Output Parameter:
1437 . dmf - the refined DM, or NULL
1438 
1439   Note: If no refinement was done, the return value is NULL
1440 
1441   Level: developer
1442 
1443 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1444 @*/
1445 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1446 {
1447   PetscErrorCode   ierr;
1448   DMRefineHookLink link;
1449 
1450   PetscFunctionBegin;
1451   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1452   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1453   if (*dmf) {
1454     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1455 
1456     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1457 
1458     (*dmf)->ctx       = dm->ctx;
1459     (*dmf)->leveldown = dm->leveldown;
1460     (*dmf)->levelup   = dm->levelup + 1;
1461 
1462     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1463     for (link=dm->refinehook; link; link=link->next) {
1464       if (link->refinehook) {
1465         ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);
1466       }
1467     }
1468   }
1469   PetscFunctionReturn(0);
1470 }
1471 
1472 #undef __FUNCT__
1473 #define __FUNCT__ "DMRefineHookAdd"
1474 /*@C
1475    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1476 
1477    Logically Collective
1478 
1479    Input Arguments:
1480 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1481 .  refinehook - function to run when setting up a coarser level
1482 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1483 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1484 
1485    Calling sequence of refinehook:
1486 $    refinehook(DM coarse,DM fine,void *ctx);
1487 
1488 +  coarse - coarse level DM
1489 .  fine - fine level DM to interpolate problem to
1490 -  ctx - optional user-defined function context
1491 
1492    Calling sequence for interphook:
1493 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1494 
1495 +  coarse - coarse level DM
1496 .  interp - matrix interpolating a coarse-level solution to the finer grid
1497 .  fine - fine level DM to update
1498 -  ctx - optional user-defined function context
1499 
1500    Level: advanced
1501 
1502    Notes:
1503    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1504 
1505    If this function is called multiple times, the hooks will be run in the order they are added.
1506 
1507    This function is currently not available from Fortran.
1508 
1509 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1510 @*/
1511 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1512 {
1513   PetscErrorCode   ierr;
1514   DMRefineHookLink link,*p;
1515 
1516   PetscFunctionBegin;
1517   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1518   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1519   ierr             = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1520   link->refinehook = refinehook;
1521   link->interphook = interphook;
1522   link->ctx        = ctx;
1523   link->next       = NULL;
1524   *p               = link;
1525   PetscFunctionReturn(0);
1526 }
1527 
1528 #undef __FUNCT__
1529 #define __FUNCT__ "DMInterpolate"
1530 /*@
1531    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1532 
1533    Collective if any hooks are
1534 
1535    Input Arguments:
1536 +  coarse - coarser DM to use as a base
1537 .  restrct - interpolation matrix, apply using MatInterpolate()
1538 -  fine - finer DM to update
1539 
1540    Level: developer
1541 
1542 .seealso: DMRefineHookAdd(), MatInterpolate()
1543 @*/
1544 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1545 {
1546   PetscErrorCode   ierr;
1547   DMRefineHookLink link;
1548 
1549   PetscFunctionBegin;
1550   for (link=fine->refinehook; link; link=link->next) {
1551     if (link->interphook) {
1552       ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);
1553     }
1554   }
1555   PetscFunctionReturn(0);
1556 }
1557 
1558 #undef __FUNCT__
1559 #define __FUNCT__ "DMGetRefineLevel"
1560 /*@
1561     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1562 
1563     Not Collective
1564 
1565     Input Parameter:
1566 .   dm - the DM object
1567 
1568     Output Parameter:
1569 .   level - number of refinements
1570 
1571     Level: developer
1572 
1573 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1574 
1575 @*/
1576 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1577 {
1578   PetscFunctionBegin;
1579   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1580   *level = dm->levelup;
1581   PetscFunctionReturn(0);
1582 }
1583 
1584 #undef __FUNCT__
1585 #define __FUNCT__ "DMGlobalToLocalHookAdd"
1586 /*@C
1587    DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called
1588 
1589    Logically Collective
1590 
1591    Input Arguments:
1592 +  dm - the DM
1593 .  beginhook - function to run at the beginning of DMGlobalToLocalBegin()
1594 .  endhook - function to run after DMGlobalToLocalEnd() has completed
1595 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1596 
1597    Calling sequence for beginhook:
1598 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1599 
1600 +  dm - global DM
1601 .  g - global vector
1602 .  mode - mode
1603 .  l - local vector
1604 -  ctx - optional user-defined function context
1605 
1606 
1607    Calling sequence for endhook:
1608 $    endhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1609 
1610 +  global - global DM
1611 -  ctx - optional user-defined function context
1612 
1613    Level: advanced
1614 
1615 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1616 @*/
1617 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx)
1618 {
1619   PetscErrorCode          ierr;
1620   DMGlobalToLocalHookLink link,*p;
1621 
1622   PetscFunctionBegin;
1623   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1624   for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1625   ierr            = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr);
1626   link->beginhook = beginhook;
1627   link->endhook   = endhook;
1628   link->ctx       = ctx;
1629   link->next      = NULL;
1630   *p              = link;
1631   PetscFunctionReturn(0);
1632 }
1633 
1634 #undef __FUNCT__
1635 #define __FUNCT__ "DMGlobalToLocalBegin"
1636 /*@
1637     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1638 
1639     Neighbor-wise Collective on DM
1640 
1641     Input Parameters:
1642 +   dm - the DM object
1643 .   g - the global vector
1644 .   mode - INSERT_VALUES or ADD_VALUES
1645 -   l - the local vector
1646 
1647 
1648     Level: beginner
1649 
1650 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1651 
1652 @*/
1653 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1654 {
1655   PetscSF                 sf;
1656   PetscErrorCode          ierr;
1657   DMGlobalToLocalHookLink link;
1658 
1659   PetscFunctionBegin;
1660   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1661   for (link=dm->gtolhook; link; link=link->next) {
1662     if (link->beginhook) {
1663       ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);
1664     }
1665   }
1666   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1667   if (sf) {
1668     PetscScalar *lArray, *gArray;
1669 
1670     if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1671     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1672     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1673     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1674     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1675     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1676   } else {
1677     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1678   }
1679   PetscFunctionReturn(0);
1680 }
1681 
1682 #undef __FUNCT__
1683 #define __FUNCT__ "DMGlobalToLocalEnd"
1684 /*@
1685     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1686 
1687     Neighbor-wise Collective on DM
1688 
1689     Input Parameters:
1690 +   dm - the DM object
1691 .   g - the global vector
1692 .   mode - INSERT_VALUES or ADD_VALUES
1693 -   l - the local vector
1694 
1695 
1696     Level: beginner
1697 
1698 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1699 
1700 @*/
1701 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1702 {
1703   PetscSF                 sf;
1704   PetscErrorCode          ierr;
1705   PetscScalar             *lArray, *gArray;
1706   DMGlobalToLocalHookLink link;
1707 
1708   PetscFunctionBegin;
1709   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1710   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1711   if (sf) {
1712     if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1713 
1714     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1715     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1716     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1717     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1718     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1719   } else {
1720     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1721   }
1722   for (link=dm->gtolhook; link; link=link->next) {
1723     if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1724   }
1725   PetscFunctionReturn(0);
1726 }
1727 
1728 #undef __FUNCT__
1729 #define __FUNCT__ "DMLocalToGlobalBegin"
1730 /*@
1731     DMLocalToGlobalBegin - updates global vectors from local vectors
1732 
1733     Neighbor-wise Collective on DM
1734 
1735     Input Parameters:
1736 +   dm - the DM object
1737 .   l - the local vector
1738 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1739            base point.
1740 - - the global vector
1741 
1742     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1743            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1744            global array to the final global array with VecAXPY().
1745 
1746     Level: beginner
1747 
1748 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1749 
1750 @*/
1751 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1752 {
1753   PetscSF        sf;
1754   PetscErrorCode ierr;
1755 
1756   PetscFunctionBegin;
1757   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1758   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1759   if (sf) {
1760     MPI_Op      op;
1761     PetscScalar *lArray, *gArray;
1762 
1763     switch (mode) {
1764     case INSERT_VALUES:
1765     case INSERT_ALL_VALUES:
1766       op = MPIU_REPLACE; break;
1767     case ADD_VALUES:
1768     case ADD_ALL_VALUES:
1769       op = MPI_SUM; break;
1770     default:
1771       SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1772     }
1773     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1774     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1775     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1776     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1777     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1778   } else {
1779     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1780   }
1781   PetscFunctionReturn(0);
1782 }
1783 
1784 #undef __FUNCT__
1785 #define __FUNCT__ "DMLocalToGlobalEnd"
1786 /*@
1787     DMLocalToGlobalEnd - updates global vectors from local vectors
1788 
1789     Neighbor-wise Collective on DM
1790 
1791     Input Parameters:
1792 +   dm - the DM object
1793 .   l - the local vector
1794 .   mode - INSERT_VALUES or ADD_VALUES
1795 -   g - the global vector
1796 
1797 
1798     Level: beginner
1799 
1800 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1801 
1802 @*/
1803 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1804 {
1805   PetscSF        sf;
1806   PetscErrorCode ierr;
1807 
1808   PetscFunctionBegin;
1809   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1810   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1811   if (sf) {
1812     MPI_Op      op;
1813     PetscScalar *lArray, *gArray;
1814 
1815     switch (mode) {
1816     case INSERT_VALUES:
1817     case INSERT_ALL_VALUES:
1818       op = MPIU_REPLACE; break;
1819     case ADD_VALUES:
1820     case ADD_ALL_VALUES:
1821       op = MPI_SUM; break;
1822     default:
1823       SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1824     }
1825     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1826     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1827     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1828     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1829     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1830   } else {
1831     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1832   }
1833   PetscFunctionReturn(0);
1834 }
1835 
1836 #undef __FUNCT__
1837 #define __FUNCT__ "DMLocalToLocalBegin"
1838 /*@
1839    DMLocalToLocalBegin - Maps from a local vector (including ghost points
1840    that contain irrelevant values) to another local vector where the ghost
1841    points in the second are set correctly. Must be followed by DMLocalToLocalEnd().
1842 
1843    Neighbor-wise Collective on DM and Vec
1844 
1845    Input Parameters:
1846 +  dm - the DM object
1847 .  g - the original local vector
1848 -  mode - one of INSERT_VALUES or ADD_VALUES
1849 
1850    Output Parameter:
1851 .  l  - the local vector with correct ghost values
1852 
1853    Level: intermediate
1854 
1855    Notes:
1856    The local vectors used here need not be the same as those
1857    obtained from DMCreateLocalVector(), BUT they
1858    must have the same parallel data layout; they could, for example, be
1859    obtained with VecDuplicate() from the DM originating vectors.
1860 
1861 .keywords: DM, local-to-local, begin
1862 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalEnd(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1863 
1864 @*/
1865 PetscErrorCode  DMLocalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1866 {
1867   PetscErrorCode          ierr;
1868 
1869   PetscFunctionBegin;
1870   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1871   ierr = (*dm->ops->localtolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1872   PetscFunctionReturn(0);
1873 }
1874 
1875 #undef __FUNCT__
1876 #define __FUNCT__ "DMLocalToLocalEnd"
1877 /*@
1878    DMLocalToLocalEnd - Maps from a local vector (including ghost points
1879    that contain irrelevant values) to another local vector where the ghost
1880    points in the second are set correctly. Must be preceded by DMLocalToLocalBegin().
1881 
1882    Neighbor-wise Collective on DM and Vec
1883 
1884    Input Parameters:
1885 +  da - the DM object
1886 .  g - the original local vector
1887 -  mode - one of INSERT_VALUES or ADD_VALUES
1888 
1889    Output Parameter:
1890 .  l  - the local vector with correct ghost values
1891 
1892    Level: intermediate
1893 
1894    Notes:
1895    The local vectors used here need not be the same as those
1896    obtained from DMCreateLocalVector(), BUT they
1897    must have the same parallel data layout; they could, for example, be
1898    obtained with VecDuplicate() from the DM originating vectors.
1899 
1900 .keywords: DM, local-to-local, end
1901 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalBegin(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1902 
1903 @*/
1904 PetscErrorCode  DMLocalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1905 {
1906   PetscErrorCode          ierr;
1907 
1908   PetscFunctionBegin;
1909   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1910   ierr = (*dm->ops->localtolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1911   PetscFunctionReturn(0);
1912 }
1913 
1914 
1915 #undef __FUNCT__
1916 #define __FUNCT__ "DMCoarsen"
1917 /*@
1918     DMCoarsen - Coarsens a DM object
1919 
1920     Collective on DM
1921 
1922     Input Parameter:
1923 +   dm - the DM object
1924 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1925 
1926     Output Parameter:
1927 .   dmc - the coarsened DM
1928 
1929     Level: developer
1930 
1931 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1932 
1933 @*/
1934 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1935 {
1936   PetscErrorCode    ierr;
1937   DMCoarsenHookLink link;
1938 
1939   PetscFunctionBegin;
1940   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1941   ierr                      = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1942   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1943   ierr                      = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1944   (*dmc)->ctx               = dm->ctx;
1945   (*dmc)->levelup           = dm->levelup;
1946   (*dmc)->leveldown         = dm->leveldown + 1;
1947   ierr                      = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
1948   for (link=dm->coarsenhook; link; link=link->next) {
1949     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
1950   }
1951   PetscFunctionReturn(0);
1952 }
1953 
1954 #undef __FUNCT__
1955 #define __FUNCT__ "DMCoarsenHookAdd"
1956 /*@C
1957    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1958 
1959    Logically Collective
1960 
1961    Input Arguments:
1962 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1963 .  coarsenhook - function to run when setting up a coarser level
1964 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
1965 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1966 
1967    Calling sequence of coarsenhook:
1968 $    coarsenhook(DM fine,DM coarse,void *ctx);
1969 
1970 +  fine - fine level DM
1971 .  coarse - coarse level DM to restrict problem to
1972 -  ctx - optional user-defined function context
1973 
1974    Calling sequence for restricthook:
1975 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
1976 
1977 +  fine - fine level DM
1978 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
1979 .  rscale - scaling vector for restriction
1980 .  inject - matrix restricting by injection
1981 .  coarse - coarse level DM to update
1982 -  ctx - optional user-defined function context
1983 
1984    Level: advanced
1985 
1986    Notes:
1987    This function is only needed if auxiliary data needs to be set up on coarse grids.
1988 
1989    If this function is called multiple times, the hooks will be run in the order they are added.
1990 
1991    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1992    extract the finest level information from its context (instead of from the SNES).
1993 
1994    This function is currently not available from Fortran.
1995 
1996 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1997 @*/
1998 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
1999 {
2000   PetscErrorCode    ierr;
2001   DMCoarsenHookLink link,*p;
2002 
2003   PetscFunctionBegin;
2004   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
2005   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2006   ierr               = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
2007   link->coarsenhook  = coarsenhook;
2008   link->restricthook = restricthook;
2009   link->ctx          = ctx;
2010   link->next         = NULL;
2011   *p                 = link;
2012   PetscFunctionReturn(0);
2013 }
2014 
2015 #undef __FUNCT__
2016 #define __FUNCT__ "DMRestrict"
2017 /*@
2018    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
2019 
2020    Collective if any hooks are
2021 
2022    Input Arguments:
2023 +  fine - finer DM to use as a base
2024 .  restrct - restriction matrix, apply using MatRestrict()
2025 .  inject - injection matrix, also use MatRestrict()
2026 -  coarse - coarer DM to update
2027 
2028    Level: developer
2029 
2030 .seealso: DMCoarsenHookAdd(), MatRestrict()
2031 @*/
2032 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
2033 {
2034   PetscErrorCode    ierr;
2035   DMCoarsenHookLink link;
2036 
2037   PetscFunctionBegin;
2038   for (link=fine->coarsenhook; link; link=link->next) {
2039     if (link->restricthook) {
2040       ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);
2041     }
2042   }
2043   PetscFunctionReturn(0);
2044 }
2045 
2046 #undef __FUNCT__
2047 #define __FUNCT__ "DMSubDomainHookAdd"
2048 /*@C
2049    DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid
2050 
2051    Logically Collective
2052 
2053    Input Arguments:
2054 +  global - global DM
2055 .  ddhook - function to run to pass data to the decomposition DM upon its creation
2056 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
2057 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
2058 
2059 
2060    Calling sequence for ddhook:
2061 $    ddhook(DM global,DM block,void *ctx)
2062 
2063 +  global - global DM
2064 .  block  - block DM
2065 -  ctx - optional user-defined function context
2066 
2067    Calling sequence for restricthook:
2068 $    restricthook(DM global,VecScatter out,VecScatter in,DM block,void *ctx)
2069 
2070 +  global - global DM
2071 .  out    - scatter to the outer (with ghost and overlap points) block vector
2072 .  in     - scatter to block vector values only owned locally
2073 .  block  - block DM
2074 -  ctx - optional user-defined function context
2075 
2076    Level: advanced
2077 
2078    Notes:
2079    This function is only needed if auxiliary data needs to be set up on subdomain DMs.
2080 
2081    If this function is called multiple times, the hooks will be run in the order they are added.
2082 
2083    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
2084    extract the global information from its context (instead of from the SNES).
2085 
2086    This function is currently not available from Fortran.
2087 
2088 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
2089 @*/
2090 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
2091 {
2092   PetscErrorCode      ierr;
2093   DMSubDomainHookLink link,*p;
2094 
2095   PetscFunctionBegin;
2096   PetscValidHeaderSpecific(global,DM_CLASSID,1);
2097   for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2098   ierr               = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr);
2099   link->restricthook = restricthook;
2100   link->ddhook       = ddhook;
2101   link->ctx          = ctx;
2102   link->next         = NULL;
2103   *p                 = link;
2104   PetscFunctionReturn(0);
2105 }
2106 
2107 #undef __FUNCT__
2108 #define __FUNCT__ "DMSubDomainRestrict"
2109 /*@
2110    DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd()
2111 
2112    Collective if any hooks are
2113 
2114    Input Arguments:
2115 +  fine - finer DM to use as a base
2116 .  oscatter - scatter from domain global vector filling subdomain global vector with overlap
2117 .  gscatter - scatter from domain global vector filling subdomain local vector with ghosts
2118 -  coarse - coarer DM to update
2119 
2120    Level: developer
2121 
2122 .seealso: DMCoarsenHookAdd(), MatRestrict()
2123 @*/
2124 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm)
2125 {
2126   PetscErrorCode      ierr;
2127   DMSubDomainHookLink link;
2128 
2129   PetscFunctionBegin;
2130   for (link=global->subdomainhook; link; link=link->next) {
2131     if (link->restricthook) {
2132       ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr);
2133     }
2134   }
2135   PetscFunctionReturn(0);
2136 }
2137 
2138 #undef __FUNCT__
2139 #define __FUNCT__ "DMGetCoarsenLevel"
2140 /*@
2141     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
2142 
2143     Not Collective
2144 
2145     Input Parameter:
2146 .   dm - the DM object
2147 
2148     Output Parameter:
2149 .   level - number of coarsenings
2150 
2151     Level: developer
2152 
2153 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2154 
2155 @*/
2156 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
2157 {
2158   PetscFunctionBegin;
2159   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2160   *level = dm->leveldown;
2161   PetscFunctionReturn(0);
2162 }
2163 
2164 
2165 
2166 #undef __FUNCT__
2167 #define __FUNCT__ "DMRefineHierarchy"
2168 /*@C
2169     DMRefineHierarchy - Refines a DM object, all levels at once
2170 
2171     Collective on DM
2172 
2173     Input Parameter:
2174 +   dm - the DM object
2175 -   nlevels - the number of levels of refinement
2176 
2177     Output Parameter:
2178 .   dmf - the refined DM hierarchy
2179 
2180     Level: developer
2181 
2182 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2183 
2184 @*/
2185 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2186 {
2187   PetscErrorCode ierr;
2188 
2189   PetscFunctionBegin;
2190   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2191   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2192   if (nlevels == 0) PetscFunctionReturn(0);
2193   if (dm->ops->refinehierarchy) {
2194     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
2195   } else if (dm->ops->refine) {
2196     PetscInt i;
2197 
2198     ierr = DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);CHKERRQ(ierr);
2199     for (i=1; i<nlevels; i++) {
2200       ierr = DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);CHKERRQ(ierr);
2201     }
2202   } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2203   PetscFunctionReturn(0);
2204 }
2205 
2206 #undef __FUNCT__
2207 #define __FUNCT__ "DMCoarsenHierarchy"
2208 /*@C
2209     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2210 
2211     Collective on DM
2212 
2213     Input Parameter:
2214 +   dm - the DM object
2215 -   nlevels - the number of levels of coarsening
2216 
2217     Output Parameter:
2218 .   dmc - the coarsened DM hierarchy
2219 
2220     Level: developer
2221 
2222 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2223 
2224 @*/
2225 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2226 {
2227   PetscErrorCode ierr;
2228 
2229   PetscFunctionBegin;
2230   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2231   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2232   if (nlevels == 0) PetscFunctionReturn(0);
2233   PetscValidPointer(dmc,3);
2234   if (dm->ops->coarsenhierarchy) {
2235     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
2236   } else if (dm->ops->coarsen) {
2237     PetscInt i;
2238 
2239     ierr = DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);CHKERRQ(ierr);
2240     for (i=1; i<nlevels; i++) {
2241       ierr = DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);CHKERRQ(ierr);
2242     }
2243   } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2244   PetscFunctionReturn(0);
2245 }
2246 
2247 #undef __FUNCT__
2248 #define __FUNCT__ "DMCreateAggregates"
2249 /*@
2250    DMCreateAggregates - Gets the aggregates that map between
2251    grids associated with two DMs.
2252 
2253    Collective on DM
2254 
2255    Input Parameters:
2256 +  dmc - the coarse grid DM
2257 -  dmf - the fine grid DM
2258 
2259    Output Parameters:
2260 .  rest - the restriction matrix (transpose of the projection matrix)
2261 
2262    Level: intermediate
2263 
2264 .keywords: interpolation, restriction, multigrid
2265 
2266 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2267 @*/
2268 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2269 {
2270   PetscErrorCode ierr;
2271 
2272   PetscFunctionBegin;
2273   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
2274   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
2275   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
2276   PetscFunctionReturn(0);
2277 }
2278 
2279 #undef __FUNCT__
2280 #define __FUNCT__ "DMSetApplicationContextDestroy"
2281 /*@C
2282     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2283 
2284     Not Collective
2285 
2286     Input Parameters:
2287 +   dm - the DM object
2288 -   destroy - the destroy function
2289 
2290     Level: intermediate
2291 
2292 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2293 
2294 @*/
2295 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2296 {
2297   PetscFunctionBegin;
2298   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2299   dm->ctxdestroy = destroy;
2300   PetscFunctionReturn(0);
2301 }
2302 
2303 #undef __FUNCT__
2304 #define __FUNCT__ "DMSetApplicationContext"
2305 /*@
2306     DMSetApplicationContext - Set a user context into a DM object
2307 
2308     Not Collective
2309 
2310     Input Parameters:
2311 +   dm - the DM object
2312 -   ctx - the user context
2313 
2314     Level: intermediate
2315 
2316 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2317 
2318 @*/
2319 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2320 {
2321   PetscFunctionBegin;
2322   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2323   dm->ctx = ctx;
2324   PetscFunctionReturn(0);
2325 }
2326 
2327 #undef __FUNCT__
2328 #define __FUNCT__ "DMGetApplicationContext"
2329 /*@
2330     DMGetApplicationContext - Gets a user context from a DM object
2331 
2332     Not Collective
2333 
2334     Input Parameter:
2335 .   dm - the DM object
2336 
2337     Output Parameter:
2338 .   ctx - the user context
2339 
2340     Level: intermediate
2341 
2342 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2343 
2344 @*/
2345 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2346 {
2347   PetscFunctionBegin;
2348   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2349   *(void**)ctx = dm->ctx;
2350   PetscFunctionReturn(0);
2351 }
2352 
2353 #undef __FUNCT__
2354 #define __FUNCT__ "DMSetVariableBounds"
2355 /*@C
2356     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2357 
2358     Logically Collective on DM
2359 
2360     Input Parameter:
2361 +   dm - the DM object
2362 -   f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set)
2363 
2364     Level: intermediate
2365 
2366 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2367          DMSetJacobian()
2368 
2369 @*/
2370 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2371 {
2372   PetscFunctionBegin;
2373   dm->ops->computevariablebounds = f;
2374   PetscFunctionReturn(0);
2375 }
2376 
2377 #undef __FUNCT__
2378 #define __FUNCT__ "DMHasVariableBounds"
2379 /*@
2380     DMHasVariableBounds - does the DM object have a variable bounds function?
2381 
2382     Not Collective
2383 
2384     Input Parameter:
2385 .   dm - the DM object to destroy
2386 
2387     Output Parameter:
2388 .   flg - PETSC_TRUE if the variable bounds function exists
2389 
2390     Level: developer
2391 
2392 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2393 
2394 @*/
2395 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2396 {
2397   PetscFunctionBegin;
2398   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2399   PetscFunctionReturn(0);
2400 }
2401 
2402 #undef __FUNCT__
2403 #define __FUNCT__ "DMComputeVariableBounds"
2404 /*@C
2405     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2406 
2407     Logically Collective on DM
2408 
2409     Input Parameters:
2410 +   dm - the DM object to destroy
2411 -   x  - current solution at which the bounds are computed
2412 
2413     Output parameters:
2414 +   xl - lower bound
2415 -   xu - upper bound
2416 
2417     Level: intermediate
2418 
2419 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2420 
2421 @*/
2422 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2423 {
2424   PetscErrorCode ierr;
2425 
2426   PetscFunctionBegin;
2427   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2428   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2429   if (dm->ops->computevariablebounds) {
2430     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr);
2431   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2432   PetscFunctionReturn(0);
2433 }
2434 
2435 #undef __FUNCT__
2436 #define __FUNCT__ "DMHasColoring"
2437 /*@
2438     DMHasColoring - does the DM object have a method of providing a coloring?
2439 
2440     Not Collective
2441 
2442     Input Parameter:
2443 .   dm - the DM object
2444 
2445     Output Parameter:
2446 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2447 
2448     Level: developer
2449 
2450 .seealso DMHasFunction(), DMCreateColoring()
2451 
2452 @*/
2453 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2454 {
2455   PetscFunctionBegin;
2456   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2457   PetscFunctionReturn(0);
2458 }
2459 
2460 #undef  __FUNCT__
2461 #define __FUNCT__ "DMSetVec"
2462 /*@C
2463     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2464 
2465     Collective on DM
2466 
2467     Input Parameter:
2468 +   dm - the DM object
2469 -   x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems.
2470 
2471     Level: developer
2472 
2473 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2474 
2475 @*/
2476 PetscErrorCode  DMSetVec(DM dm,Vec x)
2477 {
2478   PetscErrorCode ierr;
2479 
2480   PetscFunctionBegin;
2481   if (x) {
2482     if (!dm->x) {
2483       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2484     }
2485     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2486   } else if (dm->x) {
2487     ierr = VecDestroy(&dm->x);CHKERRQ(ierr);
2488   }
2489   PetscFunctionReturn(0);
2490 }
2491 
2492 PetscFunctionList DMList              = NULL;
2493 PetscBool         DMRegisterAllCalled = PETSC_FALSE;
2494 
2495 #undef __FUNCT__
2496 #define __FUNCT__ "DMSetType"
2497 /*@C
2498   DMSetType - Builds a DM, for a particular DM implementation.
2499 
2500   Collective on DM
2501 
2502   Input Parameters:
2503 + dm     - The DM object
2504 - method - The name of the DM type
2505 
2506   Options Database Key:
2507 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2508 
2509   Notes:
2510   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2511 
2512   Level: intermediate
2513 
2514 .keywords: DM, set, type
2515 .seealso: DMGetType(), DMCreate()
2516 @*/
2517 PetscErrorCode  DMSetType(DM dm, DMType method)
2518 {
2519   PetscErrorCode (*r)(DM);
2520   PetscBool      match;
2521   PetscErrorCode ierr;
2522 
2523   PetscFunctionBegin;
2524   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2525   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2526   if (match) PetscFunctionReturn(0);
2527 
2528   if (!DMRegisterAllCalled) {ierr = DMRegisterAll();CHKERRQ(ierr);}
2529   ierr = PetscFunctionListFind(DMList,method,&r);CHKERRQ(ierr);
2530   if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2531 
2532   if (dm->ops->destroy) {
2533     ierr             = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2534     dm->ops->destroy = NULL;
2535   }
2536   ierr = (*r)(dm);CHKERRQ(ierr);
2537   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2538   PetscFunctionReturn(0);
2539 }
2540 
2541 #undef __FUNCT__
2542 #define __FUNCT__ "DMGetType"
2543 /*@C
2544   DMGetType - Gets the DM type name (as a string) from the DM.
2545 
2546   Not Collective
2547 
2548   Input Parameter:
2549 . dm  - The DM
2550 
2551   Output Parameter:
2552 . type - The DM type name
2553 
2554   Level: intermediate
2555 
2556 .keywords: DM, get, type, name
2557 .seealso: DMSetType(), DMCreate()
2558 @*/
2559 PetscErrorCode  DMGetType(DM dm, DMType *type)
2560 {
2561   PetscErrorCode ierr;
2562 
2563   PetscFunctionBegin;
2564   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2565   PetscValidCharPointer(type,2);
2566   if (!DMRegisterAllCalled) {
2567     ierr = DMRegisterAll();CHKERRQ(ierr);
2568   }
2569   *type = ((PetscObject)dm)->type_name;
2570   PetscFunctionReturn(0);
2571 }
2572 
2573 #undef __FUNCT__
2574 #define __FUNCT__ "DMConvert"
2575 /*@C
2576   DMConvert - Converts a DM to another DM, either of the same or different type.
2577 
2578   Collective on DM
2579 
2580   Input Parameters:
2581 + dm - the DM
2582 - newtype - new DM type (use "same" for the same type)
2583 
2584   Output Parameter:
2585 . M - pointer to new DM
2586 
2587   Notes:
2588   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2589   the MPI communicator of the generated DM is always the same as the communicator
2590   of the input DM.
2591 
2592   Level: intermediate
2593 
2594 .seealso: DMCreate()
2595 @*/
2596 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2597 {
2598   DM             B;
2599   char           convname[256];
2600   PetscBool      sametype, issame;
2601   PetscErrorCode ierr;
2602 
2603   PetscFunctionBegin;
2604   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2605   PetscValidType(dm,1);
2606   PetscValidPointer(M,3);
2607   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2608   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2609   {
2610     PetscErrorCode (*conv)(DM, DMType, DM*) = NULL;
2611 
2612     /*
2613        Order of precedence:
2614        1) See if a specialized converter is known to the current DM.
2615        2) See if a specialized converter is known to the desired DM class.
2616        3) See if a good general converter is registered for the desired class
2617        4) See if a good general converter is known for the current matrix.
2618        5) Use a really basic converter.
2619     */
2620 
2621     /* 1) See if a specialized converter is known to the current DM and the desired class */
2622     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2623     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2624     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2625     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2626     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2627     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,&conv);CHKERRQ(ierr);
2628     if (conv) goto foundconv;
2629 
2630     /* 2)  See if a specialized converter is known to the desired DM class. */
2631     ierr = DMCreate(PetscObjectComm((PetscObject)dm), &B);CHKERRQ(ierr);
2632     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2633     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2634     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2635     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2636     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2637     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2638     ierr = PetscObjectQueryFunction((PetscObject)B,convname,&conv);CHKERRQ(ierr);
2639     if (conv) {
2640       ierr = DMDestroy(&B);CHKERRQ(ierr);
2641       goto foundconv;
2642     }
2643 
2644 #if 0
2645     /* 3) See if a good general converter is registered for the desired class */
2646     conv = B->ops->convertfrom;
2647     ierr = DMDestroy(&B);CHKERRQ(ierr);
2648     if (conv) goto foundconv;
2649 
2650     /* 4) See if a good general converter is known for the current matrix */
2651     if (dm->ops->convert) {
2652       conv = dm->ops->convert;
2653     }
2654     if (conv) goto foundconv;
2655 #endif
2656 
2657     /* 5) Use a really basic converter. */
2658     SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2659 
2660 foundconv:
2661     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2662     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2663     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2664   }
2665   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2666   PetscFunctionReturn(0);
2667 }
2668 
2669 /*--------------------------------------------------------------------------------------------------------------------*/
2670 
2671 #undef __FUNCT__
2672 #define __FUNCT__ "DMRegister"
2673 /*@C
2674   DMRegister -  Adds a new DM component implementation
2675 
2676   Not Collective
2677 
2678   Input Parameters:
2679 + name        - The name of a new user-defined creation routine
2680 - create_func - The creation routine itself
2681 
2682   Notes:
2683   DMRegister() may be called multiple times to add several user-defined DMs
2684 
2685 
2686   Sample usage:
2687 .vb
2688     DMRegister("my_da", MyDMCreate);
2689 .ve
2690 
2691   Then, your DM type can be chosen with the procedural interface via
2692 .vb
2693     DMCreate(MPI_Comm, DM *);
2694     DMSetType(DM,"my_da");
2695 .ve
2696    or at runtime via the option
2697 .vb
2698     -da_type my_da
2699 .ve
2700 
2701   Level: advanced
2702 
2703 .keywords: DM, register
2704 .seealso: DMRegisterAll(), DMRegisterDestroy()
2705 
2706 @*/
2707 PetscErrorCode  DMRegister(const char sname[],PetscErrorCode (*function)(DM))
2708 {
2709   PetscErrorCode ierr;
2710 
2711   PetscFunctionBegin;
2712   ierr = PetscFunctionListAdd(&DMList,sname,function);CHKERRQ(ierr);
2713   PetscFunctionReturn(0);
2714 }
2715 
2716 #undef __FUNCT__
2717 #define __FUNCT__ "DMLoad"
2718 /*@C
2719   DMLoad - Loads a DM that has been stored in binary  with DMView().
2720 
2721   Collective on PetscViewer
2722 
2723   Input Parameters:
2724 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2725            some related function before a call to DMLoad().
2726 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2727            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2728 
2729    Level: intermediate
2730 
2731   Notes:
2732    The type is determined by the data in the file, any type set into the DM before this call is ignored.
2733 
2734   Notes for advanced users:
2735   Most users should not need to know the details of the binary storage
2736   format, since DMLoad() and DMView() completely hide these details.
2737   But for anyone who's interested, the standard binary matrix storage
2738   format is
2739 .vb
2740      has not yet been determined
2741 .ve
2742 
2743 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2744 @*/
2745 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2746 {
2747   PetscErrorCode ierr;
2748   PetscBool      isbinary;
2749   PetscInt       classid;
2750   char           type[256];
2751 
2752   PetscFunctionBegin;
2753   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
2754   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
2755   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
2756   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
2757 
2758   ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr);
2759   if (classid != DM_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file");
2760   ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr);
2761   ierr = DMSetType(newdm, type);CHKERRQ(ierr);
2762   if (newdm->ops->load) {
2763     ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
2764   }
2765   PetscFunctionReturn(0);
2766 }
2767 
2768 /******************************** FEM Support **********************************/
2769 
2770 #undef __FUNCT__
2771 #define __FUNCT__ "DMPrintCellVector"
2772 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[])
2773 {
2774   PetscInt       f;
2775   PetscErrorCode ierr;
2776 
2777   PetscFunctionBegin;
2778   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2779   for (f = 0; f < len; ++f) {
2780     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr);
2781   }
2782   PetscFunctionReturn(0);
2783 }
2784 
2785 #undef __FUNCT__
2786 #define __FUNCT__ "DMPrintCellMatrix"
2787 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[])
2788 {
2789   PetscInt       f, g;
2790   PetscErrorCode ierr;
2791 
2792   PetscFunctionBegin;
2793   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2794   for (f = 0; f < rows; ++f) {
2795     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
2796     for (g = 0; g < cols; ++g) {
2797       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
2798     }
2799     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
2800   }
2801   PetscFunctionReturn(0);
2802 }
2803 
2804 #undef __FUNCT__
2805 #define __FUNCT__ "DMGetDefaultSection"
2806 /*@
2807   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
2808 
2809   Input Parameter:
2810 . dm - The DM
2811 
2812   Output Parameter:
2813 . section - The PetscSection
2814 
2815   Level: intermediate
2816 
2817   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2818 
2819 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2820 @*/
2821 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section)
2822 {
2823   PetscFunctionBegin;
2824   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2825   PetscValidPointer(section, 2);
2826   *section = dm->defaultSection;
2827   PetscFunctionReturn(0);
2828 }
2829 
2830 #undef __FUNCT__
2831 #define __FUNCT__ "DMSetDefaultSection"
2832 /*@
2833   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
2834 
2835   Input Parameters:
2836 + dm - The DM
2837 - section - The PetscSection
2838 
2839   Level: intermediate
2840 
2841   Note: Any existing Section will be destroyed
2842 
2843 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2844 @*/
2845 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section)
2846 {
2847   PetscInt       numFields;
2848   PetscInt       f;
2849   PetscErrorCode ierr;
2850 
2851   PetscFunctionBegin;
2852   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2853   PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2);
2854   ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr);
2855   ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
2856   dm->defaultSection = section;
2857   ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
2858   if (numFields) {
2859     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
2860     for (f = 0; f < numFields; ++f) {
2861       const char *name;
2862 
2863       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
2864       ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr);
2865     }
2866   }
2867   /* The global section will be rebuilt in the next call to DMGetDefaultGlobalSection(). */
2868   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2869   PetscFunctionReturn(0);
2870 }
2871 
2872 #undef __FUNCT__
2873 #define __FUNCT__ "DMGetDefaultGlobalSection"
2874 /*@
2875   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
2876 
2877   Collective on DM
2878 
2879   Input Parameter:
2880 . dm - The DM
2881 
2882   Output Parameter:
2883 . section - The PetscSection
2884 
2885   Level: intermediate
2886 
2887   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2888 
2889 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2890 @*/
2891 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section)
2892 {
2893   PetscErrorCode ierr;
2894 
2895   PetscFunctionBegin;
2896   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2897   PetscValidPointer(section, 2);
2898   if (!dm->defaultGlobalSection) {
2899     if (!dm->defaultSection || !dm->sf) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
2900     ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
2901     ierr = PetscLayoutDestroy(&dm->map);CHKERRQ(ierr);
2902     ierr = PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm),dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr);
2903   }
2904   *section = dm->defaultGlobalSection;
2905   PetscFunctionReturn(0);
2906 }
2907 
2908 #undef __FUNCT__
2909 #define __FUNCT__ "DMSetDefaultGlobalSection"
2910 /*@
2911   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
2912 
2913   Input Parameters:
2914 + dm - The DM
2915 - section - The PetscSection, or NULL
2916 
2917   Level: intermediate
2918 
2919   Note: Any existing Section will be destroyed
2920 
2921 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
2922 @*/
2923 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section)
2924 {
2925   PetscErrorCode ierr;
2926 
2927   PetscFunctionBegin;
2928   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2929   if (section) PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2);
2930   ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr);
2931   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2932   dm->defaultGlobalSection = section;
2933   PetscFunctionReturn(0);
2934 }
2935 
2936 #undef __FUNCT__
2937 #define __FUNCT__ "DMGetDefaultSF"
2938 /*@
2939   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
2940   it is created from the default PetscSection layouts in the DM.
2941 
2942   Input Parameter:
2943 . dm - The DM
2944 
2945   Output Parameter:
2946 . sf - The PetscSF
2947 
2948   Level: intermediate
2949 
2950   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2951 
2952 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
2953 @*/
2954 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf)
2955 {
2956   PetscInt       nroots;
2957   PetscErrorCode ierr;
2958 
2959   PetscFunctionBegin;
2960   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2961   PetscValidPointer(sf, 2);
2962   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
2963   if (nroots < 0) {
2964     PetscSection section, gSection;
2965 
2966     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
2967     if (section) {
2968       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
2969       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
2970     } else {
2971       *sf = NULL;
2972       PetscFunctionReturn(0);
2973     }
2974   }
2975   *sf = dm->defaultSF;
2976   PetscFunctionReturn(0);
2977 }
2978 
2979 #undef __FUNCT__
2980 #define __FUNCT__ "DMSetDefaultSF"
2981 /*@
2982   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
2983 
2984   Input Parameters:
2985 + dm - The DM
2986 - sf - The PetscSF
2987 
2988   Level: intermediate
2989 
2990   Note: Any previous SF is destroyed
2991 
2992 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
2993 @*/
2994 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf)
2995 {
2996   PetscErrorCode ierr;
2997 
2998   PetscFunctionBegin;
2999   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3000   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
3001   ierr          = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
3002   dm->defaultSF = sf;
3003   PetscFunctionReturn(0);
3004 }
3005 
3006 #undef __FUNCT__
3007 #define __FUNCT__ "DMCreateDefaultSF"
3008 /*@C
3009   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
3010   describing the data layout.
3011 
3012   Input Parameters:
3013 + dm - The DM
3014 . localSection - PetscSection describing the local data layout
3015 - globalSection - PetscSection describing the global data layout
3016 
3017   Level: intermediate
3018 
3019 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
3020 @*/
3021 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
3022 {
3023   MPI_Comm       comm;
3024   PetscLayout    layout;
3025   const PetscInt *ranges;
3026   PetscInt       *local;
3027   PetscSFNode    *remote;
3028   PetscInt       pStart, pEnd, p, nroots, nleaves, l;
3029   PetscMPIInt    size, rank;
3030   PetscErrorCode ierr;
3031 
3032   PetscFunctionBegin;
3033   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
3034   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3035   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
3036   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
3037   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
3038   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
3039   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
3040   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
3041   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
3042   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
3043   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
3044   for (p = pStart, nleaves = 0; p < pEnd; ++p) {
3045     PetscInt gdof, gcdof;
3046 
3047     ierr     = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
3048     ierr     = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
3049     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3050   }
3051   ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr);
3052   ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr);
3053   for (p = pStart, l = 0; p < pEnd; ++p) {
3054     const PetscInt *cind;
3055     PetscInt       dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
3056 
3057     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
3058     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
3059     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
3060     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
3061     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
3062     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
3063     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
3064     if (!gdof) continue; /* Censored point */
3065     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3066     if (gsize != dof-cdof) {
3067       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
3068       cdof = 0; /* Ignore constraints */
3069     }
3070     for (d = 0, c = 0; d < dof; ++d) {
3071       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
3072       local[l+d-c] = off+d;
3073     }
3074     if (gdof < 0) {
3075       for (d = 0; d < gsize; ++d, ++l) {
3076         PetscInt offset = -(goff+1) + d, r;
3077 
3078         ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr);
3079         if (r < 0) r = -(r+2);
3080         remote[l].rank  = r;
3081         remote[l].index = offset - ranges[r];
3082       }
3083     } else {
3084       for (d = 0; d < gsize; ++d, ++l) {
3085         remote[l].rank  = rank;
3086         remote[l].index = goff+d - ranges[rank];
3087       }
3088     }
3089   }
3090   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
3091   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3092   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
3093   PetscFunctionReturn(0);
3094 }
3095 
3096 #undef __FUNCT__
3097 #define __FUNCT__ "DMGetPointSF"
3098 /*@
3099   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
3100 
3101   Input Parameter:
3102 . dm - The DM
3103 
3104   Output Parameter:
3105 . sf - The PetscSF
3106 
3107   Level: intermediate
3108 
3109   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
3110 
3111 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3112 @*/
3113 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf)
3114 {
3115   PetscFunctionBegin;
3116   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3117   PetscValidPointer(sf, 2);
3118   *sf = dm->sf;
3119   PetscFunctionReturn(0);
3120 }
3121 
3122 #undef __FUNCT__
3123 #define __FUNCT__ "DMSetPointSF"
3124 /*@
3125   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
3126 
3127   Input Parameters:
3128 + dm - The DM
3129 - sf - The PetscSF
3130 
3131   Level: intermediate
3132 
3133 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3134 @*/
3135 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf)
3136 {
3137   PetscErrorCode ierr;
3138 
3139   PetscFunctionBegin;
3140   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3141   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
3142   ierr   = PetscSFDestroy(&dm->sf);CHKERRQ(ierr);
3143   ierr   = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr);
3144   dm->sf = sf;
3145   PetscFunctionReturn(0);
3146 }
3147 
3148 #undef __FUNCT__
3149 #define __FUNCT__ "DMGetNumFields"
3150 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3151 {
3152   PetscFunctionBegin;
3153   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3154   PetscValidPointer(numFields, 2);
3155   *numFields = dm->numFields;
3156   PetscFunctionReturn(0);
3157 }
3158 
3159 #undef __FUNCT__
3160 #define __FUNCT__ "DMSetNumFields"
3161 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3162 {
3163   PetscInt       f;
3164   PetscErrorCode ierr;
3165 
3166   PetscFunctionBegin;
3167   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3168   for (f = 0; f < dm->numFields; ++f) {
3169     ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr);
3170   }
3171   ierr          = PetscFree(dm->fields);CHKERRQ(ierr);
3172   dm->numFields = numFields;
3173   ierr          = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr);
3174   for (f = 0; f < dm->numFields; ++f) {
3175     ierr = PetscContainerCreate(PetscObjectComm((PetscObject)dm), (PetscContainer*) &dm->fields[f]);CHKERRQ(ierr);
3176   }
3177   PetscFunctionReturn(0);
3178 }
3179 
3180 #undef __FUNCT__
3181 #define __FUNCT__ "DMGetField"
3182 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3183 {
3184   PetscFunctionBegin;
3185   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3186   PetscValidPointer(field, 2);
3187   if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3188   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3189   *field = dm->fields[f];
3190   PetscFunctionReturn(0);
3191 }
3192 
3193 #undef __FUNCT__
3194 #define __FUNCT__ "DMRestrictHook_Coordinates"
3195 PetscErrorCode DMRestrictHook_Coordinates(DM dm,DM dmc,void *ctx)
3196 {
3197   DM dm_coord,dmc_coord;
3198   PetscErrorCode ierr;
3199   Vec coords,ccoords;
3200   VecScatter scat;
3201   PetscFunctionBegin;
3202   ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr);
3203   ierr = DMGetCoordinateDM(dmc,&dmc_coord);CHKERRQ(ierr);
3204   ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr);
3205   ierr = DMGetCoordinates(dmc,&ccoords);CHKERRQ(ierr);
3206   if (coords && !ccoords) {
3207     ierr = DMCreateGlobalVector(dmc_coord,&ccoords);CHKERRQ(ierr);
3208     ierr = DMCreateInjection(dmc_coord,dm_coord,&scat);CHKERRQ(ierr);
3209     ierr = VecScatterBegin(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3210     ierr = VecScatterEnd(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3211     ierr = DMSetCoordinates(dmc,ccoords);CHKERRQ(ierr);
3212     ierr = VecScatterDestroy(&scat);CHKERRQ(ierr);
3213     ierr = VecDestroy(&ccoords);CHKERRQ(ierr);
3214   }
3215   PetscFunctionReturn(0);
3216 }
3217 
3218 #undef __FUNCT__
3219 #define __FUNCT__ "DMSubDomainHook_Coordinates"
3220 static PetscErrorCode DMSubDomainHook_Coordinates(DM dm,DM subdm,void *ctx)
3221 {
3222   DM dm_coord,subdm_coord;
3223   PetscErrorCode ierr;
3224   Vec coords,ccoords,clcoords;
3225   VecScatter *scat_i,*scat_g;
3226   PetscFunctionBegin;
3227   ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr);
3228   ierr = DMGetCoordinateDM(subdm,&subdm_coord);CHKERRQ(ierr);
3229   ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr);
3230   ierr = DMGetCoordinates(subdm,&ccoords);CHKERRQ(ierr);
3231   if (coords && !ccoords) {
3232     ierr = DMCreateGlobalVector(subdm_coord,&ccoords);CHKERRQ(ierr);
3233     ierr = DMCreateLocalVector(subdm_coord,&clcoords);CHKERRQ(ierr);
3234     ierr = DMCreateDomainDecompositionScatters(dm_coord,1,&subdm_coord,NULL,&scat_i,&scat_g);CHKERRQ(ierr);
3235     ierr = VecScatterBegin(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3236     ierr = VecScatterBegin(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3237     ierr = VecScatterEnd(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3238     ierr = VecScatterEnd(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3239     ierr = DMSetCoordinates(subdm,ccoords);CHKERRQ(ierr);
3240     ierr = DMSetCoordinatesLocal(subdm,clcoords);CHKERRQ(ierr);
3241     ierr = VecScatterDestroy(&scat_i[0]);CHKERRQ(ierr);
3242     ierr = VecScatterDestroy(&scat_g[0]);CHKERRQ(ierr);
3243     ierr = VecDestroy(&ccoords);CHKERRQ(ierr);
3244     ierr = VecDestroy(&clcoords);CHKERRQ(ierr);
3245     ierr = PetscFree(scat_i);CHKERRQ(ierr);
3246     ierr = PetscFree(scat_g);CHKERRQ(ierr);
3247   }
3248   PetscFunctionReturn(0);
3249 }
3250 
3251 #undef __FUNCT__
3252 #define __FUNCT__ "DMSetCoordinates"
3253 /*@
3254   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3255 
3256   Collective on DM
3257 
3258   Input Parameters:
3259 + dm - the DM
3260 - c - coordinate vector
3261 
3262   Note:
3263   The coordinates do include those for ghost points, which are in the local vector
3264 
3265   Level: intermediate
3266 
3267 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3268 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3269 @*/
3270 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3271 {
3272   PetscErrorCode ierr;
3273 
3274   PetscFunctionBegin;
3275   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3276   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3277   ierr            = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3278   ierr            = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3279   dm->coordinates = c;
3280   ierr            = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3281   ierr            = DMCoarsenHookAdd(dm,DMRestrictHook_Coordinates,NULL,NULL);CHKERRQ(ierr);
3282   ierr            = DMSubDomainHookAdd(dm,DMSubDomainHook_Coordinates,NULL,NULL);CHKERRQ(ierr);
3283   PetscFunctionReturn(0);
3284 }
3285 
3286 #undef __FUNCT__
3287 #define __FUNCT__ "DMSetCoordinatesLocal"
3288 /*@
3289   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3290 
3291   Collective on DM
3292 
3293    Input Parameters:
3294 +  dm - the DM
3295 -  c - coordinate vector
3296 
3297   Note:
3298   The coordinates of ghost points can be set using DMSetCoordinates()
3299   followed by DMGetCoordinatesLocal(). This is intended to enable the
3300   setting of ghost coordinates outside of the domain.
3301 
3302   Level: intermediate
3303 
3304 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3305 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3306 @*/
3307 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3308 {
3309   PetscErrorCode ierr;
3310 
3311   PetscFunctionBegin;
3312   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3313   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3314   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3315   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3316 
3317   dm->coordinatesLocal = c;
3318 
3319   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3320   PetscFunctionReturn(0);
3321 }
3322 
3323 #undef __FUNCT__
3324 #define __FUNCT__ "DMGetCoordinates"
3325 /*@
3326   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3327 
3328   Not Collective
3329 
3330   Input Parameter:
3331 . dm - the DM
3332 
3333   Output Parameter:
3334 . c - global coordinate vector
3335 
3336   Note:
3337   This is a borrowed reference, so the user should NOT destroy this vector
3338 
3339   Each process has only the local coordinates (does NOT have the ghost coordinates).
3340 
3341   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3342   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3343 
3344   Level: intermediate
3345 
3346 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3347 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3348 @*/
3349 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3350 {
3351   PetscErrorCode ierr;
3352 
3353   PetscFunctionBegin;
3354   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3355   PetscValidPointer(c,2);
3356   if (!dm->coordinates && dm->coordinatesLocal) {
3357     DM cdm = NULL;
3358 
3359     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3360     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3361     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3362     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3363     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3364   }
3365   *c = dm->coordinates;
3366   PetscFunctionReturn(0);
3367 }
3368 
3369 #undef __FUNCT__
3370 #define __FUNCT__ "DMGetCoordinatesLocal"
3371 /*@
3372   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3373 
3374   Collective on DM
3375 
3376   Input Parameter:
3377 . dm - the DM
3378 
3379   Output Parameter:
3380 . c - coordinate vector
3381 
3382   Note:
3383   This is a borrowed reference, so the user should NOT destroy this vector
3384 
3385   Each process has the local and ghost coordinates
3386 
3387   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3388   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3389 
3390   Level: intermediate
3391 
3392 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3393 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3394 @*/
3395 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3396 {
3397   PetscErrorCode ierr;
3398 
3399   PetscFunctionBegin;
3400   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3401   PetscValidPointer(c,2);
3402   if (!dm->coordinatesLocal && dm->coordinates) {
3403     DM cdm = NULL;
3404 
3405     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3406     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3407     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3408     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3409     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3410   }
3411   *c = dm->coordinatesLocal;
3412   PetscFunctionReturn(0);
3413 }
3414 
3415 #undef __FUNCT__
3416 #define __FUNCT__ "DMGetCoordinateDM"
3417 /*@
3418   DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates
3419 
3420   Collective on DM
3421 
3422   Input Parameter:
3423 . dm - the DM
3424 
3425   Output Parameter:
3426 . cdm - coordinate DM
3427 
3428   Level: intermediate
3429 
3430 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3431 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3432 @*/
3433 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3434 {
3435   PetscErrorCode ierr;
3436 
3437   PetscFunctionBegin;
3438   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3439   PetscValidPointer(cdm,2);
3440   if (!dm->coordinateDM) {
3441     if (!dm->ops->createcoordinatedm) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3442     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3443   }
3444   *cdm = dm->coordinateDM;
3445   PetscFunctionReturn(0);
3446 }
3447 
3448 #undef __FUNCT__
3449 #define __FUNCT__ "DMLocatePoints"
3450 /*@
3451   DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells
3452 
3453   Not collective
3454 
3455   Input Parameters:
3456 + dm - The DM
3457 - v - The Vec of points
3458 
3459   Output Parameter:
3460 . cells - The local cell numbers for cells which contain the points
3461 
3462   Level: developer
3463 
3464 .keywords: point location, mesh
3465 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3466 @*/
3467 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells)
3468 {
3469   PetscErrorCode ierr;
3470 
3471   PetscFunctionBegin;
3472   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3473   PetscValidHeaderSpecific(v,VEC_CLASSID,2);
3474   PetscValidPointer(cells,3);
3475   if (dm->ops->locatepoints) {
3476     ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr);
3477   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Point location not available for this DM");
3478   PetscFunctionReturn(0);
3479 }
3480