xref: /petsc/src/dm/interface/dm.c (revision b412c318f95038d6b0c0d53c83ad7d4018d14d81)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
2 #include <petscsf.h>
3 
4 PetscClassId  DM_CLASSID;
5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal, DM_LocalToLocal;
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "DMViewFromOptions"
9 /*
10   DMViewFromOptions - Processes command line options to determine if/how a DM is to be viewed.
11 
12   Collective on Vec
13 
14   Input Parameters:
15 + dm   - the DM
16 . prefix - prefix to use for viewing, or NULL to use prefix of 'dm'
17 - optionname - option to activate viewing
18 
19   Level: intermediate
20 
21 .keywords: DM, view, options, database
22 .seealso: VecViewFromOptions(), MatViewFromOptions()
23 */
24 PetscErrorCode  DMViewFromOptions(DM dm,const char prefix[],const char optionname[])
25 {
26   PetscErrorCode    ierr;
27   PetscBool         flg;
28   PetscViewer       viewer;
29   PetscViewerFormat format;
30 
31   PetscFunctionBegin;
32   if (prefix) {
33     ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr);
34   } else {
35     ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),((PetscObject)dm)->prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr);
36   }
37   if (flg) {
38     ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr);
39     ierr = DMView(dm,viewer);CHKERRQ(ierr);
40     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
41     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
42   }
43   PetscFunctionReturn(0);
44 }
45 
46 
47 #undef __FUNCT__
48 #define __FUNCT__ "DMCreate"
49 /*@
50   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
51 
52    If you never  call DMSetType()  it will generate an
53    error when you try to use the vector.
54 
55   Collective on MPI_Comm
56 
57   Input Parameter:
58 . comm - The communicator for the DM object
59 
60   Output Parameter:
61 . dm - The DM object
62 
63   Level: beginner
64 
65 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
66 @*/
67 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
68 {
69   DM             v;
70   PetscErrorCode ierr;
71 
72   PetscFunctionBegin;
73   PetscValidPointer(dm,2);
74   *dm = NULL;
75 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
76   ierr = VecInitializePackage();CHKERRQ(ierr);
77   ierr = MatInitializePackage();CHKERRQ(ierr);
78   ierr = DMInitializePackage();CHKERRQ(ierr);
79 #endif
80 
81   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
82   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
83 
84 
85   v->ltogmap              = NULL;
86   v->ltogmapb             = NULL;
87   v->bs                   = 1;
88   v->coloringtype         = IS_COLORING_GLOBAL;
89   ierr                    = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
90   ierr                    = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
91   v->defaultSection       = NULL;
92   v->defaultGlobalSection = NULL;
93   {
94     PetscInt i;
95     for (i = 0; i < 10; ++i) {
96       v->nullspaceConstructors[i] = NULL;
97     }
98   }
99   v->numFields = 0;
100   v->fields    = NULL;
101   ierr = DMSetMatType(v,MATAIJ);CHKERRQ(ierr);
102   *dm = v;
103   PetscFunctionReturn(0);
104 }
105 
106 #undef __FUNCT__
107 #define __FUNCT__ "DMClone"
108 /*@
109   DMClone - Creates a DM object with the same topology as the original.
110 
111   Collective on MPI_Comm
112 
113   Input Parameter:
114 . dm - The original DM object
115 
116   Output Parameter:
117 . newdm  - The new DM object
118 
119   Level: beginner
120 
121 .keywords: DM, topology, create
122 @*/
123 PetscErrorCode DMClone(DM dm, DM *newdm)
124 {
125   PetscSF        sf;
126   Vec            coords;
127   void          *ctx;
128   PetscErrorCode ierr;
129 
130   PetscFunctionBegin;
131   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
132   PetscValidPointer(newdm,2);
133   ierr = DMCreate(PetscObjectComm((PetscObject)dm), newdm);CHKERRQ(ierr);
134   if (dm->ops->clone) {
135     ierr = (*dm->ops->clone)(dm, newdm);CHKERRQ(ierr);
136   }
137   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
138   ierr = DMSetPointSF(*newdm, sf);CHKERRQ(ierr);
139   ierr = DMGetApplicationContext(dm, &ctx);CHKERRQ(ierr);
140   ierr = DMSetApplicationContext(*newdm, ctx);CHKERRQ(ierr);
141   ierr = DMGetCoordinatesLocal(dm, &coords);CHKERRQ(ierr);
142   if (coords) {
143     ierr = DMSetCoordinatesLocal(*newdm, coords);CHKERRQ(ierr);
144   } else {
145     ierr = DMGetCoordinates(dm, &coords);CHKERRQ(ierr);
146     if (coords) {ierr = DMSetCoordinates(*newdm, coords);CHKERRQ(ierr);}
147   }
148   PetscFunctionReturn(0);
149 }
150 
151 #undef __FUNCT__
152 #define __FUNCT__ "DMSetVecType"
153 /*@C
154        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
155 
156    Logically Collective on DMDA
157 
158    Input Parameter:
159 +  da - initial distributed array
160 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
161 
162    Options Database:
163 .   -dm_vec_type ctype
164 
165    Level: intermediate
166 
167 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
168 @*/
169 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
170 {
171   PetscErrorCode ierr;
172 
173   PetscFunctionBegin;
174   PetscValidHeaderSpecific(da,DM_CLASSID,1);
175   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
176   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
177   PetscFunctionReturn(0);
178 }
179 
180 #undef __FUNCT__
181 #define __FUNCT__ "VecGetDM"
182 /*@
183   VecGetDM - Gets the DM defining the data layout of the vector
184 
185   Not collective
186 
187   Input Parameter:
188 . v - The Vec
189 
190   Output Parameter:
191 . dm - The DM
192 
193   Level: intermediate
194 
195 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
196 @*/
197 PetscErrorCode VecGetDM(Vec v, DM *dm)
198 {
199   PetscErrorCode ierr;
200 
201   PetscFunctionBegin;
202   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
203   PetscValidPointer(dm,2);
204   ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr);
205   PetscFunctionReturn(0);
206 }
207 
208 #undef __FUNCT__
209 #define __FUNCT__ "VecSetDM"
210 /*@
211   VecSetDM - Sets the DM defining the data layout of the vector
212 
213   Not collective
214 
215   Input Parameters:
216 + v - The Vec
217 - dm - The DM
218 
219   Level: intermediate
220 
221 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
222 @*/
223 PetscErrorCode VecSetDM(Vec v, DM dm)
224 {
225   PetscErrorCode ierr;
226 
227   PetscFunctionBegin;
228   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
229   if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2);
230   ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
231   PetscFunctionReturn(0);
232 }
233 
234 #undef __FUNCT__
235 #define __FUNCT__ "DMSetMatType"
236 /*@C
237        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
238 
239    Logically Collective on DM
240 
241    Input Parameter:
242 +  dm - the DM context
243 .  ctype - the matrix type
244 
245    Options Database:
246 .   -dm_mat_type ctype
247 
248    Level: intermediate
249 
250 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType
251 @*/
252 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
253 {
254   PetscErrorCode ierr;
255 
256   PetscFunctionBegin;
257   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
258   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
259   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
260   PetscFunctionReturn(0);
261 }
262 
263 #undef __FUNCT__
264 #define __FUNCT__ "MatGetDM"
265 /*@
266   MatGetDM - Gets the DM defining the data layout of the matrix
267 
268   Not collective
269 
270   Input Parameter:
271 . A - The Mat
272 
273   Output Parameter:
274 . dm - The DM
275 
276   Level: intermediate
277 
278 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
279 @*/
280 PetscErrorCode MatGetDM(Mat A, DM *dm)
281 {
282   PetscErrorCode ierr;
283 
284   PetscFunctionBegin;
285   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
286   PetscValidPointer(dm,2);
287   ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr);
288   PetscFunctionReturn(0);
289 }
290 
291 #undef __FUNCT__
292 #define __FUNCT__ "MatSetDM"
293 /*@
294   MatSetDM - Sets the DM defining the data layout of the matrix
295 
296   Not collective
297 
298   Input Parameters:
299 + A - The Mat
300 - dm - The DM
301 
302   Level: intermediate
303 
304 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
305 @*/
306 PetscErrorCode MatSetDM(Mat A, DM dm)
307 {
308   PetscErrorCode ierr;
309 
310   PetscFunctionBegin;
311   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
312   if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2);
313   ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
314   PetscFunctionReturn(0);
315 }
316 
317 #undef __FUNCT__
318 #define __FUNCT__ "DMSetOptionsPrefix"
319 /*@C
320    DMSetOptionsPrefix - Sets the prefix used for searching for all
321    DMDA options in the database.
322 
323    Logically Collective on DMDA
324 
325    Input Parameter:
326 +  da - the DMDA context
327 -  prefix - the prefix to prepend to all option names
328 
329    Notes:
330    A hyphen (-) must NOT be given at the beginning of the prefix name.
331    The first character of all runtime options is AUTOMATICALLY the hyphen.
332 
333    Level: advanced
334 
335 .keywords: DMDA, set, options, prefix, database
336 
337 .seealso: DMSetFromOptions()
338 @*/
339 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
340 {
341   PetscErrorCode ierr;
342 
343   PetscFunctionBegin;
344   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
345   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
346   PetscFunctionReturn(0);
347 }
348 
349 #undef __FUNCT__
350 #define __FUNCT__ "DMDestroy"
351 /*@
352     DMDestroy - Destroys a vector packer or DMDA.
353 
354     Collective on DM
355 
356     Input Parameter:
357 .   dm - the DM object to destroy
358 
359     Level: developer
360 
361 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
362 
363 @*/
364 PetscErrorCode  DMDestroy(DM *dm)
365 {
366   PetscInt       i, cnt = 0, f;
367   DMNamedVecLink nlink,nnext;
368   PetscErrorCode ierr;
369 
370   PetscFunctionBegin;
371   if (!*dm) PetscFunctionReturn(0);
372   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
373 
374   /* I think it makes sense to dump all attached things when you are destroyed, which also eliminates circular references */
375   for (f = 0; f < (*dm)->numFields; ++f) {
376     ierr = PetscObjectCompose((*dm)->fields[f], "pmat", NULL);CHKERRQ(ierr);
377     ierr = PetscObjectCompose((*dm)->fields[f], "nullspace", NULL);CHKERRQ(ierr);
378     ierr = PetscObjectCompose((*dm)->fields[f], "nearnullspace", NULL);CHKERRQ(ierr);
379   }
380   /* count all the circular references of DM and its contained Vecs */
381   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
382     if ((*dm)->localin[i])  cnt++;
383     if ((*dm)->globalin[i]) cnt++;
384   }
385   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
386   if ((*dm)->x) {
387     DM obj;
388     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
389     if (obj == *dm) cnt++;
390   }
391   for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++;
392   if ((*dm)->x) {
393     DM obj;
394     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
395     if (obj == *dm) cnt++;
396   }
397 
398   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
399   /*
400      Need this test because the dm references the vectors that
401      reference the dm, so destroying the dm calls destroy on the
402      vectors that cause another destroy on the dm
403   */
404   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
405   ((PetscObject) (*dm))->refct = 0;
406   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
407     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
408     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
409   }
410   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
411     nnext = nlink->next;
412     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
413     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
414     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
415     ierr = PetscFree(nlink);CHKERRQ(ierr);
416   }
417   (*dm)->namedglobal = NULL;
418 
419   for (nlink=(*dm)->namedlocal; nlink; nlink=nnext) { /* Destroy the named local vectors */
420     nnext = nlink->next;
421     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
422     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
423     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
424     ierr = PetscFree(nlink);CHKERRQ(ierr);
425   }
426   (*dm)->namedlocal = NULL;
427 
428   /* Destroy the list of hooks */
429   {
430     DMCoarsenHookLink link,next;
431     for (link=(*dm)->coarsenhook; link; link=next) {
432       next = link->next;
433       ierr = PetscFree(link);CHKERRQ(ierr);
434     }
435     (*dm)->coarsenhook = NULL;
436   }
437   {
438     DMRefineHookLink link,next;
439     for (link=(*dm)->refinehook; link; link=next) {
440       next = link->next;
441       ierr = PetscFree(link);CHKERRQ(ierr);
442     }
443     (*dm)->refinehook = NULL;
444   }
445   {
446     DMSubDomainHookLink link,next;
447     for (link=(*dm)->subdomainhook; link; link=next) {
448       next = link->next;
449       ierr = PetscFree(link);CHKERRQ(ierr);
450     }
451     (*dm)->subdomainhook = NULL;
452   }
453   {
454     DMGlobalToLocalHookLink link,next;
455     for (link=(*dm)->gtolhook; link; link=next) {
456       next = link->next;
457       ierr = PetscFree(link);CHKERRQ(ierr);
458     }
459     (*dm)->gtolhook = NULL;
460   }
461   /* Destroy the work arrays */
462   {
463     DMWorkLink link,next;
464     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
465     for (link=(*dm)->workin; link; link=next) {
466       next = link->next;
467       ierr = PetscFree(link->mem);CHKERRQ(ierr);
468       ierr = PetscFree(link);CHKERRQ(ierr);
469     }
470     (*dm)->workin = NULL;
471   }
472 
473   ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr);
474   ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr);
475   ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr);
476 
477   if ((*dm)->ctx && (*dm)->ctxdestroy) {
478     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
479   }
480   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
481   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
482   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
483   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
484   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
485   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
486   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
487 
488   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
489   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
490   ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr);
491   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
492   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
493 
494   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
495   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
496   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
497 
498   for (f = 0; f < (*dm)->numFields; ++f) {
499     ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr);
500   }
501   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
502   /* if memory was published with AMS then destroy it */
503   ierr = PetscObjectAMSViewOff((PetscObject)*dm);CHKERRQ(ierr);
504 
505   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
506   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
507   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
508   PetscFunctionReturn(0);
509 }
510 
511 #undef __FUNCT__
512 #define __FUNCT__ "DMSetUp"
513 /*@
514     DMSetUp - sets up the data structures inside a DM object
515 
516     Collective on DM
517 
518     Input Parameter:
519 .   dm - the DM object to setup
520 
521     Level: developer
522 
523 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
524 
525 @*/
526 PetscErrorCode  DMSetUp(DM dm)
527 {
528   PetscErrorCode ierr;
529 
530   PetscFunctionBegin;
531   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
532   if (dm->setupcalled) PetscFunctionReturn(0);
533   if (dm->ops->setup) {
534     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
535   }
536   dm->setupcalled = PETSC_TRUE;
537   PetscFunctionReturn(0);
538 }
539 
540 #undef __FUNCT__
541 #define __FUNCT__ "DMSetFromOptions"
542 /*@
543     DMSetFromOptions - sets parameters in a DM from the options database
544 
545     Collective on DM
546 
547     Input Parameter:
548 .   dm - the DM object to set options for
549 
550     Options Database:
551 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
552 .   -dm_vec_type <type>  type of vector to create inside DM
553 .   -dm_mat_type <type>  type of matrix to create inside DM
554 -   -dm_coloring_type <global or ghosted>
555 
556     Level: developer
557 
558 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
559 
560 @*/
561 PetscErrorCode  DMSetFromOptions(DM dm)
562 {
563   char           typeName[256] = MATAIJ;
564   PetscBool      flg;
565   PetscErrorCode ierr;
566 
567   PetscFunctionBegin;
568   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
569   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
570   ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);CHKERRQ(ierr);
571   ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
572   if (flg) {
573     ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
574   }
575   ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
576   if (flg) {
577     ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
578   }
579   ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);CHKERRQ(ierr);
580   if (dm->ops->setfromoptions) {
581     ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
582   }
583   /* process any options handlers added with PetscObjectAddOptionsHandler() */
584   ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
585   ierr = PetscOptionsEnd();CHKERRQ(ierr);
586   ierr = DMViewFromOptions(dm,NULL,"-dm_view");CHKERRQ(ierr);
587   PetscFunctionReturn(0);
588 }
589 
590 #undef __FUNCT__
591 #define __FUNCT__ "DMView"
592 /*@C
593     DMView - Views a vector packer or DMDA.
594 
595     Collective on DM
596 
597     Input Parameter:
598 +   dm - the DM object to view
599 -   v - the viewer
600 
601     Level: developer
602 
603 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
604 
605 @*/
606 PetscErrorCode  DMView(DM dm,PetscViewer v)
607 {
608   PetscErrorCode ierr;
609   PetscBool      isbinary;
610 
611   PetscFunctionBegin;
612   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
613   if (!v) {
614     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);CHKERRQ(ierr);
615   }
616   ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
617   if (isbinary) {
618     PetscInt classid = DM_FILE_CLASSID;
619     char     type[256];
620 
621     ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
622     ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr);
623     ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
624   }
625   if (dm->ops->view) {
626     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
627   }
628   PetscFunctionReturn(0);
629 }
630 
631 #undef __FUNCT__
632 #define __FUNCT__ "DMCreateGlobalVector"
633 /*@
634     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
635 
636     Collective on DM
637 
638     Input Parameter:
639 .   dm - the DM object
640 
641     Output Parameter:
642 .   vec - the global vector
643 
644     Level: beginner
645 
646 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
647 
648 @*/
649 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
650 {
651   PetscErrorCode ierr;
652 
653   PetscFunctionBegin;
654   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
655   ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
656   PetscFunctionReturn(0);
657 }
658 
659 #undef __FUNCT__
660 #define __FUNCT__ "DMCreateLocalVector"
661 /*@
662     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
663 
664     Not Collective
665 
666     Input Parameter:
667 .   dm - the DM object
668 
669     Output Parameter:
670 .   vec - the local vector
671 
672     Level: beginner
673 
674 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
675 
676 @*/
677 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
678 {
679   PetscErrorCode ierr;
680 
681   PetscFunctionBegin;
682   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
683   ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
684   PetscFunctionReturn(0);
685 }
686 
687 #undef __FUNCT__
688 #define __FUNCT__ "DMGetLocalToGlobalMapping"
689 /*@
690    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
691 
692    Collective on DM
693 
694    Input Parameter:
695 .  dm - the DM that provides the mapping
696 
697    Output Parameter:
698 .  ltog - the mapping
699 
700    Level: intermediate
701 
702    Notes:
703    This mapping can then be used by VecSetLocalToGlobalMapping() or
704    MatSetLocalToGlobalMapping().
705 
706 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
707 @*/
708 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
709 {
710   PetscErrorCode ierr;
711 
712   PetscFunctionBegin;
713   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
714   PetscValidPointer(ltog,2);
715   if (!dm->ltogmap) {
716     PetscSection section, sectionGlobal;
717 
718     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
719     if (section) {
720       PetscInt *ltog;
721       PetscInt pStart, pEnd, size, p, l;
722 
723       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
724       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
725       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
726       ierr = PetscMalloc(size * sizeof(PetscInt), &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
727       for (p = pStart, l = 0; p < pEnd; ++p) {
728         PetscInt dof, off, c;
729 
730         /* Should probably use constrained dofs */
731         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
732         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
733         for (c = 0; c < dof; ++c, ++l) {
734           ltog[l] = off+c;
735         }
736       }
737       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
738       ierr = PetscLogObjectParent((PetscObject)dm, (PetscObject)dm->ltogmap);CHKERRQ(ierr);
739     } else {
740       if (!dm->ops->getlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
741       ierr = (*dm->ops->getlocaltoglobalmapping)(dm);CHKERRQ(ierr);
742     }
743   }
744   *ltog = dm->ltogmap;
745   PetscFunctionReturn(0);
746 }
747 
748 #undef __FUNCT__
749 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
750 /*@
751    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
752 
753    Collective on DM
754 
755    Input Parameter:
756 .  da - the distributed array that provides the mapping
757 
758    Output Parameter:
759 .  ltog - the block mapping
760 
761    Level: intermediate
762 
763    Notes:
764    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
765    MatSetLocalToGlobalMappingBlock().
766 
767 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
768 @*/
769 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
770 {
771   PetscErrorCode ierr;
772 
773   PetscFunctionBegin;
774   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
775   PetscValidPointer(ltog,2);
776   if (!dm->ltogmapb) {
777     PetscInt bs;
778     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
779     if (bs > 1) {
780       if (!dm->ops->getlocaltoglobalmappingblock) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
781       ierr = (*dm->ops->getlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
782     } else {
783       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
784       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
785     }
786   }
787   *ltog = dm->ltogmapb;
788   PetscFunctionReturn(0);
789 }
790 
791 #undef __FUNCT__
792 #define __FUNCT__ "DMGetBlockSize"
793 /*@
794    DMGetBlockSize - Gets the inherent block size associated with a DM
795 
796    Not Collective
797 
798    Input Parameter:
799 .  dm - the DM with block structure
800 
801    Output Parameter:
802 .  bs - the block size, 1 implies no exploitable block structure
803 
804    Level: intermediate
805 
806 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
807 @*/
808 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
809 {
810   PetscFunctionBegin;
811   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
812   PetscValidPointer(bs,2);
813   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
814   *bs = dm->bs;
815   PetscFunctionReturn(0);
816 }
817 
818 #undef __FUNCT__
819 #define __FUNCT__ "DMCreateInterpolation"
820 /*@
821     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
822 
823     Collective on DM
824 
825     Input Parameter:
826 +   dm1 - the DM object
827 -   dm2 - the second, finer DM object
828 
829     Output Parameter:
830 +  mat - the interpolation
831 -  vec - the scaling (optional)
832 
833     Level: developer
834 
835     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
836         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
837 
838         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
839         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
840 
841 
842 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
843 
844 @*/
845 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
846 {
847   PetscErrorCode ierr;
848 
849   PetscFunctionBegin;
850   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
851   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
852   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
853   PetscFunctionReturn(0);
854 }
855 
856 #undef __FUNCT__
857 #define __FUNCT__ "DMCreateInjection"
858 /*@
859     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
860 
861     Collective on DM
862 
863     Input Parameter:
864 +   dm1 - the DM object
865 -   dm2 - the second, finer DM object
866 
867     Output Parameter:
868 .   ctx - the injection
869 
870     Level: developer
871 
872    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
873         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
874 
875 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
876 
877 @*/
878 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
879 {
880   PetscErrorCode ierr;
881 
882   PetscFunctionBegin;
883   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
884   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
885   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
886   PetscFunctionReturn(0);
887 }
888 
889 #undef __FUNCT__
890 #define __FUNCT__ "DMCreateColoring"
891 /*@
892     DMCreateColoring - Gets coloring for a DM
893 
894     Collective on DM
895 
896     Input Parameter:
897 +   dm - the DM object
898 -   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
899 
900     Output Parameter:
901 .   coloring - the coloring
902 
903     Level: developer
904 
905 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix(), DMSetMatType()
906 
907 @*/
908 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,ISColoring *coloring)
909 {
910   PetscErrorCode ierr;
911 
912   PetscFunctionBegin;
913   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
914   if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet");
915   ierr = (*dm->ops->getcoloring)(dm,ctype,coloring);CHKERRQ(ierr);
916   PetscFunctionReturn(0);
917 }
918 
919 #undef __FUNCT__
920 #define __FUNCT__ "DMCreateMatrix"
921 /*@
922     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
923 
924     Collective on DM
925 
926     Input Parameter:
927 .   dm - the DM object
928 
929     Output Parameter:
930 .   mat - the empty Jacobian
931 
932     Level: beginner
933 
934     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
935        do not need to do it yourself.
936 
937        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
938        the nonzero pattern call DMDASetMatPreallocateOnly()
939 
940        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
941        internally by PETSc.
942 
943        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
944        the indices for the global numbering for DMDAs which is complicated.
945 
946 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMSetMatType()
947 
948 @*/
949 PetscErrorCode  DMCreateMatrix(DM dm,Mat *mat)
950 {
951   PetscErrorCode ierr;
952 
953   PetscFunctionBegin;
954   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
955 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
956   ierr = MatInitializePackage();CHKERRQ(ierr);
957 #endif
958   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
959   PetscValidPointer(mat,3);
960   ierr = (*dm->ops->creatematrix)(dm,mat);CHKERRQ(ierr);
961   PetscFunctionReturn(0);
962 }
963 
964 #undef __FUNCT__
965 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
966 /*@
967   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
968     preallocated but the nonzero structure and zero values will not be set.
969 
970   Logically Collective on DMDA
971 
972   Input Parameter:
973 + dm - the DM
974 - only - PETSC_TRUE if only want preallocation
975 
976   Level: developer
977 .seealso DMCreateMatrix()
978 @*/
979 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
980 {
981   PetscFunctionBegin;
982   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
983   dm->prealloc_only = only;
984   PetscFunctionReturn(0);
985 }
986 
987 #undef __FUNCT__
988 #define __FUNCT__ "DMGetWorkArray"
989 /*@C
990   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
991 
992   Not Collective
993 
994   Input Parameters:
995 + dm - the DM object
996 . count - The minium size
997 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
998 
999   Output Parameter:
1000 . array - the work array
1001 
1002   Level: developer
1003 
1004 .seealso DMDestroy(), DMCreate()
1005 @*/
1006 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1007 {
1008   PetscErrorCode ierr;
1009   DMWorkLink     link;
1010   size_t         size;
1011 
1012   PetscFunctionBegin;
1013   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1014   PetscValidPointer(mem,4);
1015   if (dm->workin) {
1016     link       = dm->workin;
1017     dm->workin = dm->workin->next;
1018   } else {
1019     ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr);
1020   }
1021   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
1022   if (size*count > link->bytes) {
1023     ierr        = PetscFree(link->mem);CHKERRQ(ierr);
1024     ierr        = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
1025     link->bytes = size*count;
1026   }
1027   link->next   = dm->workout;
1028   dm->workout  = link;
1029   *(void**)mem = link->mem;
1030   PetscFunctionReturn(0);
1031 }
1032 
1033 #undef __FUNCT__
1034 #define __FUNCT__ "DMRestoreWorkArray"
1035 /*@C
1036   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
1037 
1038   Not Collective
1039 
1040   Input Parameters:
1041 + dm - the DM object
1042 . count - The minium size
1043 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
1044 
1045   Output Parameter:
1046 . array - the work array
1047 
1048   Level: developer
1049 
1050 .seealso DMDestroy(), DMCreate()
1051 @*/
1052 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1053 {
1054   DMWorkLink *p,link;
1055 
1056   PetscFunctionBegin;
1057   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1058   PetscValidPointer(mem,4);
1059   for (p=&dm->workout; (link=*p); p=&link->next) {
1060     if (link->mem == *(void**)mem) {
1061       *p           = link->next;
1062       link->next   = dm->workin;
1063       dm->workin   = link;
1064       *(void**)mem = NULL;
1065       PetscFunctionReturn(0);
1066     }
1067   }
1068   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1069   PetscFunctionReturn(0);
1070 }
1071 
1072 #undef __FUNCT__
1073 #define __FUNCT__ "DMSetNullSpaceConstructor"
1074 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1075 {
1076   PetscFunctionBegin;
1077   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1078   if (field >= 10) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1079   dm->nullspaceConstructors[field] = nullsp;
1080   PetscFunctionReturn(0);
1081 }
1082 
1083 #undef __FUNCT__
1084 #define __FUNCT__ "DMCreateFieldIS"
1085 /*@C
1086   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1087 
1088   Not collective
1089 
1090   Input Parameter:
1091 . dm - the DM object
1092 
1093   Output Parameters:
1094 + numFields  - The number of fields (or NULL if not requested)
1095 . fieldNames - The name for each field (or NULL if not requested)
1096 - fields     - The global indices for each field (or NULL if not requested)
1097 
1098   Level: intermediate
1099 
1100   Notes:
1101   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1102   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1103   PetscFree().
1104 
1105 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1106 @*/
1107 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1108 {
1109   PetscSection   section, sectionGlobal;
1110   PetscErrorCode ierr;
1111 
1112   PetscFunctionBegin;
1113   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1114   if (numFields) {
1115     PetscValidPointer(numFields,2);
1116     *numFields = 0;
1117   }
1118   if (fieldNames) {
1119     PetscValidPointer(fieldNames,3);
1120     *fieldNames = NULL;
1121   }
1122   if (fields) {
1123     PetscValidPointer(fields,4);
1124     *fields = NULL;
1125   }
1126   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1127   if (section) {
1128     PetscInt *fieldSizes, **fieldIndices;
1129     PetscInt nF, f, pStart, pEnd, p;
1130 
1131     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
1132     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
1133     ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt*,&fieldIndices);CHKERRQ(ierr);
1134     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
1135     for (f = 0; f < nF; ++f) {
1136       fieldSizes[f] = 0;
1137     }
1138     for (p = pStart; p < pEnd; ++p) {
1139       PetscInt gdof;
1140 
1141       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1142       if (gdof > 0) {
1143         for (f = 0; f < nF; ++f) {
1144           PetscInt fdof, fcdof;
1145 
1146           ierr           = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1147           ierr           = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1148           fieldSizes[f] += fdof-fcdof;
1149         }
1150       }
1151     }
1152     for (f = 0; f < nF; ++f) {
1153       ierr          = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr);
1154       fieldSizes[f] = 0;
1155     }
1156     for (p = pStart; p < pEnd; ++p) {
1157       PetscInt gdof, goff;
1158 
1159       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1160       if (gdof > 0) {
1161         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
1162         for (f = 0; f < nF; ++f) {
1163           PetscInt fdof, fcdof, fc;
1164 
1165           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1166           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1167           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1168             fieldIndices[f][fieldSizes[f]] = goff++;
1169           }
1170         }
1171       }
1172     }
1173     if (numFields) *numFields = nF;
1174     if (fieldNames) {
1175       ierr = PetscMalloc(nF * sizeof(char*), fieldNames);CHKERRQ(ierr);
1176       for (f = 0; f < nF; ++f) {
1177         const char *fieldName;
1178 
1179         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1180         ierr = PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);CHKERRQ(ierr);
1181       }
1182     }
1183     if (fields) {
1184       ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr);
1185       for (f = 0; f < nF; ++f) {
1186         ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1187       }
1188     }
1189     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1190   } else if (dm->ops->createfieldis) {
1191     ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);
1192   }
1193   PetscFunctionReturn(0);
1194 }
1195 
1196 
1197 #undef __FUNCT__
1198 #define __FUNCT__ "DMCreateFieldDecomposition"
1199 /*@C
1200   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1201                           corresponding to different fields: each IS contains the global indices of the dofs of the
1202                           corresponding field. The optional list of DMs define the DM for each subproblem.
1203                           Generalizes DMCreateFieldIS().
1204 
1205   Not collective
1206 
1207   Input Parameter:
1208 . dm - the DM object
1209 
1210   Output Parameters:
1211 + len       - The number of subproblems in the field decomposition (or NULL if not requested)
1212 . namelist  - The name for each field (or NULL if not requested)
1213 . islist    - The global indices for each field (or NULL if not requested)
1214 - dmlist    - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)
1215 
1216   Level: intermediate
1217 
1218   Notes:
1219   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1220   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1221   and all of the arrays should be freed with PetscFree().
1222 
1223 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1224 @*/
1225 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1226 {
1227   PetscErrorCode ierr;
1228 
1229   PetscFunctionBegin;
1230   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1231   if (len) {
1232     PetscValidPointer(len,2);
1233     *len = 0;
1234   }
1235   if (namelist) {
1236     PetscValidPointer(namelist,3);
1237     *namelist = 0;
1238   }
1239   if (islist) {
1240     PetscValidPointer(islist,4);
1241     *islist = 0;
1242   }
1243   if (dmlist) {
1244     PetscValidPointer(dmlist,5);
1245     *dmlist = 0;
1246   }
1247   /*
1248    Is it a good idea to apply the following check across all impls?
1249    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1250    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1251    */
1252   if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1253   if (!dm->ops->createfielddecomposition) {
1254     PetscSection section;
1255     PetscInt     numFields, f;
1256 
1257     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1258     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1259     if (section && numFields && dm->ops->createsubdm) {
1260       *len = numFields;
1261       ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr);
1262       for (f = 0; f < numFields; ++f) {
1263         const char *fieldName;
1264 
1265         ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr);
1266         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1267         ierr = PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);CHKERRQ(ierr);
1268       }
1269     } else {
1270       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1271       /* By default there are no DMs associated with subproblems. */
1272       if (dmlist) *dmlist = NULL;
1273     }
1274   } else {
1275     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr);
1276   }
1277   PetscFunctionReturn(0);
1278 }
1279 
1280 #undef __FUNCT__
1281 #define __FUNCT__ "DMCreateSubDM"
1282 /*@C
1283   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1284                   The fields are defined by DMCreateFieldIS().
1285 
1286   Not collective
1287 
1288   Input Parameters:
1289 + dm - the DM object
1290 . numFields - number of fields in this subproblem
1291 - len       - The number of subproblems in the decomposition (or NULL if not requested)
1292 
1293   Output Parameters:
1294 . is - The global indices for the subproblem
1295 - dm - The DM for the subproblem
1296 
1297   Level: intermediate
1298 
1299 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1300 @*/
1301 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1302 {
1303   PetscErrorCode ierr;
1304 
1305   PetscFunctionBegin;
1306   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1307   PetscValidPointer(fields,3);
1308   if (is) PetscValidPointer(is,4);
1309   if (subdm) PetscValidPointer(subdm,5);
1310   if (dm->ops->createsubdm) {
1311     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr);
1312   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1313   PetscFunctionReturn(0);
1314 }
1315 
1316 
1317 #undef __FUNCT__
1318 #define __FUNCT__ "DMCreateDomainDecomposition"
1319 /*@C
1320   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1321                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1322                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1323                           define a nonoverlapping covering, while outer subdomains can overlap.
1324                           The optional list of DMs define the DM for each subproblem.
1325 
1326   Not collective
1327 
1328   Input Parameter:
1329 . dm - the DM object
1330 
1331   Output Parameters:
1332 + len         - The number of subproblems in the domain decomposition (or NULL if not requested)
1333 . namelist    - The name for each subdomain (or NULL if not requested)
1334 . innerislist - The global indices for each inner subdomain (or NULL, if not requested)
1335 . outerislist - The global indices for each outer subdomain (or NULL, if not requested)
1336 - dmlist      - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)
1337 
1338   Level: intermediate
1339 
1340   Notes:
1341   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1342   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1343   and all of the arrays should be freed with PetscFree().
1344 
1345 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1346 @*/
1347 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1348 {
1349   PetscErrorCode      ierr;
1350   DMSubDomainHookLink link;
1351   PetscInt            i,l;
1352 
1353   PetscFunctionBegin;
1354   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1355   if (len)           {PetscValidPointer(len,2);            *len         = 0;}
1356   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = NULL;}
1357   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = NULL;}
1358   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = NULL;}
1359   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = NULL;}
1360   /*
1361    Is it a good idea to apply the following check across all impls?
1362    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1363    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1364    */
1365   if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1366   if (dm->ops->createdomaindecomposition) {
1367     ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr);
1368     /* copy subdomain hooks and context over to the subdomain DMs */
1369     if (dmlist) {
1370       for (i = 0; i < l; i++) {
1371         for (link=dm->subdomainhook; link; link=link->next) {
1372           if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);}
1373         }
1374         (*dmlist)[i]->ctx = dm->ctx;
1375       }
1376     }
1377     if (len) *len = l;
1378   }
1379   PetscFunctionReturn(0);
1380 }
1381 
1382 
1383 #undef __FUNCT__
1384 #define __FUNCT__ "DMCreateDomainDecompositionScatters"
1385 /*@C
1386   DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector
1387 
1388   Not collective
1389 
1390   Input Parameters:
1391 + dm - the DM object
1392 . n  - the number of subdomain scatters
1393 - subdms - the local subdomains
1394 
1395   Output Parameters:
1396 + n     - the number of scatters returned
1397 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain
1398 . oscat - scatter from global vector to overlapping global vector entries on subdomain
1399 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts)
1400 
1401   Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution
1402   of general nonlinear problems with overlapping subdomain methods.  While merely having index sets that enable subsets
1403   of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of
1404   solution and residual data.
1405 
1406   Level: developer
1407 
1408 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1409 @*/
1410 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat)
1411 {
1412   PetscErrorCode ierr;
1413 
1414   PetscFunctionBegin;
1415   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1416   PetscValidPointer(subdms,3);
1417   if (dm->ops->createddscatters) {
1418     ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr);
1419   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined");
1420   PetscFunctionReturn(0);
1421 }
1422 
1423 #undef __FUNCT__
1424 #define __FUNCT__ "DMRefine"
1425 /*@
1426   DMRefine - Refines a DM object
1427 
1428   Collective on DM
1429 
1430   Input Parameter:
1431 + dm   - the DM object
1432 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1433 
1434   Output Parameter:
1435 . dmf - the refined DM, or NULL
1436 
1437   Note: If no refinement was done, the return value is NULL
1438 
1439   Level: developer
1440 
1441 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1442 @*/
1443 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1444 {
1445   PetscErrorCode   ierr;
1446   DMRefineHookLink link;
1447 
1448   PetscFunctionBegin;
1449   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1450   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1451   if (*dmf) {
1452     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1453 
1454     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1455 
1456     (*dmf)->ctx       = dm->ctx;
1457     (*dmf)->leveldown = dm->leveldown;
1458     (*dmf)->levelup   = dm->levelup + 1;
1459 
1460     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1461     for (link=dm->refinehook; link; link=link->next) {
1462       if (link->refinehook) {
1463         ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);
1464       }
1465     }
1466   }
1467   PetscFunctionReturn(0);
1468 }
1469 
1470 #undef __FUNCT__
1471 #define __FUNCT__ "DMRefineHookAdd"
1472 /*@C
1473    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1474 
1475    Logically Collective
1476 
1477    Input Arguments:
1478 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1479 .  refinehook - function to run when setting up a coarser level
1480 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1481 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1482 
1483    Calling sequence of refinehook:
1484 $    refinehook(DM coarse,DM fine,void *ctx);
1485 
1486 +  coarse - coarse level DM
1487 .  fine - fine level DM to interpolate problem to
1488 -  ctx - optional user-defined function context
1489 
1490    Calling sequence for interphook:
1491 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1492 
1493 +  coarse - coarse level DM
1494 .  interp - matrix interpolating a coarse-level solution to the finer grid
1495 .  fine - fine level DM to update
1496 -  ctx - optional user-defined function context
1497 
1498    Level: advanced
1499 
1500    Notes:
1501    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1502 
1503    If this function is called multiple times, the hooks will be run in the order they are added.
1504 
1505    This function is currently not available from Fortran.
1506 
1507 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1508 @*/
1509 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1510 {
1511   PetscErrorCode   ierr;
1512   DMRefineHookLink link,*p;
1513 
1514   PetscFunctionBegin;
1515   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1516   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1517   ierr             = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1518   link->refinehook = refinehook;
1519   link->interphook = interphook;
1520   link->ctx        = ctx;
1521   link->next       = NULL;
1522   *p               = link;
1523   PetscFunctionReturn(0);
1524 }
1525 
1526 #undef __FUNCT__
1527 #define __FUNCT__ "DMInterpolate"
1528 /*@
1529    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1530 
1531    Collective if any hooks are
1532 
1533    Input Arguments:
1534 +  coarse - coarser DM to use as a base
1535 .  restrct - interpolation matrix, apply using MatInterpolate()
1536 -  fine - finer DM to update
1537 
1538    Level: developer
1539 
1540 .seealso: DMRefineHookAdd(), MatInterpolate()
1541 @*/
1542 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1543 {
1544   PetscErrorCode   ierr;
1545   DMRefineHookLink link;
1546 
1547   PetscFunctionBegin;
1548   for (link=fine->refinehook; link; link=link->next) {
1549     if (link->interphook) {
1550       ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);
1551     }
1552   }
1553   PetscFunctionReturn(0);
1554 }
1555 
1556 #undef __FUNCT__
1557 #define __FUNCT__ "DMGetRefineLevel"
1558 /*@
1559     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1560 
1561     Not Collective
1562 
1563     Input Parameter:
1564 .   dm - the DM object
1565 
1566     Output Parameter:
1567 .   level - number of refinements
1568 
1569     Level: developer
1570 
1571 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1572 
1573 @*/
1574 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1575 {
1576   PetscFunctionBegin;
1577   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1578   *level = dm->levelup;
1579   PetscFunctionReturn(0);
1580 }
1581 
1582 #undef __FUNCT__
1583 #define __FUNCT__ "DMGlobalToLocalHookAdd"
1584 /*@C
1585    DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called
1586 
1587    Logically Collective
1588 
1589    Input Arguments:
1590 +  dm - the DM
1591 .  beginhook - function to run at the beginning of DMGlobalToLocalBegin()
1592 .  endhook - function to run after DMGlobalToLocalEnd() has completed
1593 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1594 
1595    Calling sequence for beginhook:
1596 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1597 
1598 +  dm - global DM
1599 .  g - global vector
1600 .  mode - mode
1601 .  l - local vector
1602 -  ctx - optional user-defined function context
1603 
1604 
1605    Calling sequence for endhook:
1606 $    endhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1607 
1608 +  global - global DM
1609 -  ctx - optional user-defined function context
1610 
1611    Level: advanced
1612 
1613 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1614 @*/
1615 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx)
1616 {
1617   PetscErrorCode          ierr;
1618   DMGlobalToLocalHookLink link,*p;
1619 
1620   PetscFunctionBegin;
1621   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1622   for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1623   ierr            = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr);
1624   link->beginhook = beginhook;
1625   link->endhook   = endhook;
1626   link->ctx       = ctx;
1627   link->next      = NULL;
1628   *p              = link;
1629   PetscFunctionReturn(0);
1630 }
1631 
1632 #undef __FUNCT__
1633 #define __FUNCT__ "DMGlobalToLocalBegin"
1634 /*@
1635     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1636 
1637     Neighbor-wise Collective on DM
1638 
1639     Input Parameters:
1640 +   dm - the DM object
1641 .   g - the global vector
1642 .   mode - INSERT_VALUES or ADD_VALUES
1643 -   l - the local vector
1644 
1645 
1646     Level: beginner
1647 
1648 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1649 
1650 @*/
1651 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1652 {
1653   PetscSF                 sf;
1654   PetscErrorCode          ierr;
1655   DMGlobalToLocalHookLink link;
1656 
1657   PetscFunctionBegin;
1658   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1659   for (link=dm->gtolhook; link; link=link->next) {
1660     if (link->beginhook) {
1661       ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);
1662     }
1663   }
1664   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1665   if (sf) {
1666     PetscScalar *lArray, *gArray;
1667 
1668     if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1669     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1670     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1671     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1672     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1673     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1674   } else {
1675     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1676   }
1677   PetscFunctionReturn(0);
1678 }
1679 
1680 #undef __FUNCT__
1681 #define __FUNCT__ "DMGlobalToLocalEnd"
1682 /*@
1683     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1684 
1685     Neighbor-wise Collective on DM
1686 
1687     Input Parameters:
1688 +   dm - the DM object
1689 .   g - the global vector
1690 .   mode - INSERT_VALUES or ADD_VALUES
1691 -   l - the local vector
1692 
1693 
1694     Level: beginner
1695 
1696 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1697 
1698 @*/
1699 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1700 {
1701   PetscSF                 sf;
1702   PetscErrorCode          ierr;
1703   PetscScalar             *lArray, *gArray;
1704   DMGlobalToLocalHookLink link;
1705 
1706   PetscFunctionBegin;
1707   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1708   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1709   if (sf) {
1710     if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1711 
1712     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1713     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1714     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1715     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1716     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1717   } else {
1718     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1719   }
1720   for (link=dm->gtolhook; link; link=link->next) {
1721     if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1722   }
1723   PetscFunctionReturn(0);
1724 }
1725 
1726 #undef __FUNCT__
1727 #define __FUNCT__ "DMLocalToGlobalBegin"
1728 /*@
1729     DMLocalToGlobalBegin - updates global vectors from local vectors
1730 
1731     Neighbor-wise Collective on DM
1732 
1733     Input Parameters:
1734 +   dm - the DM object
1735 .   l - the local vector
1736 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1737            base point.
1738 - - the global vector
1739 
1740     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1741            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1742            global array to the final global array with VecAXPY().
1743 
1744     Level: beginner
1745 
1746 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1747 
1748 @*/
1749 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1750 {
1751   PetscSF        sf;
1752   PetscErrorCode ierr;
1753 
1754   PetscFunctionBegin;
1755   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1756   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1757   if (sf) {
1758     MPI_Op      op;
1759     PetscScalar *lArray, *gArray;
1760 
1761     switch (mode) {
1762     case INSERT_VALUES:
1763     case INSERT_ALL_VALUES:
1764       op = MPIU_REPLACE; break;
1765     case ADD_VALUES:
1766     case ADD_ALL_VALUES:
1767       op = MPI_SUM; break;
1768     default:
1769       SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1770     }
1771     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1772     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1773     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1774     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1775     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1776   } else {
1777     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1778   }
1779   PetscFunctionReturn(0);
1780 }
1781 
1782 #undef __FUNCT__
1783 #define __FUNCT__ "DMLocalToGlobalEnd"
1784 /*@
1785     DMLocalToGlobalEnd - updates global vectors from local vectors
1786 
1787     Neighbor-wise Collective on DM
1788 
1789     Input Parameters:
1790 +   dm - the DM object
1791 .   l - the local vector
1792 .   mode - INSERT_VALUES or ADD_VALUES
1793 -   g - the global vector
1794 
1795 
1796     Level: beginner
1797 
1798 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1799 
1800 @*/
1801 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1802 {
1803   PetscSF        sf;
1804   PetscErrorCode ierr;
1805 
1806   PetscFunctionBegin;
1807   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1808   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1809   if (sf) {
1810     MPI_Op      op;
1811     PetscScalar *lArray, *gArray;
1812 
1813     switch (mode) {
1814     case INSERT_VALUES:
1815     case INSERT_ALL_VALUES:
1816       op = MPIU_REPLACE; break;
1817     case ADD_VALUES:
1818     case ADD_ALL_VALUES:
1819       op = MPI_SUM; break;
1820     default:
1821       SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1822     }
1823     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1824     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1825     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1826     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1827     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1828   } else {
1829     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1830   }
1831   PetscFunctionReturn(0);
1832 }
1833 
1834 #undef __FUNCT__
1835 #define __FUNCT__ "DMLocalToLocalBegin"
1836 /*@
1837    DMLocalToLocalBegin - Maps from a local vector (including ghost points
1838    that contain irrelevant values) to another local vector where the ghost
1839    points in the second are set correctly. Must be followed by DMLocalToLocalEnd().
1840 
1841    Neighbor-wise Collective on DM and Vec
1842 
1843    Input Parameters:
1844 +  dm - the DM object
1845 .  g - the original local vector
1846 -  mode - one of INSERT_VALUES or ADD_VALUES
1847 
1848    Output Parameter:
1849 .  l  - the local vector with correct ghost values
1850 
1851    Level: intermediate
1852 
1853    Notes:
1854    The local vectors used here need not be the same as those
1855    obtained from DMCreateLocalVector(), BUT they
1856    must have the same parallel data layout; they could, for example, be
1857    obtained with VecDuplicate() from the DM originating vectors.
1858 
1859 .keywords: DM, local-to-local, begin
1860 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalEnd(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1861 
1862 @*/
1863 PetscErrorCode  DMLocalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1864 {
1865   PetscErrorCode          ierr;
1866 
1867   PetscFunctionBegin;
1868   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1869   ierr = (*dm->ops->localtolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1870   PetscFunctionReturn(0);
1871 }
1872 
1873 #undef __FUNCT__
1874 #define __FUNCT__ "DMLocalToLocalEnd"
1875 /*@
1876    DMLocalToLocalEnd - Maps from a local vector (including ghost points
1877    that contain irrelevant values) to another local vector where the ghost
1878    points in the second are set correctly. Must be preceded by DMLocalToLocalBegin().
1879 
1880    Neighbor-wise Collective on DM and Vec
1881 
1882    Input Parameters:
1883 +  da - the DM object
1884 .  g - the original local vector
1885 -  mode - one of INSERT_VALUES or ADD_VALUES
1886 
1887    Output Parameter:
1888 .  l  - the local vector with correct ghost values
1889 
1890    Level: intermediate
1891 
1892    Notes:
1893    The local vectors used here need not be the same as those
1894    obtained from DMCreateLocalVector(), BUT they
1895    must have the same parallel data layout; they could, for example, be
1896    obtained with VecDuplicate() from the DM originating vectors.
1897 
1898 .keywords: DM, local-to-local, end
1899 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalBegin(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1900 
1901 @*/
1902 PetscErrorCode  DMLocalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1903 {
1904   PetscErrorCode          ierr;
1905 
1906   PetscFunctionBegin;
1907   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1908   ierr = (*dm->ops->localtolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1909   PetscFunctionReturn(0);
1910 }
1911 
1912 
1913 #undef __FUNCT__
1914 #define __FUNCT__ "DMCoarsen"
1915 /*@
1916     DMCoarsen - Coarsens a DM object
1917 
1918     Collective on DM
1919 
1920     Input Parameter:
1921 +   dm - the DM object
1922 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1923 
1924     Output Parameter:
1925 .   dmc - the coarsened DM
1926 
1927     Level: developer
1928 
1929 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1930 
1931 @*/
1932 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1933 {
1934   PetscErrorCode    ierr;
1935   DMCoarsenHookLink link;
1936 
1937   PetscFunctionBegin;
1938   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1939   ierr                      = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1940   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1941   ierr                      = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1942   (*dmc)->ctx               = dm->ctx;
1943   (*dmc)->levelup           = dm->levelup;
1944   (*dmc)->leveldown         = dm->leveldown + 1;
1945   ierr                      = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
1946   for (link=dm->coarsenhook; link; link=link->next) {
1947     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
1948   }
1949   PetscFunctionReturn(0);
1950 }
1951 
1952 #undef __FUNCT__
1953 #define __FUNCT__ "DMCoarsenHookAdd"
1954 /*@C
1955    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1956 
1957    Logically Collective
1958 
1959    Input Arguments:
1960 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1961 .  coarsenhook - function to run when setting up a coarser level
1962 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
1963 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1964 
1965    Calling sequence of coarsenhook:
1966 $    coarsenhook(DM fine,DM coarse,void *ctx);
1967 
1968 +  fine - fine level DM
1969 .  coarse - coarse level DM to restrict problem to
1970 -  ctx - optional user-defined function context
1971 
1972    Calling sequence for restricthook:
1973 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
1974 
1975 +  fine - fine level DM
1976 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
1977 .  rscale - scaling vector for restriction
1978 .  inject - matrix restricting by injection
1979 .  coarse - coarse level DM to update
1980 -  ctx - optional user-defined function context
1981 
1982    Level: advanced
1983 
1984    Notes:
1985    This function is only needed if auxiliary data needs to be set up on coarse grids.
1986 
1987    If this function is called multiple times, the hooks will be run in the order they are added.
1988 
1989    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1990    extract the finest level information from its context (instead of from the SNES).
1991 
1992    This function is currently not available from Fortran.
1993 
1994 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1995 @*/
1996 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
1997 {
1998   PetscErrorCode    ierr;
1999   DMCoarsenHookLink link,*p;
2000 
2001   PetscFunctionBegin;
2002   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
2003   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2004   ierr               = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
2005   link->coarsenhook  = coarsenhook;
2006   link->restricthook = restricthook;
2007   link->ctx          = ctx;
2008   link->next         = NULL;
2009   *p                 = link;
2010   PetscFunctionReturn(0);
2011 }
2012 
2013 #undef __FUNCT__
2014 #define __FUNCT__ "DMRestrict"
2015 /*@
2016    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
2017 
2018    Collective if any hooks are
2019 
2020    Input Arguments:
2021 +  fine - finer DM to use as a base
2022 .  restrct - restriction matrix, apply using MatRestrict()
2023 .  inject - injection matrix, also use MatRestrict()
2024 -  coarse - coarer DM to update
2025 
2026    Level: developer
2027 
2028 .seealso: DMCoarsenHookAdd(), MatRestrict()
2029 @*/
2030 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
2031 {
2032   PetscErrorCode    ierr;
2033   DMCoarsenHookLink link;
2034 
2035   PetscFunctionBegin;
2036   for (link=fine->coarsenhook; link; link=link->next) {
2037     if (link->restricthook) {
2038       ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);
2039     }
2040   }
2041   PetscFunctionReturn(0);
2042 }
2043 
2044 #undef __FUNCT__
2045 #define __FUNCT__ "DMSubDomainHookAdd"
2046 /*@C
2047    DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid
2048 
2049    Logically Collective
2050 
2051    Input Arguments:
2052 +  global - global DM
2053 .  ddhook - function to run to pass data to the decomposition DM upon its creation
2054 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
2055 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
2056 
2057 
2058    Calling sequence for ddhook:
2059 $    ddhook(DM global,DM block,void *ctx)
2060 
2061 +  global - global DM
2062 .  block  - block DM
2063 -  ctx - optional user-defined function context
2064 
2065    Calling sequence for restricthook:
2066 $    restricthook(DM global,VecScatter out,VecScatter in,DM block,void *ctx)
2067 
2068 +  global - global DM
2069 .  out    - scatter to the outer (with ghost and overlap points) block vector
2070 .  in     - scatter to block vector values only owned locally
2071 .  block  - block DM
2072 -  ctx - optional user-defined function context
2073 
2074    Level: advanced
2075 
2076    Notes:
2077    This function is only needed if auxiliary data needs to be set up on subdomain DMs.
2078 
2079    If this function is called multiple times, the hooks will be run in the order they are added.
2080 
2081    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
2082    extract the global information from its context (instead of from the SNES).
2083 
2084    This function is currently not available from Fortran.
2085 
2086 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
2087 @*/
2088 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
2089 {
2090   PetscErrorCode      ierr;
2091   DMSubDomainHookLink link,*p;
2092 
2093   PetscFunctionBegin;
2094   PetscValidHeaderSpecific(global,DM_CLASSID,1);
2095   for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2096   ierr               = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr);
2097   link->restricthook = restricthook;
2098   link->ddhook       = ddhook;
2099   link->ctx          = ctx;
2100   link->next         = NULL;
2101   *p                 = link;
2102   PetscFunctionReturn(0);
2103 }
2104 
2105 #undef __FUNCT__
2106 #define __FUNCT__ "DMSubDomainRestrict"
2107 /*@
2108    DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd()
2109 
2110    Collective if any hooks are
2111 
2112    Input Arguments:
2113 +  fine - finer DM to use as a base
2114 .  oscatter - scatter from domain global vector filling subdomain global vector with overlap
2115 .  gscatter - scatter from domain global vector filling subdomain local vector with ghosts
2116 -  coarse - coarer DM to update
2117 
2118    Level: developer
2119 
2120 .seealso: DMCoarsenHookAdd(), MatRestrict()
2121 @*/
2122 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm)
2123 {
2124   PetscErrorCode      ierr;
2125   DMSubDomainHookLink link;
2126 
2127   PetscFunctionBegin;
2128   for (link=global->subdomainhook; link; link=link->next) {
2129     if (link->restricthook) {
2130       ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr);
2131     }
2132   }
2133   PetscFunctionReturn(0);
2134 }
2135 
2136 #undef __FUNCT__
2137 #define __FUNCT__ "DMGetCoarsenLevel"
2138 /*@
2139     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
2140 
2141     Not Collective
2142 
2143     Input Parameter:
2144 .   dm - the DM object
2145 
2146     Output Parameter:
2147 .   level - number of coarsenings
2148 
2149     Level: developer
2150 
2151 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2152 
2153 @*/
2154 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
2155 {
2156   PetscFunctionBegin;
2157   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2158   *level = dm->leveldown;
2159   PetscFunctionReturn(0);
2160 }
2161 
2162 
2163 
2164 #undef __FUNCT__
2165 #define __FUNCT__ "DMRefineHierarchy"
2166 /*@C
2167     DMRefineHierarchy - Refines a DM object, all levels at once
2168 
2169     Collective on DM
2170 
2171     Input Parameter:
2172 +   dm - the DM object
2173 -   nlevels - the number of levels of refinement
2174 
2175     Output Parameter:
2176 .   dmf - the refined DM hierarchy
2177 
2178     Level: developer
2179 
2180 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2181 
2182 @*/
2183 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2184 {
2185   PetscErrorCode ierr;
2186 
2187   PetscFunctionBegin;
2188   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2189   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2190   if (nlevels == 0) PetscFunctionReturn(0);
2191   if (dm->ops->refinehierarchy) {
2192     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
2193   } else if (dm->ops->refine) {
2194     PetscInt i;
2195 
2196     ierr = DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);CHKERRQ(ierr);
2197     for (i=1; i<nlevels; i++) {
2198       ierr = DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);CHKERRQ(ierr);
2199     }
2200   } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2201   PetscFunctionReturn(0);
2202 }
2203 
2204 #undef __FUNCT__
2205 #define __FUNCT__ "DMCoarsenHierarchy"
2206 /*@C
2207     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2208 
2209     Collective on DM
2210 
2211     Input Parameter:
2212 +   dm - the DM object
2213 -   nlevels - the number of levels of coarsening
2214 
2215     Output Parameter:
2216 .   dmc - the coarsened DM hierarchy
2217 
2218     Level: developer
2219 
2220 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2221 
2222 @*/
2223 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2224 {
2225   PetscErrorCode ierr;
2226 
2227   PetscFunctionBegin;
2228   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2229   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2230   if (nlevels == 0) PetscFunctionReturn(0);
2231   PetscValidPointer(dmc,3);
2232   if (dm->ops->coarsenhierarchy) {
2233     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
2234   } else if (dm->ops->coarsen) {
2235     PetscInt i;
2236 
2237     ierr = DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);CHKERRQ(ierr);
2238     for (i=1; i<nlevels; i++) {
2239       ierr = DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);CHKERRQ(ierr);
2240     }
2241   } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2242   PetscFunctionReturn(0);
2243 }
2244 
2245 #undef __FUNCT__
2246 #define __FUNCT__ "DMCreateAggregates"
2247 /*@
2248    DMCreateAggregates - Gets the aggregates that map between
2249    grids associated with two DMs.
2250 
2251    Collective on DM
2252 
2253    Input Parameters:
2254 +  dmc - the coarse grid DM
2255 -  dmf - the fine grid DM
2256 
2257    Output Parameters:
2258 .  rest - the restriction matrix (transpose of the projection matrix)
2259 
2260    Level: intermediate
2261 
2262 .keywords: interpolation, restriction, multigrid
2263 
2264 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2265 @*/
2266 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2267 {
2268   PetscErrorCode ierr;
2269 
2270   PetscFunctionBegin;
2271   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
2272   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
2273   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
2274   PetscFunctionReturn(0);
2275 }
2276 
2277 #undef __FUNCT__
2278 #define __FUNCT__ "DMSetApplicationContextDestroy"
2279 /*@C
2280     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2281 
2282     Not Collective
2283 
2284     Input Parameters:
2285 +   dm - the DM object
2286 -   destroy - the destroy function
2287 
2288     Level: intermediate
2289 
2290 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2291 
2292 @*/
2293 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2294 {
2295   PetscFunctionBegin;
2296   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2297   dm->ctxdestroy = destroy;
2298   PetscFunctionReturn(0);
2299 }
2300 
2301 #undef __FUNCT__
2302 #define __FUNCT__ "DMSetApplicationContext"
2303 /*@
2304     DMSetApplicationContext - Set a user context into a DM object
2305 
2306     Not Collective
2307 
2308     Input Parameters:
2309 +   dm - the DM object
2310 -   ctx - the user context
2311 
2312     Level: intermediate
2313 
2314 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2315 
2316 @*/
2317 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2318 {
2319   PetscFunctionBegin;
2320   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2321   dm->ctx = ctx;
2322   PetscFunctionReturn(0);
2323 }
2324 
2325 #undef __FUNCT__
2326 #define __FUNCT__ "DMGetApplicationContext"
2327 /*@
2328     DMGetApplicationContext - Gets a user context from a DM object
2329 
2330     Not Collective
2331 
2332     Input Parameter:
2333 .   dm - the DM object
2334 
2335     Output Parameter:
2336 .   ctx - the user context
2337 
2338     Level: intermediate
2339 
2340 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2341 
2342 @*/
2343 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2344 {
2345   PetscFunctionBegin;
2346   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2347   *(void**)ctx = dm->ctx;
2348   PetscFunctionReturn(0);
2349 }
2350 
2351 #undef __FUNCT__
2352 #define __FUNCT__ "DMSetVariableBounds"
2353 /*@C
2354     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2355 
2356     Logically Collective on DM
2357 
2358     Input Parameter:
2359 +   dm - the DM object
2360 -   f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set)
2361 
2362     Level: intermediate
2363 
2364 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2365          DMSetJacobian()
2366 
2367 @*/
2368 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2369 {
2370   PetscFunctionBegin;
2371   dm->ops->computevariablebounds = f;
2372   PetscFunctionReturn(0);
2373 }
2374 
2375 #undef __FUNCT__
2376 #define __FUNCT__ "DMHasVariableBounds"
2377 /*@
2378     DMHasVariableBounds - does the DM object have a variable bounds function?
2379 
2380     Not Collective
2381 
2382     Input Parameter:
2383 .   dm - the DM object to destroy
2384 
2385     Output Parameter:
2386 .   flg - PETSC_TRUE if the variable bounds function exists
2387 
2388     Level: developer
2389 
2390 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2391 
2392 @*/
2393 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2394 {
2395   PetscFunctionBegin;
2396   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2397   PetscFunctionReturn(0);
2398 }
2399 
2400 #undef __FUNCT__
2401 #define __FUNCT__ "DMComputeVariableBounds"
2402 /*@C
2403     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2404 
2405     Logically Collective on DM
2406 
2407     Input Parameters:
2408 +   dm - the DM object to destroy
2409 -   x  - current solution at which the bounds are computed
2410 
2411     Output parameters:
2412 +   xl - lower bound
2413 -   xu - upper bound
2414 
2415     Level: intermediate
2416 
2417 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2418 
2419 @*/
2420 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2421 {
2422   PetscErrorCode ierr;
2423 
2424   PetscFunctionBegin;
2425   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2426   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2427   if (dm->ops->computevariablebounds) {
2428     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr);
2429   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2430   PetscFunctionReturn(0);
2431 }
2432 
2433 #undef __FUNCT__
2434 #define __FUNCT__ "DMHasColoring"
2435 /*@
2436     DMHasColoring - does the DM object have a method of providing a coloring?
2437 
2438     Not Collective
2439 
2440     Input Parameter:
2441 .   dm - the DM object
2442 
2443     Output Parameter:
2444 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2445 
2446     Level: developer
2447 
2448 .seealso DMHasFunction(), DMCreateColoring()
2449 
2450 @*/
2451 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2452 {
2453   PetscFunctionBegin;
2454   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2455   PetscFunctionReturn(0);
2456 }
2457 
2458 #undef  __FUNCT__
2459 #define __FUNCT__ "DMSetVec"
2460 /*@C
2461     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2462 
2463     Collective on DM
2464 
2465     Input Parameter:
2466 +   dm - the DM object
2467 -   x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems.
2468 
2469     Level: developer
2470 
2471 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2472 
2473 @*/
2474 PetscErrorCode  DMSetVec(DM dm,Vec x)
2475 {
2476   PetscErrorCode ierr;
2477 
2478   PetscFunctionBegin;
2479   if (x) {
2480     if (!dm->x) {
2481       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2482     }
2483     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2484   } else if (dm->x) {
2485     ierr = VecDestroy(&dm->x);CHKERRQ(ierr);
2486   }
2487   PetscFunctionReturn(0);
2488 }
2489 
2490 PetscFunctionList DMList              = NULL;
2491 PetscBool         DMRegisterAllCalled = PETSC_FALSE;
2492 
2493 #undef __FUNCT__
2494 #define __FUNCT__ "DMSetType"
2495 /*@C
2496   DMSetType - Builds a DM, for a particular DM implementation.
2497 
2498   Collective on DM
2499 
2500   Input Parameters:
2501 + dm     - The DM object
2502 - method - The name of the DM type
2503 
2504   Options Database Key:
2505 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2506 
2507   Notes:
2508   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2509 
2510   Level: intermediate
2511 
2512 .keywords: DM, set, type
2513 .seealso: DMGetType(), DMCreate()
2514 @*/
2515 PetscErrorCode  DMSetType(DM dm, DMType method)
2516 {
2517   PetscErrorCode (*r)(DM);
2518   PetscBool      match;
2519   PetscErrorCode ierr;
2520 
2521   PetscFunctionBegin;
2522   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2523   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2524   if (match) PetscFunctionReturn(0);
2525 
2526   if (!DMRegisterAllCalled) {ierr = DMRegisterAll();CHKERRQ(ierr);}
2527   ierr = PetscFunctionListFind(DMList,method,&r);CHKERRQ(ierr);
2528   if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2529 
2530   if (dm->ops->destroy) {
2531     ierr             = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2532     dm->ops->destroy = NULL;
2533   }
2534   ierr = (*r)(dm);CHKERRQ(ierr);
2535   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2536   PetscFunctionReturn(0);
2537 }
2538 
2539 #undef __FUNCT__
2540 #define __FUNCT__ "DMGetType"
2541 /*@C
2542   DMGetType - Gets the DM type name (as a string) from the DM.
2543 
2544   Not Collective
2545 
2546   Input Parameter:
2547 . dm  - The DM
2548 
2549   Output Parameter:
2550 . type - The DM type name
2551 
2552   Level: intermediate
2553 
2554 .keywords: DM, get, type, name
2555 .seealso: DMSetType(), DMCreate()
2556 @*/
2557 PetscErrorCode  DMGetType(DM dm, DMType *type)
2558 {
2559   PetscErrorCode ierr;
2560 
2561   PetscFunctionBegin;
2562   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2563   PetscValidCharPointer(type,2);
2564   if (!DMRegisterAllCalled) {
2565     ierr = DMRegisterAll();CHKERRQ(ierr);
2566   }
2567   *type = ((PetscObject)dm)->type_name;
2568   PetscFunctionReturn(0);
2569 }
2570 
2571 #undef __FUNCT__
2572 #define __FUNCT__ "DMConvert"
2573 /*@C
2574   DMConvert - Converts a DM to another DM, either of the same or different type.
2575 
2576   Collective on DM
2577 
2578   Input Parameters:
2579 + dm - the DM
2580 - newtype - new DM type (use "same" for the same type)
2581 
2582   Output Parameter:
2583 . M - pointer to new DM
2584 
2585   Notes:
2586   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2587   the MPI communicator of the generated DM is always the same as the communicator
2588   of the input DM.
2589 
2590   Level: intermediate
2591 
2592 .seealso: DMCreate()
2593 @*/
2594 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2595 {
2596   DM             B;
2597   char           convname[256];
2598   PetscBool      sametype, issame;
2599   PetscErrorCode ierr;
2600 
2601   PetscFunctionBegin;
2602   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2603   PetscValidType(dm,1);
2604   PetscValidPointer(M,3);
2605   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2606   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2607   {
2608     PetscErrorCode (*conv)(DM, DMType, DM*) = NULL;
2609 
2610     /*
2611        Order of precedence:
2612        1) See if a specialized converter is known to the current DM.
2613        2) See if a specialized converter is known to the desired DM class.
2614        3) See if a good general converter is registered for the desired class
2615        4) See if a good general converter is known for the current matrix.
2616        5) Use a really basic converter.
2617     */
2618 
2619     /* 1) See if a specialized converter is known to the current DM and the desired class */
2620     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2621     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2622     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2623     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2624     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2625     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,&conv);CHKERRQ(ierr);
2626     if (conv) goto foundconv;
2627 
2628     /* 2)  See if a specialized converter is known to the desired DM class. */
2629     ierr = DMCreate(PetscObjectComm((PetscObject)dm), &B);CHKERRQ(ierr);
2630     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2631     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2632     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2633     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2634     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2635     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2636     ierr = PetscObjectQueryFunction((PetscObject)B,convname,&conv);CHKERRQ(ierr);
2637     if (conv) {
2638       ierr = DMDestroy(&B);CHKERRQ(ierr);
2639       goto foundconv;
2640     }
2641 
2642 #if 0
2643     /* 3) See if a good general converter is registered for the desired class */
2644     conv = B->ops->convertfrom;
2645     ierr = DMDestroy(&B);CHKERRQ(ierr);
2646     if (conv) goto foundconv;
2647 
2648     /* 4) See if a good general converter is known for the current matrix */
2649     if (dm->ops->convert) {
2650       conv = dm->ops->convert;
2651     }
2652     if (conv) goto foundconv;
2653 #endif
2654 
2655     /* 5) Use a really basic converter. */
2656     SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2657 
2658 foundconv:
2659     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2660     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2661     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2662   }
2663   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2664   PetscFunctionReturn(0);
2665 }
2666 
2667 /*--------------------------------------------------------------------------------------------------------------------*/
2668 
2669 #undef __FUNCT__
2670 #define __FUNCT__ "DMRegister"
2671 /*@C
2672   DMRegister -  Adds a new DM component implementation
2673 
2674   Not Collective
2675 
2676   Input Parameters:
2677 + name        - The name of a new user-defined creation routine
2678 - create_func - The creation routine itself
2679 
2680   Notes:
2681   DMRegister() may be called multiple times to add several user-defined DMs
2682 
2683 
2684   Sample usage:
2685 .vb
2686     DMRegister("my_da", MyDMCreate);
2687 .ve
2688 
2689   Then, your DM type can be chosen with the procedural interface via
2690 .vb
2691     DMCreate(MPI_Comm, DM *);
2692     DMSetType(DM,"my_da");
2693 .ve
2694    or at runtime via the option
2695 .vb
2696     -da_type my_da
2697 .ve
2698 
2699   Level: advanced
2700 
2701 .keywords: DM, register
2702 .seealso: DMRegisterAll(), DMRegisterDestroy()
2703 
2704 @*/
2705 PetscErrorCode  DMRegister(const char sname[],PetscErrorCode (*function)(DM))
2706 {
2707   PetscErrorCode ierr;
2708 
2709   PetscFunctionBegin;
2710   ierr = PetscFunctionListAdd(&DMList,sname,function);CHKERRQ(ierr);
2711   PetscFunctionReturn(0);
2712 }
2713 
2714 #undef __FUNCT__
2715 #define __FUNCT__ "DMLoad"
2716 /*@C
2717   DMLoad - Loads a DM that has been stored in binary  with DMView().
2718 
2719   Collective on PetscViewer
2720 
2721   Input Parameters:
2722 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2723            some related function before a call to DMLoad().
2724 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2725            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2726 
2727    Level: intermediate
2728 
2729   Notes:
2730    The type is determined by the data in the file, any type set into the DM before this call is ignored.
2731 
2732   Notes for advanced users:
2733   Most users should not need to know the details of the binary storage
2734   format, since DMLoad() and DMView() completely hide these details.
2735   But for anyone who's interested, the standard binary matrix storage
2736   format is
2737 .vb
2738      has not yet been determined
2739 .ve
2740 
2741 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2742 @*/
2743 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2744 {
2745   PetscErrorCode ierr;
2746   PetscBool      isbinary;
2747   PetscInt       classid;
2748   char           type[256];
2749 
2750   PetscFunctionBegin;
2751   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
2752   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
2753   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
2754   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
2755 
2756   ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr);
2757   if (classid != DM_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file");
2758   ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr);
2759   ierr = DMSetType(newdm, type);CHKERRQ(ierr);
2760   if (newdm->ops->load) {
2761     ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
2762   }
2763   PetscFunctionReturn(0);
2764 }
2765 
2766 /******************************** FEM Support **********************************/
2767 
2768 #undef __FUNCT__
2769 #define __FUNCT__ "DMPrintCellVector"
2770 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[])
2771 {
2772   PetscInt       f;
2773   PetscErrorCode ierr;
2774 
2775   PetscFunctionBegin;
2776   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2777   for (f = 0; f < len; ++f) {
2778     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr);
2779   }
2780   PetscFunctionReturn(0);
2781 }
2782 
2783 #undef __FUNCT__
2784 #define __FUNCT__ "DMPrintCellMatrix"
2785 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[])
2786 {
2787   PetscInt       f, g;
2788   PetscErrorCode ierr;
2789 
2790   PetscFunctionBegin;
2791   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2792   for (f = 0; f < rows; ++f) {
2793     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
2794     for (g = 0; g < cols; ++g) {
2795       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
2796     }
2797     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
2798   }
2799   PetscFunctionReturn(0);
2800 }
2801 
2802 #undef __FUNCT__
2803 #define __FUNCT__ "DMGetDefaultSection"
2804 /*@
2805   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
2806 
2807   Input Parameter:
2808 . dm - The DM
2809 
2810   Output Parameter:
2811 . section - The PetscSection
2812 
2813   Level: intermediate
2814 
2815   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2816 
2817 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2818 @*/
2819 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section)
2820 {
2821   PetscFunctionBegin;
2822   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2823   PetscValidPointer(section, 2);
2824   *section = dm->defaultSection;
2825   PetscFunctionReturn(0);
2826 }
2827 
2828 #undef __FUNCT__
2829 #define __FUNCT__ "DMSetDefaultSection"
2830 /*@
2831   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
2832 
2833   Input Parameters:
2834 + dm - The DM
2835 - section - The PetscSection
2836 
2837   Level: intermediate
2838 
2839   Note: Any existing Section will be destroyed
2840 
2841 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2842 @*/
2843 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section)
2844 {
2845   PetscInt       numFields;
2846   PetscInt       f;
2847   PetscErrorCode ierr;
2848 
2849   PetscFunctionBegin;
2850   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2851   PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2);
2852   ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr);
2853   ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
2854   dm->defaultSection = section;
2855   ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
2856   if (numFields) {
2857     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
2858     for (f = 0; f < numFields; ++f) {
2859       const char *name;
2860 
2861       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
2862       ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr);
2863     }
2864   }
2865   /* The global section will be rebuilt in the next call to DMGetDefaultGlobalSection(). */
2866   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2867   PetscFunctionReturn(0);
2868 }
2869 
2870 #undef __FUNCT__
2871 #define __FUNCT__ "DMGetDefaultGlobalSection"
2872 /*@
2873   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
2874 
2875   Collective on DM
2876 
2877   Input Parameter:
2878 . dm - The DM
2879 
2880   Output Parameter:
2881 . section - The PetscSection
2882 
2883   Level: intermediate
2884 
2885   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2886 
2887 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2888 @*/
2889 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section)
2890 {
2891   PetscErrorCode ierr;
2892 
2893   PetscFunctionBegin;
2894   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2895   PetscValidPointer(section, 2);
2896   if (!dm->defaultGlobalSection) {
2897     if (!dm->defaultSection || !dm->sf) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
2898     ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
2899     ierr = PetscLayoutDestroy(&dm->map);CHKERRQ(ierr);
2900     ierr = PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm),dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr);
2901   }
2902   *section = dm->defaultGlobalSection;
2903   PetscFunctionReturn(0);
2904 }
2905 
2906 #undef __FUNCT__
2907 #define __FUNCT__ "DMSetDefaultGlobalSection"
2908 /*@
2909   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
2910 
2911   Input Parameters:
2912 + dm - The DM
2913 - section - The PetscSection, or NULL
2914 
2915   Level: intermediate
2916 
2917   Note: Any existing Section will be destroyed
2918 
2919 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
2920 @*/
2921 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section)
2922 {
2923   PetscErrorCode ierr;
2924 
2925   PetscFunctionBegin;
2926   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2927   if (section) PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2);
2928   ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr);
2929   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2930   dm->defaultGlobalSection = section;
2931   PetscFunctionReturn(0);
2932 }
2933 
2934 #undef __FUNCT__
2935 #define __FUNCT__ "DMGetDefaultSF"
2936 /*@
2937   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
2938   it is created from the default PetscSection layouts in the DM.
2939 
2940   Input Parameter:
2941 . dm - The DM
2942 
2943   Output Parameter:
2944 . sf - The PetscSF
2945 
2946   Level: intermediate
2947 
2948   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2949 
2950 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
2951 @*/
2952 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf)
2953 {
2954   PetscInt       nroots;
2955   PetscErrorCode ierr;
2956 
2957   PetscFunctionBegin;
2958   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2959   PetscValidPointer(sf, 2);
2960   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
2961   if (nroots < 0) {
2962     PetscSection section, gSection;
2963 
2964     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
2965     if (section) {
2966       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
2967       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
2968     } else {
2969       *sf = NULL;
2970       PetscFunctionReturn(0);
2971     }
2972   }
2973   *sf = dm->defaultSF;
2974   PetscFunctionReturn(0);
2975 }
2976 
2977 #undef __FUNCT__
2978 #define __FUNCT__ "DMSetDefaultSF"
2979 /*@
2980   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
2981 
2982   Input Parameters:
2983 + dm - The DM
2984 - sf - The PetscSF
2985 
2986   Level: intermediate
2987 
2988   Note: Any previous SF is destroyed
2989 
2990 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
2991 @*/
2992 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf)
2993 {
2994   PetscErrorCode ierr;
2995 
2996   PetscFunctionBegin;
2997   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2998   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
2999   ierr          = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
3000   dm->defaultSF = sf;
3001   PetscFunctionReturn(0);
3002 }
3003 
3004 #undef __FUNCT__
3005 #define __FUNCT__ "DMCreateDefaultSF"
3006 /*@C
3007   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
3008   describing the data layout.
3009 
3010   Input Parameters:
3011 + dm - The DM
3012 . localSection - PetscSection describing the local data layout
3013 - globalSection - PetscSection describing the global data layout
3014 
3015   Level: intermediate
3016 
3017 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
3018 @*/
3019 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
3020 {
3021   MPI_Comm       comm;
3022   PetscLayout    layout;
3023   const PetscInt *ranges;
3024   PetscInt       *local;
3025   PetscSFNode    *remote;
3026   PetscInt       pStart, pEnd, p, nroots, nleaves = 0, l;
3027   PetscMPIInt    size, rank;
3028   PetscErrorCode ierr;
3029 
3030   PetscFunctionBegin;
3031   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
3032   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3033   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
3034   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
3035   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
3036   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
3037   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
3038   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
3039   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
3040   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
3041   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
3042   for (p = pStart; p < pEnd; ++p) {
3043     PetscInt gdof, gcdof;
3044 
3045     ierr     = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
3046     ierr     = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
3047     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3048   }
3049   ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr);
3050   ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr);
3051   for (p = pStart, l = 0; p < pEnd; ++p) {
3052     const PetscInt *cind;
3053     PetscInt       dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
3054 
3055     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
3056     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
3057     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
3058     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
3059     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
3060     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
3061     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
3062     if (!gdof) continue; /* Censored point */
3063     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3064     if (gsize != dof-cdof) {
3065       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
3066       cdof = 0; /* Ignore constraints */
3067     }
3068     for (d = 0, c = 0; d < dof; ++d) {
3069       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
3070       local[l+d-c] = off+d;
3071     }
3072     if (gdof < 0) {
3073       for (d = 0; d < gsize; ++d, ++l) {
3074         PetscInt offset = -(goff+1) + d, r;
3075 
3076         ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr);
3077         if (r < 0) r = -(r+2);
3078         remote[l].rank  = r;
3079         remote[l].index = offset - ranges[r];
3080       }
3081     } else {
3082       for (d = 0; d < gsize; ++d, ++l) {
3083         remote[l].rank  = rank;
3084         remote[l].index = goff+d - ranges[rank];
3085       }
3086     }
3087   }
3088   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
3089   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3090   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
3091   PetscFunctionReturn(0);
3092 }
3093 
3094 #undef __FUNCT__
3095 #define __FUNCT__ "DMGetPointSF"
3096 /*@
3097   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
3098 
3099   Input Parameter:
3100 . dm - The DM
3101 
3102   Output Parameter:
3103 . sf - The PetscSF
3104 
3105   Level: intermediate
3106 
3107   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
3108 
3109 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3110 @*/
3111 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf)
3112 {
3113   PetscFunctionBegin;
3114   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3115   PetscValidPointer(sf, 2);
3116   *sf = dm->sf;
3117   PetscFunctionReturn(0);
3118 }
3119 
3120 #undef __FUNCT__
3121 #define __FUNCT__ "DMSetPointSF"
3122 /*@
3123   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
3124 
3125   Input Parameters:
3126 + dm - The DM
3127 - sf - The PetscSF
3128 
3129   Level: intermediate
3130 
3131 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3132 @*/
3133 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf)
3134 {
3135   PetscErrorCode ierr;
3136 
3137   PetscFunctionBegin;
3138   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3139   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
3140   ierr   = PetscSFDestroy(&dm->sf);CHKERRQ(ierr);
3141   ierr   = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr);
3142   dm->sf = sf;
3143   PetscFunctionReturn(0);
3144 }
3145 
3146 #undef __FUNCT__
3147 #define __FUNCT__ "DMGetNumFields"
3148 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3149 {
3150   PetscFunctionBegin;
3151   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3152   PetscValidPointer(numFields, 2);
3153   *numFields = dm->numFields;
3154   PetscFunctionReturn(0);
3155 }
3156 
3157 #undef __FUNCT__
3158 #define __FUNCT__ "DMSetNumFields"
3159 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3160 {
3161   PetscInt       f;
3162   PetscErrorCode ierr;
3163 
3164   PetscFunctionBegin;
3165   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3166   for (f = 0; f < dm->numFields; ++f) {
3167     ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr);
3168   }
3169   ierr          = PetscFree(dm->fields);CHKERRQ(ierr);
3170   dm->numFields = numFields;
3171   ierr          = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr);
3172   for (f = 0; f < dm->numFields; ++f) {
3173     ierr = PetscContainerCreate(PetscObjectComm((PetscObject)dm), (PetscContainer*) &dm->fields[f]);CHKERRQ(ierr);
3174   }
3175   PetscFunctionReturn(0);
3176 }
3177 
3178 #undef __FUNCT__
3179 #define __FUNCT__ "DMGetField"
3180 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3181 {
3182   PetscFunctionBegin;
3183   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3184   PetscValidPointer(field, 2);
3185   if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3186   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3187   *field = dm->fields[f];
3188   PetscFunctionReturn(0);
3189 }
3190 
3191 #undef __FUNCT__
3192 #define __FUNCT__ "DMRestrictHook_Coordinates"
3193 PetscErrorCode DMRestrictHook_Coordinates(DM dm,DM dmc,void *ctx)
3194 {
3195   DM dm_coord,dmc_coord;
3196   PetscErrorCode ierr;
3197   Vec coords,ccoords;
3198   VecScatter scat;
3199   PetscFunctionBegin;
3200   ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr);
3201   ierr = DMGetCoordinateDM(dmc,&dmc_coord);CHKERRQ(ierr);
3202   ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr);
3203   ierr = DMGetCoordinates(dmc,&ccoords);CHKERRQ(ierr);
3204   if (coords && !ccoords) {
3205     ierr = DMCreateGlobalVector(dmc_coord,&ccoords);CHKERRQ(ierr);
3206     ierr = DMCreateInjection(dmc_coord,dm_coord,&scat);CHKERRQ(ierr);
3207     ierr = VecScatterBegin(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3208     ierr = VecScatterEnd(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3209     ierr = DMSetCoordinates(dmc,ccoords);CHKERRQ(ierr);
3210     ierr = VecScatterDestroy(&scat);CHKERRQ(ierr);
3211     ierr = VecDestroy(&ccoords);CHKERRQ(ierr);
3212   }
3213   PetscFunctionReturn(0);
3214 }
3215 
3216 #undef __FUNCT__
3217 #define __FUNCT__ "DMSubDomainHook_Coordinates"
3218 static PetscErrorCode DMSubDomainHook_Coordinates(DM dm,DM subdm,void *ctx)
3219 {
3220   DM dm_coord,subdm_coord;
3221   PetscErrorCode ierr;
3222   Vec coords,ccoords,clcoords;
3223   VecScatter *scat_i,*scat_g;
3224   PetscFunctionBegin;
3225   ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr);
3226   ierr = DMGetCoordinateDM(subdm,&subdm_coord);CHKERRQ(ierr);
3227   ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr);
3228   ierr = DMGetCoordinates(subdm,&ccoords);CHKERRQ(ierr);
3229   if (coords && !ccoords) {
3230     ierr = DMCreateGlobalVector(subdm_coord,&ccoords);CHKERRQ(ierr);
3231     ierr = DMCreateLocalVector(subdm_coord,&clcoords);CHKERRQ(ierr);
3232     ierr = DMCreateDomainDecompositionScatters(dm_coord,1,&subdm_coord,NULL,&scat_i,&scat_g);CHKERRQ(ierr);
3233     ierr = VecScatterBegin(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3234     ierr = VecScatterBegin(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3235     ierr = VecScatterEnd(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3236     ierr = VecScatterEnd(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3237     ierr = DMSetCoordinates(subdm,ccoords);CHKERRQ(ierr);
3238     ierr = DMSetCoordinatesLocal(subdm,clcoords);CHKERRQ(ierr);
3239     ierr = VecScatterDestroy(&scat_i[0]);CHKERRQ(ierr);
3240     ierr = VecScatterDestroy(&scat_g[0]);CHKERRQ(ierr);
3241     ierr = VecDestroy(&ccoords);CHKERRQ(ierr);
3242     ierr = VecDestroy(&clcoords);CHKERRQ(ierr);
3243     ierr = PetscFree(scat_i);CHKERRQ(ierr);
3244     ierr = PetscFree(scat_g);CHKERRQ(ierr);
3245   }
3246   PetscFunctionReturn(0);
3247 }
3248 
3249 #undef __FUNCT__
3250 #define __FUNCT__ "DMSetCoordinates"
3251 /*@
3252   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3253 
3254   Collective on DM
3255 
3256   Input Parameters:
3257 + dm - the DM
3258 - c - coordinate vector
3259 
3260   Note:
3261   The coordinates do include those for ghost points, which are in the local vector
3262 
3263   Level: intermediate
3264 
3265 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3266 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3267 @*/
3268 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3269 {
3270   PetscErrorCode ierr;
3271 
3272   PetscFunctionBegin;
3273   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3274   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3275   ierr            = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3276   ierr            = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3277   dm->coordinates = c;
3278   ierr            = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3279   ierr            = DMCoarsenHookAdd(dm,DMRestrictHook_Coordinates,NULL,NULL);CHKERRQ(ierr);
3280   ierr            = DMSubDomainHookAdd(dm,DMSubDomainHook_Coordinates,NULL,NULL);CHKERRQ(ierr);
3281   PetscFunctionReturn(0);
3282 }
3283 
3284 #undef __FUNCT__
3285 #define __FUNCT__ "DMSetCoordinatesLocal"
3286 /*@
3287   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3288 
3289   Collective on DM
3290 
3291    Input Parameters:
3292 +  dm - the DM
3293 -  c - coordinate vector
3294 
3295   Note:
3296   The coordinates of ghost points can be set using DMSetCoordinates()
3297   followed by DMGetCoordinatesLocal(). This is intended to enable the
3298   setting of ghost coordinates outside of the domain.
3299 
3300   Level: intermediate
3301 
3302 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3303 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3304 @*/
3305 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3306 {
3307   PetscErrorCode ierr;
3308 
3309   PetscFunctionBegin;
3310   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3311   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3312   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3313   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3314 
3315   dm->coordinatesLocal = c;
3316 
3317   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3318   PetscFunctionReturn(0);
3319 }
3320 
3321 #undef __FUNCT__
3322 #define __FUNCT__ "DMGetCoordinates"
3323 /*@
3324   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3325 
3326   Not Collective
3327 
3328   Input Parameter:
3329 . dm - the DM
3330 
3331   Output Parameter:
3332 . c - global coordinate vector
3333 
3334   Note:
3335   This is a borrowed reference, so the user should NOT destroy this vector
3336 
3337   Each process has only the local coordinates (does NOT have the ghost coordinates).
3338 
3339   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3340   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3341 
3342   Level: intermediate
3343 
3344 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3345 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3346 @*/
3347 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3348 {
3349   PetscErrorCode ierr;
3350 
3351   PetscFunctionBegin;
3352   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3353   PetscValidPointer(c,2);
3354   if (!dm->coordinates && dm->coordinatesLocal) {
3355     DM cdm = NULL;
3356 
3357     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3358     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3359     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3360     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3361     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3362   }
3363   *c = dm->coordinates;
3364   PetscFunctionReturn(0);
3365 }
3366 
3367 #undef __FUNCT__
3368 #define __FUNCT__ "DMGetCoordinatesLocal"
3369 /*@
3370   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3371 
3372   Collective on DM
3373 
3374   Input Parameter:
3375 . dm - the DM
3376 
3377   Output Parameter:
3378 . c - coordinate vector
3379 
3380   Note:
3381   This is a borrowed reference, so the user should NOT destroy this vector
3382 
3383   Each process has the local and ghost coordinates
3384 
3385   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3386   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3387 
3388   Level: intermediate
3389 
3390 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3391 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3392 @*/
3393 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3394 {
3395   PetscErrorCode ierr;
3396 
3397   PetscFunctionBegin;
3398   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3399   PetscValidPointer(c,2);
3400   if (!dm->coordinatesLocal && dm->coordinates) {
3401     DM cdm = NULL;
3402 
3403     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3404     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3405     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3406     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3407     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3408   }
3409   *c = dm->coordinatesLocal;
3410   PetscFunctionReturn(0);
3411 }
3412 
3413 #undef __FUNCT__
3414 #define __FUNCT__ "DMGetCoordinateDM"
3415 /*@
3416   DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates
3417 
3418   Collective on DM
3419 
3420   Input Parameter:
3421 . dm - the DM
3422 
3423   Output Parameter:
3424 . cdm - coordinate DM
3425 
3426   Level: intermediate
3427 
3428 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3429 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3430 @*/
3431 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3432 {
3433   PetscErrorCode ierr;
3434 
3435   PetscFunctionBegin;
3436   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3437   PetscValidPointer(cdm,2);
3438   if (!dm->coordinateDM) {
3439     if (!dm->ops->createcoordinatedm) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3440     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3441   }
3442   *cdm = dm->coordinateDM;
3443   PetscFunctionReturn(0);
3444 }
3445 
3446 #undef __FUNCT__
3447 #define __FUNCT__ "DMLocatePoints"
3448 /*@
3449   DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells
3450 
3451   Not collective
3452 
3453   Input Parameters:
3454 + dm - The DM
3455 - v - The Vec of points
3456 
3457   Output Parameter:
3458 . cells - The local cell numbers for cells which contain the points
3459 
3460   Level: developer
3461 
3462 .keywords: point location, mesh
3463 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3464 @*/
3465 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells)
3466 {
3467   PetscErrorCode ierr;
3468 
3469   PetscFunctionBegin;
3470   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3471   PetscValidHeaderSpecific(v,VEC_CLASSID,2);
3472   PetscValidPointer(cells,3);
3473   if (dm->ops->locatepoints) {
3474     ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr);
3475   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Point location not available for this DM");
3476   PetscFunctionReturn(0);
3477 }
3478