xref: /petsc/src/dm/interface/dm.c (revision c7a10e08591f5a3773572f14db2d1653e42f2f38)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
2 
3 PetscClassId  DM_CLASSID;
4 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal;
5 
6 #undef __FUNCT__
7 #define __FUNCT__ "DMCreate"
8 /*@
9   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
10 
11    If you never  call DMSetType()  it will generate an
12    error when you try to use the vector.
13 
14   Collective on MPI_Comm
15 
16   Input Parameter:
17 . comm - The communicator for the DM object
18 
19   Output Parameter:
20 . dm - The DM object
21 
22   Level: beginner
23 
24 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
25 @*/
26 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
27 {
28   DM             v;
29   PetscErrorCode ierr;
30 
31   PetscFunctionBegin;
32   PetscValidPointer(dm,2);
33   *dm = PETSC_NULL;
34 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
35   ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr);
36   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
37   ierr = DMInitializePackage(PETSC_NULL);CHKERRQ(ierr);
38 #endif
39 
40   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, -1, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
41   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
42 
43 
44   v->ltogmap      = PETSC_NULL;
45   v->ltogmapb     = PETSC_NULL;
46   v->bs           = 1;
47   v->coloringtype = IS_COLORING_GLOBAL;
48   ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
49   ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
50   v->defaultSection       = PETSC_NULL;
51   v->defaultGlobalSection = PETSC_NULL;
52   {
53     PetscInt i;
54     for (i = 0; i < 10; ++i) {
55       v->nullspaceConstructors[i] = PETSC_NULL;
56     }
57   }
58   v->numFields = 0;
59   v->fields    = PETSC_NULL;
60 
61   *dm = v;
62   PetscFunctionReturn(0);
63 }
64 
65 #undef __FUNCT__
66 #define __FUNCT__ "DMSetVecType"
67 /*@C
68        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
69 
70    Logically Collective on DMDA
71 
72    Input Parameter:
73 +  da - initial distributed array
74 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
75 
76    Options Database:
77 .   -dm_vec_type ctype
78 
79    Level: intermediate
80 
81 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
82 @*/
83 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
84 {
85   PetscErrorCode ierr;
86 
87   PetscFunctionBegin;
88   PetscValidHeaderSpecific(da,DM_CLASSID,1);
89   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
90   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
91   PetscFunctionReturn(0);
92 }
93 
94 #undef __FUNCT__
95 #define __FUNCT__ "VecGetDM"
96 /*@
97   VecGetDM - Gets the DM defining the data layout of the vector
98 
99   Not collective
100 
101   Input Parameter:
102 . v - The Vec
103 
104   Output Parameter:
105 . dm - The DM
106 
107   Level: intermediate
108 
109 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
110 @*/
111 PetscErrorCode VecGetDM(Vec v, DM *dm)
112 {
113   PetscErrorCode ierr;
114 
115   PetscFunctionBegin;
116   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
117   PetscValidPointer(dm,2);
118   ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
119   PetscFunctionReturn(0);
120 }
121 
122 #undef __FUNCT__
123 #define __FUNCT__ "VecSetDM"
124 /*@
125   VecSetDM - Sets the DM defining the data layout of the vector
126 
127   Not collective
128 
129   Input Parameters:
130 + v - The Vec
131 - dm - The DM
132 
133   Level: intermediate
134 
135 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
136 @*/
137 PetscErrorCode VecSetDM(Vec v, DM dm)
138 {
139   PetscErrorCode ierr;
140 
141   PetscFunctionBegin;
142   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
143   PetscValidHeaderSpecific(dm,DM_CLASSID,2);
144   ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
145   PetscFunctionReturn(0);
146 }
147 
148 #undef __FUNCT__
149 #define __FUNCT__ "DMSetMatType"
150 /*@C
151        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
152 
153    Logically Collective on DM
154 
155    Input Parameter:
156 +  dm - the DM context
157 .  ctype - the matrix type
158 
159    Options Database:
160 .   -dm_mat_type ctype
161 
162    Level: intermediate
163 
164 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType
165 @*/
166 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
167 {
168   PetscErrorCode ierr;
169   PetscFunctionBegin;
170   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
171   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
172   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
173   PetscFunctionReturn(0);
174 }
175 
176 #undef __FUNCT__
177 #define __FUNCT__ "MatGetDM"
178 /*@
179   MatGetDM - Gets the DM defining the data layout of the matrix
180 
181   Not collective
182 
183   Input Parameter:
184 . A - The Mat
185 
186   Output Parameter:
187 . dm - The DM
188 
189   Level: intermediate
190 
191 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
192 @*/
193 PetscErrorCode MatGetDM(Mat A, DM *dm)
194 {
195   PetscErrorCode ierr;
196 
197   PetscFunctionBegin;
198   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
199   PetscValidPointer(dm,2);
200   ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
201   PetscFunctionReturn(0);
202 }
203 
204 #undef __FUNCT__
205 #define __FUNCT__ "MatSetDM"
206 /*@
207   MatSetDM - Sets the DM defining the data layout of the matrix
208 
209   Not collective
210 
211   Input Parameters:
212 + A - The Mat
213 - dm - The DM
214 
215   Level: intermediate
216 
217 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
218 @*/
219 PetscErrorCode MatSetDM(Mat A, DM dm)
220 {
221   PetscErrorCode ierr;
222 
223   PetscFunctionBegin;
224   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
225   if (dm) {PetscValidHeaderSpecific(dm,DM_CLASSID,2);}
226   ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
227   PetscFunctionReturn(0);
228 }
229 
230 #undef __FUNCT__
231 #define __FUNCT__ "DMSetOptionsPrefix"
232 /*@C
233    DMSetOptionsPrefix - Sets the prefix used for searching for all
234    DMDA options in the database.
235 
236    Logically Collective on DMDA
237 
238    Input Parameter:
239 +  da - the DMDA context
240 -  prefix - the prefix to prepend to all option names
241 
242    Notes:
243    A hyphen (-) must NOT be given at the beginning of the prefix name.
244    The first character of all runtime options is AUTOMATICALLY the hyphen.
245 
246    Level: advanced
247 
248 .keywords: DMDA, set, options, prefix, database
249 
250 .seealso: DMSetFromOptions()
251 @*/
252 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
253 {
254   PetscErrorCode ierr;
255 
256   PetscFunctionBegin;
257   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
258   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
259   PetscFunctionReturn(0);
260 }
261 
262 #undef __FUNCT__
263 #define __FUNCT__ "DMDestroy"
264 /*@
265     DMDestroy - Destroys a vector packer or DMDA.
266 
267     Collective on DM
268 
269     Input Parameter:
270 .   dm - the DM object to destroy
271 
272     Level: developer
273 
274 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
275 
276 @*/
277 PetscErrorCode  DMDestroy(DM *dm)
278 {
279   PetscInt       i, cnt = 0, f;
280   DMNamedVecLink nlink,nnext;
281   PetscErrorCode ierr;
282 
283   PetscFunctionBegin;
284   if (!*dm) PetscFunctionReturn(0);
285   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
286 
287   /* count all the circular references of DM and its contained Vecs */
288   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
289     if ((*dm)->localin[i])  {cnt++;}
290     if ((*dm)->globalin[i]) {cnt++;}
291   }
292   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
293   if ((*dm)->x) {
294     DM obj;
295     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
296     if (obj == *dm) cnt++;
297   }
298 
299   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
300   /*
301      Need this test because the dm references the vectors that
302      reference the dm, so destroying the dm calls destroy on the
303      vectors that cause another destroy on the dm
304   */
305   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
306   ((PetscObject) (*dm))->refct = 0;
307   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
308     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
309     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
310   }
311   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
312     nnext = nlink->next;
313     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
314     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
315     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
316     ierr = PetscFree(nlink);CHKERRQ(ierr);
317   }
318   (*dm)->namedglobal = PETSC_NULL;
319 
320   /* Destroy the list of hooks */
321   {
322     DMCoarsenHookLink link,next;
323     for (link=(*dm)->coarsenhook; link; link=next) {
324       next = link->next;
325       ierr = PetscFree(link);CHKERRQ(ierr);
326     }
327     (*dm)->coarsenhook = PETSC_NULL;
328   }
329   {
330     DMRefineHookLink link,next;
331     for (link=(*dm)->refinehook; link; link=next) {
332       next = link->next;
333       ierr = PetscFree(link);CHKERRQ(ierr);
334     }
335     (*dm)->refinehook = PETSC_NULL;
336   }
337   /* Destroy the work arrays */
338   {
339     DMWorkLink link,next;
340     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
341     for (link=(*dm)->workin; link; link=next) {
342       next = link->next;
343       ierr = PetscFree(link->mem);CHKERRQ(ierr);
344       ierr = PetscFree(link);CHKERRQ(ierr);
345     }
346     (*dm)->workin = PETSC_NULL;
347   }
348 
349   ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr);
350   ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr);
351   ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr);
352 
353   if ((*dm)->ctx && (*dm)->ctxdestroy) {
354     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
355   }
356   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
357   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
358   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
359   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
360   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
361   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
362   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
363 
364   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
365   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
366   ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr);
367   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
368   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
369 
370   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
371   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
372   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
373 
374   for (f = 0; f < (*dm)->numFields; ++f) {
375     ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr);
376   }
377   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
378   /* if memory was published with AMS then destroy it */
379   ierr = PetscObjectDepublish(*dm);CHKERRQ(ierr);
380 
381   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
382   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
383   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
384   PetscFunctionReturn(0);
385 }
386 
387 #undef __FUNCT__
388 #define __FUNCT__ "DMSetUp"
389 /*@
390     DMSetUp - sets up the data structures inside a DM object
391 
392     Collective on DM
393 
394     Input Parameter:
395 .   dm - the DM object to setup
396 
397     Level: developer
398 
399 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
400 
401 @*/
402 PetscErrorCode  DMSetUp(DM dm)
403 {
404   PetscErrorCode ierr;
405 
406   PetscFunctionBegin;
407   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
408   if (dm->setupcalled) PetscFunctionReturn(0);
409   if (dm->ops->setup) {
410     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
411   }
412   dm->setupcalled = PETSC_TRUE;
413   PetscFunctionReturn(0);
414 }
415 
416 #undef __FUNCT__
417 #define __FUNCT__ "DMSetFromOptions"
418 /*@
419     DMSetFromOptions - sets parameters in a DM from the options database
420 
421     Collective on DM
422 
423     Input Parameter:
424 .   dm - the DM object to set options for
425 
426     Options Database:
427 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
428 .   -dm_vec_type <type>  type of vector to create inside DM
429 .   -dm_mat_type <type>  type of matrix to create inside DM
430 -   -dm_coloring_type <global or ghosted>
431 
432     Level: developer
433 
434 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
435 
436 @*/
437 PetscErrorCode  DMSetFromOptions(DM dm)
438 {
439   PetscBool      flg1 = PETSC_FALSE,flg2 = PETSC_FALSE,flg3 = PETSC_FALSE,flg4 = PETSC_FALSE,flg;
440   PetscErrorCode ierr;
441   char           typeName[256] = MATAIJ;
442 
443   PetscFunctionBegin;
444   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
445   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
446     ierr = PetscOptionsBool("-dm_view", "Information on DM", "DMView", flg1, &flg1, PETSC_NULL);CHKERRQ(ierr);
447     ierr = PetscOptionsBool("-dm_view_detail", "Exhaustive mesh description", "DMView", flg2, &flg2, PETSC_NULL);CHKERRQ(ierr);
448     ierr = PetscOptionsBool("-dm_view_vtk", "Output mesh in VTK format", "DMView", flg3, &flg3, PETSC_NULL);CHKERRQ(ierr);
449     ierr = PetscOptionsBool("-dm_view_latex", "Output mesh in LaTeX TikZ format", "DMView", flg4, &flg4, PETSC_NULL);CHKERRQ(ierr);
450     ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,PETSC_NULL);CHKERRQ(ierr);
451     ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
452     if (flg) {
453       ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
454     }
455     ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype?dm->mattype:typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
456     if (flg) {
457       ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
458     }
459     ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,PETSC_NULL);CHKERRQ(ierr);
460     if (dm->ops->setfromoptions) {
461       ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
462     }
463     /* process any options handlers added with PetscObjectAddOptionsHandler() */
464     ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
465   ierr = PetscOptionsEnd();CHKERRQ(ierr);
466   if (flg1) {
467     ierr = DMView(dm, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
468   }
469   if (flg2) {
470     PetscViewer viewer;
471 
472     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
473     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
474     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_INFO_DETAIL);CHKERRQ(ierr);
475     ierr = DMView(dm, viewer);CHKERRQ(ierr);
476     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
477   }
478   if (flg3) {
479     PetscViewer viewer;
480 
481     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
482     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
483     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_VTK);CHKERRQ(ierr);
484     ierr = PetscViewerFileSetName(viewer, "mesh.vtk");CHKERRQ(ierr);
485     ierr = DMView(dm, viewer);CHKERRQ(ierr);
486     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
487   }
488   if (flg4) {
489     PetscViewer viewer;
490 
491     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
492     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
493     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_LATEX);CHKERRQ(ierr);
494     ierr = PetscViewerFileSetName(viewer, "mesh.tex");CHKERRQ(ierr);
495     ierr = DMView(dm, viewer);CHKERRQ(ierr);
496     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
497   }
498   PetscFunctionReturn(0);
499 }
500 
501 #undef __FUNCT__
502 #define __FUNCT__ "DMView"
503 /*@C
504     DMView - Views a vector packer or DMDA.
505 
506     Collective on DM
507 
508     Input Parameter:
509 +   dm - the DM object to view
510 -   v - the viewer
511 
512     Level: developer
513 
514 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
515 
516 @*/
517 PetscErrorCode  DMView(DM dm,PetscViewer v)
518 {
519   PetscErrorCode ierr;
520   PetscBool      isbinary;
521 
522   PetscFunctionBegin;
523   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
524   if (!v) {
525     ierr = PetscViewerASCIIGetStdout(((PetscObject)dm)->comm,&v);CHKERRQ(ierr);
526   }
527   ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
528   if (isbinary) {
529     PetscInt         classid = DM_FILE_CLASSID;
530     char             type[256];
531 
532     ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
533     ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr);
534     ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
535   }
536   if (dm->ops->view) {
537     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
538   }
539   PetscFunctionReturn(0);
540 }
541 
542 PETSC_EXTERN PetscErrorCode VecView_Complex_Local(Vec, PetscViewer);
543 PETSC_EXTERN PetscErrorCode VecView_Complex(Vec, PetscViewer);
544 
545 #undef __FUNCT__
546 #define __FUNCT__ "DMCreateGlobalVector"
547 /*@
548     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
549 
550     Collective on DM
551 
552     Input Parameter:
553 .   dm - the DM object
554 
555     Output Parameter:
556 .   vec - the global vector
557 
558     Level: beginner
559 
560 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
561 
562 @*/
563 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
564 {
565   PetscErrorCode ierr;
566 
567   PetscFunctionBegin;
568   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
569   if (dm->defaultSection) {
570     PetscSection gSection;
571     PetscInt     localSize, blockSize = -1, pStart, pEnd, p;
572 
573     ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
574     ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr);
575     for(p = pStart; p < pEnd; ++p) {
576       PetscInt dof, cdof;
577 
578       ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr);
579       ierr = PetscSectionGetConstraintDof(dm->defaultSection, p, &cdof);CHKERRQ(ierr);
580       if ((blockSize < 0) && (dof > 0)) blockSize = dof-cdof;
581       if ((dof > 0) && (dof-cdof != blockSize)) {
582         blockSize = 1;
583         break;
584       }
585     }
586     ierr = PetscSectionGetConstrainedStorageSize(dm->defaultGlobalSection, &localSize);CHKERRQ(ierr);
587     if (localSize%blockSize) SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONG, "Mismatch between blocksize %d and local storage size %d", blockSize, localSize);
588     ierr = VecCreate(((PetscObject) dm)->comm, vec);CHKERRQ(ierr);
589     ierr = VecSetSizes(*vec, localSize, PETSC_DETERMINE);CHKERRQ(ierr);
590     ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr);
591     /* ierr = VecSetType(*vec, dm->vectype);CHKERRQ(ierr); */
592     ierr = VecSetFromOptions(*vec);CHKERRQ(ierr);
593     ierr = VecSetDM(*vec, dm);CHKERRQ(ierr);
594     /* ierr = VecSetLocalToGlobalMapping(*vec, dm->ltogmap);CHKERRQ(ierr); */
595     /* ierr = VecSetLocalToGlobalMappingBlock(*vec, dm->ltogmapb);CHKERRQ(ierr); */
596     /* ierr = VecSetOperation(*vec, VECOP_DUPLICATE, (void(*)(void)) VecDuplicate_MPI_DM);CHKERRQ(ierr); */
597     ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex);CHKERRQ(ierr);
598   } else {
599     ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
600   }
601   PetscFunctionReturn(0);
602 }
603 
604 #undef __FUNCT__
605 #define __FUNCT__ "DMCreateLocalVector"
606 /*@
607     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
608 
609     Not Collective
610 
611     Input Parameter:
612 .   dm - the DM object
613 
614     Output Parameter:
615 .   vec - the local vector
616 
617     Level: beginner
618 
619 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
620 
621 @*/
622 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
623 {
624   PetscErrorCode ierr;
625 
626   PetscFunctionBegin;
627   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
628   if (dm->defaultSection) {
629     PetscInt localSize, blockSize = -1, pStart, pEnd, p;
630 
631     ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr);
632     for(p = pStart; p < pEnd; ++p) {
633       PetscInt dof;
634 
635       ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr);
636       if ((blockSize < 0) && (dof > 0)) blockSize = dof;
637       if ((dof > 0) && (dof != blockSize)) {
638         blockSize = 1;
639         break;
640       }
641     }
642     ierr = PetscSectionGetStorageSize(dm->defaultSection, &localSize);CHKERRQ(ierr);
643     ierr = VecCreate(PETSC_COMM_SELF, vec);CHKERRQ(ierr);
644     ierr = VecSetSizes(*vec, localSize, localSize);CHKERRQ(ierr);
645     ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr);
646     ierr = VecSetFromOptions(*vec);CHKERRQ(ierr);
647     ierr = VecSetDM(*vec, dm);CHKERRQ(ierr);
648     ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex_Local);CHKERRQ(ierr);
649   } else {
650     ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
651   }
652   PetscFunctionReturn(0);
653 }
654 
655 #undef __FUNCT__
656 #define __FUNCT__ "DMGetLocalToGlobalMapping"
657 /*@
658    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
659 
660    Collective on DM
661 
662    Input Parameter:
663 .  dm - the DM that provides the mapping
664 
665    Output Parameter:
666 .  ltog - the mapping
667 
668    Level: intermediate
669 
670    Notes:
671    This mapping can then be used by VecSetLocalToGlobalMapping() or
672    MatSetLocalToGlobalMapping().
673 
674 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
675 @*/
676 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
677 {
678   PetscErrorCode ierr;
679 
680   PetscFunctionBegin;
681   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
682   PetscValidPointer(ltog,2);
683   if (!dm->ltogmap) {
684     PetscSection section, sectionGlobal;
685 
686     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
687     if (section) {
688       PetscInt      *ltog;
689       PetscInt       pStart, pEnd, size, p, l;
690 
691       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
692       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
693       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
694       ierr = PetscMalloc(size * sizeof(PetscInt), &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
695       for (p = pStart, l = 0; p < pEnd; ++p) {
696         PetscInt dof, off, c;
697 
698         /* Should probably use constrained dofs */
699         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
700         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
701         for (c = 0; c < dof; ++c, ++l) {
702           ltog[l] = off+c;
703         }
704       }
705       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
706       ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr);
707     } else {
708       if (!dm->ops->createlocaltoglobalmapping) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
709       ierr = (*dm->ops->createlocaltoglobalmapping)(dm);CHKERRQ(ierr);
710     }
711   }
712   *ltog = dm->ltogmap;
713   PetscFunctionReturn(0);
714 }
715 
716 #undef __FUNCT__
717 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
718 /*@
719    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
720 
721    Collective on DM
722 
723    Input Parameter:
724 .  da - the distributed array that provides the mapping
725 
726    Output Parameter:
727 .  ltog - the block mapping
728 
729    Level: intermediate
730 
731    Notes:
732    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
733    MatSetLocalToGlobalMappingBlock().
734 
735 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
736 @*/
737 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
738 {
739   PetscErrorCode ierr;
740 
741   PetscFunctionBegin;
742   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
743   PetscValidPointer(ltog,2);
744   if (!dm->ltogmapb) {
745     PetscInt bs;
746     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
747     if (bs > 1) {
748       if (!dm->ops->createlocaltoglobalmappingblock) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
749       ierr = (*dm->ops->createlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
750     } else {
751       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
752       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
753     }
754   }
755   *ltog = dm->ltogmapb;
756   PetscFunctionReturn(0);
757 }
758 
759 #undef __FUNCT__
760 #define __FUNCT__ "DMGetBlockSize"
761 /*@
762    DMGetBlockSize - Gets the inherent block size associated with a DM
763 
764    Not Collective
765 
766    Input Parameter:
767 .  dm - the DM with block structure
768 
769    Output Parameter:
770 .  bs - the block size, 1 implies no exploitable block structure
771 
772    Level: intermediate
773 
774 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
775 @*/
776 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
777 {
778   PetscFunctionBegin;
779   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
780   PetscValidPointer(bs,2);
781   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
782   *bs = dm->bs;
783   PetscFunctionReturn(0);
784 }
785 
786 #undef __FUNCT__
787 #define __FUNCT__ "DMCreateInterpolation"
788 /*@
789     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
790 
791     Collective on DM
792 
793     Input Parameter:
794 +   dm1 - the DM object
795 -   dm2 - the second, finer DM object
796 
797     Output Parameter:
798 +  mat - the interpolation
799 -  vec - the scaling (optional)
800 
801     Level: developer
802 
803     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
804         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
805 
806         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
807         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
808 
809 
810 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
811 
812 @*/
813 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
814 {
815   PetscErrorCode ierr;
816 
817   PetscFunctionBegin;
818   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
819   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
820   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
821   PetscFunctionReturn(0);
822 }
823 
824 #undef __FUNCT__
825 #define __FUNCT__ "DMCreateInjection"
826 /*@
827     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
828 
829     Collective on DM
830 
831     Input Parameter:
832 +   dm1 - the DM object
833 -   dm2 - the second, finer DM object
834 
835     Output Parameter:
836 .   ctx - the injection
837 
838     Level: developer
839 
840    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
841         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
842 
843 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
844 
845 @*/
846 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
847 {
848   PetscErrorCode ierr;
849 
850   PetscFunctionBegin;
851   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
852   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
853   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
854   PetscFunctionReturn(0);
855 }
856 
857 #undef __FUNCT__
858 #define __FUNCT__ "DMCreateColoring"
859 /*@C
860     DMCreateColoring - Gets coloring for a DMDA or DMComposite
861 
862     Collective on DM
863 
864     Input Parameter:
865 +   dm - the DM object
866 .   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
867 -   matype - either MATAIJ or MATBAIJ
868 
869     Output Parameter:
870 .   coloring - the coloring
871 
872     Level: developer
873 
874 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix()
875 
876 @*/
877 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring)
878 {
879   PetscErrorCode ierr;
880 
881   PetscFunctionBegin;
882   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
883   if (!dm->ops->getcoloring) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No coloring for this type of DM yet");
884   ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr);
885   PetscFunctionReturn(0);
886 }
887 
888 #undef __FUNCT__
889 #define __FUNCT__ "DMCreateMatrix"
890 /*@C
891     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
892 
893     Collective on DM
894 
895     Input Parameter:
896 +   dm - the DM object
897 -   mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or
898             any type which inherits from one of these (such as MATAIJ)
899 
900     Output Parameter:
901 .   mat - the empty Jacobian
902 
903     Level: beginner
904 
905     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
906        do not need to do it yourself.
907 
908        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
909        the nonzero pattern call DMDASetMatPreallocateOnly()
910 
911        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
912        internally by PETSc.
913 
914        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
915        the indices for the global numbering for DMDAs which is complicated.
916 
917 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
918 
919 @*/
920 PetscErrorCode  DMCreateMatrix(DM dm,MatType mtype,Mat *mat)
921 {
922   PetscErrorCode ierr;
923 
924   PetscFunctionBegin;
925   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
926 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
927   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
928 #endif
929   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
930   PetscValidPointer(mat,3);
931   if (dm->mattype) {
932     ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr);
933   } else {
934     ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr);
935   }
936   PetscFunctionReturn(0);
937 }
938 
939 #undef __FUNCT__
940 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
941 /*@
942   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
943     preallocated but the nonzero structure and zero values will not be set.
944 
945   Logically Collective on DMDA
946 
947   Input Parameter:
948 + dm - the DM
949 - only - PETSC_TRUE if only want preallocation
950 
951   Level: developer
952 .seealso DMCreateMatrix()
953 @*/
954 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
955 {
956   PetscFunctionBegin;
957   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
958   dm->prealloc_only = only;
959   PetscFunctionReturn(0);
960 }
961 
962 #undef __FUNCT__
963 #define __FUNCT__ "DMGetWorkArray"
964 /*@C
965   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
966 
967   Not Collective
968 
969   Input Parameters:
970 + dm - the DM object
971 . count - The minium size
972 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
973 
974   Output Parameter:
975 . array - the work array
976 
977   Level: developer
978 
979 .seealso DMDestroy(), DMCreate()
980 @*/
981 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
982 {
983   PetscErrorCode ierr;
984   DMWorkLink link;
985   size_t size;
986 
987   PetscFunctionBegin;
988   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
989   PetscValidPointer(mem,4);
990   if (dm->workin) {
991     link = dm->workin;
992     dm->workin = dm->workin->next;
993   } else {
994     ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr);
995   }
996   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
997   if (size*count > link->bytes) {
998     ierr = PetscFree(link->mem);CHKERRQ(ierr);
999     ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
1000     link->bytes = size*count;
1001   }
1002   link->next = dm->workout;
1003   dm->workout = link;
1004   *(void**)mem = link->mem;
1005   PetscFunctionReturn(0);
1006 }
1007 
1008 #undef __FUNCT__
1009 #define __FUNCT__ "DMRestoreWorkArray"
1010 /*@C
1011   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
1012 
1013   Not Collective
1014 
1015   Input Parameters:
1016 + dm - the DM object
1017 . count - The minium size
1018 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
1019 
1020   Output Parameter:
1021 . array - the work array
1022 
1023   Level: developer
1024 
1025 .seealso DMDestroy(), DMCreate()
1026 @*/
1027 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1028 {
1029   DMWorkLink *p,link;
1030 
1031   PetscFunctionBegin;
1032   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1033   PetscValidPointer(mem,4);
1034   for (p=&dm->workout; (link=*p); p=&link->next) {
1035     if (link->mem == *(void**)mem) {
1036       *p = link->next;
1037       link->next = dm->workin;
1038       dm->workin = link;
1039       *(void**)mem = PETSC_NULL;
1040       PetscFunctionReturn(0);
1041     }
1042   }
1043   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1044   PetscFunctionReturn(0);
1045 }
1046 
1047 #undef __FUNCT__
1048 #define __FUNCT__ "DMSetNullSpaceConstructor"
1049 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1050 {
1051   PetscFunctionBegin;
1052   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1053   if (field >= 10) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1054   dm->nullspaceConstructors[field] = nullsp;
1055   PetscFunctionReturn(0);
1056 }
1057 
1058 #undef __FUNCT__
1059 #define __FUNCT__ "DMCreateFieldIS"
1060 /*@C
1061   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1062 
1063   Not collective
1064 
1065   Input Parameter:
1066 . dm - the DM object
1067 
1068   Output Parameters:
1069 + numFields  - The number of fields (or PETSC_NULL if not requested)
1070 . fieldNames - The name for each field (or PETSC_NULL if not requested)
1071 - fields     - The global indices for each field (or PETSC_NULL if not requested)
1072 
1073   Level: intermediate
1074 
1075   Notes:
1076   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1077   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1078   PetscFree().
1079 
1080 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1081 @*/
1082 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1083 {
1084   PetscSection   section, sectionGlobal;
1085   PetscErrorCode ierr;
1086 
1087   PetscFunctionBegin;
1088   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1089   if (numFields) {
1090     PetscValidPointer(numFields,2);
1091     *numFields = 0;
1092   }
1093   if (fieldNames) {
1094     PetscValidPointer(fieldNames,3);
1095     *fieldNames = PETSC_NULL;
1096   }
1097   if (fields) {
1098     PetscValidPointer(fields,4);
1099     *fields = PETSC_NULL;
1100   }
1101   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1102   if (section) {
1103     PetscInt *fieldSizes, **fieldIndices;
1104     PetscInt  nF, f, pStart, pEnd, p;
1105 
1106     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
1107     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
1108     ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt *,&fieldIndices);CHKERRQ(ierr);
1109     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
1110     for (f = 0; f < nF; ++f) {
1111       fieldSizes[f] = 0;
1112     }
1113     for (p = pStart; p < pEnd; ++p) {
1114       PetscInt gdof;
1115 
1116       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1117       if (gdof > 0) {
1118         for (f = 0; f < nF; ++f) {
1119           PetscInt fdof, fcdof;
1120 
1121           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1122           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1123           fieldSizes[f] += fdof-fcdof;
1124         }
1125       }
1126     }
1127     for (f = 0; f < nF; ++f) {
1128       ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr);
1129       fieldSizes[f] = 0;
1130     }
1131     for (p = pStart; p < pEnd; ++p) {
1132       PetscInt gdof, goff;
1133 
1134       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1135       if (gdof > 0) {
1136         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
1137         for (f = 0; f < nF; ++f) {
1138           PetscInt fdof, fcdof, fc;
1139 
1140           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1141           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1142           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1143             fieldIndices[f][fieldSizes[f]] = goff++;
1144           }
1145         }
1146       }
1147     }
1148     if (numFields) {*numFields = nF;}
1149     if (fieldNames) {
1150       ierr = PetscMalloc(nF * sizeof(char *), fieldNames);CHKERRQ(ierr);
1151       for (f = 0; f < nF; ++f) {
1152         const char *fieldName;
1153 
1154         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1155         ierr = PetscStrallocpy(fieldName, (char **) &(*fieldNames)[f]);CHKERRQ(ierr);
1156       }
1157     }
1158     if (fields) {
1159       ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr);
1160       for (f = 0; f < nF; ++f) {
1161         ierr = ISCreateGeneral(((PetscObject) dm)->comm, fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1162       }
1163     }
1164     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1165   } else {
1166     if (dm->ops->createfieldis) {ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);}
1167   }
1168   PetscFunctionReturn(0);
1169 }
1170 
1171 #undef __FUNCT__
1172 #define __FUNCT__ "DMCreateFieldDecompositionDM"
1173 /*@C
1174   DMCreateFieldDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into fields.
1175 
1176   Not Collective
1177 
1178   Input Parameters:
1179 + dm   - the DM object
1180 - name - the name of the field decomposition
1181 
1182   Output Parameter:
1183 . ddm  - the field decomposition DM (PETSC_NULL, if no such decomposition is known)
1184 
1185   Level: advanced
1186 
1187 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1188 @*/
1189 PetscErrorCode DMCreateFieldDecompositionDM(DM dm, const char* name, DM *ddm)
1190 {
1191   PetscErrorCode ierr;
1192 
1193   PetscFunctionBegin;
1194   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1195   PetscValidCharPointer(name,2);
1196   PetscValidPointer(ddm,3);
1197   *ddm = PETSC_NULL;
1198   if (dm->ops->createfielddecompositiondm) {
1199     ierr = (*dm->ops->createfielddecompositiondm)(dm,name,ddm); CHKERRQ(ierr);
1200   }
1201   PetscFunctionReturn(0);
1202 }
1203 
1204 #undef __FUNCT__
1205 #define __FUNCT__ "DMCreateFieldDecomposition"
1206 /*@C
1207   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1208                           corresponding to different fields: each IS contains the global indices of the dofs of the
1209                           corresponding field. The optional list of DMs define the DM for each subproblem.
1210                           Generalizes DMCreateFieldIS().
1211 
1212   Not collective
1213 
1214   Input Parameter:
1215 . dm - the DM object
1216 
1217   Output Parameters:
1218 + len       - The number of subproblems in the field decomposition (or PETSC_NULL if not requested)
1219 . namelist  - The name for each field (or PETSC_NULL if not requested)
1220 . islist    - The global indices for each field (or PETSC_NULL if not requested)
1221 - dmlist    - The DMs for each field subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1222 
1223   Level: intermediate
1224 
1225   Notes:
1226   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1227   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1228   and all of the arrays should be freed with PetscFree().
1229 
1230 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1231 @*/
1232 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1233 {
1234   PetscErrorCode ierr;
1235 
1236   PetscFunctionBegin;
1237   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1238   if (len)      {PetscValidPointer(len,2);      *len      = 0;}
1239   if (namelist) {PetscValidPointer(namelist,3); *namelist = 0;}
1240   if (islist)   {PetscValidPointer(islist,4);   *islist   = 0;}
1241   if (dmlist)   {PetscValidPointer(dmlist,5);   *dmlist   = 0;}
1242   if (!dm->ops->createfielddecomposition) {
1243     PetscSection section;
1244     PetscInt     numFields, f;
1245 
1246     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1247     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1248     if (section && numFields && dm->ops->createsubdm) {
1249       *len = numFields;
1250       ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr);
1251       for (f = 0; f < numFields; ++f) {
1252         const char *fieldName;
1253 
1254         ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr);
1255         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1256         ierr = PetscStrallocpy(fieldName, (char **) &(*namelist)[f]);CHKERRQ(ierr);
1257       }
1258     } else {
1259       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1260       /* By default there are no DMs associated with subproblems. */
1261       if (dmlist) *dmlist = PETSC_NULL;
1262     }
1263   }
1264   else {
1265     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist); CHKERRQ(ierr);
1266   }
1267   PetscFunctionReturn(0);
1268 }
1269 
1270 #undef __FUNCT__
1271 #define __FUNCT__ "DMCreateSubDM"
1272 /*@C
1273   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1274                   The fields are defined by DMCreateFieldIS().
1275 
1276   Not collective
1277 
1278   Input Parameters:
1279 + dm - the DM object
1280 . numFields - number of fields in this subproblem
1281 - len       - The number of subproblems in the decomposition (or PETSC_NULL if not requested)
1282 
1283   Output Parameters:
1284 . is - The global indices for the subproblem
1285 - dm - The DM for the subproblem
1286 
1287   Level: intermediate
1288 
1289 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1290 @*/
1291 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1292 {
1293   PetscErrorCode ierr;
1294 
1295   PetscFunctionBegin;
1296   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1297   PetscValidPointer(fields,3);
1298   if (is) {PetscValidPointer(is,4);}
1299   if (subdm) {PetscValidPointer(subdm,5);}
1300   if (dm->ops->createsubdm) {
1301     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm); CHKERRQ(ierr);
1302   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1303   PetscFunctionReturn(0);
1304 }
1305 
1306 #undef __FUNCT__
1307 #define __FUNCT__ "DMCreateDomainDecompositionDM"
1308 /*@C
1309   DMCreateDomainDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into subdomains.
1310 
1311   Not Collective
1312 
1313   Input Parameters:
1314 + dm   - the DM object
1315 - name - the name of the subdomain decomposition
1316 
1317   Output Parameter:
1318 . ddm  - the subdomain decomposition DM (PETSC_NULL, if no such decomposition is known)
1319 
1320   Level: advanced
1321 
1322 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1323 @*/
1324 PetscErrorCode DMCreateDomainDecompositionDM(DM dm, const char* name, DM *ddm)
1325 {
1326   PetscErrorCode ierr;
1327 
1328   PetscFunctionBegin;
1329   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1330   PetscValidCharPointer(name,2);
1331   PetscValidPointer(ddm,3);
1332   *ddm = PETSC_NULL;
1333   if (dm->ops->createdomaindecompositiondm) {
1334     ierr = (*dm->ops->createdomaindecompositiondm)(dm,name,ddm); CHKERRQ(ierr);
1335   }
1336   PetscFunctionReturn(0);
1337 }
1338 
1339 #undef __FUNCT__
1340 #define __FUNCT__ "DMCreateDomainDecomposition"
1341 /*@C
1342   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1343                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1344                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1345                           define a nonoverlapping covering, while outer subdomains can overlap.
1346                           The optional list of DMs define the DM for each subproblem.
1347 
1348   Not collective
1349 
1350   Input Parameter:
1351 . dm - the DM object
1352 
1353   Output Parameters:
1354 + len         - The number of subproblems in the domain decomposition (or PETSC_NULL if not requested)
1355 . namelist    - The name for each subdomain (or PETSC_NULL if not requested)
1356 . innerislist - The global indices for each inner subdomain (or PETSC_NULL, if not requested)
1357 . outerislist - The global indices for each outer subdomain (or PETSC_NULL, if not requested)
1358 - dmlist      - The DMs for each subdomain subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1359 
1360   Level: intermediate
1361 
1362   Notes:
1363   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1364   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1365   and all of the arrays should be freed with PetscFree().
1366 
1367 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1368 @*/
1369 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1370 {
1371   PetscErrorCode ierr;
1372 
1373   PetscFunctionBegin;
1374   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1375   if (len)           {PetscValidPointer(len,2);            *len         = PETSC_NULL;}
1376   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = PETSC_NULL;}
1377   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = PETSC_NULL;}
1378   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = PETSC_NULL;}
1379   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = PETSC_NULL;}
1380   if (dm->ops->createdomaindecomposition) {
1381     ierr = (*dm->ops->createdomaindecomposition)(dm,len,namelist,innerislist,outerislist,dmlist); CHKERRQ(ierr);
1382   }
1383   PetscFunctionReturn(0);
1384 }
1385 
1386 #undef __FUNCT__
1387 #define __FUNCT__ "DMRefine"
1388 /*@
1389   DMRefine - Refines a DM object
1390 
1391   Collective on DM
1392 
1393   Input Parameter:
1394 + dm   - the DM object
1395 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1396 
1397   Output Parameter:
1398 . dmf - the refined DM, or PETSC_NULL
1399 
1400   Note: If no refinement was done, the return value is PETSC_NULL
1401 
1402   Level: developer
1403 
1404 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1405 @*/
1406 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1407 {
1408   PetscErrorCode ierr;
1409   DMRefineHookLink link;
1410 
1411   PetscFunctionBegin;
1412   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1413   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1414   if (*dmf) {
1415     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1416     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1417     (*dmf)->ctx       = dm->ctx;
1418     (*dmf)->leveldown = dm->leveldown;
1419     (*dmf)->levelup   = dm->levelup + 1;
1420     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1421     for (link=dm->refinehook; link; link=link->next) {
1422       if (link->refinehook) {ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);}
1423     }
1424   }
1425   PetscFunctionReturn(0);
1426 }
1427 
1428 #undef __FUNCT__
1429 #define __FUNCT__ "DMRefineHookAdd"
1430 /*@
1431    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1432 
1433    Logically Collective
1434 
1435    Input Arguments:
1436 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1437 .  refinehook - function to run when setting up a coarser level
1438 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1439 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1440 
1441    Calling sequence of refinehook:
1442 $    refinehook(DM coarse,DM fine,void *ctx);
1443 
1444 +  coarse - coarse level DM
1445 .  fine - fine level DM to interpolate problem to
1446 -  ctx - optional user-defined function context
1447 
1448    Calling sequence for interphook:
1449 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1450 
1451 +  coarse - coarse level DM
1452 .  interp - matrix interpolating a coarse-level solution to the finer grid
1453 .  fine - fine level DM to update
1454 -  ctx - optional user-defined function context
1455 
1456    Level: advanced
1457 
1458    Notes:
1459    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1460 
1461    If this function is called multiple times, the hooks will be run in the order they are added.
1462 
1463 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1464 @*/
1465 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1466 {
1467   PetscErrorCode ierr;
1468   DMRefineHookLink link,*p;
1469 
1470   PetscFunctionBegin;
1471   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1472   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1473   ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1474   link->refinehook = refinehook;
1475   link->interphook = interphook;
1476   link->ctx = ctx;
1477   link->next = PETSC_NULL;
1478   *p = link;
1479   PetscFunctionReturn(0);
1480 }
1481 
1482 #undef __FUNCT__
1483 #define __FUNCT__ "DMInterpolate"
1484 /*@
1485    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1486 
1487    Collective if any hooks are
1488 
1489    Input Arguments:
1490 +  coarse - coarser DM to use as a base
1491 .  restrct - interpolation matrix, apply using MatInterpolate()
1492 -  fine - finer DM to update
1493 
1494    Level: developer
1495 
1496 .seealso: DMRefineHookAdd(), MatInterpolate()
1497 @*/
1498 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1499 {
1500   PetscErrorCode ierr;
1501   DMRefineHookLink link;
1502 
1503   PetscFunctionBegin;
1504   for (link=fine->refinehook; link; link=link->next) {
1505     if (link->interphook) {ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);}
1506   }
1507   PetscFunctionReturn(0);
1508 }
1509 
1510 #undef __FUNCT__
1511 #define __FUNCT__ "DMGetRefineLevel"
1512 /*@
1513     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1514 
1515     Not Collective
1516 
1517     Input Parameter:
1518 .   dm - the DM object
1519 
1520     Output Parameter:
1521 .   level - number of refinements
1522 
1523     Level: developer
1524 
1525 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1526 
1527 @*/
1528 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1529 {
1530   PetscFunctionBegin;
1531   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1532   *level = dm->levelup;
1533   PetscFunctionReturn(0);
1534 }
1535 
1536 #undef __FUNCT__
1537 #define __FUNCT__ "DMGlobalToLocalBegin"
1538 /*@
1539     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1540 
1541     Neighbor-wise Collective on DM
1542 
1543     Input Parameters:
1544 +   dm - the DM object
1545 .   g - the global vector
1546 .   mode - INSERT_VALUES or ADD_VALUES
1547 -   l - the local vector
1548 
1549 
1550     Level: beginner
1551 
1552 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1553 
1554 @*/
1555 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1556 {
1557   PetscSF        sf;
1558   PetscErrorCode ierr;
1559 
1560   PetscFunctionBegin;
1561   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1562   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1563   if (sf) {
1564     PetscScalar *lArray, *gArray;
1565 
1566     if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1567     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1568     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1569     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1570     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1571     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1572   } else {
1573     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1574   }
1575   PetscFunctionReturn(0);
1576 }
1577 
1578 #undef __FUNCT__
1579 #define __FUNCT__ "DMGlobalToLocalEnd"
1580 /*@
1581     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1582 
1583     Neighbor-wise Collective on DM
1584 
1585     Input Parameters:
1586 +   dm - the DM object
1587 .   g - the global vector
1588 .   mode - INSERT_VALUES or ADD_VALUES
1589 -   l - the local vector
1590 
1591 
1592     Level: beginner
1593 
1594 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1595 
1596 @*/
1597 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1598 {
1599   PetscSF        sf;
1600   PetscErrorCode ierr;
1601   PetscScalar    *lArray, *gArray;
1602 
1603   PetscFunctionBegin;
1604   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1605   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1606   if (sf) {
1607   if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1608 
1609     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1610     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1611     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1612     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1613     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1614   } else {
1615     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1616   }
1617   PetscFunctionReturn(0);
1618 }
1619 
1620 #undef __FUNCT__
1621 #define __FUNCT__ "DMLocalToGlobalBegin"
1622 /*@
1623     DMLocalToGlobalBegin - updates global vectors from local vectors
1624 
1625     Neighbor-wise Collective on DM
1626 
1627     Input Parameters:
1628 +   dm - the DM object
1629 .   l - the local vector
1630 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1631            base point.
1632 - - the global vector
1633 
1634     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1635            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1636            global array to the final global array with VecAXPY().
1637 
1638     Level: beginner
1639 
1640 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1641 
1642 @*/
1643 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1644 {
1645   PetscSF        sf;
1646   PetscErrorCode ierr;
1647 
1648   PetscFunctionBegin;
1649   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1650   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1651   if (sf) {
1652     MPI_Op       op;
1653     PetscScalar *lArray, *gArray;
1654 
1655     switch(mode) {
1656     case INSERT_VALUES:
1657     case INSERT_ALL_VALUES:
1658 #if defined(PETSC_HAVE_MPI_REPLACE)
1659       op = MPI_REPLACE; break;
1660 #else
1661       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1662 #endif
1663     case ADD_VALUES:
1664     case ADD_ALL_VALUES:
1665       op = MPI_SUM; break;
1666   default:
1667     SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1668     }
1669     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1670     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1671     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1672     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1673     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1674   } else {
1675     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1676   }
1677   PetscFunctionReturn(0);
1678 }
1679 
1680 #undef __FUNCT__
1681 #define __FUNCT__ "DMLocalToGlobalEnd"
1682 /*@
1683     DMLocalToGlobalEnd - updates global vectors from local vectors
1684 
1685     Neighbor-wise Collective on DM
1686 
1687     Input Parameters:
1688 +   dm - the DM object
1689 .   l - the local vector
1690 .   mode - INSERT_VALUES or ADD_VALUES
1691 -   g - the global vector
1692 
1693 
1694     Level: beginner
1695 
1696 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1697 
1698 @*/
1699 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1700 {
1701   PetscSF        sf;
1702   PetscErrorCode ierr;
1703 
1704   PetscFunctionBegin;
1705   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1706   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1707   if (sf) {
1708     MPI_Op       op;
1709     PetscScalar *lArray, *gArray;
1710 
1711     switch(mode) {
1712     case INSERT_VALUES:
1713     case INSERT_ALL_VALUES:
1714 #if defined(PETSC_HAVE_MPI_REPLACE)
1715       op = MPI_REPLACE; break;
1716 #else
1717       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1718 #endif
1719     case ADD_VALUES:
1720     case ADD_ALL_VALUES:
1721       op = MPI_SUM; break;
1722     default:
1723       SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1724     }
1725     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1726     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1727     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1728     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1729     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1730   } else {
1731     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1732   }
1733   PetscFunctionReturn(0);
1734 }
1735 
1736 #undef __FUNCT__
1737 #define __FUNCT__ "DMCoarsen"
1738 /*@
1739     DMCoarsen - Coarsens a DM object
1740 
1741     Collective on DM
1742 
1743     Input Parameter:
1744 +   dm - the DM object
1745 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1746 
1747     Output Parameter:
1748 .   dmc - the coarsened DM
1749 
1750     Level: developer
1751 
1752 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1753 
1754 @*/
1755 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1756 {
1757   PetscErrorCode ierr;
1758   DMCoarsenHookLink link;
1759 
1760   PetscFunctionBegin;
1761   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1762   ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1763   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1764   ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1765   (*dmc)->ctx       = dm->ctx;
1766   (*dmc)->levelup   = dm->levelup;
1767   (*dmc)->leveldown = dm->leveldown + 1;
1768   ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
1769   for (link=dm->coarsenhook; link; link=link->next) {
1770     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
1771   }
1772   PetscFunctionReturn(0);
1773 }
1774 
1775 #undef __FUNCT__
1776 #define __FUNCT__ "DMCoarsenHookAdd"
1777 /*@
1778    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1779 
1780    Logically Collective
1781 
1782    Input Arguments:
1783 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1784 .  coarsenhook - function to run when setting up a coarser level
1785 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
1786 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1787 
1788    Calling sequence of coarsenhook:
1789 $    coarsenhook(DM fine,DM coarse,void *ctx);
1790 
1791 +  fine - fine level DM
1792 .  coarse - coarse level DM to restrict problem to
1793 -  ctx - optional user-defined function context
1794 
1795    Calling sequence for restricthook:
1796 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
1797 
1798 +  fine - fine level DM
1799 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
1800 .  rscale - scaling vector for restriction
1801 .  inject - matrix restricting by injection
1802 .  coarse - coarse level DM to update
1803 -  ctx - optional user-defined function context
1804 
1805    Level: advanced
1806 
1807    Notes:
1808    This function is only needed if auxiliary data needs to be set up on coarse grids.
1809 
1810    If this function is called multiple times, the hooks will be run in the order they are added.
1811 
1812    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1813    extract the finest level information from its context (instead of from the SNES).
1814 
1815 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1816 @*/
1817 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
1818 {
1819   PetscErrorCode ierr;
1820   DMCoarsenHookLink link,*p;
1821 
1822   PetscFunctionBegin;
1823   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
1824   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1825   ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
1826   link->coarsenhook = coarsenhook;
1827   link->restricthook = restricthook;
1828   link->ctx = ctx;
1829   link->next = PETSC_NULL;
1830   *p = link;
1831   PetscFunctionReturn(0);
1832 }
1833 
1834 #undef __FUNCT__
1835 #define __FUNCT__ "DMRestrict"
1836 /*@
1837    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
1838 
1839    Collective if any hooks are
1840 
1841    Input Arguments:
1842 +  fine - finer DM to use as a base
1843 .  restrct - restriction matrix, apply using MatRestrict()
1844 .  inject - injection matrix, also use MatRestrict()
1845 -  coarse - coarer DM to update
1846 
1847    Level: developer
1848 
1849 .seealso: DMCoarsenHookAdd(), MatRestrict()
1850 @*/
1851 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
1852 {
1853   PetscErrorCode ierr;
1854   DMCoarsenHookLink link;
1855 
1856   PetscFunctionBegin;
1857   for (link=fine->coarsenhook; link; link=link->next) {
1858     if (link->restricthook) {ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);}
1859   }
1860   PetscFunctionReturn(0);
1861 }
1862 
1863 #undef __FUNCT__
1864 #define __FUNCT__ "DMBlockRestrictHookAdd"
1865 /*@
1866    DMBlockRestrictHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1867 
1868    Logically Collective
1869 
1870    Input Arguments:
1871 +  global - global DM
1872 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
1873 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1874 
1875    Calling sequence for restricthook:
1876 $    restricthook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1877 
1878 +  global - global DM
1879 .  out    - scatter to the outer (with ghost and overlap points) block vector
1880 .  in     - scatter to block vector values only owned locally
1881 .  block  - block DM (may just be a shell if the global DM is passed in correctly)
1882 -  ctx - optional user-defined function context
1883 
1884    Level: advanced
1885 
1886    Notes:
1887    This function is only needed if auxiliary data needs to be set up on coarse grids.
1888 
1889    If this function is called multiple times, the hooks will be run in the order they are added.
1890 
1891    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1892    extract the finest level information from its context (instead of from the SNES).
1893 
1894 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1895 @*/
1896 PetscErrorCode DMBlockRestrictHookAdd(DM global,PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
1897 {
1898   PetscErrorCode ierr;
1899   DMBlockRestrictHookLink link,*p;
1900 
1901   PetscFunctionBegin;
1902   PetscValidHeaderSpecific(global,DM_CLASSID,1);
1903   for (p=&global->blockrestricthook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1904   ierr = PetscMalloc(sizeof(struct _DMBlockRestrictHookLink),&link);CHKERRQ(ierr);
1905   link->restricthook = restricthook;
1906   link->ctx = ctx;
1907   link->next = PETSC_NULL;
1908   *p = link;
1909   PetscFunctionReturn(0);
1910 }
1911 
1912 #undef __FUNCT__
1913 #define __FUNCT__ "DMBlockRestrict"
1914 /*@
1915    DMBlockRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMBlockRestrictHookAdd()
1916 
1917    Collective if any hooks are
1918 
1919    Input Arguments:
1920 +  fine - finer DM to use as a base
1921 .  restrct - restriction matrix, apply using MatRestrict()
1922 .  inject - injection matrix, also use MatRestrict()
1923 -  coarse - coarer DM to update
1924 
1925    Level: developer
1926 
1927 .seealso: DMCoarsenHookAdd(), MatRestrict()
1928 @*/
1929 PetscErrorCode DMBlockRestrict(DM global,VecScatter in,VecScatter out,DM block)
1930 {
1931   PetscErrorCode ierr;
1932   DMBlockRestrictHookLink link;
1933 
1934   PetscFunctionBegin;
1935   for (link=global->blockrestricthook; link; link=link->next) {
1936     if (link->restricthook) {ierr = (*link->restricthook)(global,in,out,block,link->ctx);CHKERRQ(ierr);}
1937   }
1938   PetscFunctionReturn(0);
1939 }
1940 
1941 #undef __FUNCT__
1942 #define __FUNCT__ "DMGetCoarsenLevel"
1943 /*@
1944     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
1945 
1946     Not Collective
1947 
1948     Input Parameter:
1949 .   dm - the DM object
1950 
1951     Output Parameter:
1952 .   level - number of coarsenings
1953 
1954     Level: developer
1955 
1956 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1957 
1958 @*/
1959 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
1960 {
1961   PetscFunctionBegin;
1962   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1963   *level = dm->leveldown;
1964   PetscFunctionReturn(0);
1965 }
1966 
1967 
1968 
1969 #undef __FUNCT__
1970 #define __FUNCT__ "DMRefineHierarchy"
1971 /*@C
1972     DMRefineHierarchy - Refines a DM object, all levels at once
1973 
1974     Collective on DM
1975 
1976     Input Parameter:
1977 +   dm - the DM object
1978 -   nlevels - the number of levels of refinement
1979 
1980     Output Parameter:
1981 .   dmf - the refined DM hierarchy
1982 
1983     Level: developer
1984 
1985 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1986 
1987 @*/
1988 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
1989 {
1990   PetscErrorCode ierr;
1991 
1992   PetscFunctionBegin;
1993   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1994   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
1995   if (nlevels == 0) PetscFunctionReturn(0);
1996   if (dm->ops->refinehierarchy) {
1997     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
1998   } else if (dm->ops->refine) {
1999     PetscInt i;
2000 
2001     ierr = DMRefine(dm,((PetscObject)dm)->comm,&dmf[0]);CHKERRQ(ierr);
2002     for (i=1; i<nlevels; i++) {
2003       ierr = DMRefine(dmf[i-1],((PetscObject)dm)->comm,&dmf[i]);CHKERRQ(ierr);
2004     }
2005   } else {
2006     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2007   }
2008   PetscFunctionReturn(0);
2009 }
2010 
2011 #undef __FUNCT__
2012 #define __FUNCT__ "DMCoarsenHierarchy"
2013 /*@C
2014     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2015 
2016     Collective on DM
2017 
2018     Input Parameter:
2019 +   dm - the DM object
2020 -   nlevels - the number of levels of coarsening
2021 
2022     Output Parameter:
2023 .   dmc - the coarsened DM hierarchy
2024 
2025     Level: developer
2026 
2027 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2028 
2029 @*/
2030 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2031 {
2032   PetscErrorCode ierr;
2033 
2034   PetscFunctionBegin;
2035   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2036   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2037   if (nlevels == 0) PetscFunctionReturn(0);
2038   PetscValidPointer(dmc,3);
2039   if (dm->ops->coarsenhierarchy) {
2040     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
2041   } else if (dm->ops->coarsen) {
2042     PetscInt i;
2043 
2044     ierr = DMCoarsen(dm,((PetscObject)dm)->comm,&dmc[0]);CHKERRQ(ierr);
2045     for (i=1; i<nlevels; i++) {
2046       ierr = DMCoarsen(dmc[i-1],((PetscObject)dm)->comm,&dmc[i]);CHKERRQ(ierr);
2047     }
2048   } else {
2049     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2050   }
2051   PetscFunctionReturn(0);
2052 }
2053 
2054 #undef __FUNCT__
2055 #define __FUNCT__ "DMCreateAggregates"
2056 /*@
2057    DMCreateAggregates - Gets the aggregates that map between
2058    grids associated with two DMs.
2059 
2060    Collective on DM
2061 
2062    Input Parameters:
2063 +  dmc - the coarse grid DM
2064 -  dmf - the fine grid DM
2065 
2066    Output Parameters:
2067 .  rest - the restriction matrix (transpose of the projection matrix)
2068 
2069    Level: intermediate
2070 
2071 .keywords: interpolation, restriction, multigrid
2072 
2073 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2074 @*/
2075 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2076 {
2077   PetscErrorCode ierr;
2078 
2079   PetscFunctionBegin;
2080   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
2081   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
2082   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
2083   PetscFunctionReturn(0);
2084 }
2085 
2086 #undef __FUNCT__
2087 #define __FUNCT__ "DMSetApplicationContextDestroy"
2088 /*@C
2089     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2090 
2091     Not Collective
2092 
2093     Input Parameters:
2094 +   dm - the DM object
2095 -   destroy - the destroy function
2096 
2097     Level: intermediate
2098 
2099 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2100 
2101 @*/
2102 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2103 {
2104   PetscFunctionBegin;
2105   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2106   dm->ctxdestroy = destroy;
2107   PetscFunctionReturn(0);
2108 }
2109 
2110 #undef __FUNCT__
2111 #define __FUNCT__ "DMSetApplicationContext"
2112 /*@
2113     DMSetApplicationContext - Set a user context into a DM object
2114 
2115     Not Collective
2116 
2117     Input Parameters:
2118 +   dm - the DM object
2119 -   ctx - the user context
2120 
2121     Level: intermediate
2122 
2123 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2124 
2125 @*/
2126 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2127 {
2128   PetscFunctionBegin;
2129   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2130   dm->ctx = ctx;
2131   PetscFunctionReturn(0);
2132 }
2133 
2134 #undef __FUNCT__
2135 #define __FUNCT__ "DMGetApplicationContext"
2136 /*@
2137     DMGetApplicationContext - Gets a user context from a DM object
2138 
2139     Not Collective
2140 
2141     Input Parameter:
2142 .   dm - the DM object
2143 
2144     Output Parameter:
2145 .   ctx - the user context
2146 
2147     Level: intermediate
2148 
2149 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2150 
2151 @*/
2152 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2153 {
2154   PetscFunctionBegin;
2155   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2156   *(void**)ctx = dm->ctx;
2157   PetscFunctionReturn(0);
2158 }
2159 
2160 #undef __FUNCT__
2161 #define __FUNCT__ "DMSetVariableBounds"
2162 /*@C
2163     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2164 
2165     Logically Collective on DM
2166 
2167     Input Parameter:
2168 +   dm - the DM object
2169 -   f - the function that computes variable bounds used by SNESVI (use PETSC_NULL to cancel a previous function that was set)
2170 
2171     Level: intermediate
2172 
2173 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2174          DMSetJacobian()
2175 
2176 @*/
2177 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2178 {
2179   PetscFunctionBegin;
2180   dm->ops->computevariablebounds = f;
2181   PetscFunctionReturn(0);
2182 }
2183 
2184 #undef __FUNCT__
2185 #define __FUNCT__ "DMHasVariableBounds"
2186 /*@
2187     DMHasVariableBounds - does the DM object have a variable bounds function?
2188 
2189     Not Collective
2190 
2191     Input Parameter:
2192 .   dm - the DM object to destroy
2193 
2194     Output Parameter:
2195 .   flg - PETSC_TRUE if the variable bounds function exists
2196 
2197     Level: developer
2198 
2199 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2200 
2201 @*/
2202 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2203 {
2204   PetscFunctionBegin;
2205   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2206   PetscFunctionReturn(0);
2207 }
2208 
2209 #undef __FUNCT__
2210 #define __FUNCT__ "DMComputeVariableBounds"
2211 /*@C
2212     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2213 
2214     Logically Collective on DM
2215 
2216     Input Parameters:
2217 +   dm - the DM object to destroy
2218 -   x  - current solution at which the bounds are computed
2219 
2220     Output parameters:
2221 +   xl - lower bound
2222 -   xu - upper bound
2223 
2224     Level: intermediate
2225 
2226 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2227 
2228 @*/
2229 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2230 {
2231   PetscErrorCode ierr;
2232   PetscFunctionBegin;
2233   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2234   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2235   if (dm->ops->computevariablebounds) {
2236     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu); CHKERRQ(ierr);
2237   }
2238   else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2239   PetscFunctionReturn(0);
2240 }
2241 
2242 #undef __FUNCT__
2243 #define __FUNCT__ "DMHasColoring"
2244 /*@
2245     DMHasColoring - does the DM object have a method of providing a coloring?
2246 
2247     Not Collective
2248 
2249     Input Parameter:
2250 .   dm - the DM object
2251 
2252     Output Parameter:
2253 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2254 
2255     Level: developer
2256 
2257 .seealso DMHasFunction(), DMCreateColoring()
2258 
2259 @*/
2260 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2261 {
2262   PetscFunctionBegin;
2263   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2264   PetscFunctionReturn(0);
2265 }
2266 
2267 #undef  __FUNCT__
2268 #define __FUNCT__ "DMSetVec"
2269 /*@C
2270     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2271 
2272     Collective on DM
2273 
2274     Input Parameter:
2275 +   dm - the DM object
2276 -   x - location to compute residual and Jacobian, if PETSC_NULL is passed to those routines; will be PETSC_NULL for linear problems.
2277 
2278     Level: developer
2279 
2280 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2281 
2282 @*/
2283 PetscErrorCode  DMSetVec(DM dm,Vec x)
2284 {
2285   PetscErrorCode ierr;
2286   PetscFunctionBegin;
2287   if (x) {
2288     if (!dm->x) {
2289       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2290     }
2291     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2292   }
2293   else if (dm->x) {
2294     ierr = VecDestroy(&dm->x);  CHKERRQ(ierr);
2295   }
2296   PetscFunctionReturn(0);
2297 }
2298 
2299 PetscFList DMList                       = PETSC_NULL;
2300 PetscBool  DMRegisterAllCalled          = PETSC_FALSE;
2301 
2302 #undef __FUNCT__
2303 #define __FUNCT__ "DMSetType"
2304 /*@C
2305   DMSetType - Builds a DM, for a particular DM implementation.
2306 
2307   Collective on DM
2308 
2309   Input Parameters:
2310 + dm     - The DM object
2311 - method - The name of the DM type
2312 
2313   Options Database Key:
2314 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2315 
2316   Notes:
2317   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2318 
2319   Level: intermediate
2320 
2321 .keywords: DM, set, type
2322 .seealso: DMGetType(), DMCreate()
2323 @*/
2324 PetscErrorCode  DMSetType(DM dm, DMType method)
2325 {
2326   PetscErrorCode (*r)(DM);
2327   PetscBool      match;
2328   PetscErrorCode ierr;
2329 
2330   PetscFunctionBegin;
2331   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2332   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2333   if (match) PetscFunctionReturn(0);
2334 
2335   if (!DMRegisterAllCalled) {ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);}
2336   ierr = PetscFListFind(DMList, ((PetscObject)dm)->comm, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr);
2337   if (!r) SETERRQ1(((PetscObject)dm)->comm,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2338 
2339   if (dm->ops->destroy) {
2340     ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2341     dm->ops->destroy = PETSC_NULL;
2342   }
2343   ierr = (*r)(dm);CHKERRQ(ierr);
2344   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2345   PetscFunctionReturn(0);
2346 }
2347 
2348 #undef __FUNCT__
2349 #define __FUNCT__ "DMGetType"
2350 /*@C
2351   DMGetType - Gets the DM type name (as a string) from the DM.
2352 
2353   Not Collective
2354 
2355   Input Parameter:
2356 . dm  - The DM
2357 
2358   Output Parameter:
2359 . type - The DM type name
2360 
2361   Level: intermediate
2362 
2363 .keywords: DM, get, type, name
2364 .seealso: DMSetType(), DMCreate()
2365 @*/
2366 PetscErrorCode  DMGetType(DM dm, DMType *type)
2367 {
2368   PetscErrorCode ierr;
2369 
2370   PetscFunctionBegin;
2371   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2372   PetscValidCharPointer(type,2);
2373   if (!DMRegisterAllCalled) {
2374     ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);
2375   }
2376   *type = ((PetscObject)dm)->type_name;
2377   PetscFunctionReturn(0);
2378 }
2379 
2380 #undef __FUNCT__
2381 #define __FUNCT__ "DMConvert"
2382 /*@C
2383   DMConvert - Converts a DM to another DM, either of the same or different type.
2384 
2385   Collective on DM
2386 
2387   Input Parameters:
2388 + dm - the DM
2389 - newtype - new DM type (use "same" for the same type)
2390 
2391   Output Parameter:
2392 . M - pointer to new DM
2393 
2394   Notes:
2395   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2396   the MPI communicator of the generated DM is always the same as the communicator
2397   of the input DM.
2398 
2399   Level: intermediate
2400 
2401 .seealso: DMCreate()
2402 @*/
2403 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2404 {
2405   DM             B;
2406   char           convname[256];
2407   PetscBool      sametype, issame;
2408   PetscErrorCode ierr;
2409 
2410   PetscFunctionBegin;
2411   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2412   PetscValidType(dm,1);
2413   PetscValidPointer(M,3);
2414   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2415   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2416   {
2417     PetscErrorCode (*conv)(DM, DMType, DM *) = PETSC_NULL;
2418 
2419     /*
2420        Order of precedence:
2421        1) See if a specialized converter is known to the current DM.
2422        2) See if a specialized converter is known to the desired DM class.
2423        3) See if a good general converter is registered for the desired class
2424        4) See if a good general converter is known for the current matrix.
2425        5) Use a really basic converter.
2426     */
2427 
2428     /* 1) See if a specialized converter is known to the current DM and the desired class */
2429     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2430     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2431     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2432     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2433     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2434     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2435     if (conv) goto foundconv;
2436 
2437     /* 2)  See if a specialized converter is known to the desired DM class. */
2438     ierr = DMCreate(((PetscObject) dm)->comm, &B);CHKERRQ(ierr);
2439     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2440     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2441     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2442     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2443     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2444     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2445     ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2446     if (conv) {
2447       ierr = DMDestroy(&B);CHKERRQ(ierr);
2448       goto foundconv;
2449     }
2450 
2451 #if 0
2452     /* 3) See if a good general converter is registered for the desired class */
2453     conv = B->ops->convertfrom;
2454     ierr = DMDestroy(&B);CHKERRQ(ierr);
2455     if (conv) goto foundconv;
2456 
2457     /* 4) See if a good general converter is known for the current matrix */
2458     if (dm->ops->convert) {
2459       conv = dm->ops->convert;
2460     }
2461     if (conv) goto foundconv;
2462 #endif
2463 
2464     /* 5) Use a really basic converter. */
2465     SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2466 
2467     foundconv:
2468     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2469     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2470     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2471   }
2472   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2473   PetscFunctionReturn(0);
2474 }
2475 
2476 /*--------------------------------------------------------------------------------------------------------------------*/
2477 
2478 #undef __FUNCT__
2479 #define __FUNCT__ "DMRegister"
2480 /*@C
2481   DMRegister - See DMRegisterDynamic()
2482 
2483   Level: advanced
2484 @*/
2485 PetscErrorCode  DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM))
2486 {
2487   char fullname[PETSC_MAX_PATH_LEN];
2488   PetscErrorCode ierr;
2489 
2490   PetscFunctionBegin;
2491   ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr);
2492   ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr);
2493   ierr = PetscStrcat(fullname, name);CHKERRQ(ierr);
2494   ierr = PetscFListAdd(&DMList, sname, fullname, (void (*)(void)) function);CHKERRQ(ierr);
2495   PetscFunctionReturn(0);
2496 }
2497 
2498 
2499 /*--------------------------------------------------------------------------------------------------------------------*/
2500 #undef __FUNCT__
2501 #define __FUNCT__ "DMRegisterDestroy"
2502 /*@C
2503    DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic().
2504 
2505    Not Collective
2506 
2507    Level: advanced
2508 
2509 .keywords: DM, register, destroy
2510 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic()
2511 @*/
2512 PetscErrorCode  DMRegisterDestroy(void)
2513 {
2514   PetscErrorCode ierr;
2515 
2516   PetscFunctionBegin;
2517   ierr = PetscFListDestroy(&DMList);CHKERRQ(ierr);
2518   DMRegisterAllCalled = PETSC_FALSE;
2519   PetscFunctionReturn(0);
2520 }
2521 
2522 #undef __FUNCT__
2523 #define __FUNCT__ "DMLoad"
2524 /*@C
2525   DMLoad - Loads a DM that has been stored in binary  with DMView().
2526 
2527   Collective on PetscViewer
2528 
2529   Input Parameters:
2530 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2531            some related function before a call to DMLoad().
2532 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2533            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2534 
2535    Level: intermediate
2536 
2537   Notes:
2538    The type is determined by the data in the file, any type set into the DM before this call is ignored.
2539 
2540   Notes for advanced users:
2541   Most users should not need to know the details of the binary storage
2542   format, since DMLoad() and DMView() completely hide these details.
2543   But for anyone who's interested, the standard binary matrix storage
2544   format is
2545 .vb
2546      has not yet been determined
2547 .ve
2548 
2549 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2550 @*/
2551 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2552 {
2553   PetscErrorCode ierr;
2554   PetscBool      isbinary;
2555   PetscInt       classid;
2556   char           type[256];
2557 
2558   PetscFunctionBegin;
2559   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
2560   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
2561   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
2562   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
2563 
2564   ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr);
2565   if (classid != DM_FILE_CLASSID) SETERRQ(((PetscObject)newdm)->comm,PETSC_ERR_ARG_WRONG,"Not DM next in file");
2566   ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr);
2567   ierr = DMSetType(newdm, type);CHKERRQ(ierr);
2568   if (newdm->ops->load) {
2569     ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
2570   }
2571   PetscFunctionReturn(0);
2572 }
2573 
2574 /******************************** FEM Support **********************************/
2575 
2576 #undef __FUNCT__
2577 #define __FUNCT__ "DMPrintCellVector"
2578 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) {
2579   PetscInt       f;
2580   PetscErrorCode ierr;
2581 
2582   PetscFunctionBegin;
2583   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2584   for (f = 0; f < len; ++f) {
2585     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr);
2586   }
2587   PetscFunctionReturn(0);
2588 }
2589 
2590 #undef __FUNCT__
2591 #define __FUNCT__ "DMPrintCellMatrix"
2592 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) {
2593   PetscInt       f, g;
2594   PetscErrorCode ierr;
2595 
2596   PetscFunctionBegin;
2597   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2598   for (f = 0; f < rows; ++f) {
2599     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
2600     for (g = 0; g < cols; ++g) {
2601       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
2602     }
2603     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
2604   }
2605   PetscFunctionReturn(0);
2606 }
2607 
2608 #undef __FUNCT__
2609 #define __FUNCT__ "DMGetDefaultSection"
2610 /*@
2611   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
2612 
2613   Input Parameter:
2614 . dm - The DM
2615 
2616   Output Parameter:
2617 . section - The PetscSection
2618 
2619   Level: intermediate
2620 
2621   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2622 
2623 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2624 @*/
2625 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) {
2626   PetscFunctionBegin;
2627   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2628   PetscValidPointer(section, 2);
2629   *section = dm->defaultSection;
2630   PetscFunctionReturn(0);
2631 }
2632 
2633 #undef __FUNCT__
2634 #define __FUNCT__ "DMSetDefaultSection"
2635 /*@
2636   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
2637 
2638   Input Parameters:
2639 + dm - The DM
2640 - section - The PetscSection
2641 
2642   Level: intermediate
2643 
2644   Note: Any existing Section will be destroyed
2645 
2646 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2647 @*/
2648 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) {
2649   PetscInt       numFields;
2650   PetscInt       f;
2651   PetscErrorCode ierr;
2652 
2653   PetscFunctionBegin;
2654   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2655   ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
2656   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2657   dm->defaultSection = section;
2658   ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
2659   if (numFields) {
2660     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
2661     for (f = 0; f < numFields; ++f) {
2662       const char *name;
2663 
2664       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
2665       ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr);
2666     }
2667   }
2668   PetscFunctionReturn(0);
2669 }
2670 
2671 #undef __FUNCT__
2672 #define __FUNCT__ "DMGetDefaultGlobalSection"
2673 /*@
2674   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
2675 
2676   Collective on DM
2677 
2678   Input Parameter:
2679 . dm - The DM
2680 
2681   Output Parameter:
2682 . section - The PetscSection
2683 
2684   Level: intermediate
2685 
2686   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2687 
2688 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2689 @*/
2690 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) {
2691   PetscErrorCode ierr;
2692 
2693   PetscFunctionBegin;
2694   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2695   PetscValidPointer(section, 2);
2696   if (!dm->defaultGlobalSection) {
2697     if (!dm->defaultSection || !dm->sf) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
2698     ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
2699     ierr = PetscSectionGetValueLayout(((PetscObject)dm)->comm,dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr);
2700   }
2701   *section = dm->defaultGlobalSection;
2702   PetscFunctionReturn(0);
2703 }
2704 
2705 #undef __FUNCT__
2706 #define __FUNCT__ "DMSetDefaultGlobalSection"
2707 /*@
2708   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
2709 
2710   Input Parameters:
2711 + dm - The DM
2712 - section - The PetscSection
2713 
2714   Level: intermediate
2715 
2716   Note: Any existing Section will be destroyed
2717 
2718 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
2719 @*/
2720 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) {
2721   PetscErrorCode ierr;
2722 
2723   PetscFunctionBegin;
2724   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2725   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2726   dm->defaultGlobalSection = section;
2727   PetscFunctionReturn(0);
2728 }
2729 
2730 #undef __FUNCT__
2731 #define __FUNCT__ "DMGetDefaultSF"
2732 /*@
2733   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
2734   it is created from the default PetscSection layouts in the DM.
2735 
2736   Input Parameter:
2737 . dm - The DM
2738 
2739   Output Parameter:
2740 . sf - The PetscSF
2741 
2742   Level: intermediate
2743 
2744   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2745 
2746 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
2747 @*/
2748 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) {
2749   PetscInt       nroots;
2750   PetscErrorCode ierr;
2751 
2752   PetscFunctionBegin;
2753   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2754   PetscValidPointer(sf, 2);
2755   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, PETSC_NULL, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr);
2756   if (nroots < 0) {
2757     PetscSection section, gSection;
2758 
2759     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
2760     if (section) {
2761       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
2762       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
2763     } else {
2764       *sf = PETSC_NULL;
2765       PetscFunctionReturn(0);
2766     }
2767   }
2768   *sf = dm->defaultSF;
2769   PetscFunctionReturn(0);
2770 }
2771 
2772 #undef __FUNCT__
2773 #define __FUNCT__ "DMSetDefaultSF"
2774 /*@
2775   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
2776 
2777   Input Parameters:
2778 + dm - The DM
2779 - sf - The PetscSF
2780 
2781   Level: intermediate
2782 
2783   Note: Any previous SF is destroyed
2784 
2785 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
2786 @*/
2787 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) {
2788   PetscErrorCode ierr;
2789 
2790   PetscFunctionBegin;
2791   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2792   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
2793   ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
2794   dm->defaultSF = sf;
2795   PetscFunctionReturn(0);
2796 }
2797 
2798 #undef __FUNCT__
2799 #define __FUNCT__ "DMCreateDefaultSF"
2800 /*@C
2801   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
2802   describing the data layout.
2803 
2804   Input Parameters:
2805 + dm - The DM
2806 . localSection - PetscSection describing the local data layout
2807 - globalSection - PetscSection describing the global data layout
2808 
2809   Level: intermediate
2810 
2811 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
2812 @*/
2813 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
2814 {
2815   MPI_Comm        comm = ((PetscObject) dm)->comm;
2816   PetscLayout     layout;
2817   const PetscInt *ranges;
2818   PetscInt       *local;
2819   PetscSFNode    *remote;
2820   PetscInt        pStart, pEnd, p, nroots, nleaves, l;
2821   PetscMPIInt     size, rank;
2822   PetscErrorCode  ierr;
2823 
2824   PetscFunctionBegin;
2825   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2826   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2827   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2828   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
2829   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
2830   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
2831   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
2832   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
2833   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
2834   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
2835   for (p = pStart, nleaves = 0; p < pEnd; ++p) {
2836     PetscInt gdof, gcdof;
2837 
2838     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2839     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2840     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2841   }
2842   ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr);
2843   ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr);
2844   for (p = pStart, l = 0; p < pEnd; ++p) {
2845     const PetscInt *cind;
2846     PetscInt        dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
2847 
2848     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
2849     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
2850     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
2851     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
2852     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2853     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2854     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
2855     if (!gdof) continue; /* Censored point */
2856     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2857     if (gsize != dof-cdof) {
2858       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
2859       cdof = 0; /* Ignore constraints */
2860     }
2861     for (d = 0, c = 0; d < dof; ++d) {
2862       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
2863       local[l+d-c] = off+d;
2864     }
2865     if (gdof < 0) {
2866       for(d = 0; d < gsize; ++d, ++l) {
2867         PetscInt offset = -(goff+1) + d, r;
2868 
2869         ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr);
2870         if (r < 0) r = -(r+2);
2871         remote[l].rank  = r;
2872         remote[l].index = offset - ranges[r];
2873       }
2874     } else {
2875       for(d = 0; d < gsize; ++d, ++l) {
2876         remote[l].rank  = rank;
2877         remote[l].index = goff+d - ranges[rank];
2878       }
2879     }
2880   }
2881   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
2882   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
2883   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
2884   PetscFunctionReturn(0);
2885 }
2886 
2887 #undef __FUNCT__
2888 #define __FUNCT__ "DMGetPointSF"
2889 /*@
2890   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
2891 
2892   Input Parameter:
2893 . dm - The DM
2894 
2895   Output Parameter:
2896 . sf - The PetscSF
2897 
2898   Level: intermediate
2899 
2900   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2901 
2902 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
2903 @*/
2904 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) {
2905   PetscFunctionBegin;
2906   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2907   PetscValidPointer(sf, 2);
2908   *sf = dm->sf;
2909   PetscFunctionReturn(0);
2910 }
2911 
2912 #undef __FUNCT__
2913 #define __FUNCT__ "DMSetPointSF"
2914 /*@
2915   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
2916 
2917   Input Parameters:
2918 + dm - The DM
2919 - sf - The PetscSF
2920 
2921   Level: intermediate
2922 
2923 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
2924 @*/
2925 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) {
2926   PetscErrorCode ierr;
2927 
2928   PetscFunctionBegin;
2929   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2930   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
2931   ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr);
2932   ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr);
2933   dm->sf = sf;
2934   PetscFunctionReturn(0);
2935 }
2936 
2937 #undef __FUNCT__
2938 #define __FUNCT__ "DMGetNumFields"
2939 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
2940 {
2941   PetscFunctionBegin;
2942   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2943   PetscValidPointer(numFields, 2);
2944   *numFields = dm->numFields;
2945   PetscFunctionReturn(0);
2946 }
2947 
2948 #undef __FUNCT__
2949 #define __FUNCT__ "DMSetNumFields"
2950 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
2951 {
2952   PetscInt       f;
2953   PetscErrorCode ierr;
2954 
2955   PetscFunctionBegin;
2956   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2957   for (f = 0; f < dm->numFields; ++f) {
2958     ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr);
2959   }
2960   ierr = PetscFree(dm->fields);CHKERRQ(ierr);
2961   dm->numFields = numFields;
2962   ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr);
2963   for (f = 0; f < dm->numFields; ++f) {
2964     ierr = PetscContainerCreate(((PetscObject) dm)->comm, (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr);
2965   }
2966   PetscFunctionReturn(0);
2967 }
2968 
2969 #undef __FUNCT__
2970 #define __FUNCT__ "DMGetField"
2971 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
2972 {
2973   PetscFunctionBegin;
2974   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2975   PetscValidPointer(field, 2);
2976   if (!dm->fields) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
2977   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
2978   *field = dm->fields[f];
2979   PetscFunctionReturn(0);
2980 }
2981 
2982 #undef __FUNCT__
2983 #define __FUNCT__ "DMSetCoordinates"
2984 /*@
2985   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
2986 
2987   Collective on DM
2988 
2989   Input Parameters:
2990 + dm - the DM
2991 - c - coordinate vector
2992 
2993   Note:
2994   The coordinates do include those for ghost points, which are in the local vector
2995 
2996   Level: intermediate
2997 
2998 .keywords: distributed array, get, corners, nodes, local indices, coordinates
2999 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3000 @*/
3001 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3002 {
3003   PetscErrorCode ierr;
3004 
3005   PetscFunctionBegin;
3006   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3007   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3008   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3009   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3010   dm->coordinates = c;
3011   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3012   PetscFunctionReturn(0);
3013 }
3014 
3015 #undef __FUNCT__
3016 #define __FUNCT__ "DMSetCoordinatesLocal"
3017 /*@
3018   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3019 
3020   Collective on DM
3021 
3022    Input Parameters:
3023 +  dm - the DM
3024 -  c - coordinate vector
3025 
3026   Note:
3027   The coordinates of ghost points can be set using DMSetCoordinates()
3028   followed by DMGetCoordinatesLocal(). This is intended to enable the
3029   setting of ghost coordinates outside of the domain.
3030 
3031   Level: intermediate
3032 
3033 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3034 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3035 @*/
3036 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3037 {
3038   PetscErrorCode ierr;
3039 
3040   PetscFunctionBegin;
3041   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3042   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3043   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3044   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3045   dm->coordinatesLocal = c;
3046   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3047   PetscFunctionReturn(0);
3048 }
3049 
3050 #undef __FUNCT__
3051 #define __FUNCT__ "DMGetCoordinates"
3052 /*@
3053   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3054 
3055   Not Collective
3056 
3057   Input Parameter:
3058 . dm - the DM
3059 
3060   Output Parameter:
3061 . c - global coordinate vector
3062 
3063   Note:
3064   This is a borrowed reference, so the user should NOT destroy this vector
3065 
3066   Each process has only the local coordinates (does NOT have the ghost coordinates).
3067 
3068   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3069   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3070 
3071   Level: intermediate
3072 
3073 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3074 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3075 @*/
3076 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3077 {
3078   PetscErrorCode ierr;
3079 
3080   PetscFunctionBegin;
3081   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3082   PetscValidPointer(c,2);
3083   if (!dm->coordinates && dm->coordinatesLocal) {
3084     DM cdm;
3085 
3086     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3087     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3088     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3089     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3090     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3091   }
3092   *c = dm->coordinates;
3093   PetscFunctionReturn(0);
3094 }
3095 
3096 #undef __FUNCT__
3097 #define __FUNCT__ "DMGetCoordinatesLocal"
3098 /*@
3099   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3100 
3101   Collective on DM
3102 
3103   Input Parameter:
3104 . dm - the DM
3105 
3106   Output Parameter:
3107 . c - coordinate vector
3108 
3109   Note:
3110   This is a borrowed reference, so the user should NOT destroy this vector
3111 
3112   Each process has the local and ghost coordinates
3113 
3114   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3115   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3116 
3117   Level: intermediate
3118 
3119 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3120 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3121 @*/
3122 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3123 {
3124   PetscErrorCode ierr;
3125 
3126   PetscFunctionBegin;
3127   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3128   PetscValidPointer(c,2);
3129   if (!dm->coordinatesLocal && dm->coordinates) {
3130     DM cdm;
3131 
3132     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3133     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3134     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3135     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3136     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3137   }
3138   *c = dm->coordinatesLocal;
3139   PetscFunctionReturn(0);
3140 }
3141 
3142 #undef __FUNCT__
3143 #define __FUNCT__ "DMGetCoordinateDM"
3144 /*@
3145   DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates
3146 
3147   Collective on DM
3148 
3149   Input Parameter:
3150 . dm - the DM
3151 
3152   Output Parameter:
3153 . cdm - coordinate DM
3154 
3155   Level: intermediate
3156 
3157 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3158 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3159 @*/
3160 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3161 {
3162   PetscErrorCode ierr;
3163 
3164   PetscFunctionBegin;
3165   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3166   PetscValidPointer(cdm,2);
3167   if (!dm->coordinateDM) {
3168     if (!dm->ops->createcoordinatedm) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3169     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3170   }
3171   *cdm = dm->coordinateDM;
3172   PetscFunctionReturn(0);
3173 }
3174