xref: /petsc/src/dm/interface/dm.c (revision 90c77898206983775cf1b53006bd96c4fc8033d6)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
2 
3 PetscClassId  DM_CLASSID;
4 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal;
5 
6 #undef __FUNCT__
7 #define __FUNCT__ "DMViewFromOptions"
8 /*
9   Processes command line options to determine if/how a dm
10   is to be viewed.
11 
12 */
13 PetscErrorCode  DMViewFromOptions(DM dm,const char optionname[])
14 {
15   PetscErrorCode    ierr;
16   PetscBool         flg;
17   PetscViewer       viewer;
18   PetscViewerFormat format;
19 
20   PetscFunctionBegin;
21   ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),((PetscObject)dm)->prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr);
22   if (flg) {
23     ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr);
24     ierr = DMView(dm,viewer);CHKERRQ(ierr);
25     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
26     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
27   }
28   PetscFunctionReturn(0);
29 }
30 
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "DMCreate"
34 /*@
35   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
36 
37    If you never  call DMSetType()  it will generate an
38    error when you try to use the vector.
39 
40   Collective on MPI_Comm
41 
42   Input Parameter:
43 . comm - The communicator for the DM object
44 
45   Output Parameter:
46 . dm - The DM object
47 
48   Level: beginner
49 
50 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
51 @*/
52 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
53 {
54   DM             v;
55   PetscErrorCode ierr;
56 
57   PetscFunctionBegin;
58   PetscValidPointer(dm,2);
59   *dm = NULL;
60 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
61   ierr = VecInitializePackage(NULL);CHKERRQ(ierr);
62   ierr = MatInitializePackage(NULL);CHKERRQ(ierr);
63   ierr = DMInitializePackage(NULL);CHKERRQ(ierr);
64 #endif
65 
66   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
67   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
68 
69 
70   v->ltogmap              = NULL;
71   v->ltogmapb             = NULL;
72   v->bs                   = 1;
73   v->coloringtype         = IS_COLORING_GLOBAL;
74   ierr                    = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
75   ierr                    = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
76   v->defaultSection       = NULL;
77   v->defaultGlobalSection = NULL;
78   {
79     PetscInt i;
80     for (i = 0; i < 10; ++i) {
81       v->nullspaceConstructors[i] = NULL;
82     }
83   }
84   v->numFields = 0;
85   v->fields    = NULL;
86 
87   *dm = v;
88   PetscFunctionReturn(0);
89 }
90 
91 #undef __FUNCT__
92 #define __FUNCT__ "DMSetVecType"
93 /*@C
94        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
95 
96    Logically Collective on DMDA
97 
98    Input Parameter:
99 +  da - initial distributed array
100 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
101 
102    Options Database:
103 .   -dm_vec_type ctype
104 
105    Level: intermediate
106 
107 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
108 @*/
109 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
110 {
111   PetscErrorCode ierr;
112 
113   PetscFunctionBegin;
114   PetscValidHeaderSpecific(da,DM_CLASSID,1);
115   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
116   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
117   PetscFunctionReturn(0);
118 }
119 
120 #undef __FUNCT__
121 #define __FUNCT__ "VecGetDM"
122 /*@
123   VecGetDM - Gets the DM defining the data layout of the vector
124 
125   Not collective
126 
127   Input Parameter:
128 . v - The Vec
129 
130   Output Parameter:
131 . dm - The DM
132 
133   Level: intermediate
134 
135 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
136 @*/
137 PetscErrorCode VecGetDM(Vec v, DM *dm)
138 {
139   PetscErrorCode ierr;
140 
141   PetscFunctionBegin;
142   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
143   PetscValidPointer(dm,2);
144   ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr);
145   PetscFunctionReturn(0);
146 }
147 
148 #undef __FUNCT__
149 #define __FUNCT__ "VecSetDM"
150 /*@
151   VecSetDM - Sets the DM defining the data layout of the vector
152 
153   Not collective
154 
155   Input Parameters:
156 + v - The Vec
157 - dm - The DM
158 
159   Level: intermediate
160 
161 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
162 @*/
163 PetscErrorCode VecSetDM(Vec v, DM dm)
164 {
165   PetscErrorCode ierr;
166 
167   PetscFunctionBegin;
168   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
169   PetscValidHeaderSpecific(dm,DM_CLASSID,2);
170   ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
171   PetscFunctionReturn(0);
172 }
173 
174 #undef __FUNCT__
175 #define __FUNCT__ "DMSetMatType"
176 /*@C
177        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
178 
179    Logically Collective on DM
180 
181    Input Parameter:
182 +  dm - the DM context
183 .  ctype - the matrix type
184 
185    Options Database:
186 .   -dm_mat_type ctype
187 
188    Level: intermediate
189 
190 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType
191 @*/
192 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
193 {
194   PetscErrorCode ierr;
195 
196   PetscFunctionBegin;
197   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
198   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
199   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
200   PetscFunctionReturn(0);
201 }
202 
203 #undef __FUNCT__
204 #define __FUNCT__ "MatGetDM"
205 /*@
206   MatGetDM - Gets the DM defining the data layout of the matrix
207 
208   Not collective
209 
210   Input Parameter:
211 . A - The Mat
212 
213   Output Parameter:
214 . dm - The DM
215 
216   Level: intermediate
217 
218 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
219 @*/
220 PetscErrorCode MatGetDM(Mat A, DM *dm)
221 {
222   PetscErrorCode ierr;
223 
224   PetscFunctionBegin;
225   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
226   PetscValidPointer(dm,2);
227   ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr);
228   PetscFunctionReturn(0);
229 }
230 
231 #undef __FUNCT__
232 #define __FUNCT__ "MatSetDM"
233 /*@
234   MatSetDM - Sets the DM defining the data layout of the matrix
235 
236   Not collective
237 
238   Input Parameters:
239 + A - The Mat
240 - dm - The DM
241 
242   Level: intermediate
243 
244 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
245 @*/
246 PetscErrorCode MatSetDM(Mat A, DM dm)
247 {
248   PetscErrorCode ierr;
249 
250   PetscFunctionBegin;
251   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
252   if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2);
253   ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
254   PetscFunctionReturn(0);
255 }
256 
257 #undef __FUNCT__
258 #define __FUNCT__ "DMSetOptionsPrefix"
259 /*@C
260    DMSetOptionsPrefix - Sets the prefix used for searching for all
261    DMDA options in the database.
262 
263    Logically Collective on DMDA
264 
265    Input Parameter:
266 +  da - the DMDA context
267 -  prefix - the prefix to prepend to all option names
268 
269    Notes:
270    A hyphen (-) must NOT be given at the beginning of the prefix name.
271    The first character of all runtime options is AUTOMATICALLY the hyphen.
272 
273    Level: advanced
274 
275 .keywords: DMDA, set, options, prefix, database
276 
277 .seealso: DMSetFromOptions()
278 @*/
279 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
280 {
281   PetscErrorCode ierr;
282 
283   PetscFunctionBegin;
284   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
285   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
286   PetscFunctionReturn(0);
287 }
288 
289 #undef __FUNCT__
290 #define __FUNCT__ "DMDestroy"
291 /*@
292     DMDestroy - Destroys a vector packer or DMDA.
293 
294     Collective on DM
295 
296     Input Parameter:
297 .   dm - the DM object to destroy
298 
299     Level: developer
300 
301 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
302 
303 @*/
304 PetscErrorCode  DMDestroy(DM *dm)
305 {
306   PetscInt       i, cnt = 0, f;
307   DMNamedVecLink nlink,nnext;
308   PetscErrorCode ierr;
309 
310   PetscFunctionBegin;
311   if (!*dm) PetscFunctionReturn(0);
312   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
313 
314   /* count all the circular references of DM and its contained Vecs */
315   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
316     if ((*dm)->localin[i])  cnt++;
317     if ((*dm)->globalin[i]) cnt++;
318   }
319   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
320   if ((*dm)->x) {
321     DM obj;
322     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
323     if (obj == *dm) cnt++;
324   }
325   for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++;
326   if ((*dm)->x) {
327     DM obj;
328     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
329     if (obj == *dm) cnt++;
330   }
331 
332   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
333   /*
334      Need this test because the dm references the vectors that
335      reference the dm, so destroying the dm calls destroy on the
336      vectors that cause another destroy on the dm
337   */
338   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
339   ((PetscObject) (*dm))->refct = 0;
340   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
341     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
342     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
343   }
344   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
345     nnext = nlink->next;
346     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
347     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
348     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
349     ierr = PetscFree(nlink);CHKERRQ(ierr);
350   }
351   (*dm)->namedglobal = NULL;
352 
353   for (nlink=(*dm)->namedlocal; nlink; nlink=nnext) { /* Destroy the named local vectors */
354     nnext = nlink->next;
355     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
356     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
357     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
358     ierr = PetscFree(nlink);CHKERRQ(ierr);
359   }
360   (*dm)->namedlocal = NULL;
361 
362   /* Destroy the list of hooks */
363   {
364     DMCoarsenHookLink link,next;
365     for (link=(*dm)->coarsenhook; link; link=next) {
366       next = link->next;
367       ierr = PetscFree(link);CHKERRQ(ierr);
368     }
369     (*dm)->coarsenhook = NULL;
370   }
371   {
372     DMRefineHookLink link,next;
373     for (link=(*dm)->refinehook; link; link=next) {
374       next = link->next;
375       ierr = PetscFree(link);CHKERRQ(ierr);
376     }
377     (*dm)->refinehook = NULL;
378   }
379   {
380     DMSubDomainHookLink link,next;
381     for (link=(*dm)->subdomainhook; link; link=next) {
382       next = link->next;
383       ierr = PetscFree(link);CHKERRQ(ierr);
384     }
385     (*dm)->subdomainhook = NULL;
386   }
387   {
388     DMGlobalToLocalHookLink link,next;
389     for (link=(*dm)->gtolhook; link; link=next) {
390       next = link->next;
391       ierr = PetscFree(link);CHKERRQ(ierr);
392     }
393     (*dm)->gtolhook = NULL;
394   }
395   /* Destroy the work arrays */
396   {
397     DMWorkLink link,next;
398     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
399     for (link=(*dm)->workin; link; link=next) {
400       next = link->next;
401       ierr = PetscFree(link->mem);CHKERRQ(ierr);
402       ierr = PetscFree(link);CHKERRQ(ierr);
403     }
404     (*dm)->workin = NULL;
405   }
406 
407   ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr);
408   ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr);
409   ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr);
410 
411   if ((*dm)->ctx && (*dm)->ctxdestroy) {
412     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
413   }
414   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
415   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
416   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
417   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
418   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
419   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
420   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
421 
422   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
423   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
424   ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr);
425   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
426   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
427 
428   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
429   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
430   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
431 
432   for (f = 0; f < (*dm)->numFields; ++f) {
433     ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr);
434   }
435   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
436   /* if memory was published with AMS then destroy it */
437   ierr = PetscObjectDepublish(*dm);CHKERRQ(ierr);
438 
439   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
440   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
441   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
442   PetscFunctionReturn(0);
443 }
444 
445 #undef __FUNCT__
446 #define __FUNCT__ "DMSetUp"
447 /*@
448     DMSetUp - sets up the data structures inside a DM object
449 
450     Collective on DM
451 
452     Input Parameter:
453 .   dm - the DM object to setup
454 
455     Level: developer
456 
457 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
458 
459 @*/
460 PetscErrorCode  DMSetUp(DM dm)
461 {
462   PetscErrorCode ierr;
463 
464   PetscFunctionBegin;
465   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
466   if (dm->setupcalled) PetscFunctionReturn(0);
467   if (dm->ops->setup) {
468     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
469   }
470   dm->setupcalled = PETSC_TRUE;
471   PetscFunctionReturn(0);
472 }
473 
474 #undef __FUNCT__
475 #define __FUNCT__ "DMSetFromOptions"
476 /*@
477     DMSetFromOptions - sets parameters in a DM from the options database
478 
479     Collective on DM
480 
481     Input Parameter:
482 .   dm - the DM object to set options for
483 
484     Options Database:
485 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
486 .   -dm_vec_type <type>  type of vector to create inside DM
487 .   -dm_mat_type <type>  type of matrix to create inside DM
488 -   -dm_coloring_type <global or ghosted>
489 
490     Level: developer
491 
492 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
493 
494 @*/
495 PetscErrorCode  DMSetFromOptions(DM dm)
496 {
497   char           typeName[256] = MATAIJ;
498   PetscBool      flg;
499   PetscErrorCode ierr;
500 
501   PetscFunctionBegin;
502   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
503   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
504   ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);CHKERRQ(ierr);
505   ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
506   if (flg) {
507     ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
508   }
509   ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
510   if (flg) {
511     ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
512   }
513   ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);CHKERRQ(ierr);
514   if (dm->ops->setfromoptions) {
515     ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
516   }
517   /* process any options handlers added with PetscObjectAddOptionsHandler() */
518   ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
519   ierr = PetscOptionsEnd();CHKERRQ(ierr);
520   ierr = DMViewFromOptions(dm,"-dm_view");CHKERRQ(ierr);
521   PetscFunctionReturn(0);
522 }
523 
524 #undef __FUNCT__
525 #define __FUNCT__ "DMView"
526 /*@C
527     DMView - Views a vector packer or DMDA.
528 
529     Collective on DM
530 
531     Input Parameter:
532 +   dm - the DM object to view
533 -   v - the viewer
534 
535     Level: developer
536 
537 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
538 
539 @*/
540 PetscErrorCode  DMView(DM dm,PetscViewer v)
541 {
542   PetscErrorCode ierr;
543   PetscBool      isbinary;
544 
545   PetscFunctionBegin;
546   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
547   if (!v) {
548     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);CHKERRQ(ierr);
549   }
550   ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
551   if (isbinary) {
552     PetscInt classid = DM_FILE_CLASSID;
553     char     type[256];
554 
555     ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
556     ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr);
557     ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
558   }
559   if (dm->ops->view) {
560     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
561   }
562   PetscFunctionReturn(0);
563 }
564 
565 #undef __FUNCT__
566 #define __FUNCT__ "DMCreateGlobalVector"
567 /*@
568     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
569 
570     Collective on DM
571 
572     Input Parameter:
573 .   dm - the DM object
574 
575     Output Parameter:
576 .   vec - the global vector
577 
578     Level: beginner
579 
580 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
581 
582 @*/
583 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
584 {
585   PetscErrorCode ierr;
586 
587   PetscFunctionBegin;
588   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
589   ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
590   PetscFunctionReturn(0);
591 }
592 
593 #undef __FUNCT__
594 #define __FUNCT__ "DMCreateLocalVector"
595 /*@
596     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
597 
598     Not Collective
599 
600     Input Parameter:
601 .   dm - the DM object
602 
603     Output Parameter:
604 .   vec - the local vector
605 
606     Level: beginner
607 
608 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
609 
610 @*/
611 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
612 {
613   PetscErrorCode ierr;
614 
615   PetscFunctionBegin;
616   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
617   ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
618   PetscFunctionReturn(0);
619 }
620 
621 #undef __FUNCT__
622 #define __FUNCT__ "DMGetLocalToGlobalMapping"
623 /*@
624    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
625 
626    Collective on DM
627 
628    Input Parameter:
629 .  dm - the DM that provides the mapping
630 
631    Output Parameter:
632 .  ltog - the mapping
633 
634    Level: intermediate
635 
636    Notes:
637    This mapping can then be used by VecSetLocalToGlobalMapping() or
638    MatSetLocalToGlobalMapping().
639 
640 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
641 @*/
642 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
643 {
644   PetscErrorCode ierr;
645 
646   PetscFunctionBegin;
647   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
648   PetscValidPointer(ltog,2);
649   if (!dm->ltogmap) {
650     PetscSection section, sectionGlobal;
651 
652     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
653     if (section) {
654       PetscInt *ltog;
655       PetscInt pStart, pEnd, size, p, l;
656 
657       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
658       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
659       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
660       ierr = PetscMalloc(size * sizeof(PetscInt), &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
661       for (p = pStart, l = 0; p < pEnd; ++p) {
662         PetscInt dof, off, c;
663 
664         /* Should probably use constrained dofs */
665         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
666         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
667         for (c = 0; c < dof; ++c, ++l) {
668           ltog[l] = off+c;
669         }
670       }
671       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
672       ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr);
673     } else {
674       if (!dm->ops->createlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
675       ierr = (*dm->ops->createlocaltoglobalmapping)(dm);CHKERRQ(ierr);
676     }
677   }
678   *ltog = dm->ltogmap;
679   PetscFunctionReturn(0);
680 }
681 
682 #undef __FUNCT__
683 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
684 /*@
685    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
686 
687    Collective on DM
688 
689    Input Parameter:
690 .  da - the distributed array that provides the mapping
691 
692    Output Parameter:
693 .  ltog - the block mapping
694 
695    Level: intermediate
696 
697    Notes:
698    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
699    MatSetLocalToGlobalMappingBlock().
700 
701 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
702 @*/
703 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
704 {
705   PetscErrorCode ierr;
706 
707   PetscFunctionBegin;
708   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
709   PetscValidPointer(ltog,2);
710   if (!dm->ltogmapb) {
711     PetscInt bs;
712     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
713     if (bs > 1) {
714       if (!dm->ops->createlocaltoglobalmappingblock) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
715       ierr = (*dm->ops->createlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
716     } else {
717       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
718       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
719     }
720   }
721   *ltog = dm->ltogmapb;
722   PetscFunctionReturn(0);
723 }
724 
725 #undef __FUNCT__
726 #define __FUNCT__ "DMGetBlockSize"
727 /*@
728    DMGetBlockSize - Gets the inherent block size associated with a DM
729 
730    Not Collective
731 
732    Input Parameter:
733 .  dm - the DM with block structure
734 
735    Output Parameter:
736 .  bs - the block size, 1 implies no exploitable block structure
737 
738    Level: intermediate
739 
740 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
741 @*/
742 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
743 {
744   PetscFunctionBegin;
745   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
746   PetscValidPointer(bs,2);
747   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
748   *bs = dm->bs;
749   PetscFunctionReturn(0);
750 }
751 
752 #undef __FUNCT__
753 #define __FUNCT__ "DMCreateInterpolation"
754 /*@
755     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
756 
757     Collective on DM
758 
759     Input Parameter:
760 +   dm1 - the DM object
761 -   dm2 - the second, finer DM object
762 
763     Output Parameter:
764 +  mat - the interpolation
765 -  vec - the scaling (optional)
766 
767     Level: developer
768 
769     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
770         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
771 
772         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
773         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
774 
775 
776 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
777 
778 @*/
779 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
780 {
781   PetscErrorCode ierr;
782 
783   PetscFunctionBegin;
784   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
785   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
786   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
787   PetscFunctionReturn(0);
788 }
789 
790 #undef __FUNCT__
791 #define __FUNCT__ "DMCreateInjection"
792 /*@
793     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
794 
795     Collective on DM
796 
797     Input Parameter:
798 +   dm1 - the DM object
799 -   dm2 - the second, finer DM object
800 
801     Output Parameter:
802 .   ctx - the injection
803 
804     Level: developer
805 
806    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
807         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
808 
809 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
810 
811 @*/
812 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
813 {
814   PetscErrorCode ierr;
815 
816   PetscFunctionBegin;
817   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
818   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
819   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
820   PetscFunctionReturn(0);
821 }
822 
823 #undef __FUNCT__
824 #define __FUNCT__ "DMCreateColoring"
825 /*@C
826     DMCreateColoring - Gets coloring for a DMDA or DMComposite
827 
828     Collective on DM
829 
830     Input Parameter:
831 +   dm - the DM object
832 .   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
833 -   matype - either MATAIJ or MATBAIJ
834 
835     Output Parameter:
836 .   coloring - the coloring
837 
838     Level: developer
839 
840 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix()
841 
842 @*/
843 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring)
844 {
845   PetscErrorCode ierr;
846 
847   PetscFunctionBegin;
848   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
849   if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet");
850   ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr);
851   PetscFunctionReturn(0);
852 }
853 
854 #undef __FUNCT__
855 #define __FUNCT__ "DMCreateMatrix"
856 /*@C
857     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
858 
859     Collective on DM
860 
861     Input Parameter:
862 +   dm - the DM object
863 -   mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or
864             any type which inherits from one of these (such as MATAIJ)
865 
866     Output Parameter:
867 .   mat - the empty Jacobian
868 
869     Level: beginner
870 
871     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
872        do not need to do it yourself.
873 
874        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
875        the nonzero pattern call DMDASetMatPreallocateOnly()
876 
877        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
878        internally by PETSc.
879 
880        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
881        the indices for the global numbering for DMDAs which is complicated.
882 
883 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
884 
885 @*/
886 PetscErrorCode  DMCreateMatrix(DM dm,MatType mtype,Mat *mat)
887 {
888   PetscErrorCode ierr;
889 
890   PetscFunctionBegin;
891   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
892 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
893   ierr = MatInitializePackage(NULL);CHKERRQ(ierr);
894 #endif
895   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
896   PetscValidPointer(mat,3);
897   if (dm->mattype) {
898     ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr);
899   } else {
900     ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr);
901   }
902   PetscFunctionReturn(0);
903 }
904 
905 #undef __FUNCT__
906 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
907 /*@
908   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
909     preallocated but the nonzero structure and zero values will not be set.
910 
911   Logically Collective on DMDA
912 
913   Input Parameter:
914 + dm - the DM
915 - only - PETSC_TRUE if only want preallocation
916 
917   Level: developer
918 .seealso DMCreateMatrix()
919 @*/
920 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
921 {
922   PetscFunctionBegin;
923   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
924   dm->prealloc_only = only;
925   PetscFunctionReturn(0);
926 }
927 
928 #undef __FUNCT__
929 #define __FUNCT__ "DMGetWorkArray"
930 /*@C
931   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
932 
933   Not Collective
934 
935   Input Parameters:
936 + dm - the DM object
937 . count - The minium size
938 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
939 
940   Output Parameter:
941 . array - the work array
942 
943   Level: developer
944 
945 .seealso DMDestroy(), DMCreate()
946 @*/
947 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
948 {
949   PetscErrorCode ierr;
950   DMWorkLink     link;
951   size_t         size;
952 
953   PetscFunctionBegin;
954   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
955   PetscValidPointer(mem,4);
956   if (dm->workin) {
957     link       = dm->workin;
958     dm->workin = dm->workin->next;
959   } else {
960     ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr);
961   }
962   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
963   if (size*count > link->bytes) {
964     ierr        = PetscFree(link->mem);CHKERRQ(ierr);
965     ierr        = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
966     link->bytes = size*count;
967   }
968   link->next   = dm->workout;
969   dm->workout  = link;
970   *(void**)mem = link->mem;
971   PetscFunctionReturn(0);
972 }
973 
974 #undef __FUNCT__
975 #define __FUNCT__ "DMRestoreWorkArray"
976 /*@C
977   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
978 
979   Not Collective
980 
981   Input Parameters:
982 + dm - the DM object
983 . count - The minium size
984 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
985 
986   Output Parameter:
987 . array - the work array
988 
989   Level: developer
990 
991 .seealso DMDestroy(), DMCreate()
992 @*/
993 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
994 {
995   DMWorkLink *p,link;
996 
997   PetscFunctionBegin;
998   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
999   PetscValidPointer(mem,4);
1000   for (p=&dm->workout; (link=*p); p=&link->next) {
1001     if (link->mem == *(void**)mem) {
1002       *p           = link->next;
1003       link->next   = dm->workin;
1004       dm->workin   = link;
1005       *(void**)mem = NULL;
1006       PetscFunctionReturn(0);
1007     }
1008   }
1009   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1010   PetscFunctionReturn(0);
1011 }
1012 
1013 #undef __FUNCT__
1014 #define __FUNCT__ "DMSetNullSpaceConstructor"
1015 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1016 {
1017   PetscFunctionBegin;
1018   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1019   if (field >= 10) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1020   dm->nullspaceConstructors[field] = nullsp;
1021   PetscFunctionReturn(0);
1022 }
1023 
1024 #undef __FUNCT__
1025 #define __FUNCT__ "DMCreateFieldIS"
1026 /*@C
1027   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1028 
1029   Not collective
1030 
1031   Input Parameter:
1032 . dm - the DM object
1033 
1034   Output Parameters:
1035 + numFields  - The number of fields (or NULL if not requested)
1036 . fieldNames - The name for each field (or NULL if not requested)
1037 - fields     - The global indices for each field (or NULL if not requested)
1038 
1039   Level: intermediate
1040 
1041   Notes:
1042   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1043   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1044   PetscFree().
1045 
1046 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1047 @*/
1048 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1049 {
1050   PetscSection   section, sectionGlobal;
1051   PetscErrorCode ierr;
1052 
1053   PetscFunctionBegin;
1054   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1055   if (numFields) {
1056     PetscValidPointer(numFields,2);
1057     *numFields = 0;
1058   }
1059   if (fieldNames) {
1060     PetscValidPointer(fieldNames,3);
1061     *fieldNames = NULL;
1062   }
1063   if (fields) {
1064     PetscValidPointer(fields,4);
1065     *fields = NULL;
1066   }
1067   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1068   if (section) {
1069     PetscInt *fieldSizes, **fieldIndices;
1070     PetscInt nF, f, pStart, pEnd, p;
1071 
1072     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
1073     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
1074     ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt*,&fieldIndices);CHKERRQ(ierr);
1075     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
1076     for (f = 0; f < nF; ++f) {
1077       fieldSizes[f] = 0;
1078     }
1079     for (p = pStart; p < pEnd; ++p) {
1080       PetscInt gdof;
1081 
1082       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1083       if (gdof > 0) {
1084         for (f = 0; f < nF; ++f) {
1085           PetscInt fdof, fcdof;
1086 
1087           ierr           = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1088           ierr           = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1089           fieldSizes[f] += fdof-fcdof;
1090         }
1091       }
1092     }
1093     for (f = 0; f < nF; ++f) {
1094       ierr          = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr);
1095       fieldSizes[f] = 0;
1096     }
1097     for (p = pStart; p < pEnd; ++p) {
1098       PetscInt gdof, goff;
1099 
1100       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1101       if (gdof > 0) {
1102         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
1103         for (f = 0; f < nF; ++f) {
1104           PetscInt fdof, fcdof, fc;
1105 
1106           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1107           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1108           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1109             fieldIndices[f][fieldSizes[f]] = goff++;
1110           }
1111         }
1112       }
1113     }
1114     if (numFields) *numFields = nF;
1115     if (fieldNames) {
1116       ierr = PetscMalloc(nF * sizeof(char*), fieldNames);CHKERRQ(ierr);
1117       for (f = 0; f < nF; ++f) {
1118         const char *fieldName;
1119 
1120         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1121         ierr = PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);CHKERRQ(ierr);
1122       }
1123     }
1124     if (fields) {
1125       ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr);
1126       for (f = 0; f < nF; ++f) {
1127         ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1128       }
1129     }
1130     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1131   } else if (dm->ops->createfieldis) {
1132     ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);
1133   }
1134   PetscFunctionReturn(0);
1135 }
1136 
1137 
1138 #undef __FUNCT__
1139 #define __FUNCT__ "DMCreateFieldDecomposition"
1140 /*@C
1141   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1142                           corresponding to different fields: each IS contains the global indices of the dofs of the
1143                           corresponding field. The optional list of DMs define the DM for each subproblem.
1144                           Generalizes DMCreateFieldIS().
1145 
1146   Not collective
1147 
1148   Input Parameter:
1149 . dm - the DM object
1150 
1151   Output Parameters:
1152 + len       - The number of subproblems in the field decomposition (or NULL if not requested)
1153 . namelist  - The name for each field (or NULL if not requested)
1154 . islist    - The global indices for each field (or NULL if not requested)
1155 - dmlist    - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)
1156 
1157   Level: intermediate
1158 
1159   Notes:
1160   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1161   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1162   and all of the arrays should be freed with PetscFree().
1163 
1164 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1165 @*/
1166 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1167 {
1168   PetscErrorCode ierr;
1169 
1170   PetscFunctionBegin;
1171   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1172   if (len) {
1173     PetscValidPointer(len,2);
1174     *len = 0;
1175   }
1176   if (namelist) {
1177     PetscValidPointer(namelist,3);
1178     *namelist = 0;
1179   }
1180   if (islist) {
1181     PetscValidPointer(islist,4);
1182     *islist = 0;
1183   }
1184   if (dmlist) {
1185     PetscValidPointer(dmlist,5);
1186     *dmlist = 0;
1187   }
1188   /*
1189    Is it a good idea to apply the following check across all impls?
1190    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1191    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1192    */
1193   if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1194   if (!dm->ops->createfielddecomposition) {
1195     PetscSection section;
1196     PetscInt     numFields, f;
1197 
1198     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1199     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1200     if (section && numFields && dm->ops->createsubdm) {
1201       *len = numFields;
1202       ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr);
1203       for (f = 0; f < numFields; ++f) {
1204         const char *fieldName;
1205 
1206         ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr);
1207         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1208         ierr = PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);CHKERRQ(ierr);
1209       }
1210     } else {
1211       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1212       /* By default there are no DMs associated with subproblems. */
1213       if (dmlist) *dmlist = NULL;
1214     }
1215   } else {
1216     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr);
1217   }
1218   PetscFunctionReturn(0);
1219 }
1220 
1221 #undef __FUNCT__
1222 #define __FUNCT__ "DMCreateSubDM"
1223 /*@C
1224   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1225                   The fields are defined by DMCreateFieldIS().
1226 
1227   Not collective
1228 
1229   Input Parameters:
1230 + dm - the DM object
1231 . numFields - number of fields in this subproblem
1232 - len       - The number of subproblems in the decomposition (or NULL if not requested)
1233 
1234   Output Parameters:
1235 . is - The global indices for the subproblem
1236 - dm - The DM for the subproblem
1237 
1238   Level: intermediate
1239 
1240 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1241 @*/
1242 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1243 {
1244   PetscErrorCode ierr;
1245 
1246   PetscFunctionBegin;
1247   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1248   PetscValidPointer(fields,3);
1249   if (is) PetscValidPointer(is,4);
1250   if (subdm) PetscValidPointer(subdm,5);
1251   if (dm->ops->createsubdm) {
1252     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr);
1253   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1254   PetscFunctionReturn(0);
1255 }
1256 
1257 
1258 #undef __FUNCT__
1259 #define __FUNCT__ "DMCreateDomainDecomposition"
1260 /*@C
1261   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1262                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1263                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1264                           define a nonoverlapping covering, while outer subdomains can overlap.
1265                           The optional list of DMs define the DM for each subproblem.
1266 
1267   Not collective
1268 
1269   Input Parameter:
1270 . dm - the DM object
1271 
1272   Output Parameters:
1273 + len         - The number of subproblems in the domain decomposition (or NULL if not requested)
1274 . namelist    - The name for each subdomain (or NULL if not requested)
1275 . innerislist - The global indices for each inner subdomain (or NULL, if not requested)
1276 . outerislist - The global indices for each outer subdomain (or NULL, if not requested)
1277 - dmlist      - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)
1278 
1279   Level: intermediate
1280 
1281   Notes:
1282   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1283   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1284   and all of the arrays should be freed with PetscFree().
1285 
1286 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1287 @*/
1288 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1289 {
1290   PetscErrorCode      ierr;
1291   DMSubDomainHookLink link;
1292   PetscInt            i,l;
1293 
1294   PetscFunctionBegin;
1295   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1296   if (len)           {PetscValidPointer(len,2);            *len         = 0;}
1297   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = NULL;}
1298   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = NULL;}
1299   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = NULL;}
1300   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = NULL;}
1301   /*
1302    Is it a good idea to apply the following check across all impls?
1303    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1304    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1305    */
1306   if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1307   if (dm->ops->createdomaindecomposition) {
1308     ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr);
1309     /* copy subdomain hooks and context over to the subdomain DMs */
1310     if (dmlist) {
1311       for (i = 0; i < l; i++) {
1312         for (link=dm->subdomainhook; link; link=link->next) {
1313           if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);}
1314         }
1315         (*dmlist)[i]->ctx = dm->ctx;
1316       }
1317     }
1318     if (len) *len = l;
1319   }
1320   PetscFunctionReturn(0);
1321 }
1322 
1323 
1324 #undef __FUNCT__
1325 #define __FUNCT__ "DMCreateDomainDecompositionScatters"
1326 /*@C
1327   DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector
1328 
1329   Not collective
1330 
1331   Input Parameters:
1332 + dm - the DM object
1333 . n  - the number of subdomain scatters
1334 - subdms - the local subdomains
1335 
1336   Output Parameters:
1337 + n     - the number of scatters returned
1338 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain
1339 . oscat - scatter from global vector to overlapping global vector entries on subdomain
1340 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts)
1341 
1342   Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution
1343   of general nonlinear problems with overlapping subdomain methods.  While merely having index sets that enable subsets
1344   of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of
1345   solution and residual data.
1346 
1347   Level: developer
1348 
1349 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1350 @*/
1351 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat)
1352 {
1353   PetscErrorCode ierr;
1354 
1355   PetscFunctionBegin;
1356   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1357   PetscValidPointer(subdms,3);
1358   if (dm->ops->createddscatters) {
1359     ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr);
1360   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined");
1361   PetscFunctionReturn(0);
1362 }
1363 
1364 #undef __FUNCT__
1365 #define __FUNCT__ "DMRefine"
1366 /*@
1367   DMRefine - Refines a DM object
1368 
1369   Collective on DM
1370 
1371   Input Parameter:
1372 + dm   - the DM object
1373 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1374 
1375   Output Parameter:
1376 . dmf - the refined DM, or NULL
1377 
1378   Note: If no refinement was done, the return value is NULL
1379 
1380   Level: developer
1381 
1382 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1383 @*/
1384 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1385 {
1386   PetscErrorCode   ierr;
1387   DMRefineHookLink link;
1388 
1389   PetscFunctionBegin;
1390   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1391   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1392   if (*dmf) {
1393     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1394 
1395     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1396 
1397     (*dmf)->ctx       = dm->ctx;
1398     (*dmf)->leveldown = dm->leveldown;
1399     (*dmf)->levelup   = dm->levelup + 1;
1400 
1401     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1402     for (link=dm->refinehook; link; link=link->next) {
1403       if (link->refinehook) {
1404         ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);
1405       }
1406     }
1407   }
1408   PetscFunctionReturn(0);
1409 }
1410 
1411 #undef __FUNCT__
1412 #define __FUNCT__ "DMRefineHookAdd"
1413 /*@
1414    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1415 
1416    Logically Collective
1417 
1418    Input Arguments:
1419 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1420 .  refinehook - function to run when setting up a coarser level
1421 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1422 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1423 
1424    Calling sequence of refinehook:
1425 $    refinehook(DM coarse,DM fine,void *ctx);
1426 
1427 +  coarse - coarse level DM
1428 .  fine - fine level DM to interpolate problem to
1429 -  ctx - optional user-defined function context
1430 
1431    Calling sequence for interphook:
1432 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1433 
1434 +  coarse - coarse level DM
1435 .  interp - matrix interpolating a coarse-level solution to the finer grid
1436 .  fine - fine level DM to update
1437 -  ctx - optional user-defined function context
1438 
1439    Level: advanced
1440 
1441    Notes:
1442    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1443 
1444    If this function is called multiple times, the hooks will be run in the order they are added.
1445 
1446 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1447 @*/
1448 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1449 {
1450   PetscErrorCode   ierr;
1451   DMRefineHookLink link,*p;
1452 
1453   PetscFunctionBegin;
1454   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1455   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1456   ierr             = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1457   link->refinehook = refinehook;
1458   link->interphook = interphook;
1459   link->ctx        = ctx;
1460   link->next       = NULL;
1461   *p               = link;
1462   PetscFunctionReturn(0);
1463 }
1464 
1465 #undef __FUNCT__
1466 #define __FUNCT__ "DMInterpolate"
1467 /*@
1468    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1469 
1470    Collective if any hooks are
1471 
1472    Input Arguments:
1473 +  coarse - coarser DM to use as a base
1474 .  restrct - interpolation matrix, apply using MatInterpolate()
1475 -  fine - finer DM to update
1476 
1477    Level: developer
1478 
1479 .seealso: DMRefineHookAdd(), MatInterpolate()
1480 @*/
1481 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1482 {
1483   PetscErrorCode   ierr;
1484   DMRefineHookLink link;
1485 
1486   PetscFunctionBegin;
1487   for (link=fine->refinehook; link; link=link->next) {
1488     if (link->interphook) {
1489       ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);
1490     }
1491   }
1492   PetscFunctionReturn(0);
1493 }
1494 
1495 #undef __FUNCT__
1496 #define __FUNCT__ "DMGetRefineLevel"
1497 /*@
1498     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1499 
1500     Not Collective
1501 
1502     Input Parameter:
1503 .   dm - the DM object
1504 
1505     Output Parameter:
1506 .   level - number of refinements
1507 
1508     Level: developer
1509 
1510 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1511 
1512 @*/
1513 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1514 {
1515   PetscFunctionBegin;
1516   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1517   *level = dm->levelup;
1518   PetscFunctionReturn(0);
1519 }
1520 
1521 #undef __FUNCT__
1522 #define __FUNCT__ "DMGlobalToLocalHookAdd"
1523 /*@
1524    DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called
1525 
1526    Logically Collective
1527 
1528    Input Arguments:
1529 +  dm - the DM
1530 .  beginhook - function to run at the beginning of DMGlobalToLocalBegin()
1531 .  endhook - function to run after DMGlobalToLocalEnd() has completed
1532 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1533 
1534    Calling sequence for beginhook:
1535 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1536 
1537 +  dm - global DM
1538 .  g - global vector
1539 .  mode - mode
1540 .  l - local vector
1541 -  ctx - optional user-defined function context
1542 
1543 
1544    Calling sequence for endhook:
1545 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1546 
1547 +  global - global DM
1548 -  ctx - optional user-defined function context
1549 
1550    Level: advanced
1551 
1552    Notes:
1553    This function is only needed if auxiliary data needs to be set up on coarse grids.
1554 
1555    If this function is called multiple times, the hooks will be run in the order they are added.
1556 
1557    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1558    extract the finest level information from its context (instead of from the SNES).
1559 
1560 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1561 @*/
1562 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx)
1563 {
1564   PetscErrorCode          ierr;
1565   DMGlobalToLocalHookLink link,*p;
1566 
1567   PetscFunctionBegin;
1568   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1569   for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1570   ierr            = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr);
1571   link->beginhook = beginhook;
1572   link->endhook   = endhook;
1573   link->ctx       = ctx;
1574   link->next      = NULL;
1575   *p              = link;
1576   PetscFunctionReturn(0);
1577 }
1578 
1579 #undef __FUNCT__
1580 #define __FUNCT__ "DMGlobalToLocalBegin"
1581 /*@
1582     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1583 
1584     Neighbor-wise Collective on DM
1585 
1586     Input Parameters:
1587 +   dm - the DM object
1588 .   g - the global vector
1589 .   mode - INSERT_VALUES or ADD_VALUES
1590 -   l - the local vector
1591 
1592 
1593     Level: beginner
1594 
1595 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1596 
1597 @*/
1598 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1599 {
1600   PetscSF                 sf;
1601   PetscErrorCode          ierr;
1602   DMGlobalToLocalHookLink link;
1603 
1604   PetscFunctionBegin;
1605   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1606   for (link=dm->gtolhook; link; link=link->next) {
1607     if (link->beginhook) {
1608       ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);
1609     }
1610   }
1611   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1612   if (sf) {
1613     PetscScalar *lArray, *gArray;
1614 
1615     if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1616     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1617     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1618     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1619     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1620     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1621   } else {
1622     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1623   }
1624   PetscFunctionReturn(0);
1625 }
1626 
1627 #undef __FUNCT__
1628 #define __FUNCT__ "DMGlobalToLocalEnd"
1629 /*@
1630     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1631 
1632     Neighbor-wise Collective on DM
1633 
1634     Input Parameters:
1635 +   dm - the DM object
1636 .   g - the global vector
1637 .   mode - INSERT_VALUES or ADD_VALUES
1638 -   l - the local vector
1639 
1640 
1641     Level: beginner
1642 
1643 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1644 
1645 @*/
1646 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1647 {
1648   PetscSF                 sf;
1649   PetscErrorCode          ierr;
1650   PetscScalar             *lArray, *gArray;
1651   DMGlobalToLocalHookLink link;
1652 
1653   PetscFunctionBegin;
1654   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1655   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1656   if (sf) {
1657     if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1658 
1659     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1660     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1661     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1662     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1663     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1664   } else {
1665     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1666   }
1667   for (link=dm->gtolhook; link; link=link->next) {
1668     if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1669   }
1670   PetscFunctionReturn(0);
1671 }
1672 
1673 #undef __FUNCT__
1674 #define __FUNCT__ "DMLocalToGlobalBegin"
1675 /*@
1676     DMLocalToGlobalBegin - updates global vectors from local vectors
1677 
1678     Neighbor-wise Collective on DM
1679 
1680     Input Parameters:
1681 +   dm - the DM object
1682 .   l - the local vector
1683 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1684            base point.
1685 - - the global vector
1686 
1687     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1688            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1689            global array to the final global array with VecAXPY().
1690 
1691     Level: beginner
1692 
1693 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1694 
1695 @*/
1696 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1697 {
1698   PetscSF        sf;
1699   PetscErrorCode ierr;
1700 
1701   PetscFunctionBegin;
1702   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1703   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1704   if (sf) {
1705     MPI_Op      op;
1706     PetscScalar *lArray, *gArray;
1707 
1708     switch (mode) {
1709     case INSERT_VALUES:
1710     case INSERT_ALL_VALUES:
1711 #if defined(PETSC_HAVE_MPI_REPLACE)
1712       op = MPI_REPLACE; break;
1713 #else
1714       SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1715 #endif
1716     case ADD_VALUES:
1717     case ADD_ALL_VALUES:
1718       op = MPI_SUM; break;
1719     default:
1720       SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1721     }
1722     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1723     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1724     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1725     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1726     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1727   } else {
1728     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1729   }
1730   PetscFunctionReturn(0);
1731 }
1732 
1733 #undef __FUNCT__
1734 #define __FUNCT__ "DMLocalToGlobalEnd"
1735 /*@
1736     DMLocalToGlobalEnd - updates global vectors from local vectors
1737 
1738     Neighbor-wise Collective on DM
1739 
1740     Input Parameters:
1741 +   dm - the DM object
1742 .   l - the local vector
1743 .   mode - INSERT_VALUES or ADD_VALUES
1744 -   g - the global vector
1745 
1746 
1747     Level: beginner
1748 
1749 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1750 
1751 @*/
1752 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1753 {
1754   PetscSF        sf;
1755   PetscErrorCode ierr;
1756 
1757   PetscFunctionBegin;
1758   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1759   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1760   if (sf) {
1761     MPI_Op      op;
1762     PetscScalar *lArray, *gArray;
1763 
1764     switch (mode) {
1765     case INSERT_VALUES:
1766     case INSERT_ALL_VALUES:
1767 #if defined(PETSC_HAVE_MPI_REPLACE)
1768       op = MPI_REPLACE; break;
1769 #else
1770       SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1771 #endif
1772     case ADD_VALUES:
1773     case ADD_ALL_VALUES:
1774       op = MPI_SUM; break;
1775     default:
1776       SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1777     }
1778     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1779     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1780     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1781     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1782     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1783   } else {
1784     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1785   }
1786   PetscFunctionReturn(0);
1787 }
1788 
1789 #undef __FUNCT__
1790 #define __FUNCT__ "DMCoarsen"
1791 /*@
1792     DMCoarsen - Coarsens a DM object
1793 
1794     Collective on DM
1795 
1796     Input Parameter:
1797 +   dm - the DM object
1798 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1799 
1800     Output Parameter:
1801 .   dmc - the coarsened DM
1802 
1803     Level: developer
1804 
1805 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1806 
1807 @*/
1808 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1809 {
1810   PetscErrorCode    ierr;
1811   DMCoarsenHookLink link;
1812 
1813   PetscFunctionBegin;
1814   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1815   ierr                      = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1816   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1817   ierr                      = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1818   (*dmc)->ctx               = dm->ctx;
1819   (*dmc)->levelup           = dm->levelup;
1820   (*dmc)->leveldown         = dm->leveldown + 1;
1821   ierr                      = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
1822   for (link=dm->coarsenhook; link; link=link->next) {
1823     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
1824   }
1825   PetscFunctionReturn(0);
1826 }
1827 
1828 #undef __FUNCT__
1829 #define __FUNCT__ "DMCoarsenHookAdd"
1830 /*@
1831    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1832 
1833    Logically Collective
1834 
1835    Input Arguments:
1836 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1837 .  coarsenhook - function to run when setting up a coarser level
1838 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
1839 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1840 
1841    Calling sequence of coarsenhook:
1842 $    coarsenhook(DM fine,DM coarse,void *ctx);
1843 
1844 +  fine - fine level DM
1845 .  coarse - coarse level DM to restrict problem to
1846 -  ctx - optional user-defined function context
1847 
1848    Calling sequence for restricthook:
1849 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
1850 
1851 +  fine - fine level DM
1852 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
1853 .  rscale - scaling vector for restriction
1854 .  inject - matrix restricting by injection
1855 .  coarse - coarse level DM to update
1856 -  ctx - optional user-defined function context
1857 
1858    Level: advanced
1859 
1860    Notes:
1861    This function is only needed if auxiliary data needs to be set up on coarse grids.
1862 
1863    If this function is called multiple times, the hooks will be run in the order they are added.
1864 
1865    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1866    extract the finest level information from its context (instead of from the SNES).
1867 
1868 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1869 @*/
1870 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
1871 {
1872   PetscErrorCode    ierr;
1873   DMCoarsenHookLink link,*p;
1874 
1875   PetscFunctionBegin;
1876   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
1877   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1878   ierr               = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
1879   link->coarsenhook  = coarsenhook;
1880   link->restricthook = restricthook;
1881   link->ctx          = ctx;
1882   link->next         = NULL;
1883   *p                 = link;
1884   PetscFunctionReturn(0);
1885 }
1886 
1887 #undef __FUNCT__
1888 #define __FUNCT__ "DMRestrict"
1889 /*@
1890    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
1891 
1892    Collective if any hooks are
1893 
1894    Input Arguments:
1895 +  fine - finer DM to use as a base
1896 .  restrct - restriction matrix, apply using MatRestrict()
1897 .  inject - injection matrix, also use MatRestrict()
1898 -  coarse - coarer DM to update
1899 
1900    Level: developer
1901 
1902 .seealso: DMCoarsenHookAdd(), MatRestrict()
1903 @*/
1904 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
1905 {
1906   PetscErrorCode    ierr;
1907   DMCoarsenHookLink link;
1908 
1909   PetscFunctionBegin;
1910   for (link=fine->coarsenhook; link; link=link->next) {
1911     if (link->restricthook) {
1912       ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);
1913     }
1914   }
1915   PetscFunctionReturn(0);
1916 }
1917 
1918 #undef __FUNCT__
1919 #define __FUNCT__ "DMSubDomainHookAdd"
1920 /*@
1921    DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid
1922 
1923    Logically Collective
1924 
1925    Input Arguments:
1926 +  global - global DM
1927 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
1928 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1929 
1930    Calling sequence for restricthook:
1931 $    restricthook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1932 
1933 +  global - global DM
1934 .  out    - scatter to the outer (with ghost and overlap points) block vector
1935 .  in     - scatter to block vector values only owned locally
1936 .  block  - block DM (may just be a shell if the global DM is passed in correctly)
1937 -  ctx - optional user-defined function context
1938 
1939    Level: advanced
1940 
1941    Notes:
1942    This function is only needed if auxiliary data needs to be set up on coarse grids.
1943 
1944    If this function is called multiple times, the hooks will be run in the order they are added.
1945 
1946    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1947    extract the finest level information from its context (instead of from the SNES).
1948 
1949 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1950 @*/
1951 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
1952 {
1953   PetscErrorCode      ierr;
1954   DMSubDomainHookLink link,*p;
1955 
1956   PetscFunctionBegin;
1957   PetscValidHeaderSpecific(global,DM_CLASSID,1);
1958   for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1959   ierr               = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr);
1960   link->restricthook = restricthook;
1961   link->ddhook       = ddhook;
1962   link->ctx          = ctx;
1963   link->next         = NULL;
1964   *p                 = link;
1965   PetscFunctionReturn(0);
1966 }
1967 
1968 #undef __FUNCT__
1969 #define __FUNCT__ "DMSubDomainRestrict"
1970 /*@
1971    DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd()
1972 
1973    Collective if any hooks are
1974 
1975    Input Arguments:
1976 +  fine - finer DM to use as a base
1977 .  oscatter - scatter from domain global vector filling subdomain global vector with overlap
1978 .  gscatter - scatter from domain global vector filling subdomain local vector with ghosts
1979 -  coarse - coarer DM to update
1980 
1981    Level: developer
1982 
1983 .seealso: DMCoarsenHookAdd(), MatRestrict()
1984 @*/
1985 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm)
1986 {
1987   PetscErrorCode      ierr;
1988   DMSubDomainHookLink link;
1989 
1990   PetscFunctionBegin;
1991   for (link=global->subdomainhook; link; link=link->next) {
1992     if (link->restricthook) {
1993       ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr);
1994     }
1995   }
1996   PetscFunctionReturn(0);
1997 }
1998 
1999 #undef __FUNCT__
2000 #define __FUNCT__ "DMGetCoarsenLevel"
2001 /*@
2002     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
2003 
2004     Not Collective
2005 
2006     Input Parameter:
2007 .   dm - the DM object
2008 
2009     Output Parameter:
2010 .   level - number of coarsenings
2011 
2012     Level: developer
2013 
2014 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2015 
2016 @*/
2017 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
2018 {
2019   PetscFunctionBegin;
2020   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2021   *level = dm->leveldown;
2022   PetscFunctionReturn(0);
2023 }
2024 
2025 
2026 
2027 #undef __FUNCT__
2028 #define __FUNCT__ "DMRefineHierarchy"
2029 /*@C
2030     DMRefineHierarchy - Refines a DM object, all levels at once
2031 
2032     Collective on DM
2033 
2034     Input Parameter:
2035 +   dm - the DM object
2036 -   nlevels - the number of levels of refinement
2037 
2038     Output Parameter:
2039 .   dmf - the refined DM hierarchy
2040 
2041     Level: developer
2042 
2043 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2044 
2045 @*/
2046 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2047 {
2048   PetscErrorCode ierr;
2049 
2050   PetscFunctionBegin;
2051   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2052   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2053   if (nlevels == 0) PetscFunctionReturn(0);
2054   if (dm->ops->refinehierarchy) {
2055     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
2056   } else if (dm->ops->refine) {
2057     PetscInt i;
2058 
2059     ierr = DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);CHKERRQ(ierr);
2060     for (i=1; i<nlevels; i++) {
2061       ierr = DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);CHKERRQ(ierr);
2062     }
2063   } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2064   PetscFunctionReturn(0);
2065 }
2066 
2067 #undef __FUNCT__
2068 #define __FUNCT__ "DMCoarsenHierarchy"
2069 /*@C
2070     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2071 
2072     Collective on DM
2073 
2074     Input Parameter:
2075 +   dm - the DM object
2076 -   nlevels - the number of levels of coarsening
2077 
2078     Output Parameter:
2079 .   dmc - the coarsened DM hierarchy
2080 
2081     Level: developer
2082 
2083 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2084 
2085 @*/
2086 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2087 {
2088   PetscErrorCode ierr;
2089 
2090   PetscFunctionBegin;
2091   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2092   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2093   if (nlevels == 0) PetscFunctionReturn(0);
2094   PetscValidPointer(dmc,3);
2095   if (dm->ops->coarsenhierarchy) {
2096     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
2097   } else if (dm->ops->coarsen) {
2098     PetscInt i;
2099 
2100     ierr = DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);CHKERRQ(ierr);
2101     for (i=1; i<nlevels; i++) {
2102       ierr = DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);CHKERRQ(ierr);
2103     }
2104   } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2105   PetscFunctionReturn(0);
2106 }
2107 
2108 #undef __FUNCT__
2109 #define __FUNCT__ "DMCreateAggregates"
2110 /*@
2111    DMCreateAggregates - Gets the aggregates that map between
2112    grids associated with two DMs.
2113 
2114    Collective on DM
2115 
2116    Input Parameters:
2117 +  dmc - the coarse grid DM
2118 -  dmf - the fine grid DM
2119 
2120    Output Parameters:
2121 .  rest - the restriction matrix (transpose of the projection matrix)
2122 
2123    Level: intermediate
2124 
2125 .keywords: interpolation, restriction, multigrid
2126 
2127 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2128 @*/
2129 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2130 {
2131   PetscErrorCode ierr;
2132 
2133   PetscFunctionBegin;
2134   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
2135   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
2136   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
2137   PetscFunctionReturn(0);
2138 }
2139 
2140 #undef __FUNCT__
2141 #define __FUNCT__ "DMSetApplicationContextDestroy"
2142 /*@C
2143     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2144 
2145     Not Collective
2146 
2147     Input Parameters:
2148 +   dm - the DM object
2149 -   destroy - the destroy function
2150 
2151     Level: intermediate
2152 
2153 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2154 
2155 @*/
2156 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2157 {
2158   PetscFunctionBegin;
2159   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2160   dm->ctxdestroy = destroy;
2161   PetscFunctionReturn(0);
2162 }
2163 
2164 #undef __FUNCT__
2165 #define __FUNCT__ "DMSetApplicationContext"
2166 /*@
2167     DMSetApplicationContext - Set a user context into a DM object
2168 
2169     Not Collective
2170 
2171     Input Parameters:
2172 +   dm - the DM object
2173 -   ctx - the user context
2174 
2175     Level: intermediate
2176 
2177 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2178 
2179 @*/
2180 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2181 {
2182   PetscFunctionBegin;
2183   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2184   dm->ctx = ctx;
2185   PetscFunctionReturn(0);
2186 }
2187 
2188 #undef __FUNCT__
2189 #define __FUNCT__ "DMGetApplicationContext"
2190 /*@
2191     DMGetApplicationContext - Gets a user context from a DM object
2192 
2193     Not Collective
2194 
2195     Input Parameter:
2196 .   dm - the DM object
2197 
2198     Output Parameter:
2199 .   ctx - the user context
2200 
2201     Level: intermediate
2202 
2203 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2204 
2205 @*/
2206 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2207 {
2208   PetscFunctionBegin;
2209   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2210   *(void**)ctx = dm->ctx;
2211   PetscFunctionReturn(0);
2212 }
2213 
2214 #undef __FUNCT__
2215 #define __FUNCT__ "DMSetVariableBounds"
2216 /*@C
2217     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2218 
2219     Logically Collective on DM
2220 
2221     Input Parameter:
2222 +   dm - the DM object
2223 -   f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set)
2224 
2225     Level: intermediate
2226 
2227 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2228          DMSetJacobian()
2229 
2230 @*/
2231 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2232 {
2233   PetscFunctionBegin;
2234   dm->ops->computevariablebounds = f;
2235   PetscFunctionReturn(0);
2236 }
2237 
2238 #undef __FUNCT__
2239 #define __FUNCT__ "DMHasVariableBounds"
2240 /*@
2241     DMHasVariableBounds - does the DM object have a variable bounds function?
2242 
2243     Not Collective
2244 
2245     Input Parameter:
2246 .   dm - the DM object to destroy
2247 
2248     Output Parameter:
2249 .   flg - PETSC_TRUE if the variable bounds function exists
2250 
2251     Level: developer
2252 
2253 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2254 
2255 @*/
2256 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2257 {
2258   PetscFunctionBegin;
2259   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2260   PetscFunctionReturn(0);
2261 }
2262 
2263 #undef __FUNCT__
2264 #define __FUNCT__ "DMComputeVariableBounds"
2265 /*@C
2266     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2267 
2268     Logically Collective on DM
2269 
2270     Input Parameters:
2271 +   dm - the DM object to destroy
2272 -   x  - current solution at which the bounds are computed
2273 
2274     Output parameters:
2275 +   xl - lower bound
2276 -   xu - upper bound
2277 
2278     Level: intermediate
2279 
2280 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2281 
2282 @*/
2283 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2284 {
2285   PetscErrorCode ierr;
2286 
2287   PetscFunctionBegin;
2288   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2289   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2290   if (dm->ops->computevariablebounds) {
2291     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr);
2292   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2293   PetscFunctionReturn(0);
2294 }
2295 
2296 #undef __FUNCT__
2297 #define __FUNCT__ "DMHasColoring"
2298 /*@
2299     DMHasColoring - does the DM object have a method of providing a coloring?
2300 
2301     Not Collective
2302 
2303     Input Parameter:
2304 .   dm - the DM object
2305 
2306     Output Parameter:
2307 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2308 
2309     Level: developer
2310 
2311 .seealso DMHasFunction(), DMCreateColoring()
2312 
2313 @*/
2314 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2315 {
2316   PetscFunctionBegin;
2317   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2318   PetscFunctionReturn(0);
2319 }
2320 
2321 #undef  __FUNCT__
2322 #define __FUNCT__ "DMSetVec"
2323 /*@C
2324     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2325 
2326     Collective on DM
2327 
2328     Input Parameter:
2329 +   dm - the DM object
2330 -   x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems.
2331 
2332     Level: developer
2333 
2334 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2335 
2336 @*/
2337 PetscErrorCode  DMSetVec(DM dm,Vec x)
2338 {
2339   PetscErrorCode ierr;
2340 
2341   PetscFunctionBegin;
2342   if (x) {
2343     if (!dm->x) {
2344       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2345     }
2346     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2347   } else if (dm->x) {
2348     ierr = VecDestroy(&dm->x);CHKERRQ(ierr);
2349   }
2350   PetscFunctionReturn(0);
2351 }
2352 
2353 PetscFunctionList DMList              = NULL;
2354 PetscBool         DMRegisterAllCalled = PETSC_FALSE;
2355 
2356 #undef __FUNCT__
2357 #define __FUNCT__ "DMSetType"
2358 /*@C
2359   DMSetType - Builds a DM, for a particular DM implementation.
2360 
2361   Collective on DM
2362 
2363   Input Parameters:
2364 + dm     - The DM object
2365 - method - The name of the DM type
2366 
2367   Options Database Key:
2368 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2369 
2370   Notes:
2371   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2372 
2373   Level: intermediate
2374 
2375 .keywords: DM, set, type
2376 .seealso: DMGetType(), DMCreate()
2377 @*/
2378 PetscErrorCode  DMSetType(DM dm, DMType method)
2379 {
2380   PetscErrorCode (*r)(DM);
2381   PetscBool      match;
2382   PetscErrorCode ierr;
2383 
2384   PetscFunctionBegin;
2385   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2386   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2387   if (match) PetscFunctionReturn(0);
2388 
2389   if (!DMRegisterAllCalled) {ierr = DMRegisterAll(NULL);CHKERRQ(ierr);}
2390   ierr = PetscFunctionListFind(PetscObjectComm((PetscObject)dm), DMList, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr);
2391   if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2392 
2393   if (dm->ops->destroy) {
2394     ierr             = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2395     dm->ops->destroy = NULL;
2396   }
2397   ierr = (*r)(dm);CHKERRQ(ierr);
2398   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2399   PetscFunctionReturn(0);
2400 }
2401 
2402 #undef __FUNCT__
2403 #define __FUNCT__ "DMGetType"
2404 /*@C
2405   DMGetType - Gets the DM type name (as a string) from the DM.
2406 
2407   Not Collective
2408 
2409   Input Parameter:
2410 . dm  - The DM
2411 
2412   Output Parameter:
2413 . type - The DM type name
2414 
2415   Level: intermediate
2416 
2417 .keywords: DM, get, type, name
2418 .seealso: DMSetType(), DMCreate()
2419 @*/
2420 PetscErrorCode  DMGetType(DM dm, DMType *type)
2421 {
2422   PetscErrorCode ierr;
2423 
2424   PetscFunctionBegin;
2425   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2426   PetscValidCharPointer(type,2);
2427   if (!DMRegisterAllCalled) {
2428     ierr = DMRegisterAll(NULL);CHKERRQ(ierr);
2429   }
2430   *type = ((PetscObject)dm)->type_name;
2431   PetscFunctionReturn(0);
2432 }
2433 
2434 #undef __FUNCT__
2435 #define __FUNCT__ "DMConvert"
2436 /*@C
2437   DMConvert - Converts a DM to another DM, either of the same or different type.
2438 
2439   Collective on DM
2440 
2441   Input Parameters:
2442 + dm - the DM
2443 - newtype - new DM type (use "same" for the same type)
2444 
2445   Output Parameter:
2446 . M - pointer to new DM
2447 
2448   Notes:
2449   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2450   the MPI communicator of the generated DM is always the same as the communicator
2451   of the input DM.
2452 
2453   Level: intermediate
2454 
2455 .seealso: DMCreate()
2456 @*/
2457 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2458 {
2459   DM             B;
2460   char           convname[256];
2461   PetscBool      sametype, issame;
2462   PetscErrorCode ierr;
2463 
2464   PetscFunctionBegin;
2465   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2466   PetscValidType(dm,1);
2467   PetscValidPointer(M,3);
2468   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2469   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2470   {
2471     PetscErrorCode (*conv)(DM, DMType, DM*) = NULL;
2472 
2473     /*
2474        Order of precedence:
2475        1) See if a specialized converter is known to the current DM.
2476        2) See if a specialized converter is known to the desired DM class.
2477        3) See if a good general converter is registered for the desired class
2478        4) See if a good general converter is known for the current matrix.
2479        5) Use a really basic converter.
2480     */
2481 
2482     /* 1) See if a specialized converter is known to the current DM and the desired class */
2483     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2484     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2485     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2486     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2487     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2488     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2489     if (conv) goto foundconv;
2490 
2491     /* 2)  See if a specialized converter is known to the desired DM class. */
2492     ierr = DMCreate(PetscObjectComm((PetscObject)dm), &B);CHKERRQ(ierr);
2493     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2494     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2495     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2496     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2497     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2498     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2499     ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2500     if (conv) {
2501       ierr = DMDestroy(&B);CHKERRQ(ierr);
2502       goto foundconv;
2503     }
2504 
2505 #if 0
2506     /* 3) See if a good general converter is registered for the desired class */
2507     conv = B->ops->convertfrom;
2508     ierr = DMDestroy(&B);CHKERRQ(ierr);
2509     if (conv) goto foundconv;
2510 
2511     /* 4) See if a good general converter is known for the current matrix */
2512     if (dm->ops->convert) {
2513       conv = dm->ops->convert;
2514     }
2515     if (conv) goto foundconv;
2516 #endif
2517 
2518     /* 5) Use a really basic converter. */
2519     SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2520 
2521 foundconv:
2522     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2523     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2524     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2525   }
2526   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2527   PetscFunctionReturn(0);
2528 }
2529 
2530 /*--------------------------------------------------------------------------------------------------------------------*/
2531 
2532 #undef __FUNCT__
2533 #define __FUNCT__ "DMRegister"
2534 /*@C
2535   DMRegister - See DMRegisterDynamic()
2536 
2537   Level: advanced
2538 @*/
2539 PetscErrorCode  DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM))
2540 {
2541   char           fullname[PETSC_MAX_PATH_LEN];
2542   PetscErrorCode ierr;
2543 
2544   PetscFunctionBegin;
2545   ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr);
2546   ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr);
2547   ierr = PetscStrcat(fullname, name);CHKERRQ(ierr);
2548   ierr = PetscFunctionListAdd(PETSC_COMM_WORLD,&DMList, sname, fullname, (void (*)(void))function);CHKERRQ(ierr);
2549   PetscFunctionReturn(0);
2550 }
2551 
2552 
2553 /*--------------------------------------------------------------------------------------------------------------------*/
2554 #undef __FUNCT__
2555 #define __FUNCT__ "DMRegisterDestroy"
2556 /*@C
2557    DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic().
2558 
2559    Not Collective
2560 
2561    Level: advanced
2562 
2563 .keywords: DM, register, destroy
2564 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic()
2565 @*/
2566 PetscErrorCode  DMRegisterDestroy(void)
2567 {
2568   PetscErrorCode ierr;
2569 
2570   PetscFunctionBegin;
2571   ierr                = PetscFunctionListDestroy(&DMList);CHKERRQ(ierr);
2572   DMRegisterAllCalled = PETSC_FALSE;
2573   PetscFunctionReturn(0);
2574 }
2575 
2576 #undef __FUNCT__
2577 #define __FUNCT__ "DMLoad"
2578 /*@C
2579   DMLoad - Loads a DM that has been stored in binary  with DMView().
2580 
2581   Collective on PetscViewer
2582 
2583   Input Parameters:
2584 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2585            some related function before a call to DMLoad().
2586 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2587            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2588 
2589    Level: intermediate
2590 
2591   Notes:
2592    The type is determined by the data in the file, any type set into the DM before this call is ignored.
2593 
2594   Notes for advanced users:
2595   Most users should not need to know the details of the binary storage
2596   format, since DMLoad() and DMView() completely hide these details.
2597   But for anyone who's interested, the standard binary matrix storage
2598   format is
2599 .vb
2600      has not yet been determined
2601 .ve
2602 
2603 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2604 @*/
2605 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2606 {
2607   PetscErrorCode ierr;
2608   PetscBool      isbinary;
2609   PetscInt       classid;
2610   char           type[256];
2611 
2612   PetscFunctionBegin;
2613   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
2614   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
2615   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
2616   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
2617 
2618   ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr);
2619   if (classid != DM_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file");
2620   ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr);
2621   ierr = DMSetType(newdm, type);CHKERRQ(ierr);
2622   if (newdm->ops->load) {
2623     ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
2624   }
2625   PetscFunctionReturn(0);
2626 }
2627 
2628 /******************************** FEM Support **********************************/
2629 
2630 #undef __FUNCT__
2631 #define __FUNCT__ "DMPrintCellVector"
2632 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[])
2633 {
2634   PetscInt       f;
2635   PetscErrorCode ierr;
2636 
2637   PetscFunctionBegin;
2638   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2639   for (f = 0; f < len; ++f) {
2640     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr);
2641   }
2642   PetscFunctionReturn(0);
2643 }
2644 
2645 #undef __FUNCT__
2646 #define __FUNCT__ "DMPrintCellMatrix"
2647 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[])
2648 {
2649   PetscInt       f, g;
2650   PetscErrorCode ierr;
2651 
2652   PetscFunctionBegin;
2653   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2654   for (f = 0; f < rows; ++f) {
2655     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
2656     for (g = 0; g < cols; ++g) {
2657       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
2658     }
2659     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
2660   }
2661   PetscFunctionReturn(0);
2662 }
2663 
2664 #undef __FUNCT__
2665 #define __FUNCT__ "DMGetDefaultSection"
2666 /*@
2667   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
2668 
2669   Input Parameter:
2670 . dm - The DM
2671 
2672   Output Parameter:
2673 . section - The PetscSection
2674 
2675   Level: intermediate
2676 
2677   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2678 
2679 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2680 @*/
2681 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section)
2682 {
2683   PetscFunctionBegin;
2684   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2685   PetscValidPointer(section, 2);
2686   *section = dm->defaultSection;
2687   PetscFunctionReturn(0);
2688 }
2689 
2690 #undef __FUNCT__
2691 #define __FUNCT__ "DMSetDefaultSection"
2692 /*@
2693   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
2694 
2695   Input Parameters:
2696 + dm - The DM
2697 - section - The PetscSection
2698 
2699   Level: intermediate
2700 
2701   Note: Any existing Section will be destroyed
2702 
2703 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2704 @*/
2705 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section)
2706 {
2707   PetscInt       numFields;
2708   PetscInt       f;
2709   PetscErrorCode ierr;
2710 
2711   PetscFunctionBegin;
2712   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2713   PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2);
2714   ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr);
2715   ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
2716   dm->defaultSection = section;
2717   ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
2718   if (numFields) {
2719     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
2720     for (f = 0; f < numFields; ++f) {
2721       const char *name;
2722 
2723       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
2724       ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr);
2725     }
2726   }
2727   /* The global section will be rebuilt in the next call to DMGetDefaultGlobalSection(). */
2728   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2729   PetscFunctionReturn(0);
2730 }
2731 
2732 #undef __FUNCT__
2733 #define __FUNCT__ "DMGetDefaultGlobalSection"
2734 /*@
2735   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
2736 
2737   Collective on DM
2738 
2739   Input Parameter:
2740 . dm - The DM
2741 
2742   Output Parameter:
2743 . section - The PetscSection
2744 
2745   Level: intermediate
2746 
2747   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2748 
2749 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2750 @*/
2751 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section)
2752 {
2753   PetscErrorCode ierr;
2754 
2755   PetscFunctionBegin;
2756   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2757   PetscValidPointer(section, 2);
2758   if (!dm->defaultGlobalSection) {
2759     if (!dm->defaultSection || !dm->sf) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
2760     ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
2761     ierr = PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm),dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr);
2762   }
2763   *section = dm->defaultGlobalSection;
2764   PetscFunctionReturn(0);
2765 }
2766 
2767 #undef __FUNCT__
2768 #define __FUNCT__ "DMSetDefaultGlobalSection"
2769 /*@
2770   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
2771 
2772   Input Parameters:
2773 + dm - The DM
2774 - section - The PetscSection, or NULL
2775 
2776   Level: intermediate
2777 
2778   Note: Any existing Section will be destroyed
2779 
2780 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
2781 @*/
2782 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section)
2783 {
2784   PetscErrorCode ierr;
2785 
2786   PetscFunctionBegin;
2787   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2788   if (section) PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2);
2789   ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr);
2790   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2791   dm->defaultGlobalSection = section;
2792   PetscFunctionReturn(0);
2793 }
2794 
2795 #undef __FUNCT__
2796 #define __FUNCT__ "DMGetDefaultSF"
2797 /*@
2798   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
2799   it is created from the default PetscSection layouts in the DM.
2800 
2801   Input Parameter:
2802 . dm - The DM
2803 
2804   Output Parameter:
2805 . sf - The PetscSF
2806 
2807   Level: intermediate
2808 
2809   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2810 
2811 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
2812 @*/
2813 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf)
2814 {
2815   PetscInt       nroots;
2816   PetscErrorCode ierr;
2817 
2818   PetscFunctionBegin;
2819   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2820   PetscValidPointer(sf, 2);
2821   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
2822   if (nroots < 0) {
2823     PetscSection section, gSection;
2824 
2825     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
2826     if (section) {
2827       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
2828       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
2829     } else {
2830       *sf = NULL;
2831       PetscFunctionReturn(0);
2832     }
2833   }
2834   *sf = dm->defaultSF;
2835   PetscFunctionReturn(0);
2836 }
2837 
2838 #undef __FUNCT__
2839 #define __FUNCT__ "DMSetDefaultSF"
2840 /*@
2841   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
2842 
2843   Input Parameters:
2844 + dm - The DM
2845 - sf - The PetscSF
2846 
2847   Level: intermediate
2848 
2849   Note: Any previous SF is destroyed
2850 
2851 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
2852 @*/
2853 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf)
2854 {
2855   PetscErrorCode ierr;
2856 
2857   PetscFunctionBegin;
2858   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2859   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
2860   ierr          = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
2861   dm->defaultSF = sf;
2862   PetscFunctionReturn(0);
2863 }
2864 
2865 #undef __FUNCT__
2866 #define __FUNCT__ "DMCreateDefaultSF"
2867 /*@C
2868   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
2869   describing the data layout.
2870 
2871   Input Parameters:
2872 + dm - The DM
2873 . localSection - PetscSection describing the local data layout
2874 - globalSection - PetscSection describing the global data layout
2875 
2876   Level: intermediate
2877 
2878 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
2879 @*/
2880 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
2881 {
2882   MPI_Comm       comm;
2883   PetscLayout    layout;
2884   const PetscInt *ranges;
2885   PetscInt       *local;
2886   PetscSFNode    *remote;
2887   PetscInt       pStart, pEnd, p, nroots, nleaves, l;
2888   PetscMPIInt    size, rank;
2889   PetscErrorCode ierr;
2890 
2891   PetscFunctionBegin;
2892   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
2893   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2894   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2895   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2896   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
2897   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
2898   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
2899   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
2900   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
2901   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
2902   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
2903   for (p = pStart, nleaves = 0; p < pEnd; ++p) {
2904     PetscInt gdof, gcdof;
2905 
2906     ierr     = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2907     ierr     = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2908     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2909   }
2910   ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr);
2911   ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr);
2912   for (p = pStart, l = 0; p < pEnd; ++p) {
2913     const PetscInt *cind;
2914     PetscInt       dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
2915 
2916     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
2917     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
2918     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
2919     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
2920     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2921     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2922     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
2923     if (!gdof) continue; /* Censored point */
2924     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2925     if (gsize != dof-cdof) {
2926       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
2927       cdof = 0; /* Ignore constraints */
2928     }
2929     for (d = 0, c = 0; d < dof; ++d) {
2930       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
2931       local[l+d-c] = off+d;
2932     }
2933     if (gdof < 0) {
2934       for (d = 0; d < gsize; ++d, ++l) {
2935         PetscInt offset = -(goff+1) + d, r;
2936 
2937         ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr);
2938         if (r < 0) r = -(r+2);
2939         remote[l].rank  = r;
2940         remote[l].index = offset - ranges[r];
2941       }
2942     } else {
2943       for (d = 0; d < gsize; ++d, ++l) {
2944         remote[l].rank  = rank;
2945         remote[l].index = goff+d - ranges[rank];
2946       }
2947     }
2948   }
2949   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
2950   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
2951   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
2952   PetscFunctionReturn(0);
2953 }
2954 
2955 #undef __FUNCT__
2956 #define __FUNCT__ "DMGetPointSF"
2957 /*@
2958   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
2959 
2960   Input Parameter:
2961 . dm - The DM
2962 
2963   Output Parameter:
2964 . sf - The PetscSF
2965 
2966   Level: intermediate
2967 
2968   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2969 
2970 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
2971 @*/
2972 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf)
2973 {
2974   PetscFunctionBegin;
2975   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2976   PetscValidPointer(sf, 2);
2977   *sf = dm->sf;
2978   PetscFunctionReturn(0);
2979 }
2980 
2981 #undef __FUNCT__
2982 #define __FUNCT__ "DMSetPointSF"
2983 /*@
2984   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
2985 
2986   Input Parameters:
2987 + dm - The DM
2988 - sf - The PetscSF
2989 
2990   Level: intermediate
2991 
2992 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
2993 @*/
2994 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf)
2995 {
2996   PetscErrorCode ierr;
2997 
2998   PetscFunctionBegin;
2999   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3000   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
3001   ierr   = PetscSFDestroy(&dm->sf);CHKERRQ(ierr);
3002   ierr   = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr);
3003   dm->sf = sf;
3004   PetscFunctionReturn(0);
3005 }
3006 
3007 #undef __FUNCT__
3008 #define __FUNCT__ "DMGetNumFields"
3009 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3010 {
3011   PetscFunctionBegin;
3012   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3013   PetscValidPointer(numFields, 2);
3014   *numFields = dm->numFields;
3015   PetscFunctionReturn(0);
3016 }
3017 
3018 #undef __FUNCT__
3019 #define __FUNCT__ "DMSetNumFields"
3020 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3021 {
3022   PetscInt       f;
3023   PetscErrorCode ierr;
3024 
3025   PetscFunctionBegin;
3026   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3027   for (f = 0; f < dm->numFields; ++f) {
3028     ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr);
3029   }
3030   ierr          = PetscFree(dm->fields);CHKERRQ(ierr);
3031   dm->numFields = numFields;
3032   ierr          = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr);
3033   for (f = 0; f < dm->numFields; ++f) {
3034     ierr = PetscContainerCreate(PetscObjectComm((PetscObject)dm), (PetscContainer*) &dm->fields[f]);CHKERRQ(ierr);
3035   }
3036   PetscFunctionReturn(0);
3037 }
3038 
3039 #undef __FUNCT__
3040 #define __FUNCT__ "DMGetField"
3041 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3042 {
3043   PetscFunctionBegin;
3044   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3045   PetscValidPointer(field, 2);
3046   if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3047   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3048   *field = dm->fields[f];
3049   PetscFunctionReturn(0);
3050 }
3051 
3052 #undef __FUNCT__
3053 #define __FUNCT__ "DMSetCoordinates"
3054 /*@
3055   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3056 
3057   Collective on DM
3058 
3059   Input Parameters:
3060 + dm - the DM
3061 - c - coordinate vector
3062 
3063   Note:
3064   The coordinates do include those for ghost points, which are in the local vector
3065 
3066   Level: intermediate
3067 
3068 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3069 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3070 @*/
3071 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3072 {
3073   PetscErrorCode ierr;
3074 
3075   PetscFunctionBegin;
3076   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3077   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3078   ierr            = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3079   ierr            = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3080   dm->coordinates = c;
3081   ierr            = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3082   PetscFunctionReturn(0);
3083 }
3084 
3085 #undef __FUNCT__
3086 #define __FUNCT__ "DMSetCoordinatesLocal"
3087 /*@
3088   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3089 
3090   Collective on DM
3091 
3092    Input Parameters:
3093 +  dm - the DM
3094 -  c - coordinate vector
3095 
3096   Note:
3097   The coordinates of ghost points can be set using DMSetCoordinates()
3098   followed by DMGetCoordinatesLocal(). This is intended to enable the
3099   setting of ghost coordinates outside of the domain.
3100 
3101   Level: intermediate
3102 
3103 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3104 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3105 @*/
3106 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3107 {
3108   PetscErrorCode ierr;
3109 
3110   PetscFunctionBegin;
3111   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3112   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3113   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3114   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3115 
3116   dm->coordinatesLocal = c;
3117 
3118   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3119   PetscFunctionReturn(0);
3120 }
3121 
3122 #undef __FUNCT__
3123 #define __FUNCT__ "DMGetCoordinates"
3124 /*@
3125   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3126 
3127   Not Collective
3128 
3129   Input Parameter:
3130 . dm - the DM
3131 
3132   Output Parameter:
3133 . c - global coordinate vector
3134 
3135   Note:
3136   This is a borrowed reference, so the user should NOT destroy this vector
3137 
3138   Each process has only the local coordinates (does NOT have the ghost coordinates).
3139 
3140   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3141   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3142 
3143   Level: intermediate
3144 
3145 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3146 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3147 @*/
3148 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3149 {
3150   PetscErrorCode ierr;
3151 
3152   PetscFunctionBegin;
3153   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3154   PetscValidPointer(c,2);
3155   if (!dm->coordinates && dm->coordinatesLocal) {
3156     DM cdm = NULL;
3157 
3158     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3159     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3160     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3161     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3162     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3163   }
3164   *c = dm->coordinates;
3165   PetscFunctionReturn(0);
3166 }
3167 
3168 #undef __FUNCT__
3169 #define __FUNCT__ "DMGetCoordinatesLocal"
3170 /*@
3171   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3172 
3173   Collective on DM
3174 
3175   Input Parameter:
3176 . dm - the DM
3177 
3178   Output Parameter:
3179 . c - coordinate vector
3180 
3181   Note:
3182   This is a borrowed reference, so the user should NOT destroy this vector
3183 
3184   Each process has the local and ghost coordinates
3185 
3186   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3187   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3188 
3189   Level: intermediate
3190 
3191 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3192 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3193 @*/
3194 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3195 {
3196   PetscErrorCode ierr;
3197 
3198   PetscFunctionBegin;
3199   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3200   PetscValidPointer(c,2);
3201   if (!dm->coordinatesLocal && dm->coordinates) {
3202     DM cdm = NULL;
3203 
3204     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3205     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3206     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3207     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3208     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3209   }
3210   *c = dm->coordinatesLocal;
3211   PetscFunctionReturn(0);
3212 }
3213 
3214 #undef __FUNCT__
3215 #define __FUNCT__ "DMGetCoordinateDM"
3216 /*@
3217   DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates
3218 
3219   Collective on DM
3220 
3221   Input Parameter:
3222 . dm - the DM
3223 
3224   Output Parameter:
3225 . cdm - coordinate DM
3226 
3227   Level: intermediate
3228 
3229 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3230 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3231 @*/
3232 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3233 {
3234   PetscErrorCode ierr;
3235 
3236   PetscFunctionBegin;
3237   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3238   PetscValidPointer(cdm,2);
3239   if (!dm->coordinateDM) {
3240     if (!dm->ops->createcoordinatedm) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3241     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3242   }
3243   *cdm = dm->coordinateDM;
3244   PetscFunctionReturn(0);
3245 }
3246 
3247 #undef __FUNCT__
3248 #define __FUNCT__ "DMLocatePoints"
3249 /*@
3250   DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells
3251 
3252   Not collective
3253 
3254   Input Parameters:
3255 + dm - The DM
3256 - v - The Vec of points
3257 
3258   Output Parameter:
3259 . cells - The local cell numbers for cells which contain the points
3260 
3261   Level: developer
3262 
3263 .keywords: point location, mesh
3264 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3265 @*/
3266 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells)
3267 {
3268   PetscErrorCode ierr;
3269 
3270   PetscFunctionBegin;
3271   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3272   PetscValidHeaderSpecific(v,VEC_CLASSID,2);
3273   PetscValidPointer(cells,3);
3274   if (dm->ops->locatepoints) {
3275     ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr);
3276   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Point location not available for this DM");
3277   PetscFunctionReturn(0);
3278 }
3279