xref: /petsc/src/dm/interface/dm.c (revision 25a7538848551b30ff59322f4567ff5665b3b03c)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
2 #include <petscsf.h>
3 
4 PetscClassId  DM_CLASSID;
5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal, DM_LocalToLocal;
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "DMViewFromOptions"
9 /*
10   DMViewFromOptions - Processes command line options to determine if/how a DM is to be viewed.
11 
12   Collective on Vec
13 
14   Input Parameters:
15 + dm   - the DM
16 . prefix - prefix to use for viewing, or NULL to use prefix of 'dm'
17 - optionname - option to activate viewing
18 
19   Level: intermediate
20 
21 .keywords: DM, view, options, database
22 .seealso: VecViewFromOptions(), MatViewFromOptions()
23 */
24 PetscErrorCode  DMViewFromOptions(DM dm,const char prefix[],const char optionname[])
25 {
26   PetscErrorCode    ierr;
27   PetscBool         flg;
28   PetscViewer       viewer;
29   PetscViewerFormat format;
30 
31   PetscFunctionBegin;
32   if (prefix) {
33     ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr);
34   } else {
35     ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),((PetscObject)dm)->prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr);
36   }
37   if (flg) {
38     ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr);
39     ierr = DMView(dm,viewer);CHKERRQ(ierr);
40     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
41     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
42   }
43   PetscFunctionReturn(0);
44 }
45 
46 
47 #undef __FUNCT__
48 #define __FUNCT__ "DMCreate"
49 /*@
50   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
51 
52    If you never  call DMSetType()  it will generate an
53    error when you try to use the vector.
54 
55   Collective on MPI_Comm
56 
57   Input Parameter:
58 . comm - The communicator for the DM object
59 
60   Output Parameter:
61 . dm - The DM object
62 
63   Level: beginner
64 
65 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
66 @*/
67 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
68 {
69   DM             v;
70   PetscErrorCode ierr;
71 
72   PetscFunctionBegin;
73   PetscValidPointer(dm,2);
74   *dm = NULL;
75   ierr = VecInitializePackage();CHKERRQ(ierr);
76   ierr = MatInitializePackage();CHKERRQ(ierr);
77   ierr = DMInitializePackage();CHKERRQ(ierr);
78 
79   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
80   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
81 
82 
83   v->ltogmap              = NULL;
84   v->ltogmapb             = NULL;
85   v->bs                   = 1;
86   v->coloringtype         = IS_COLORING_GLOBAL;
87   ierr                    = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
88   ierr                    = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
89   v->defaultSection       = NULL;
90   v->defaultGlobalSection = NULL;
91   {
92     PetscInt i;
93     for (i = 0; i < 10; ++i) {
94       v->nullspaceConstructors[i] = NULL;
95     }
96   }
97   v->numFields = 0;
98   v->fields    = NULL;
99 
100   *dm = v;
101   PetscFunctionReturn(0);
102 }
103 
104 #undef __FUNCT__
105 #define __FUNCT__ "DMClone"
106 /*@
107   DMClone - Creates a DM object with the same topology as the original.
108 
109   Collective on MPI_Comm
110 
111   Input Parameter:
112 . dm - The original DM object
113 
114   Output Parameter:
115 . newdm  - The new DM object
116 
117   Level: beginner
118 
119 .keywords: DM, topology, create
120 @*/
121 PetscErrorCode DMClone(DM dm, DM *newdm)
122 {
123   PetscSF        sf;
124   Vec            coords;
125   void          *ctx;
126   PetscErrorCode ierr;
127 
128   PetscFunctionBegin;
129   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
130   PetscValidPointer(newdm,2);
131   ierr = DMCreate(PetscObjectComm((PetscObject)dm), newdm);CHKERRQ(ierr);
132   if (dm->ops->clone) {
133     ierr = (*dm->ops->clone)(dm, newdm);CHKERRQ(ierr);
134   }
135   (*newdm)->setupcalled = PETSC_TRUE;
136   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
137   ierr = DMSetPointSF(*newdm, sf);CHKERRQ(ierr);
138   ierr = DMGetApplicationContext(dm, &ctx);CHKERRQ(ierr);
139   ierr = DMSetApplicationContext(*newdm, ctx);CHKERRQ(ierr);
140   ierr = DMGetCoordinatesLocal(dm, &coords);CHKERRQ(ierr);
141   if (coords) {
142     ierr = DMSetCoordinatesLocal(*newdm, coords);CHKERRQ(ierr);
143   } else {
144     ierr = DMGetCoordinates(dm, &coords);CHKERRQ(ierr);
145     if (coords) {ierr = DMSetCoordinates(*newdm, coords);CHKERRQ(ierr);}
146   }
147   PetscFunctionReturn(0);
148 }
149 
150 #undef __FUNCT__
151 #define __FUNCT__ "DMSetVecType"
152 /*@C
153        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
154 
155    Logically Collective on DMDA
156 
157    Input Parameter:
158 +  da - initial distributed array
159 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
160 
161    Options Database:
162 .   -dm_vec_type ctype
163 
164    Level: intermediate
165 
166 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
167 @*/
168 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
169 {
170   PetscErrorCode ierr;
171 
172   PetscFunctionBegin;
173   PetscValidHeaderSpecific(da,DM_CLASSID,1);
174   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
175   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
176   PetscFunctionReturn(0);
177 }
178 
179 #undef __FUNCT__
180 #define __FUNCT__ "VecGetDM"
181 /*@
182   VecGetDM - Gets the DM defining the data layout of the vector
183 
184   Not collective
185 
186   Input Parameter:
187 . v - The Vec
188 
189   Output Parameter:
190 . dm - The DM
191 
192   Level: intermediate
193 
194 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
195 @*/
196 PetscErrorCode VecGetDM(Vec v, DM *dm)
197 {
198   PetscErrorCode ierr;
199 
200   PetscFunctionBegin;
201   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
202   PetscValidPointer(dm,2);
203   ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr);
204   PetscFunctionReturn(0);
205 }
206 
207 #undef __FUNCT__
208 #define __FUNCT__ "VecSetDM"
209 /*@
210   VecSetDM - Sets the DM defining the data layout of the vector
211 
212   Not collective
213 
214   Input Parameters:
215 + v - The Vec
216 - dm - The DM
217 
218   Level: intermediate
219 
220 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
221 @*/
222 PetscErrorCode VecSetDM(Vec v, DM dm)
223 {
224   PetscErrorCode ierr;
225 
226   PetscFunctionBegin;
227   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
228   if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2);
229   ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
230   PetscFunctionReturn(0);
231 }
232 
233 #undef __FUNCT__
234 #define __FUNCT__ "DMSetMatType"
235 /*@C
236        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
237 
238    Logically Collective on DM
239 
240    Input Parameter:
241 +  dm - the DM context
242 .  ctype - the matrix type
243 
244    Options Database:
245 .   -dm_mat_type ctype
246 
247    Level: intermediate
248 
249 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType
250 @*/
251 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
252 {
253   PetscErrorCode ierr;
254 
255   PetscFunctionBegin;
256   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
257   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
258   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
259   PetscFunctionReturn(0);
260 }
261 
262 #undef __FUNCT__
263 #define __FUNCT__ "MatGetDM"
264 /*@
265   MatGetDM - Gets the DM defining the data layout of the matrix
266 
267   Not collective
268 
269   Input Parameter:
270 . A - The Mat
271 
272   Output Parameter:
273 . dm - The DM
274 
275   Level: intermediate
276 
277 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
278 @*/
279 PetscErrorCode MatGetDM(Mat A, DM *dm)
280 {
281   PetscErrorCode ierr;
282 
283   PetscFunctionBegin;
284   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
285   PetscValidPointer(dm,2);
286   ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr);
287   PetscFunctionReturn(0);
288 }
289 
290 #undef __FUNCT__
291 #define __FUNCT__ "MatSetDM"
292 /*@
293   MatSetDM - Sets the DM defining the data layout of the matrix
294 
295   Not collective
296 
297   Input Parameters:
298 + A - The Mat
299 - dm - The DM
300 
301   Level: intermediate
302 
303 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
304 @*/
305 PetscErrorCode MatSetDM(Mat A, DM dm)
306 {
307   PetscErrorCode ierr;
308 
309   PetscFunctionBegin;
310   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
311   if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2);
312   ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
313   PetscFunctionReturn(0);
314 }
315 
316 #undef __FUNCT__
317 #define __FUNCT__ "DMSetOptionsPrefix"
318 /*@C
319    DMSetOptionsPrefix - Sets the prefix used for searching for all
320    DMDA options in the database.
321 
322    Logically Collective on DMDA
323 
324    Input Parameter:
325 +  da - the DMDA context
326 -  prefix - the prefix to prepend to all option names
327 
328    Notes:
329    A hyphen (-) must NOT be given at the beginning of the prefix name.
330    The first character of all runtime options is AUTOMATICALLY the hyphen.
331 
332    Level: advanced
333 
334 .keywords: DMDA, set, options, prefix, database
335 
336 .seealso: DMSetFromOptions()
337 @*/
338 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
339 {
340   PetscErrorCode ierr;
341 
342   PetscFunctionBegin;
343   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
344   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
345   PetscFunctionReturn(0);
346 }
347 
348 #undef __FUNCT__
349 #define __FUNCT__ "DMDestroy"
350 /*@
351     DMDestroy - Destroys a vector packer or DMDA.
352 
353     Collective on DM
354 
355     Input Parameter:
356 .   dm - the DM object to destroy
357 
358     Level: developer
359 
360 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
361 
362 @*/
363 PetscErrorCode  DMDestroy(DM *dm)
364 {
365   PetscInt       i, cnt = 0, f;
366   DMNamedVecLink nlink,nnext;
367   PetscErrorCode ierr;
368 
369   PetscFunctionBegin;
370   if (!*dm) PetscFunctionReturn(0);
371   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
372 
373   /* I think it makes sense to dump all attached things when you are destroyed, which also eliminates circular references */
374   for (f = 0; f < (*dm)->numFields; ++f) {
375     ierr = PetscObjectCompose((*dm)->fields[f], "pmat", NULL);CHKERRQ(ierr);
376     ierr = PetscObjectCompose((*dm)->fields[f], "nullspace", NULL);CHKERRQ(ierr);
377     ierr = PetscObjectCompose((*dm)->fields[f], "nearnullspace", NULL);CHKERRQ(ierr);
378   }
379   /* count all the circular references of DM and its contained Vecs */
380   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
381     if ((*dm)->localin[i])  cnt++;
382     if ((*dm)->globalin[i]) cnt++;
383   }
384   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
385   for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++;
386   if ((*dm)->x) {
387     DM obj;
388     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
389     if (obj == *dm) cnt++;
390   }
391 
392   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
393   /*
394      Need this test because the dm references the vectors that
395      reference the dm, so destroying the dm calls destroy on the
396      vectors that cause another destroy on the dm
397   */
398   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
399   ((PetscObject) (*dm))->refct = 0;
400   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
401     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
402     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
403   }
404   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
405     nnext = nlink->next;
406     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
407     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
408     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
409     ierr = PetscFree(nlink);CHKERRQ(ierr);
410   }
411   (*dm)->namedglobal = NULL;
412 
413   for (nlink=(*dm)->namedlocal; nlink; nlink=nnext) { /* Destroy the named local vectors */
414     nnext = nlink->next;
415     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
416     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
417     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
418     ierr = PetscFree(nlink);CHKERRQ(ierr);
419   }
420   (*dm)->namedlocal = NULL;
421 
422   /* Destroy the list of hooks */
423   {
424     DMCoarsenHookLink link,next;
425     for (link=(*dm)->coarsenhook; link; link=next) {
426       next = link->next;
427       ierr = PetscFree(link);CHKERRQ(ierr);
428     }
429     (*dm)->coarsenhook = NULL;
430   }
431   {
432     DMRefineHookLink link,next;
433     for (link=(*dm)->refinehook; link; link=next) {
434       next = link->next;
435       ierr = PetscFree(link);CHKERRQ(ierr);
436     }
437     (*dm)->refinehook = NULL;
438   }
439   {
440     DMSubDomainHookLink link,next;
441     for (link=(*dm)->subdomainhook; link; link=next) {
442       next = link->next;
443       ierr = PetscFree(link);CHKERRQ(ierr);
444     }
445     (*dm)->subdomainhook = NULL;
446   }
447   {
448     DMGlobalToLocalHookLink link,next;
449     for (link=(*dm)->gtolhook; link; link=next) {
450       next = link->next;
451       ierr = PetscFree(link);CHKERRQ(ierr);
452     }
453     (*dm)->gtolhook = NULL;
454   }
455   /* Destroy the work arrays */
456   {
457     DMWorkLink link,next;
458     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
459     for (link=(*dm)->workin; link; link=next) {
460       next = link->next;
461       ierr = PetscFree(link->mem);CHKERRQ(ierr);
462       ierr = PetscFree(link);CHKERRQ(ierr);
463     }
464     (*dm)->workin = NULL;
465   }
466 
467   ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr);
468   ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr);
469   ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr);
470 
471   if ((*dm)->ctx && (*dm)->ctxdestroy) {
472     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
473   }
474   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
475   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
476   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
477   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
478   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
479   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
480   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
481 
482   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
483   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
484   ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr);
485   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
486   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
487 
488   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
489   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
490   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
491 
492   for (f = 0; f < (*dm)->numFields; ++f) {
493     ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr);
494   }
495   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
496   /* if memory was published with AMS then destroy it */
497   ierr = PetscObjectAMSViewOff((PetscObject)*dm);CHKERRQ(ierr);
498 
499   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
500   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
501   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
502   PetscFunctionReturn(0);
503 }
504 
505 #undef __FUNCT__
506 #define __FUNCT__ "DMSetUp"
507 /*@
508     DMSetUp - sets up the data structures inside a DM object
509 
510     Collective on DM
511 
512     Input Parameter:
513 .   dm - the DM object to setup
514 
515     Level: developer
516 
517 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
518 
519 @*/
520 PetscErrorCode  DMSetUp(DM dm)
521 {
522   PetscErrorCode ierr;
523 
524   PetscFunctionBegin;
525   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
526   if (dm->setupcalled) PetscFunctionReturn(0);
527   if (dm->ops->setup) {
528     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
529   }
530   dm->setupcalled = PETSC_TRUE;
531   PetscFunctionReturn(0);
532 }
533 
534 #undef __FUNCT__
535 #define __FUNCT__ "DMSetFromOptions"
536 /*@
537     DMSetFromOptions - sets parameters in a DM from the options database
538 
539     Collective on DM
540 
541     Input Parameter:
542 .   dm - the DM object to set options for
543 
544     Options Database:
545 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
546 .   -dm_vec_type <type>  type of vector to create inside DM
547 .   -dm_mat_type <type>  type of matrix to create inside DM
548 -   -dm_coloring_type <global or ghosted>
549 
550     Level: developer
551 
552 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
553 
554 @*/
555 PetscErrorCode  DMSetFromOptions(DM dm)
556 {
557   char           typeName[256] = MATAIJ;
558   PetscBool      flg;
559   PetscErrorCode ierr;
560 
561   PetscFunctionBegin;
562   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
563   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
564   ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);CHKERRQ(ierr);
565   ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
566   if (flg) {
567     ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
568   }
569   ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
570   if (flg) {
571     ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
572   }
573   ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);CHKERRQ(ierr);
574   if (dm->ops->setfromoptions) {
575     ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
576   }
577   /* process any options handlers added with PetscObjectAddOptionsHandler() */
578   ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
579   ierr = PetscOptionsEnd();CHKERRQ(ierr);
580   ierr = DMViewFromOptions(dm,NULL,"-dm_view");CHKERRQ(ierr);
581   PetscFunctionReturn(0);
582 }
583 
584 #undef __FUNCT__
585 #define __FUNCT__ "DMView"
586 /*@C
587     DMView - Views a vector packer or DMDA.
588 
589     Collective on DM
590 
591     Input Parameter:
592 +   dm - the DM object to view
593 -   v - the viewer
594 
595     Level: developer
596 
597 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
598 
599 @*/
600 PetscErrorCode  DMView(DM dm,PetscViewer v)
601 {
602   PetscErrorCode ierr;
603   PetscBool      isbinary;
604 
605   PetscFunctionBegin;
606   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
607   if (!v) {
608     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);CHKERRQ(ierr);
609   }
610   ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
611   if (isbinary) {
612     PetscInt classid = DM_FILE_CLASSID;
613     char     type[256];
614 
615     ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
616     ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr);
617     ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
618   }
619   if (dm->ops->view) {
620     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
621   }
622   PetscFunctionReturn(0);
623 }
624 
625 #undef __FUNCT__
626 #define __FUNCT__ "DMCreateGlobalVector"
627 /*@
628     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
629 
630     Collective on DM
631 
632     Input Parameter:
633 .   dm - the DM object
634 
635     Output Parameter:
636 .   vec - the global vector
637 
638     Level: beginner
639 
640 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
641 
642 @*/
643 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
644 {
645   PetscErrorCode ierr;
646 
647   PetscFunctionBegin;
648   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
649   ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
650   PetscFunctionReturn(0);
651 }
652 
653 #undef __FUNCT__
654 #define __FUNCT__ "DMCreateLocalVector"
655 /*@
656     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
657 
658     Not Collective
659 
660     Input Parameter:
661 .   dm - the DM object
662 
663     Output Parameter:
664 .   vec - the local vector
665 
666     Level: beginner
667 
668 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
669 
670 @*/
671 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
672 {
673   PetscErrorCode ierr;
674 
675   PetscFunctionBegin;
676   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
677   ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
678   PetscFunctionReturn(0);
679 }
680 
681 #undef __FUNCT__
682 #define __FUNCT__ "DMGetLocalToGlobalMapping"
683 /*@
684    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
685 
686    Collective on DM
687 
688    Input Parameter:
689 .  dm - the DM that provides the mapping
690 
691    Output Parameter:
692 .  ltog - the mapping
693 
694    Level: intermediate
695 
696    Notes:
697    This mapping can then be used by VecSetLocalToGlobalMapping() or
698    MatSetLocalToGlobalMapping().
699 
700 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
701 @*/
702 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
703 {
704   PetscErrorCode ierr;
705 
706   PetscFunctionBegin;
707   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
708   PetscValidPointer(ltog,2);
709   if (!dm->ltogmap) {
710     PetscSection section, sectionGlobal;
711 
712     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
713     if (section) {
714       PetscInt *ltog;
715       PetscInt pStart, pEnd, size, p, l;
716 
717       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
718       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
719       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
720       ierr = PetscMalloc(size * sizeof(PetscInt), &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
721       for (p = pStart, l = 0; p < pEnd; ++p) {
722         PetscInt dof, off, c;
723 
724         /* Should probably use constrained dofs */
725         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
726         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
727         for (c = 0; c < dof; ++c, ++l) {
728           ltog[l] = off+c;
729         }
730       }
731       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
732       ierr = PetscLogObjectParent((PetscObject)dm, (PetscObject)dm->ltogmap);CHKERRQ(ierr);
733     } else {
734       if (!dm->ops->getlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
735       ierr = (*dm->ops->getlocaltoglobalmapping)(dm);CHKERRQ(ierr);
736     }
737   }
738   *ltog = dm->ltogmap;
739   PetscFunctionReturn(0);
740 }
741 
742 #undef __FUNCT__
743 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
744 /*@
745    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
746 
747    Collective on DM
748 
749    Input Parameter:
750 .  da - the distributed array that provides the mapping
751 
752    Output Parameter:
753 .  ltog - the block mapping
754 
755    Level: intermediate
756 
757    Notes:
758    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
759    MatSetLocalToGlobalMappingBlock().
760 
761 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
762 @*/
763 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
764 {
765   PetscErrorCode ierr;
766 
767   PetscFunctionBegin;
768   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
769   PetscValidPointer(ltog,2);
770   if (!dm->ltogmapb) {
771     PetscInt bs;
772     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
773     if (bs > 1) {
774       if (!dm->ops->getlocaltoglobalmappingblock) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
775       ierr = (*dm->ops->getlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
776     } else {
777       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
778       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
779     }
780   }
781   *ltog = dm->ltogmapb;
782   PetscFunctionReturn(0);
783 }
784 
785 #undef __FUNCT__
786 #define __FUNCT__ "DMGetBlockSize"
787 /*@
788    DMGetBlockSize - Gets the inherent block size associated with a DM
789 
790    Not Collective
791 
792    Input Parameter:
793 .  dm - the DM with block structure
794 
795    Output Parameter:
796 .  bs - the block size, 1 implies no exploitable block structure
797 
798    Level: intermediate
799 
800 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
801 @*/
802 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
803 {
804   PetscFunctionBegin;
805   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
806   PetscValidPointer(bs,2);
807   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
808   *bs = dm->bs;
809   PetscFunctionReturn(0);
810 }
811 
812 #undef __FUNCT__
813 #define __FUNCT__ "DMCreateInterpolation"
814 /*@
815     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
816 
817     Collective on DM
818 
819     Input Parameter:
820 +   dm1 - the DM object
821 -   dm2 - the second, finer DM object
822 
823     Output Parameter:
824 +  mat - the interpolation
825 -  vec - the scaling (optional)
826 
827     Level: developer
828 
829     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
830         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
831 
832         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
833         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
834 
835 
836 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
837 
838 @*/
839 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
840 {
841   PetscErrorCode ierr;
842 
843   PetscFunctionBegin;
844   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
845   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
846   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
847   PetscFunctionReturn(0);
848 }
849 
850 #undef __FUNCT__
851 #define __FUNCT__ "DMCreateInjection"
852 /*@
853     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
854 
855     Collective on DM
856 
857     Input Parameter:
858 +   dm1 - the DM object
859 -   dm2 - the second, finer DM object
860 
861     Output Parameter:
862 .   ctx - the injection
863 
864     Level: developer
865 
866    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
867         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
868 
869 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
870 
871 @*/
872 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
873 {
874   PetscErrorCode ierr;
875 
876   PetscFunctionBegin;
877   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
878   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
879   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
880   PetscFunctionReturn(0);
881 }
882 
883 #undef __FUNCT__
884 #define __FUNCT__ "DMCreateColoring"
885 /*@C
886     DMCreateColoring - Gets coloring for a DMDA or DMComposite
887 
888     Collective on DM
889 
890     Input Parameter:
891 +   dm - the DM object
892 .   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
893 -   matype - either MATAIJ or MATBAIJ
894 
895     Output Parameter:
896 .   coloring - the coloring
897 
898     Level: developer
899 
900 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix()
901 
902 @*/
903 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring)
904 {
905   PetscErrorCode ierr;
906 
907   PetscFunctionBegin;
908   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
909   if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet");
910   ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr);
911   PetscFunctionReturn(0);
912 }
913 
914 #undef __FUNCT__
915 #define __FUNCT__ "DMCreateMatrix"
916 /*@C
917     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
918 
919     Collective on DM
920 
921     Input Parameter:
922 +   dm - the DM object
923 -   mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or
924             any type which inherits from one of these (such as MATAIJ)
925 
926     Output Parameter:
927 .   mat - the empty Jacobian
928 
929     Level: beginner
930 
931     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
932        do not need to do it yourself.
933 
934        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
935        the nonzero pattern call DMDASetMatPreallocateOnly()
936 
937        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
938        internally by PETSc.
939 
940        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
941        the indices for the global numbering for DMDAs which is complicated.
942 
943 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
944 
945 @*/
946 PetscErrorCode  DMCreateMatrix(DM dm,MatType mtype,Mat *mat)
947 {
948   PetscErrorCode ierr;
949 
950   PetscFunctionBegin;
951   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
952   ierr = MatInitializePackage();CHKERRQ(ierr);
953   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
954   PetscValidPointer(mat,3);
955   if (dm->mattype) {
956     ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr);
957   } else {
958     ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr);
959   }
960   PetscFunctionReturn(0);
961 }
962 
963 #undef __FUNCT__
964 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
965 /*@
966   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
967     preallocated but the nonzero structure and zero values will not be set.
968 
969   Logically Collective on DMDA
970 
971   Input Parameter:
972 + dm - the DM
973 - only - PETSC_TRUE if only want preallocation
974 
975   Level: developer
976 .seealso DMCreateMatrix()
977 @*/
978 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
979 {
980   PetscFunctionBegin;
981   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
982   dm->prealloc_only = only;
983   PetscFunctionReturn(0);
984 }
985 
986 #undef __FUNCT__
987 #define __FUNCT__ "DMGetWorkArray"
988 /*@C
989   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
990 
991   Not Collective
992 
993   Input Parameters:
994 + dm - the DM object
995 . count - The minium size
996 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
997 
998   Output Parameter:
999 . array - the work array
1000 
1001   Level: developer
1002 
1003 .seealso DMDestroy(), DMCreate()
1004 @*/
1005 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1006 {
1007   PetscErrorCode ierr;
1008   DMWorkLink     link;
1009   size_t         size;
1010 
1011   PetscFunctionBegin;
1012   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1013   PetscValidPointer(mem,4);
1014   if (dm->workin) {
1015     link       = dm->workin;
1016     dm->workin = dm->workin->next;
1017   } else {
1018     ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr);
1019   }
1020   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
1021   if (size*count > link->bytes) {
1022     ierr        = PetscFree(link->mem);CHKERRQ(ierr);
1023     ierr        = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
1024     link->bytes = size*count;
1025   }
1026   link->next   = dm->workout;
1027   dm->workout  = link;
1028   *(void**)mem = link->mem;
1029   PetscFunctionReturn(0);
1030 }
1031 
1032 #undef __FUNCT__
1033 #define __FUNCT__ "DMRestoreWorkArray"
1034 /*@C
1035   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
1036 
1037   Not Collective
1038 
1039   Input Parameters:
1040 + dm - the DM object
1041 . count - The minium size
1042 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
1043 
1044   Output Parameter:
1045 . array - the work array
1046 
1047   Level: developer
1048 
1049 .seealso DMDestroy(), DMCreate()
1050 @*/
1051 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1052 {
1053   DMWorkLink *p,link;
1054 
1055   PetscFunctionBegin;
1056   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1057   PetscValidPointer(mem,4);
1058   for (p=&dm->workout; (link=*p); p=&link->next) {
1059     if (link->mem == *(void**)mem) {
1060       *p           = link->next;
1061       link->next   = dm->workin;
1062       dm->workin   = link;
1063       *(void**)mem = NULL;
1064       PetscFunctionReturn(0);
1065     }
1066   }
1067   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1068   PetscFunctionReturn(0);
1069 }
1070 
1071 #undef __FUNCT__
1072 #define __FUNCT__ "DMSetNullSpaceConstructor"
1073 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1074 {
1075   PetscFunctionBegin;
1076   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1077   if (field >= 10) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1078   dm->nullspaceConstructors[field] = nullsp;
1079   PetscFunctionReturn(0);
1080 }
1081 
1082 #undef __FUNCT__
1083 #define __FUNCT__ "DMCreateFieldIS"
1084 /*@C
1085   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1086 
1087   Not collective
1088 
1089   Input Parameter:
1090 . dm - the DM object
1091 
1092   Output Parameters:
1093 + numFields  - The number of fields (or NULL if not requested)
1094 . fieldNames - The name for each field (or NULL if not requested)
1095 - fields     - The global indices for each field (or NULL if not requested)
1096 
1097   Level: intermediate
1098 
1099   Notes:
1100   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1101   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1102   PetscFree().
1103 
1104 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1105 @*/
1106 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1107 {
1108   PetscSection   section, sectionGlobal;
1109   PetscErrorCode ierr;
1110 
1111   PetscFunctionBegin;
1112   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1113   if (numFields) {
1114     PetscValidPointer(numFields,2);
1115     *numFields = 0;
1116   }
1117   if (fieldNames) {
1118     PetscValidPointer(fieldNames,3);
1119     *fieldNames = NULL;
1120   }
1121   if (fields) {
1122     PetscValidPointer(fields,4);
1123     *fields = NULL;
1124   }
1125   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1126   if (section) {
1127     PetscInt *fieldSizes, **fieldIndices;
1128     PetscInt nF, f, pStart, pEnd, p;
1129 
1130     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
1131     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
1132     ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt*,&fieldIndices);CHKERRQ(ierr);
1133     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
1134     for (f = 0; f < nF; ++f) {
1135       fieldSizes[f] = 0;
1136     }
1137     for (p = pStart; p < pEnd; ++p) {
1138       PetscInt gdof;
1139 
1140       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1141       if (gdof > 0) {
1142         for (f = 0; f < nF; ++f) {
1143           PetscInt fdof, fcdof;
1144 
1145           ierr           = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1146           ierr           = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1147           fieldSizes[f] += fdof-fcdof;
1148         }
1149       }
1150     }
1151     for (f = 0; f < nF; ++f) {
1152       ierr          = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr);
1153       fieldSizes[f] = 0;
1154     }
1155     for (p = pStart; p < pEnd; ++p) {
1156       PetscInt gdof, goff;
1157 
1158       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1159       if (gdof > 0) {
1160         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
1161         for (f = 0; f < nF; ++f) {
1162           PetscInt fdof, fcdof, fc;
1163 
1164           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1165           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1166           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1167             fieldIndices[f][fieldSizes[f]] = goff++;
1168           }
1169         }
1170       }
1171     }
1172     if (numFields) *numFields = nF;
1173     if (fieldNames) {
1174       ierr = PetscMalloc(nF * sizeof(char*), fieldNames);CHKERRQ(ierr);
1175       for (f = 0; f < nF; ++f) {
1176         const char *fieldName;
1177 
1178         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1179         ierr = PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);CHKERRQ(ierr);
1180       }
1181     }
1182     if (fields) {
1183       ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr);
1184       for (f = 0; f < nF; ++f) {
1185         ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1186       }
1187     }
1188     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1189   } else if (dm->ops->createfieldis) {
1190     ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);
1191   }
1192   PetscFunctionReturn(0);
1193 }
1194 
1195 
1196 #undef __FUNCT__
1197 #define __FUNCT__ "DMCreateFieldDecomposition"
1198 /*@C
1199   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1200                           corresponding to different fields: each IS contains the global indices of the dofs of the
1201                           corresponding field. The optional list of DMs define the DM for each subproblem.
1202                           Generalizes DMCreateFieldIS().
1203 
1204   Not collective
1205 
1206   Input Parameter:
1207 . dm - the DM object
1208 
1209   Output Parameters:
1210 + len       - The number of subproblems in the field decomposition (or NULL if not requested)
1211 . namelist  - The name for each field (or NULL if not requested)
1212 . islist    - The global indices for each field (or NULL if not requested)
1213 - dmlist    - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)
1214 
1215   Level: intermediate
1216 
1217   Notes:
1218   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1219   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1220   and all of the arrays should be freed with PetscFree().
1221 
1222 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1223 @*/
1224 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1225 {
1226   PetscErrorCode ierr;
1227 
1228   PetscFunctionBegin;
1229   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1230   if (len) {
1231     PetscValidPointer(len,2);
1232     *len = 0;
1233   }
1234   if (namelist) {
1235     PetscValidPointer(namelist,3);
1236     *namelist = 0;
1237   }
1238   if (islist) {
1239     PetscValidPointer(islist,4);
1240     *islist = 0;
1241   }
1242   if (dmlist) {
1243     PetscValidPointer(dmlist,5);
1244     *dmlist = 0;
1245   }
1246   /*
1247    Is it a good idea to apply the following check across all impls?
1248    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1249    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1250    */
1251   if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1252   if (!dm->ops->createfielddecomposition) {
1253     PetscSection section;
1254     PetscInt     numFields, f;
1255 
1256     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1257     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1258     if (section && numFields && dm->ops->createsubdm) {
1259       *len = numFields;
1260       ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr);
1261       for (f = 0; f < numFields; ++f) {
1262         const char *fieldName;
1263 
1264         ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr);
1265         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1266         ierr = PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);CHKERRQ(ierr);
1267       }
1268     } else {
1269       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1270       /* By default there are no DMs associated with subproblems. */
1271       if (dmlist) *dmlist = NULL;
1272     }
1273   } else {
1274     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr);
1275   }
1276   PetscFunctionReturn(0);
1277 }
1278 
1279 #undef __FUNCT__
1280 #define __FUNCT__ "DMCreateSubDM"
1281 /*@C
1282   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1283                   The fields are defined by DMCreateFieldIS().
1284 
1285   Not collective
1286 
1287   Input Parameters:
1288 + dm - the DM object
1289 . numFields - number of fields in this subproblem
1290 - len       - The number of subproblems in the decomposition (or NULL if not requested)
1291 
1292   Output Parameters:
1293 . is - The global indices for the subproblem
1294 - dm - The DM for the subproblem
1295 
1296   Level: intermediate
1297 
1298 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1299 @*/
1300 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1301 {
1302   PetscErrorCode ierr;
1303 
1304   PetscFunctionBegin;
1305   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1306   PetscValidPointer(fields,3);
1307   if (is) PetscValidPointer(is,4);
1308   if (subdm) PetscValidPointer(subdm,5);
1309   if (dm->ops->createsubdm) {
1310     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr);
1311   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1312   PetscFunctionReturn(0);
1313 }
1314 
1315 
1316 #undef __FUNCT__
1317 #define __FUNCT__ "DMCreateDomainDecomposition"
1318 /*@C
1319   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1320                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1321                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1322                           define a nonoverlapping covering, while outer subdomains can overlap.
1323                           The optional list of DMs define the DM for each subproblem.
1324 
1325   Not collective
1326 
1327   Input Parameter:
1328 . dm - the DM object
1329 
1330   Output Parameters:
1331 + len         - The number of subproblems in the domain decomposition (or NULL if not requested)
1332 . namelist    - The name for each subdomain (or NULL if not requested)
1333 . innerislist - The global indices for each inner subdomain (or NULL, if not requested)
1334 . outerislist - The global indices for each outer subdomain (or NULL, if not requested)
1335 - dmlist      - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)
1336 
1337   Level: intermediate
1338 
1339   Notes:
1340   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1341   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1342   and all of the arrays should be freed with PetscFree().
1343 
1344 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1345 @*/
1346 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1347 {
1348   PetscErrorCode      ierr;
1349   DMSubDomainHookLink link;
1350   PetscInt            i,l;
1351 
1352   PetscFunctionBegin;
1353   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1354   if (len)           {PetscValidPointer(len,2);            *len         = 0;}
1355   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = NULL;}
1356   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = NULL;}
1357   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = NULL;}
1358   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = NULL;}
1359   /*
1360    Is it a good idea to apply the following check across all impls?
1361    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1362    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1363    */
1364   if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1365   if (dm->ops->createdomaindecomposition) {
1366     ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr);
1367     /* copy subdomain hooks and context over to the subdomain DMs */
1368     if (dmlist) {
1369       for (i = 0; i < l; i++) {
1370         for (link=dm->subdomainhook; link; link=link->next) {
1371           if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);}
1372         }
1373         (*dmlist)[i]->ctx = dm->ctx;
1374       }
1375     }
1376     if (len) *len = l;
1377   }
1378   PetscFunctionReturn(0);
1379 }
1380 
1381 
1382 #undef __FUNCT__
1383 #define __FUNCT__ "DMCreateDomainDecompositionScatters"
1384 /*@C
1385   DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector
1386 
1387   Not collective
1388 
1389   Input Parameters:
1390 + dm - the DM object
1391 . n  - the number of subdomain scatters
1392 - subdms - the local subdomains
1393 
1394   Output Parameters:
1395 + n     - the number of scatters returned
1396 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain
1397 . oscat - scatter from global vector to overlapping global vector entries on subdomain
1398 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts)
1399 
1400   Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution
1401   of general nonlinear problems with overlapping subdomain methods.  While merely having index sets that enable subsets
1402   of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of
1403   solution and residual data.
1404 
1405   Level: developer
1406 
1407 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1408 @*/
1409 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat)
1410 {
1411   PetscErrorCode ierr;
1412 
1413   PetscFunctionBegin;
1414   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1415   PetscValidPointer(subdms,3);
1416   if (dm->ops->createddscatters) {
1417     ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr);
1418   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined");
1419   PetscFunctionReturn(0);
1420 }
1421 
1422 #undef __FUNCT__
1423 #define __FUNCT__ "DMRefine"
1424 /*@
1425   DMRefine - Refines a DM object
1426 
1427   Collective on DM
1428 
1429   Input Parameter:
1430 + dm   - the DM object
1431 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1432 
1433   Output Parameter:
1434 . dmf - the refined DM, or NULL
1435 
1436   Note: If no refinement was done, the return value is NULL
1437 
1438   Level: developer
1439 
1440 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1441 @*/
1442 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1443 {
1444   PetscErrorCode   ierr;
1445   DMRefineHookLink link;
1446 
1447   PetscFunctionBegin;
1448   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1449   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1450   if (*dmf) {
1451     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1452 
1453     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1454 
1455     (*dmf)->ctx       = dm->ctx;
1456     (*dmf)->leveldown = dm->leveldown;
1457     (*dmf)->levelup   = dm->levelup + 1;
1458 
1459     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1460     for (link=dm->refinehook; link; link=link->next) {
1461       if (link->refinehook) {
1462         ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);
1463       }
1464     }
1465   }
1466   PetscFunctionReturn(0);
1467 }
1468 
1469 #undef __FUNCT__
1470 #define __FUNCT__ "DMRefineHookAdd"
1471 /*@C
1472    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1473 
1474    Logically Collective
1475 
1476    Input Arguments:
1477 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1478 .  refinehook - function to run when setting up a coarser level
1479 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1480 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1481 
1482    Calling sequence of refinehook:
1483 $    refinehook(DM coarse,DM fine,void *ctx);
1484 
1485 +  coarse - coarse level DM
1486 .  fine - fine level DM to interpolate problem to
1487 -  ctx - optional user-defined function context
1488 
1489    Calling sequence for interphook:
1490 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1491 
1492 +  coarse - coarse level DM
1493 .  interp - matrix interpolating a coarse-level solution to the finer grid
1494 .  fine - fine level DM to update
1495 -  ctx - optional user-defined function context
1496 
1497    Level: advanced
1498 
1499    Notes:
1500    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1501 
1502    If this function is called multiple times, the hooks will be run in the order they are added.
1503 
1504    This function is currently not available from Fortran.
1505 
1506 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1507 @*/
1508 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1509 {
1510   PetscErrorCode   ierr;
1511   DMRefineHookLink link,*p;
1512 
1513   PetscFunctionBegin;
1514   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1515   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1516   ierr             = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1517   link->refinehook = refinehook;
1518   link->interphook = interphook;
1519   link->ctx        = ctx;
1520   link->next       = NULL;
1521   *p               = link;
1522   PetscFunctionReturn(0);
1523 }
1524 
1525 #undef __FUNCT__
1526 #define __FUNCT__ "DMInterpolate"
1527 /*@
1528    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1529 
1530    Collective if any hooks are
1531 
1532    Input Arguments:
1533 +  coarse - coarser DM to use as a base
1534 .  restrct - interpolation matrix, apply using MatInterpolate()
1535 -  fine - finer DM to update
1536 
1537    Level: developer
1538 
1539 .seealso: DMRefineHookAdd(), MatInterpolate()
1540 @*/
1541 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1542 {
1543   PetscErrorCode   ierr;
1544   DMRefineHookLink link;
1545 
1546   PetscFunctionBegin;
1547   for (link=fine->refinehook; link; link=link->next) {
1548     if (link->interphook) {
1549       ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);
1550     }
1551   }
1552   PetscFunctionReturn(0);
1553 }
1554 
1555 #undef __FUNCT__
1556 #define __FUNCT__ "DMGetRefineLevel"
1557 /*@
1558     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1559 
1560     Not Collective
1561 
1562     Input Parameter:
1563 .   dm - the DM object
1564 
1565     Output Parameter:
1566 .   level - number of refinements
1567 
1568     Level: developer
1569 
1570 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1571 
1572 @*/
1573 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1574 {
1575   PetscFunctionBegin;
1576   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1577   *level = dm->levelup;
1578   PetscFunctionReturn(0);
1579 }
1580 
1581 #undef __FUNCT__
1582 #define __FUNCT__ "DMGlobalToLocalHookAdd"
1583 /*@C
1584    DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called
1585 
1586    Logically Collective
1587 
1588    Input Arguments:
1589 +  dm - the DM
1590 .  beginhook - function to run at the beginning of DMGlobalToLocalBegin()
1591 .  endhook - function to run after DMGlobalToLocalEnd() has completed
1592 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1593 
1594    Calling sequence for beginhook:
1595 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1596 
1597 +  dm - global DM
1598 .  g - global vector
1599 .  mode - mode
1600 .  l - local vector
1601 -  ctx - optional user-defined function context
1602 
1603 
1604    Calling sequence for endhook:
1605 $    endhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1606 
1607 +  global - global DM
1608 -  ctx - optional user-defined function context
1609 
1610    Level: advanced
1611 
1612 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1613 @*/
1614 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx)
1615 {
1616   PetscErrorCode          ierr;
1617   DMGlobalToLocalHookLink link,*p;
1618 
1619   PetscFunctionBegin;
1620   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1621   for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1622   ierr            = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr);
1623   link->beginhook = beginhook;
1624   link->endhook   = endhook;
1625   link->ctx       = ctx;
1626   link->next      = NULL;
1627   *p              = link;
1628   PetscFunctionReturn(0);
1629 }
1630 
1631 #undef __FUNCT__
1632 #define __FUNCT__ "DMGlobalToLocalBegin"
1633 /*@
1634     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1635 
1636     Neighbor-wise Collective on DM
1637 
1638     Input Parameters:
1639 +   dm - the DM object
1640 .   g - the global vector
1641 .   mode - INSERT_VALUES or ADD_VALUES
1642 -   l - the local vector
1643 
1644 
1645     Level: beginner
1646 
1647 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1648 
1649 @*/
1650 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1651 {
1652   PetscSF                 sf;
1653   PetscErrorCode          ierr;
1654   DMGlobalToLocalHookLink link;
1655 
1656   PetscFunctionBegin;
1657   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1658   for (link=dm->gtolhook; link; link=link->next) {
1659     if (link->beginhook) {
1660       ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);
1661     }
1662   }
1663   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1664   if (sf) {
1665     PetscScalar *lArray, *gArray;
1666 
1667     if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1668     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1669     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1670     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1671     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1672     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1673   } else {
1674     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1675   }
1676   PetscFunctionReturn(0);
1677 }
1678 
1679 #undef __FUNCT__
1680 #define __FUNCT__ "DMGlobalToLocalEnd"
1681 /*@
1682     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1683 
1684     Neighbor-wise Collective on DM
1685 
1686     Input Parameters:
1687 +   dm - the DM object
1688 .   g - the global vector
1689 .   mode - INSERT_VALUES or ADD_VALUES
1690 -   l - the local vector
1691 
1692 
1693     Level: beginner
1694 
1695 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1696 
1697 @*/
1698 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1699 {
1700   PetscSF                 sf;
1701   PetscErrorCode          ierr;
1702   PetscScalar             *lArray, *gArray;
1703   DMGlobalToLocalHookLink link;
1704 
1705   PetscFunctionBegin;
1706   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1707   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1708   if (sf) {
1709     if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1710 
1711     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1712     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1713     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1714     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1715     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1716   } else {
1717     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1718   }
1719   for (link=dm->gtolhook; link; link=link->next) {
1720     if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1721   }
1722   PetscFunctionReturn(0);
1723 }
1724 
1725 #undef __FUNCT__
1726 #define __FUNCT__ "DMLocalToGlobalBegin"
1727 /*@
1728     DMLocalToGlobalBegin - updates global vectors from local vectors
1729 
1730     Neighbor-wise Collective on DM
1731 
1732     Input Parameters:
1733 +   dm - the DM object
1734 .   l - the local vector
1735 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1736            base point.
1737 - - the global vector
1738 
1739     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1740            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1741            global array to the final global array with VecAXPY().
1742 
1743     Level: beginner
1744 
1745 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1746 
1747 @*/
1748 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1749 {
1750   PetscSF        sf;
1751   PetscErrorCode ierr;
1752 
1753   PetscFunctionBegin;
1754   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1755   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1756   if (sf) {
1757     MPI_Op      op;
1758     PetscScalar *lArray, *gArray;
1759 
1760     switch (mode) {
1761     case INSERT_VALUES:
1762     case INSERT_ALL_VALUES:
1763       op = MPIU_REPLACE; break;
1764     case ADD_VALUES:
1765     case ADD_ALL_VALUES:
1766       op = MPI_SUM; break;
1767     default:
1768       SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1769     }
1770     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1771     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1772     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1773     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1774     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1775   } else {
1776     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1777   }
1778   PetscFunctionReturn(0);
1779 }
1780 
1781 #undef __FUNCT__
1782 #define __FUNCT__ "DMLocalToGlobalEnd"
1783 /*@
1784     DMLocalToGlobalEnd - updates global vectors from local vectors
1785 
1786     Neighbor-wise Collective on DM
1787 
1788     Input Parameters:
1789 +   dm - the DM object
1790 .   l - the local vector
1791 .   mode - INSERT_VALUES or ADD_VALUES
1792 -   g - the global vector
1793 
1794 
1795     Level: beginner
1796 
1797 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1798 
1799 @*/
1800 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1801 {
1802   PetscSF        sf;
1803   PetscErrorCode ierr;
1804 
1805   PetscFunctionBegin;
1806   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1807   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1808   if (sf) {
1809     MPI_Op      op;
1810     PetscScalar *lArray, *gArray;
1811 
1812     switch (mode) {
1813     case INSERT_VALUES:
1814     case INSERT_ALL_VALUES:
1815       op = MPIU_REPLACE; break;
1816     case ADD_VALUES:
1817     case ADD_ALL_VALUES:
1818       op = MPI_SUM; break;
1819     default:
1820       SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1821     }
1822     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1823     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1824     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1825     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1826     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1827   } else {
1828     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1829   }
1830   PetscFunctionReturn(0);
1831 }
1832 
1833 #undef __FUNCT__
1834 #define __FUNCT__ "DMLocalToLocalBegin"
1835 /*@
1836    DMLocalToLocalBegin - Maps from a local vector (including ghost points
1837    that contain irrelevant values) to another local vector where the ghost
1838    points in the second are set correctly. Must be followed by DMLocalToLocalEnd().
1839 
1840    Neighbor-wise Collective on DM and Vec
1841 
1842    Input Parameters:
1843 +  dm - the DM object
1844 .  g - the original local vector
1845 -  mode - one of INSERT_VALUES or ADD_VALUES
1846 
1847    Output Parameter:
1848 .  l  - the local vector with correct ghost values
1849 
1850    Level: intermediate
1851 
1852    Notes:
1853    The local vectors used here need not be the same as those
1854    obtained from DMCreateLocalVector(), BUT they
1855    must have the same parallel data layout; they could, for example, be
1856    obtained with VecDuplicate() from the DM originating vectors.
1857 
1858 .keywords: DM, local-to-local, begin
1859 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalEnd(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1860 
1861 @*/
1862 PetscErrorCode  DMLocalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1863 {
1864   PetscErrorCode          ierr;
1865 
1866   PetscFunctionBegin;
1867   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1868   ierr = (*dm->ops->localtolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1869   PetscFunctionReturn(0);
1870 }
1871 
1872 #undef __FUNCT__
1873 #define __FUNCT__ "DMLocalToLocalEnd"
1874 /*@
1875    DMLocalToLocalEnd - Maps from a local vector (including ghost points
1876    that contain irrelevant values) to another local vector where the ghost
1877    points in the second are set correctly. Must be preceded by DMLocalToLocalBegin().
1878 
1879    Neighbor-wise Collective on DM and Vec
1880 
1881    Input Parameters:
1882 +  da - the DM object
1883 .  g - the original local vector
1884 -  mode - one of INSERT_VALUES or ADD_VALUES
1885 
1886    Output Parameter:
1887 .  l  - the local vector with correct ghost values
1888 
1889    Level: intermediate
1890 
1891    Notes:
1892    The local vectors used here need not be the same as those
1893    obtained from DMCreateLocalVector(), BUT they
1894    must have the same parallel data layout; they could, for example, be
1895    obtained with VecDuplicate() from the DM originating vectors.
1896 
1897 .keywords: DM, local-to-local, end
1898 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalBegin(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1899 
1900 @*/
1901 PetscErrorCode  DMLocalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1902 {
1903   PetscErrorCode          ierr;
1904 
1905   PetscFunctionBegin;
1906   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1907   ierr = (*dm->ops->localtolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1908   PetscFunctionReturn(0);
1909 }
1910 
1911 
1912 #undef __FUNCT__
1913 #define __FUNCT__ "DMCoarsen"
1914 /*@
1915     DMCoarsen - Coarsens a DM object
1916 
1917     Collective on DM
1918 
1919     Input Parameter:
1920 +   dm - the DM object
1921 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1922 
1923     Output Parameter:
1924 .   dmc - the coarsened DM
1925 
1926     Level: developer
1927 
1928 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1929 
1930 @*/
1931 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1932 {
1933   PetscErrorCode    ierr;
1934   DMCoarsenHookLink link;
1935 
1936   PetscFunctionBegin;
1937   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1938   ierr                      = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1939   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1940   ierr                      = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1941   (*dmc)->ctx               = dm->ctx;
1942   (*dmc)->levelup           = dm->levelup;
1943   (*dmc)->leveldown         = dm->leveldown + 1;
1944   ierr                      = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
1945   for (link=dm->coarsenhook; link; link=link->next) {
1946     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
1947   }
1948   PetscFunctionReturn(0);
1949 }
1950 
1951 #undef __FUNCT__
1952 #define __FUNCT__ "DMCoarsenHookAdd"
1953 /*@C
1954    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1955 
1956    Logically Collective
1957 
1958    Input Arguments:
1959 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1960 .  coarsenhook - function to run when setting up a coarser level
1961 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
1962 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1963 
1964    Calling sequence of coarsenhook:
1965 $    coarsenhook(DM fine,DM coarse,void *ctx);
1966 
1967 +  fine - fine level DM
1968 .  coarse - coarse level DM to restrict problem to
1969 -  ctx - optional user-defined function context
1970 
1971    Calling sequence for restricthook:
1972 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
1973 
1974 +  fine - fine level DM
1975 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
1976 .  rscale - scaling vector for restriction
1977 .  inject - matrix restricting by injection
1978 .  coarse - coarse level DM to update
1979 -  ctx - optional user-defined function context
1980 
1981    Level: advanced
1982 
1983    Notes:
1984    This function is only needed if auxiliary data needs to be set up on coarse grids.
1985 
1986    If this function is called multiple times, the hooks will be run in the order they are added.
1987 
1988    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1989    extract the finest level information from its context (instead of from the SNES).
1990 
1991    This function is currently not available from Fortran.
1992 
1993 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1994 @*/
1995 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
1996 {
1997   PetscErrorCode    ierr;
1998   DMCoarsenHookLink link,*p;
1999 
2000   PetscFunctionBegin;
2001   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
2002   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2003   ierr               = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
2004   link->coarsenhook  = coarsenhook;
2005   link->restricthook = restricthook;
2006   link->ctx          = ctx;
2007   link->next         = NULL;
2008   *p                 = link;
2009   PetscFunctionReturn(0);
2010 }
2011 
2012 #undef __FUNCT__
2013 #define __FUNCT__ "DMRestrict"
2014 /*@
2015    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
2016 
2017    Collective if any hooks are
2018 
2019    Input Arguments:
2020 +  fine - finer DM to use as a base
2021 .  restrct - restriction matrix, apply using MatRestrict()
2022 .  inject - injection matrix, also use MatRestrict()
2023 -  coarse - coarer DM to update
2024 
2025    Level: developer
2026 
2027 .seealso: DMCoarsenHookAdd(), MatRestrict()
2028 @*/
2029 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
2030 {
2031   PetscErrorCode    ierr;
2032   DMCoarsenHookLink link;
2033 
2034   PetscFunctionBegin;
2035   for (link=fine->coarsenhook; link; link=link->next) {
2036     if (link->restricthook) {
2037       ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);
2038     }
2039   }
2040   PetscFunctionReturn(0);
2041 }
2042 
2043 #undef __FUNCT__
2044 #define __FUNCT__ "DMSubDomainHookAdd"
2045 /*@C
2046    DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid
2047 
2048    Logically Collective
2049 
2050    Input Arguments:
2051 +  global - global DM
2052 .  ddhook - function to run to pass data to the decomposition DM upon its creation
2053 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
2054 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
2055 
2056 
2057    Calling sequence for ddhook:
2058 $    ddhook(DM global,DM block,void *ctx)
2059 
2060 +  global - global DM
2061 .  block  - block DM
2062 -  ctx - optional user-defined function context
2063 
2064    Calling sequence for restricthook:
2065 $    restricthook(DM global,VecScatter out,VecScatter in,DM block,void *ctx)
2066 
2067 +  global - global DM
2068 .  out    - scatter to the outer (with ghost and overlap points) block vector
2069 .  in     - scatter to block vector values only owned locally
2070 .  block  - block DM
2071 -  ctx - optional user-defined function context
2072 
2073    Level: advanced
2074 
2075    Notes:
2076    This function is only needed if auxiliary data needs to be set up on subdomain DMs.
2077 
2078    If this function is called multiple times, the hooks will be run in the order they are added.
2079 
2080    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
2081    extract the global information from its context (instead of from the SNES).
2082 
2083    This function is currently not available from Fortran.
2084 
2085 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
2086 @*/
2087 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
2088 {
2089   PetscErrorCode      ierr;
2090   DMSubDomainHookLink link,*p;
2091 
2092   PetscFunctionBegin;
2093   PetscValidHeaderSpecific(global,DM_CLASSID,1);
2094   for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2095   ierr               = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr);
2096   link->restricthook = restricthook;
2097   link->ddhook       = ddhook;
2098   link->ctx          = ctx;
2099   link->next         = NULL;
2100   *p                 = link;
2101   PetscFunctionReturn(0);
2102 }
2103 
2104 #undef __FUNCT__
2105 #define __FUNCT__ "DMSubDomainRestrict"
2106 /*@
2107    DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd()
2108 
2109    Collective if any hooks are
2110 
2111    Input Arguments:
2112 +  fine - finer DM to use as a base
2113 .  oscatter - scatter from domain global vector filling subdomain global vector with overlap
2114 .  gscatter - scatter from domain global vector filling subdomain local vector with ghosts
2115 -  coarse - coarer DM to update
2116 
2117    Level: developer
2118 
2119 .seealso: DMCoarsenHookAdd(), MatRestrict()
2120 @*/
2121 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm)
2122 {
2123   PetscErrorCode      ierr;
2124   DMSubDomainHookLink link;
2125 
2126   PetscFunctionBegin;
2127   for (link=global->subdomainhook; link; link=link->next) {
2128     if (link->restricthook) {
2129       ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr);
2130     }
2131   }
2132   PetscFunctionReturn(0);
2133 }
2134 
2135 #undef __FUNCT__
2136 #define __FUNCT__ "DMGetCoarsenLevel"
2137 /*@
2138     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
2139 
2140     Not Collective
2141 
2142     Input Parameter:
2143 .   dm - the DM object
2144 
2145     Output Parameter:
2146 .   level - number of coarsenings
2147 
2148     Level: developer
2149 
2150 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2151 
2152 @*/
2153 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
2154 {
2155   PetscFunctionBegin;
2156   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2157   *level = dm->leveldown;
2158   PetscFunctionReturn(0);
2159 }
2160 
2161 
2162 
2163 #undef __FUNCT__
2164 #define __FUNCT__ "DMRefineHierarchy"
2165 /*@C
2166     DMRefineHierarchy - Refines a DM object, all levels at once
2167 
2168     Collective on DM
2169 
2170     Input Parameter:
2171 +   dm - the DM object
2172 -   nlevels - the number of levels of refinement
2173 
2174     Output Parameter:
2175 .   dmf - the refined DM hierarchy
2176 
2177     Level: developer
2178 
2179 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2180 
2181 @*/
2182 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2183 {
2184   PetscErrorCode ierr;
2185 
2186   PetscFunctionBegin;
2187   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2188   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2189   if (nlevels == 0) PetscFunctionReturn(0);
2190   if (dm->ops->refinehierarchy) {
2191     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
2192   } else if (dm->ops->refine) {
2193     PetscInt i;
2194 
2195     ierr = DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);CHKERRQ(ierr);
2196     for (i=1; i<nlevels; i++) {
2197       ierr = DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);CHKERRQ(ierr);
2198     }
2199   } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2200   PetscFunctionReturn(0);
2201 }
2202 
2203 #undef __FUNCT__
2204 #define __FUNCT__ "DMCoarsenHierarchy"
2205 /*@C
2206     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2207 
2208     Collective on DM
2209 
2210     Input Parameter:
2211 +   dm - the DM object
2212 -   nlevels - the number of levels of coarsening
2213 
2214     Output Parameter:
2215 .   dmc - the coarsened DM hierarchy
2216 
2217     Level: developer
2218 
2219 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2220 
2221 @*/
2222 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2223 {
2224   PetscErrorCode ierr;
2225 
2226   PetscFunctionBegin;
2227   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2228   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2229   if (nlevels == 0) PetscFunctionReturn(0);
2230   PetscValidPointer(dmc,3);
2231   if (dm->ops->coarsenhierarchy) {
2232     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
2233   } else if (dm->ops->coarsen) {
2234     PetscInt i;
2235 
2236     ierr = DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);CHKERRQ(ierr);
2237     for (i=1; i<nlevels; i++) {
2238       ierr = DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);CHKERRQ(ierr);
2239     }
2240   } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2241   PetscFunctionReturn(0);
2242 }
2243 
2244 #undef __FUNCT__
2245 #define __FUNCT__ "DMCreateAggregates"
2246 /*@
2247    DMCreateAggregates - Gets the aggregates that map between
2248    grids associated with two DMs.
2249 
2250    Collective on DM
2251 
2252    Input Parameters:
2253 +  dmc - the coarse grid DM
2254 -  dmf - the fine grid DM
2255 
2256    Output Parameters:
2257 .  rest - the restriction matrix (transpose of the projection matrix)
2258 
2259    Level: intermediate
2260 
2261 .keywords: interpolation, restriction, multigrid
2262 
2263 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2264 @*/
2265 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2266 {
2267   PetscErrorCode ierr;
2268 
2269   PetscFunctionBegin;
2270   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
2271   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
2272   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
2273   PetscFunctionReturn(0);
2274 }
2275 
2276 #undef __FUNCT__
2277 #define __FUNCT__ "DMSetApplicationContextDestroy"
2278 /*@C
2279     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2280 
2281     Not Collective
2282 
2283     Input Parameters:
2284 +   dm - the DM object
2285 -   destroy - the destroy function
2286 
2287     Level: intermediate
2288 
2289 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2290 
2291 @*/
2292 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2293 {
2294   PetscFunctionBegin;
2295   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2296   dm->ctxdestroy = destroy;
2297   PetscFunctionReturn(0);
2298 }
2299 
2300 #undef __FUNCT__
2301 #define __FUNCT__ "DMSetApplicationContext"
2302 /*@
2303     DMSetApplicationContext - Set a user context into a DM object
2304 
2305     Not Collective
2306 
2307     Input Parameters:
2308 +   dm - the DM object
2309 -   ctx - the user context
2310 
2311     Level: intermediate
2312 
2313 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2314 
2315 @*/
2316 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2317 {
2318   PetscFunctionBegin;
2319   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2320   dm->ctx = ctx;
2321   PetscFunctionReturn(0);
2322 }
2323 
2324 #undef __FUNCT__
2325 #define __FUNCT__ "DMGetApplicationContext"
2326 /*@
2327     DMGetApplicationContext - Gets a user context from a DM object
2328 
2329     Not Collective
2330 
2331     Input Parameter:
2332 .   dm - the DM object
2333 
2334     Output Parameter:
2335 .   ctx - the user context
2336 
2337     Level: intermediate
2338 
2339 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2340 
2341 @*/
2342 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2343 {
2344   PetscFunctionBegin;
2345   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2346   *(void**)ctx = dm->ctx;
2347   PetscFunctionReturn(0);
2348 }
2349 
2350 #undef __FUNCT__
2351 #define __FUNCT__ "DMSetVariableBounds"
2352 /*@C
2353     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2354 
2355     Logically Collective on DM
2356 
2357     Input Parameter:
2358 +   dm - the DM object
2359 -   f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set)
2360 
2361     Level: intermediate
2362 
2363 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2364          DMSetJacobian()
2365 
2366 @*/
2367 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2368 {
2369   PetscFunctionBegin;
2370   dm->ops->computevariablebounds = f;
2371   PetscFunctionReturn(0);
2372 }
2373 
2374 #undef __FUNCT__
2375 #define __FUNCT__ "DMHasVariableBounds"
2376 /*@
2377     DMHasVariableBounds - does the DM object have a variable bounds function?
2378 
2379     Not Collective
2380 
2381     Input Parameter:
2382 .   dm - the DM object to destroy
2383 
2384     Output Parameter:
2385 .   flg - PETSC_TRUE if the variable bounds function exists
2386 
2387     Level: developer
2388 
2389 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2390 
2391 @*/
2392 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2393 {
2394   PetscFunctionBegin;
2395   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2396   PetscFunctionReturn(0);
2397 }
2398 
2399 #undef __FUNCT__
2400 #define __FUNCT__ "DMComputeVariableBounds"
2401 /*@C
2402     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2403 
2404     Logically Collective on DM
2405 
2406     Input Parameters:
2407 +   dm - the DM object to destroy
2408 -   x  - current solution at which the bounds are computed
2409 
2410     Output parameters:
2411 +   xl - lower bound
2412 -   xu - upper bound
2413 
2414     Level: intermediate
2415 
2416 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2417 
2418 @*/
2419 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2420 {
2421   PetscErrorCode ierr;
2422 
2423   PetscFunctionBegin;
2424   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2425   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2426   if (dm->ops->computevariablebounds) {
2427     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr);
2428   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2429   PetscFunctionReturn(0);
2430 }
2431 
2432 #undef __FUNCT__
2433 #define __FUNCT__ "DMHasColoring"
2434 /*@
2435     DMHasColoring - does the DM object have a method of providing a coloring?
2436 
2437     Not Collective
2438 
2439     Input Parameter:
2440 .   dm - the DM object
2441 
2442     Output Parameter:
2443 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2444 
2445     Level: developer
2446 
2447 .seealso DMHasFunction(), DMCreateColoring()
2448 
2449 @*/
2450 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2451 {
2452   PetscFunctionBegin;
2453   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2454   PetscFunctionReturn(0);
2455 }
2456 
2457 #undef  __FUNCT__
2458 #define __FUNCT__ "DMSetVec"
2459 /*@C
2460     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2461 
2462     Collective on DM
2463 
2464     Input Parameter:
2465 +   dm - the DM object
2466 -   x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems.
2467 
2468     Level: developer
2469 
2470 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2471 
2472 @*/
2473 PetscErrorCode  DMSetVec(DM dm,Vec x)
2474 {
2475   PetscErrorCode ierr;
2476 
2477   PetscFunctionBegin;
2478   if (x) {
2479     if (!dm->x) {
2480       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2481     }
2482     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2483   } else if (dm->x) {
2484     ierr = VecDestroy(&dm->x);CHKERRQ(ierr);
2485   }
2486   PetscFunctionReturn(0);
2487 }
2488 
2489 PetscFunctionList DMList              = NULL;
2490 PetscBool         DMRegisterAllCalled = PETSC_FALSE;
2491 
2492 #undef __FUNCT__
2493 #define __FUNCT__ "DMSetType"
2494 /*@C
2495   DMSetType - Builds a DM, for a particular DM implementation.
2496 
2497   Collective on DM
2498 
2499   Input Parameters:
2500 + dm     - The DM object
2501 - method - The name of the DM type
2502 
2503   Options Database Key:
2504 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2505 
2506   Notes:
2507   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2508 
2509   Level: intermediate
2510 
2511 .keywords: DM, set, type
2512 .seealso: DMGetType(), DMCreate()
2513 @*/
2514 PetscErrorCode  DMSetType(DM dm, DMType method)
2515 {
2516   PetscErrorCode (*r)(DM);
2517   PetscBool      match;
2518   PetscErrorCode ierr;
2519 
2520   PetscFunctionBegin;
2521   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2522   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2523   if (match) PetscFunctionReturn(0);
2524 
2525   if (!DMRegisterAllCalled) {ierr = DMRegisterAll();CHKERRQ(ierr);}
2526   ierr = PetscFunctionListFind(DMList,method,&r);CHKERRQ(ierr);
2527   if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2528 
2529   if (dm->ops->destroy) {
2530     ierr             = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2531     dm->ops->destroy = NULL;
2532   }
2533   ierr = (*r)(dm);CHKERRQ(ierr);
2534   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2535   PetscFunctionReturn(0);
2536 }
2537 
2538 #undef __FUNCT__
2539 #define __FUNCT__ "DMGetType"
2540 /*@C
2541   DMGetType - Gets the DM type name (as a string) from the DM.
2542 
2543   Not Collective
2544 
2545   Input Parameter:
2546 . dm  - The DM
2547 
2548   Output Parameter:
2549 . type - The DM type name
2550 
2551   Level: intermediate
2552 
2553 .keywords: DM, get, type, name
2554 .seealso: DMSetType(), DMCreate()
2555 @*/
2556 PetscErrorCode  DMGetType(DM dm, DMType *type)
2557 {
2558   PetscErrorCode ierr;
2559 
2560   PetscFunctionBegin;
2561   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2562   PetscValidCharPointer(type,2);
2563   if (!DMRegisterAllCalled) {
2564     ierr = DMRegisterAll();CHKERRQ(ierr);
2565   }
2566   *type = ((PetscObject)dm)->type_name;
2567   PetscFunctionReturn(0);
2568 }
2569 
2570 #undef __FUNCT__
2571 #define __FUNCT__ "DMConvert"
2572 /*@C
2573   DMConvert - Converts a DM to another DM, either of the same or different type.
2574 
2575   Collective on DM
2576 
2577   Input Parameters:
2578 + dm - the DM
2579 - newtype - new DM type (use "same" for the same type)
2580 
2581   Output Parameter:
2582 . M - pointer to new DM
2583 
2584   Notes:
2585   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2586   the MPI communicator of the generated DM is always the same as the communicator
2587   of the input DM.
2588 
2589   Level: intermediate
2590 
2591 .seealso: DMCreate()
2592 @*/
2593 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2594 {
2595   DM             B;
2596   char           convname[256];
2597   PetscBool      sametype, issame;
2598   PetscErrorCode ierr;
2599 
2600   PetscFunctionBegin;
2601   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2602   PetscValidType(dm,1);
2603   PetscValidPointer(M,3);
2604   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2605   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2606   {
2607     PetscErrorCode (*conv)(DM, DMType, DM*) = NULL;
2608 
2609     /*
2610        Order of precedence:
2611        1) See if a specialized converter is known to the current DM.
2612        2) See if a specialized converter is known to the desired DM class.
2613        3) See if a good general converter is registered for the desired class
2614        4) See if a good general converter is known for the current matrix.
2615        5) Use a really basic converter.
2616     */
2617 
2618     /* 1) See if a specialized converter is known to the current DM and the desired class */
2619     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2620     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2621     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2622     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2623     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2624     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,&conv);CHKERRQ(ierr);
2625     if (conv) goto foundconv;
2626 
2627     /* 2)  See if a specialized converter is known to the desired DM class. */
2628     ierr = DMCreate(PetscObjectComm((PetscObject)dm), &B);CHKERRQ(ierr);
2629     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2630     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2631     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2632     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2633     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2634     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2635     ierr = PetscObjectQueryFunction((PetscObject)B,convname,&conv);CHKERRQ(ierr);
2636     if (conv) {
2637       ierr = DMDestroy(&B);CHKERRQ(ierr);
2638       goto foundconv;
2639     }
2640 
2641 #if 0
2642     /* 3) See if a good general converter is registered for the desired class */
2643     conv = B->ops->convertfrom;
2644     ierr = DMDestroy(&B);CHKERRQ(ierr);
2645     if (conv) goto foundconv;
2646 
2647     /* 4) See if a good general converter is known for the current matrix */
2648     if (dm->ops->convert) {
2649       conv = dm->ops->convert;
2650     }
2651     if (conv) goto foundconv;
2652 #endif
2653 
2654     /* 5) Use a really basic converter. */
2655     SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2656 
2657 foundconv:
2658     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2659     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2660     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2661   }
2662   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2663   PetscFunctionReturn(0);
2664 }
2665 
2666 /*--------------------------------------------------------------------------------------------------------------------*/
2667 
2668 #undef __FUNCT__
2669 #define __FUNCT__ "DMRegister"
2670 /*@C
2671   DMRegister -  Adds a new DM component implementation
2672 
2673   Not Collective
2674 
2675   Input Parameters:
2676 + name        - The name of a new user-defined creation routine
2677 - create_func - The creation routine itself
2678 
2679   Notes:
2680   DMRegister() may be called multiple times to add several user-defined DMs
2681 
2682 
2683   Sample usage:
2684 .vb
2685     DMRegister("my_da", MyDMCreate);
2686 .ve
2687 
2688   Then, your DM type can be chosen with the procedural interface via
2689 .vb
2690     DMCreate(MPI_Comm, DM *);
2691     DMSetType(DM,"my_da");
2692 .ve
2693    or at runtime via the option
2694 .vb
2695     -da_type my_da
2696 .ve
2697 
2698   Level: advanced
2699 
2700 .keywords: DM, register
2701 .seealso: DMRegisterAll(), DMRegisterDestroy()
2702 
2703 @*/
2704 PetscErrorCode  DMRegister(const char sname[],PetscErrorCode (*function)(DM))
2705 {
2706   PetscErrorCode ierr;
2707 
2708   PetscFunctionBegin;
2709   ierr = PetscFunctionListAdd(&DMList,sname,function);CHKERRQ(ierr);
2710   PetscFunctionReturn(0);
2711 }
2712 
2713 #undef __FUNCT__
2714 #define __FUNCT__ "DMLoad"
2715 /*@C
2716   DMLoad - Loads a DM that has been stored in binary  with DMView().
2717 
2718   Collective on PetscViewer
2719 
2720   Input Parameters:
2721 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2722            some related function before a call to DMLoad().
2723 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2724            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2725 
2726    Level: intermediate
2727 
2728   Notes:
2729    The type is determined by the data in the file, any type set into the DM before this call is ignored.
2730 
2731   Notes for advanced users:
2732   Most users should not need to know the details of the binary storage
2733   format, since DMLoad() and DMView() completely hide these details.
2734   But for anyone who's interested, the standard binary matrix storage
2735   format is
2736 .vb
2737      has not yet been determined
2738 .ve
2739 
2740 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2741 @*/
2742 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2743 {
2744   PetscErrorCode ierr;
2745   PetscBool      isbinary;
2746   PetscInt       classid;
2747   char           type[256];
2748 
2749   PetscFunctionBegin;
2750   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
2751   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
2752   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
2753   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
2754 
2755   ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr);
2756   if (classid != DM_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file");
2757   ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr);
2758   ierr = DMSetType(newdm, type);CHKERRQ(ierr);
2759   if (newdm->ops->load) {
2760     ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
2761   }
2762   PetscFunctionReturn(0);
2763 }
2764 
2765 /******************************** FEM Support **********************************/
2766 
2767 #undef __FUNCT__
2768 #define __FUNCT__ "DMPrintCellVector"
2769 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[])
2770 {
2771   PetscInt       f;
2772   PetscErrorCode ierr;
2773 
2774   PetscFunctionBegin;
2775   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2776   for (f = 0; f < len; ++f) {
2777     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr);
2778   }
2779   PetscFunctionReturn(0);
2780 }
2781 
2782 #undef __FUNCT__
2783 #define __FUNCT__ "DMPrintCellMatrix"
2784 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[])
2785 {
2786   PetscInt       f, g;
2787   PetscErrorCode ierr;
2788 
2789   PetscFunctionBegin;
2790   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2791   for (f = 0; f < rows; ++f) {
2792     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
2793     for (g = 0; g < cols; ++g) {
2794       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
2795     }
2796     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
2797   }
2798   PetscFunctionReturn(0);
2799 }
2800 
2801 #undef __FUNCT__
2802 #define __FUNCT__ "DMPrintLocalVec"
2803 PetscErrorCode DMPrintLocalVec(DM dm, const char name[], PetscReal tol, Vec X)
2804 {
2805   PetscMPIInt    rank, numProcs;
2806   PetscInt       p;
2807   PetscErrorCode ierr;
2808 
2809   PetscFunctionBegin;
2810   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
2811   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
2812   ierr = PetscPrintf(PetscObjectComm((PetscObject) dm), "%s:\n", name);CHKERRQ(ierr);
2813   for (p = 0; p < numProcs; ++p) {
2814     if (p == rank) {
2815       Vec x;
2816 
2817       ierr = VecDuplicate(X, &x);CHKERRQ(ierr);
2818       ierr = VecCopy(X, x);CHKERRQ(ierr);
2819       ierr = VecChop(x, tol);CHKERRQ(ierr);
2820       ierr = VecView(x, PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);
2821       ierr = VecDestroy(&x);CHKERRQ(ierr);
2822       ierr = PetscViewerFlush(PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);
2823     }
2824     ierr = PetscBarrier((PetscObject) dm);CHKERRQ(ierr);
2825   }
2826   PetscFunctionReturn(0);
2827 }
2828 
2829 #undef __FUNCT__
2830 #define __FUNCT__ "DMGetDefaultSection"
2831 /*@
2832   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
2833 
2834   Input Parameter:
2835 . dm - The DM
2836 
2837   Output Parameter:
2838 . section - The PetscSection
2839 
2840   Level: intermediate
2841 
2842   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2843 
2844 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2845 @*/
2846 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section)
2847 {
2848   PetscFunctionBegin;
2849   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2850   PetscValidPointer(section, 2);
2851   *section = dm->defaultSection;
2852   PetscFunctionReturn(0);
2853 }
2854 
2855 #undef __FUNCT__
2856 #define __FUNCT__ "DMSetDefaultSection"
2857 /*@
2858   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
2859 
2860   Input Parameters:
2861 + dm - The DM
2862 - section - The PetscSection
2863 
2864   Level: intermediate
2865 
2866   Note: Any existing Section will be destroyed
2867 
2868 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2869 @*/
2870 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section)
2871 {
2872   PetscInt       numFields;
2873   PetscInt       f;
2874   PetscErrorCode ierr;
2875 
2876   PetscFunctionBegin;
2877   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2878   PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2);
2879   ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr);
2880   ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
2881   dm->defaultSection = section;
2882   ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
2883   if (numFields) {
2884     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
2885     for (f = 0; f < numFields; ++f) {
2886       const char *name;
2887 
2888       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
2889       ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr);
2890     }
2891   }
2892   /* The global section will be rebuilt in the next call to DMGetDefaultGlobalSection(). */
2893   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2894   PetscFunctionReturn(0);
2895 }
2896 
2897 #undef __FUNCT__
2898 #define __FUNCT__ "DMGetDefaultGlobalSection"
2899 /*@
2900   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
2901 
2902   Collective on DM
2903 
2904   Input Parameter:
2905 . dm - The DM
2906 
2907   Output Parameter:
2908 . section - The PetscSection
2909 
2910   Level: intermediate
2911 
2912   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2913 
2914 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2915 @*/
2916 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section)
2917 {
2918   PetscErrorCode ierr;
2919 
2920   PetscFunctionBegin;
2921   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2922   PetscValidPointer(section, 2);
2923   if (!dm->defaultGlobalSection) {
2924     if (!dm->defaultSection || !dm->sf) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
2925     ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
2926     ierr = PetscLayoutDestroy(&dm->map);CHKERRQ(ierr);
2927     ierr = PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm),dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr);
2928   }
2929   *section = dm->defaultGlobalSection;
2930   PetscFunctionReturn(0);
2931 }
2932 
2933 #undef __FUNCT__
2934 #define __FUNCT__ "DMSetDefaultGlobalSection"
2935 /*@
2936   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
2937 
2938   Input Parameters:
2939 + dm - The DM
2940 - section - The PetscSection, or NULL
2941 
2942   Level: intermediate
2943 
2944   Note: Any existing Section will be destroyed
2945 
2946 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
2947 @*/
2948 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section)
2949 {
2950   PetscErrorCode ierr;
2951 
2952   PetscFunctionBegin;
2953   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2954   if (section) PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2);
2955   ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr);
2956   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2957   dm->defaultGlobalSection = section;
2958   PetscFunctionReturn(0);
2959 }
2960 
2961 #undef __FUNCT__
2962 #define __FUNCT__ "DMGetDefaultSF"
2963 /*@
2964   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
2965   it is created from the default PetscSection layouts in the DM.
2966 
2967   Input Parameter:
2968 . dm - The DM
2969 
2970   Output Parameter:
2971 . sf - The PetscSF
2972 
2973   Level: intermediate
2974 
2975   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2976 
2977 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
2978 @*/
2979 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf)
2980 {
2981   PetscInt       nroots;
2982   PetscErrorCode ierr;
2983 
2984   PetscFunctionBegin;
2985   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2986   PetscValidPointer(sf, 2);
2987   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
2988   if (nroots < 0) {
2989     PetscSection section, gSection;
2990 
2991     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
2992     if (section) {
2993       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
2994       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
2995     } else {
2996       *sf = NULL;
2997       PetscFunctionReturn(0);
2998     }
2999   }
3000   *sf = dm->defaultSF;
3001   PetscFunctionReturn(0);
3002 }
3003 
3004 #undef __FUNCT__
3005 #define __FUNCT__ "DMSetDefaultSF"
3006 /*@
3007   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
3008 
3009   Input Parameters:
3010 + dm - The DM
3011 - sf - The PetscSF
3012 
3013   Level: intermediate
3014 
3015   Note: Any previous SF is destroyed
3016 
3017 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
3018 @*/
3019 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf)
3020 {
3021   PetscErrorCode ierr;
3022 
3023   PetscFunctionBegin;
3024   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3025   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
3026   ierr          = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
3027   dm->defaultSF = sf;
3028   PetscFunctionReturn(0);
3029 }
3030 
3031 #undef __FUNCT__
3032 #define __FUNCT__ "DMCreateDefaultSF"
3033 /*@C
3034   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
3035   describing the data layout.
3036 
3037   Input Parameters:
3038 + dm - The DM
3039 . localSection - PetscSection describing the local data layout
3040 - globalSection - PetscSection describing the global data layout
3041 
3042   Level: intermediate
3043 
3044 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
3045 @*/
3046 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
3047 {
3048   MPI_Comm       comm;
3049   PetscLayout    layout;
3050   const PetscInt *ranges;
3051   PetscInt       *local;
3052   PetscSFNode    *remote;
3053   PetscInt       pStart, pEnd, p, nroots, nleaves = 0, l;
3054   PetscMPIInt    size, rank;
3055   PetscErrorCode ierr;
3056 
3057   PetscFunctionBegin;
3058   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
3059   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3060   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
3061   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
3062   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
3063   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
3064   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
3065   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
3066   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
3067   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
3068   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
3069   for (p = pStart; p < pEnd; ++p) {
3070     PetscInt gdof, gcdof;
3071 
3072     ierr     = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
3073     ierr     = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
3074     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3075   }
3076   ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr);
3077   ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr);
3078   for (p = pStart, l = 0; p < pEnd; ++p) {
3079     const PetscInt *cind;
3080     PetscInt       dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
3081 
3082     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
3083     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
3084     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
3085     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
3086     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
3087     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
3088     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
3089     if (!gdof) continue; /* Censored point */
3090     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3091     if (gsize != dof-cdof) {
3092       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
3093       cdof = 0; /* Ignore constraints */
3094     }
3095     for (d = 0, c = 0; d < dof; ++d) {
3096       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
3097       local[l+d-c] = off+d;
3098     }
3099     if (gdof < 0) {
3100       for (d = 0; d < gsize; ++d, ++l) {
3101         PetscInt offset = -(goff+1) + d, r;
3102 
3103         ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr);
3104         if (r < 0) r = -(r+2);
3105         remote[l].rank  = r;
3106         remote[l].index = offset - ranges[r];
3107       }
3108     } else {
3109       for (d = 0; d < gsize; ++d, ++l) {
3110         remote[l].rank  = rank;
3111         remote[l].index = goff+d - ranges[rank];
3112       }
3113     }
3114   }
3115   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
3116   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3117   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
3118   PetscFunctionReturn(0);
3119 }
3120 
3121 #undef __FUNCT__
3122 #define __FUNCT__ "DMGetPointSF"
3123 /*@
3124   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
3125 
3126   Input Parameter:
3127 . dm - The DM
3128 
3129   Output Parameter:
3130 . sf - The PetscSF
3131 
3132   Level: intermediate
3133 
3134   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
3135 
3136 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3137 @*/
3138 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf)
3139 {
3140   PetscFunctionBegin;
3141   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3142   PetscValidPointer(sf, 2);
3143   *sf = dm->sf;
3144   PetscFunctionReturn(0);
3145 }
3146 
3147 #undef __FUNCT__
3148 #define __FUNCT__ "DMSetPointSF"
3149 /*@
3150   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
3151 
3152   Input Parameters:
3153 + dm - The DM
3154 - sf - The PetscSF
3155 
3156   Level: intermediate
3157 
3158 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3159 @*/
3160 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf)
3161 {
3162   PetscErrorCode ierr;
3163 
3164   PetscFunctionBegin;
3165   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3166   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
3167   ierr   = PetscSFDestroy(&dm->sf);CHKERRQ(ierr);
3168   ierr   = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr);
3169   dm->sf = sf;
3170   PetscFunctionReturn(0);
3171 }
3172 
3173 #undef __FUNCT__
3174 #define __FUNCT__ "DMGetNumFields"
3175 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3176 {
3177   PetscFunctionBegin;
3178   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3179   PetscValidPointer(numFields, 2);
3180   *numFields = dm->numFields;
3181   PetscFunctionReturn(0);
3182 }
3183 
3184 #undef __FUNCT__
3185 #define __FUNCT__ "DMSetNumFields"
3186 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3187 {
3188   PetscInt       f;
3189   PetscErrorCode ierr;
3190 
3191   PetscFunctionBegin;
3192   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3193   for (f = 0; f < dm->numFields; ++f) {
3194     ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr);
3195   }
3196   ierr          = PetscFree(dm->fields);CHKERRQ(ierr);
3197   dm->numFields = numFields;
3198   ierr          = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr);
3199   for (f = 0; f < dm->numFields; ++f) {
3200     ierr = PetscContainerCreate(PetscObjectComm((PetscObject)dm), (PetscContainer*) &dm->fields[f]);CHKERRQ(ierr);
3201   }
3202   PetscFunctionReturn(0);
3203 }
3204 
3205 #undef __FUNCT__
3206 #define __FUNCT__ "DMGetField"
3207 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3208 {
3209   PetscFunctionBegin;
3210   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3211   PetscValidPointer(field, 2);
3212   if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3213   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3214   *field = dm->fields[f];
3215   PetscFunctionReturn(0);
3216 }
3217 
3218 #undef __FUNCT__
3219 #define __FUNCT__ "DMRestrictHook_Coordinates"
3220 PetscErrorCode DMRestrictHook_Coordinates(DM dm,DM dmc,void *ctx)
3221 {
3222   DM dm_coord,dmc_coord;
3223   PetscErrorCode ierr;
3224   Vec coords,ccoords;
3225   VecScatter scat;
3226   PetscFunctionBegin;
3227   ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr);
3228   ierr = DMGetCoordinateDM(dmc,&dmc_coord);CHKERRQ(ierr);
3229   ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr);
3230   ierr = DMGetCoordinates(dmc,&ccoords);CHKERRQ(ierr);
3231   if (coords && !ccoords) {
3232     ierr = DMCreateGlobalVector(dmc_coord,&ccoords);CHKERRQ(ierr);
3233     ierr = DMCreateInjection(dmc_coord,dm_coord,&scat);CHKERRQ(ierr);
3234     ierr = VecScatterBegin(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3235     ierr = VecScatterEnd(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3236     ierr = DMSetCoordinates(dmc,ccoords);CHKERRQ(ierr);
3237     ierr = VecScatterDestroy(&scat);CHKERRQ(ierr);
3238     ierr = VecDestroy(&ccoords);CHKERRQ(ierr);
3239   }
3240   PetscFunctionReturn(0);
3241 }
3242 
3243 #undef __FUNCT__
3244 #define __FUNCT__ "DMSubDomainHook_Coordinates"
3245 static PetscErrorCode DMSubDomainHook_Coordinates(DM dm,DM subdm,void *ctx)
3246 {
3247   DM dm_coord,subdm_coord;
3248   PetscErrorCode ierr;
3249   Vec coords,ccoords,clcoords;
3250   VecScatter *scat_i,*scat_g;
3251   PetscFunctionBegin;
3252   ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr);
3253   ierr = DMGetCoordinateDM(subdm,&subdm_coord);CHKERRQ(ierr);
3254   ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr);
3255   ierr = DMGetCoordinates(subdm,&ccoords);CHKERRQ(ierr);
3256   if (coords && !ccoords) {
3257     ierr = DMCreateGlobalVector(subdm_coord,&ccoords);CHKERRQ(ierr);
3258     ierr = DMCreateLocalVector(subdm_coord,&clcoords);CHKERRQ(ierr);
3259     ierr = DMCreateDomainDecompositionScatters(dm_coord,1,&subdm_coord,NULL,&scat_i,&scat_g);CHKERRQ(ierr);
3260     ierr = VecScatterBegin(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3261     ierr = VecScatterBegin(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3262     ierr = VecScatterEnd(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3263     ierr = VecScatterEnd(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3264     ierr = DMSetCoordinates(subdm,ccoords);CHKERRQ(ierr);
3265     ierr = DMSetCoordinatesLocal(subdm,clcoords);CHKERRQ(ierr);
3266     ierr = VecScatterDestroy(&scat_i[0]);CHKERRQ(ierr);
3267     ierr = VecScatterDestroy(&scat_g[0]);CHKERRQ(ierr);
3268     ierr = VecDestroy(&ccoords);CHKERRQ(ierr);
3269     ierr = VecDestroy(&clcoords);CHKERRQ(ierr);
3270     ierr = PetscFree(scat_i);CHKERRQ(ierr);
3271     ierr = PetscFree(scat_g);CHKERRQ(ierr);
3272   }
3273   PetscFunctionReturn(0);
3274 }
3275 
3276 #undef __FUNCT__
3277 #define __FUNCT__ "DMSetCoordinates"
3278 /*@
3279   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3280 
3281   Collective on DM
3282 
3283   Input Parameters:
3284 + dm - the DM
3285 - c - coordinate vector
3286 
3287   Note:
3288   The coordinates do include those for ghost points, which are in the local vector
3289 
3290   Level: intermediate
3291 
3292 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3293 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3294 @*/
3295 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3296 {
3297   PetscErrorCode ierr;
3298 
3299   PetscFunctionBegin;
3300   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3301   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3302   ierr            = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3303   ierr            = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3304   dm->coordinates = c;
3305   ierr            = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3306   ierr            = DMCoarsenHookAdd(dm,DMRestrictHook_Coordinates,NULL,NULL);CHKERRQ(ierr);
3307   ierr            = DMSubDomainHookAdd(dm,DMSubDomainHook_Coordinates,NULL,NULL);CHKERRQ(ierr);
3308   PetscFunctionReturn(0);
3309 }
3310 
3311 #undef __FUNCT__
3312 #define __FUNCT__ "DMSetCoordinatesLocal"
3313 /*@
3314   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3315 
3316   Collective on DM
3317 
3318    Input Parameters:
3319 +  dm - the DM
3320 -  c - coordinate vector
3321 
3322   Note:
3323   The coordinates of ghost points can be set using DMSetCoordinates()
3324   followed by DMGetCoordinatesLocal(). This is intended to enable the
3325   setting of ghost coordinates outside of the domain.
3326 
3327   Level: intermediate
3328 
3329 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3330 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3331 @*/
3332 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3333 {
3334   PetscErrorCode ierr;
3335 
3336   PetscFunctionBegin;
3337   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3338   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3339   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3340   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3341 
3342   dm->coordinatesLocal = c;
3343 
3344   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3345   PetscFunctionReturn(0);
3346 }
3347 
3348 #undef __FUNCT__
3349 #define __FUNCT__ "DMGetCoordinates"
3350 /*@
3351   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3352 
3353   Not Collective
3354 
3355   Input Parameter:
3356 . dm - the DM
3357 
3358   Output Parameter:
3359 . c - global coordinate vector
3360 
3361   Note:
3362   This is a borrowed reference, so the user should NOT destroy this vector
3363 
3364   Each process has only the local coordinates (does NOT have the ghost coordinates).
3365 
3366   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3367   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3368 
3369   Level: intermediate
3370 
3371 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3372 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3373 @*/
3374 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3375 {
3376   PetscErrorCode ierr;
3377 
3378   PetscFunctionBegin;
3379   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3380   PetscValidPointer(c,2);
3381   if (!dm->coordinates && dm->coordinatesLocal) {
3382     DM cdm = NULL;
3383 
3384     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3385     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3386     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3387     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3388     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3389   }
3390   *c = dm->coordinates;
3391   PetscFunctionReturn(0);
3392 }
3393 
3394 #undef __FUNCT__
3395 #define __FUNCT__ "DMGetCoordinatesLocal"
3396 /*@
3397   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3398 
3399   Collective on DM
3400 
3401   Input Parameter:
3402 . dm - the DM
3403 
3404   Output Parameter:
3405 . c - coordinate vector
3406 
3407   Note:
3408   This is a borrowed reference, so the user should NOT destroy this vector
3409 
3410   Each process has the local and ghost coordinates
3411 
3412   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3413   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3414 
3415   Level: intermediate
3416 
3417 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3418 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3419 @*/
3420 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3421 {
3422   PetscErrorCode ierr;
3423 
3424   PetscFunctionBegin;
3425   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3426   PetscValidPointer(c,2);
3427   if (!dm->coordinatesLocal && dm->coordinates) {
3428     DM cdm = NULL;
3429 
3430     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3431     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3432     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3433     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3434     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3435   }
3436   *c = dm->coordinatesLocal;
3437   PetscFunctionReturn(0);
3438 }
3439 
3440 #undef __FUNCT__
3441 #define __FUNCT__ "DMGetCoordinateDM"
3442 /*@
3443   DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates
3444 
3445   Collective on DM
3446 
3447   Input Parameter:
3448 . dm - the DM
3449 
3450   Output Parameter:
3451 . cdm - coordinate DM
3452 
3453   Level: intermediate
3454 
3455 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3456 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3457 @*/
3458 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3459 {
3460   PetscErrorCode ierr;
3461 
3462   PetscFunctionBegin;
3463   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3464   PetscValidPointer(cdm,2);
3465   if (!dm->coordinateDM) {
3466     if (!dm->ops->createcoordinatedm) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3467     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3468   }
3469   *cdm = dm->coordinateDM;
3470   PetscFunctionReturn(0);
3471 }
3472 
3473 #undef __FUNCT__
3474 #define __FUNCT__ "DMLocatePoints"
3475 /*@
3476   DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells
3477 
3478   Not collective
3479 
3480   Input Parameters:
3481 + dm - The DM
3482 - v - The Vec of points
3483 
3484   Output Parameter:
3485 . cells - The local cell numbers for cells which contain the points
3486 
3487   Level: developer
3488 
3489 .keywords: point location, mesh
3490 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3491 @*/
3492 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells)
3493 {
3494   PetscErrorCode ierr;
3495 
3496   PetscFunctionBegin;
3497   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3498   PetscValidHeaderSpecific(v,VEC_CLASSID,2);
3499   PetscValidPointer(cells,3);
3500   if (dm->ops->locatepoints) {
3501     ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr);
3502   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Point location not available for this DM");
3503   PetscFunctionReturn(0);
3504 }
3505