xref: /petsc/src/ksp/pc/impls/bddc/bddc.c (revision a401a8b6f7bc823ba5fc6d107d6739773b0b4d41)
1 /* TODOLIST
2    DofSplitting and DM attached to pc?
3    Change SetNeumannBoundaries to SetNeumannBoundariesLocal and provide new SetNeumannBoundaries (same Dirichlet)
4    change how to deal with the coarse problem (PCBDDCSetCoarseEnvironment):
5      - simplify coarse problem structure -> PCBDDC or PCREDUDANT, nothing else -> same comm for all levels?
6      - remove coarse enums and allow use of PCBDDCGetCoarseKSP
7      - remove metis dependency -> use MatPartitioning for multilevel -> Assemble serial adjacency in PCBDDCAnalyzeInterface?
8    code refactoring:
9      - pick up better names for static functions
10    change options structure:
11      - insert BDDC into MG framework?
12    provide other ops? Ask to developers
13    remove all unused printf
14    man pages
15 */
16 
17 /* ----------------------------------------------------------------------------------------------------------------------------------------------
18    Implementation of BDDC preconditioner based on:
19    C. Dohrmann "An approximate BDDC preconditioner", Numerical Linear Algebra with Applications Volume 14, Issue 2, pages 149-168, March 2007
20    ---------------------------------------------------------------------------------------------------------------------------------------------- */
21 
22 #include "bddc.h" /*I "petscpc.h" I*/  /* includes for fortran wrappers */
23 #include "bddcprivate.h"
24 #include <petscblaslapack.h>
25 
26 /* prototypes for static functions contained in bddc.c */
27 static PetscErrorCode PCBDDCCoarseSetUp(PC);
28 
29 /* -------------------------------------------------------------------------- */
30 #undef __FUNCT__
31 #define __FUNCT__ "PCSetFromOptions_BDDC"
32 PetscErrorCode PCSetFromOptions_BDDC(PC pc)
33 {
34   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;
35   PetscErrorCode ierr;
36 
37   PetscFunctionBegin;
38   ierr = PetscOptionsHead("BDDC options");CHKERRQ(ierr);
39   /* Verbose debugging of main data structures */
40   ierr = PetscOptionsInt("-pc_bddc_check_level"       ,"Verbose (debugging) output for PCBDDC"                       ,"none",pcbddc->dbg_flag      ,&pcbddc->dbg_flag      ,NULL);CHKERRQ(ierr);
41   /* Some customization for default primal space */
42   ierr = PetscOptionsBool("-pc_bddc_vertices_only"   ,"Use only vertices in coarse space (i.e. discard constraints)","none",pcbddc->vertices_flag   ,&pcbddc->vertices_flag   ,NULL);CHKERRQ(ierr);
43   ierr = PetscOptionsBool("-pc_bddc_constraints_only","Use only constraints in coarse space (i.e. discard vertices)","none",pcbddc->constraints_flag,&pcbddc->constraints_flag,NULL);CHKERRQ(ierr);
44   ierr = PetscOptionsBool("-pc_bddc_faces_only"      ,"Use only faces among constraints of coarse space (i.e. discard edges)"         ,"none",pcbddc->faces_flag      ,&pcbddc->faces_flag      ,NULL);CHKERRQ(ierr);
45   ierr = PetscOptionsBool("-pc_bddc_edges_only"      ,"Use only edges among constraints of coarse space (i.e. discard faces)"         ,"none",pcbddc->edges_flag      ,&pcbddc->edges_flag      ,NULL);CHKERRQ(ierr);
46   /* Coarse solver context */
47   static const char * const avail_coarse_problems[] = {"sequential","replicated","parallel","multilevel","CoarseProblemType","PC_BDDC_",0}; /*order of choiches depends on ENUM defined in bddc.h */
48   ierr = PetscOptionsEnum("-pc_bddc_coarse_problem_type","Set coarse problem type","none",avail_coarse_problems,(PetscEnum)pcbddc->coarse_problem_type,(PetscEnum*)&pcbddc->coarse_problem_type,NULL);CHKERRQ(ierr);
49   /* Two different application of BDDC to the whole set of dofs, internal and interface */
50   ierr = PetscOptionsBool("-pc_bddc_switch_preconditioning_type","Switch between M_2 (default) and M_3 preconditioners (as defined by Dohrmann)","none",pcbddc->inexact_prec_type,&pcbddc->inexact_prec_type,NULL);CHKERRQ(ierr);
51   ierr = PetscOptionsBool("-pc_bddc_use_change_of_basis","Use change of basis approach for primal space","none",pcbddc->use_change_of_basis,&pcbddc->use_change_of_basis,NULL);CHKERRQ(ierr);
52   ierr = PetscOptionsBool("-pc_bddc_use_change_on_faces","Use change of basis approach for face constraints","none",pcbddc->use_change_on_faces,&pcbddc->use_change_on_faces,NULL);CHKERRQ(ierr);
53   if (!pcbddc->use_change_of_basis) {
54     pcbddc->use_change_on_faces = PETSC_FALSE;
55   }
56   ierr = PetscOptionsInt("-pc_bddc_coarsening_ratio","Set coarsening ratio used in multilevel coarsening","none",pcbddc->coarsening_ratio,&pcbddc->coarsening_ratio,NULL);CHKERRQ(ierr);
57   ierr = PetscOptionsInt("-pc_bddc_max_levels","Set maximum number of levels for multilevel","none",pcbddc->max_levels,&pcbddc->max_levels,NULL);CHKERRQ(ierr);
58   ierr = PetscOptionsBool("-pc_bddc_use_deluxe_scaling","Use deluxe scaling for BDDC","none",pcbddc->use_deluxe_scaling,&pcbddc->use_deluxe_scaling,NULL);CHKERRQ(ierr);
59   ierr = PetscOptionsTail();CHKERRQ(ierr);
60   PetscFunctionReturn(0);
61 }
62 /* -------------------------------------------------------------------------- */
63 #undef __FUNCT__
64 #define __FUNCT__ "PCBDDCSetPrimalVerticesLocalIS_BDDC"
65 static PetscErrorCode PCBDDCSetPrimalVerticesLocalIS_BDDC(PC pc, IS PrimalVertices)
66 {
67   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;
68   PetscErrorCode ierr;
69 
70   PetscFunctionBegin;
71   ierr = ISDestroy(&pcbddc->user_primal_vertices);CHKERRQ(ierr);
72   ierr = PetscObjectReference((PetscObject)PrimalVertices);CHKERRQ(ierr);
73   pcbddc->user_primal_vertices = PrimalVertices;
74   PetscFunctionReturn(0);
75 }
76 #undef __FUNCT__
77 #define __FUNCT__ "PCBDDCSetPrimalVerticesLocalIS"
78 /*@
79  PCBDDCSetPrimalVerticesLocalIS - Set user defined primal vertices in PCBDDC.
80 
81    Not collective
82 
83    Input Parameters:
84 +  pc - the preconditioning context
85 -  PrimalVertices - index sets of primal vertices in local numbering
86 
87    Level: intermediate
88 
89    Notes:
90 
91 .seealso: PCBDDC
92 @*/
93 PetscErrorCode PCBDDCSetPrimalVerticesLocalIS(PC pc, IS PrimalVertices)
94 {
95   PetscErrorCode ierr;
96 
97   PetscFunctionBegin;
98   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
99   PetscValidHeaderSpecific(PrimalVertices,IS_CLASSID,2);
100   ierr = PetscTryMethod(pc,"PCBDDCSetPrimalVerticesLocalIS_C",(PC,IS),(pc,PrimalVertices));CHKERRQ(ierr);
101   PetscFunctionReturn(0);
102 }
103 /* -------------------------------------------------------------------------- */
104 #undef __FUNCT__
105 #define __FUNCT__ "PCBDDCSetCoarseProblemType_BDDC"
106 static PetscErrorCode PCBDDCSetCoarseProblemType_BDDC(PC pc, CoarseProblemType CPT)
107 {
108   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
109 
110   PetscFunctionBegin;
111   pcbddc->coarse_problem_type = CPT;
112   PetscFunctionReturn(0);
113 }
114 
115 #undef __FUNCT__
116 #define __FUNCT__ "PCBDDCSetCoarseProblemType"
117 /*@
118  PCBDDCSetCoarseProblemType - Set coarse problem type in PCBDDC.
119 
120    Not collective
121 
122    Input Parameters:
123 +  pc - the preconditioning context
124 -  CoarseProblemType - pick a better name and explain what this is
125 
126    Level: intermediate
127 
128    Notes:
129    Not collective but all procs must call with same arguments.
130 
131 .seealso: PCBDDC
132 @*/
133 PetscErrorCode PCBDDCSetCoarseProblemType(PC pc, CoarseProblemType CPT)
134 {
135   PetscErrorCode ierr;
136 
137   PetscFunctionBegin;
138   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
139   ierr = PetscTryMethod(pc,"PCBDDCSetCoarseProblemType_C",(PC,CoarseProblemType),(pc,CPT));CHKERRQ(ierr);
140   PetscFunctionReturn(0);
141 }
142 /* -------------------------------------------------------------------------- */
143 #undef __FUNCT__
144 #define __FUNCT__ "PCBDDCSetCoarseningRatio_BDDC"
145 static PetscErrorCode PCBDDCSetCoarseningRatio_BDDC(PC pc,PetscInt k)
146 {
147   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
148 
149   PetscFunctionBegin;
150   pcbddc->coarsening_ratio=k;
151   PetscFunctionReturn(0);
152 }
153 
154 #undef __FUNCT__
155 #define __FUNCT__ "PCBDDCSetCoarseningRatio"
156 /*@
157  PCBDDCSetCoarseningRatio - Set coarsening ratio used in multilevel coarsening
158 
159    Logically collective on PC
160 
161    Input Parameters:
162 +  pc - the preconditioning context
163 -  k - coarsening ratio
164 
165    Approximatively k subdomains at the finer level will be aggregated into a single subdomain at the coarser level.
166 
167    Level: intermediate
168 
169    Notes:
170 
171 .seealso: PCBDDC
172 @*/
173 PetscErrorCode PCBDDCSetCoarseningRatio(PC pc,PetscInt k)
174 {
175   PetscErrorCode ierr;
176 
177   PetscFunctionBegin;
178   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
179   ierr = PetscTryMethod(pc,"PCBDDCSetCoarseningRatio_C",(PC,PetscInt),(pc,k));CHKERRQ(ierr);
180   PetscFunctionReturn(0);
181 }
182 /* -------------------------------------------------------------------------- */
183 
184 #undef __FUNCT__
185 #define __FUNCT__ "PCBDDCSetMaxLevels_BDDC"
186 static PetscErrorCode PCBDDCSetMaxLevels_BDDC(PC pc,PetscInt max_levels)
187 {
188   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
189 
190   PetscFunctionBegin;
191   pcbddc->max_levels=max_levels;
192   PetscFunctionReturn(0);
193 }
194 
195 #undef __FUNCT__
196 #define __FUNCT__ "PCBDDCSetMaxLevels"
197 /*@
198  PCBDDCSetMaxLevels - Sets the maximum number of levels within the multilevel approach.
199 
200    Logically collective on PC
201 
202    Input Parameters:
203 +  pc - the preconditioning context
204 -  max_levels - the maximum number of levels
205 
206    Default value is 1, i.e. coarse problem will be solved inexactly with one application
207    of PCBDDC preconditioner if the multilevel approach is requested.
208 
209    Level: intermediate
210 
211    Notes:
212 
213 .seealso: PCBDDC
214 @*/
215 PetscErrorCode PCBDDCSetMaxLevels(PC pc,PetscInt max_levels)
216 {
217   PetscErrorCode ierr;
218 
219   PetscFunctionBegin;
220   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
221   ierr = PetscTryMethod(pc,"PCBDDCSetMaxLevels_C",(PC,PetscInt),(pc,max_levels));CHKERRQ(ierr);
222   PetscFunctionReturn(0);
223 }
224 /* -------------------------------------------------------------------------- */
225 
226 #undef __FUNCT__
227 #define __FUNCT__ "PCBDDCSetNullSpace_BDDC"
228 static PetscErrorCode PCBDDCSetNullSpace_BDDC(PC pc,MatNullSpace NullSpace)
229 {
230   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
231   PetscErrorCode ierr;
232 
233   PetscFunctionBegin;
234   ierr = PetscObjectReference((PetscObject)NullSpace);CHKERRQ(ierr);
235   ierr = MatNullSpaceDestroy(&pcbddc->NullSpace);CHKERRQ(ierr);
236   pcbddc->NullSpace=NullSpace;
237   PetscFunctionReturn(0);
238 }
239 
240 #undef __FUNCT__
241 #define __FUNCT__ "PCBDDCSetNullSpace"
242 /*@
243  PCBDDCSetNullSpace - Set NullSpace of global operator of BDDC preconditioned mat.
244 
245    Logically collective on PC and MatNullSpace
246 
247    Input Parameters:
248 +  pc - the preconditioning context
249 -  NullSpace - Null space of the linear operator to be preconditioned.
250 
251    Level: intermediate
252 
253    Notes:
254 
255 .seealso: PCBDDC
256 @*/
257 PetscErrorCode PCBDDCSetNullSpace(PC pc,MatNullSpace NullSpace)
258 {
259   PetscErrorCode ierr;
260 
261   PetscFunctionBegin;
262   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
263   PetscValidHeaderSpecific(NullSpace,MAT_NULLSPACE_CLASSID,2);
264   ierr = PetscTryMethod(pc,"PCBDDCSetNullSpace_C",(PC,MatNullSpace),(pc,NullSpace));CHKERRQ(ierr);
265   PetscFunctionReturn(0);
266 }
267 /* -------------------------------------------------------------------------- */
268 
269 #undef __FUNCT__
270 #define __FUNCT__ "PCBDDCSetDirichletBoundaries_BDDC"
271 static PetscErrorCode PCBDDCSetDirichletBoundaries_BDDC(PC pc,IS DirichletBoundaries)
272 {
273   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
274   PetscErrorCode ierr;
275 
276   PetscFunctionBegin;
277   ierr = ISDestroy(&pcbddc->DirichletBoundaries);CHKERRQ(ierr);
278   ierr = PetscObjectReference((PetscObject)DirichletBoundaries);CHKERRQ(ierr);
279   pcbddc->DirichletBoundaries=DirichletBoundaries;
280   PetscFunctionReturn(0);
281 }
282 
283 #undef __FUNCT__
284 #define __FUNCT__ "PCBDDCSetDirichletBoundaries"
285 /*@
286  PCBDDCSetDirichletBoundaries - Set index set defining subdomain part (in local ordering)
287                               of Dirichlet boundaries for the global problem.
288 
289    Not collective
290 
291    Input Parameters:
292 +  pc - the preconditioning context
293 -  DirichletBoundaries - sequential index set defining the subdomain part of Dirichlet boundaries (can be NULL)
294 
295    Level: intermediate
296 
297    Notes:
298 
299 .seealso: PCBDDC
300 @*/
301 PetscErrorCode PCBDDCSetDirichletBoundaries(PC pc,IS DirichletBoundaries)
302 {
303   PetscErrorCode ierr;
304 
305   PetscFunctionBegin;
306   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
307   PetscValidHeaderSpecific(DirichletBoundaries,IS_CLASSID,2);
308   ierr = PetscTryMethod(pc,"PCBDDCSetDirichletBoundaries_C",(PC,IS),(pc,DirichletBoundaries));CHKERRQ(ierr);
309   PetscFunctionReturn(0);
310 }
311 /* -------------------------------------------------------------------------- */
312 
313 #undef __FUNCT__
314 #define __FUNCT__ "PCBDDCSetNeumannBoundaries_BDDC"
315 static PetscErrorCode PCBDDCSetNeumannBoundaries_BDDC(PC pc,IS NeumannBoundaries)
316 {
317   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
318   PetscErrorCode ierr;
319 
320   PetscFunctionBegin;
321   ierr = ISDestroy(&pcbddc->NeumannBoundaries);CHKERRQ(ierr);
322   ierr = PetscObjectReference((PetscObject)NeumannBoundaries);CHKERRQ(ierr);
323   pcbddc->NeumannBoundaries=NeumannBoundaries;
324   PetscFunctionReturn(0);
325 }
326 
327 #undef __FUNCT__
328 #define __FUNCT__ "PCBDDCSetNeumannBoundaries"
329 /*@
330  PCBDDCSetNeumannBoundaries - Set index set defining subdomain part (in local ordering)
331                               of Neumann boundaries for the global problem.
332 
333    Not collective
334 
335    Input Parameters:
336 +  pc - the preconditioning context
337 -  NeumannBoundaries - sequential index set defining the subdomain part of Neumann boundaries (can be NULL)
338 
339    Level: intermediate
340 
341    Notes:
342 
343 .seealso: PCBDDC
344 @*/
345 PetscErrorCode PCBDDCSetNeumannBoundaries(PC pc,IS NeumannBoundaries)
346 {
347   PetscErrorCode ierr;
348 
349   PetscFunctionBegin;
350   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
351   PetscValidHeaderSpecific(NeumannBoundaries,IS_CLASSID,2);
352   ierr = PetscTryMethod(pc,"PCBDDCSetNeumannBoundaries_C",(PC,IS),(pc,NeumannBoundaries));CHKERRQ(ierr);
353   PetscFunctionReturn(0);
354 }
355 /* -------------------------------------------------------------------------- */
356 
357 #undef __FUNCT__
358 #define __FUNCT__ "PCBDDCGetDirichletBoundaries_BDDC"
359 static PetscErrorCode PCBDDCGetDirichletBoundaries_BDDC(PC pc,IS *DirichletBoundaries)
360 {
361   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
362 
363   PetscFunctionBegin;
364   *DirichletBoundaries = pcbddc->DirichletBoundaries;
365   PetscFunctionReturn(0);
366 }
367 
368 #undef __FUNCT__
369 #define __FUNCT__ "PCBDDCGetDirichletBoundaries"
370 /*@
371  PCBDDCGetDirichletBoundaries - Get index set defining subdomain part (in local ordering)
372                                 of Dirichlet boundaries for the global problem.
373 
374    Not collective
375 
376    Input Parameters:
377 +  pc - the preconditioning context
378 
379    Output Parameters:
380 +  DirichletBoundaries - index set defining the subdomain part of Dirichlet boundaries
381 
382    Level: intermediate
383 
384    Notes:
385 
386 .seealso: PCBDDC
387 @*/
388 PetscErrorCode PCBDDCGetDirichletBoundaries(PC pc,IS *DirichletBoundaries)
389 {
390   PetscErrorCode ierr;
391 
392   PetscFunctionBegin;
393   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
394   ierr = PetscUseMethod(pc,"PCBDDCGetDirichletBoundaries_C",(PC,IS*),(pc,DirichletBoundaries));CHKERRQ(ierr);
395   PetscFunctionReturn(0);
396 }
397 /* -------------------------------------------------------------------------- */
398 
399 #undef __FUNCT__
400 #define __FUNCT__ "PCBDDCGetNeumannBoundaries_BDDC"
401 static PetscErrorCode PCBDDCGetNeumannBoundaries_BDDC(PC pc,IS *NeumannBoundaries)
402 {
403   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
404 
405   PetscFunctionBegin;
406   *NeumannBoundaries = pcbddc->NeumannBoundaries;
407   PetscFunctionReturn(0);
408 }
409 
410 #undef __FUNCT__
411 #define __FUNCT__ "PCBDDCGetNeumannBoundaries"
412 /*@
413  PCBDDCGetNeumannBoundaries - Get index set defining subdomain part (in local ordering)
414                               of Neumann boundaries for the global problem.
415 
416    Not collective
417 
418    Input Parameters:
419 +  pc - the preconditioning context
420 
421    Output Parameters:
422 +  NeumannBoundaries - index set defining the subdomain part of Neumann boundaries
423 
424    Level: intermediate
425 
426    Notes:
427 
428 .seealso: PCBDDC
429 @*/
430 PetscErrorCode PCBDDCGetNeumannBoundaries(PC pc,IS *NeumannBoundaries)
431 {
432   PetscErrorCode ierr;
433 
434   PetscFunctionBegin;
435   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
436   ierr = PetscUseMethod(pc,"PCBDDCGetNeumannBoundaries_C",(PC,IS*),(pc,NeumannBoundaries));CHKERRQ(ierr);
437   PetscFunctionReturn(0);
438 }
439 /* -------------------------------------------------------------------------- */
440 
441 #undef __FUNCT__
442 #define __FUNCT__ "PCBDDCSetLocalAdjacencyGraph_BDDC"
443 static PetscErrorCode PCBDDCSetLocalAdjacencyGraph_BDDC(PC pc, PetscInt nvtxs,const PetscInt xadj[],const PetscInt adjncy[], PetscCopyMode copymode)
444 {
445   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;
446   PCBDDCGraph    mat_graph = pcbddc->mat_graph;
447   PetscErrorCode ierr;
448 
449   PetscFunctionBegin;
450   /* free old CSR */
451   ierr = PCBDDCGraphResetCSR(mat_graph);CHKERRQ(ierr);
452   /* TODO: PCBDDCGraphSetAdjacency */
453   /* get CSR into graph structure */
454   if (copymode == PETSC_COPY_VALUES) {
455     ierr = PetscMalloc((nvtxs+1)*sizeof(PetscInt),&mat_graph->xadj);CHKERRQ(ierr);
456     ierr = PetscMalloc(xadj[nvtxs]*sizeof(PetscInt),&mat_graph->adjncy);CHKERRQ(ierr);
457     ierr = PetscMemcpy(mat_graph->xadj,xadj,(nvtxs+1)*sizeof(PetscInt));CHKERRQ(ierr);
458     ierr = PetscMemcpy(mat_graph->adjncy,adjncy,xadj[nvtxs]*sizeof(PetscInt));CHKERRQ(ierr);
459   } else if (copymode == PETSC_OWN_POINTER) {
460     mat_graph->xadj = (PetscInt*)xadj;
461     mat_graph->adjncy = (PetscInt*)adjncy;
462   }
463   mat_graph->nvtxs_csr = nvtxs;
464   PetscFunctionReturn(0);
465 }
466 
467 #undef __FUNCT__
468 #define __FUNCT__ "PCBDDCSetLocalAdjacencyGraph"
469 /*@
470  PCBDDCSetLocalAdjacencyGraph - Set CSR graph of local matrix for use of PCBDDC.
471 
472    Not collective
473 
474    Input Parameters:
475 +  pc - the preconditioning context
476 -  nvtxs - number of local vertices of the graph
477 -  xadj, adjncy - the CSR graph
478 -  copymode - either PETSC_COPY_VALUES or PETSC_OWN_POINTER. In the former case the user must free the array passed in;
479                                                              in the latter case, memory must be obtained with PetscMalloc.
480 
481    Level: intermediate
482 
483    Notes:
484 
485 .seealso: PCBDDC
486 @*/
487 PetscErrorCode PCBDDCSetLocalAdjacencyGraph(PC pc,PetscInt nvtxs,const PetscInt xadj[],const PetscInt adjncy[], PetscCopyMode copymode)
488 {
489   void (*f)(void) = 0;
490   PetscErrorCode ierr;
491 
492   PetscFunctionBegin;
493   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
494   PetscValidIntPointer(xadj,3);
495   PetscValidIntPointer(xadj,4);
496   if (copymode != PETSC_COPY_VALUES && copymode != PETSC_OWN_POINTER) {
497     SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unsupported copy mode %d in %s\n",copymode,__FUNCT__);
498   }
499   ierr = PetscTryMethod(pc,"PCBDDCSetLocalAdjacencyGraph_C",(PC,PetscInt,const PetscInt[],const PetscInt[],PetscCopyMode),(pc,nvtxs,xadj,adjncy,copymode));CHKERRQ(ierr);
500   /* free arrays if PCBDDC is not the PC type */
501   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCBDDCSetLocalAdjacencyGraph_C",&f);CHKERRQ(ierr);
502   if (!f && copymode == PETSC_OWN_POINTER) {
503     ierr = PetscFree(xadj);CHKERRQ(ierr);
504     ierr = PetscFree(adjncy);CHKERRQ(ierr);
505   }
506   PetscFunctionReturn(0);
507 }
508 /* -------------------------------------------------------------------------- */
509 
510 #undef __FUNCT__
511 #define __FUNCT__ "PCBDDCSetDofsSplitting_BDDC"
512 static PetscErrorCode PCBDDCSetDofsSplitting_BDDC(PC pc,PetscInt n_is, IS ISForDofs[])
513 {
514   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
515   PetscInt i;
516   PetscErrorCode ierr;
517 
518   PetscFunctionBegin;
519   /* Destroy ISes if they were already set */
520   for (i=0;i<pcbddc->n_ISForDofs;i++) {
521     ierr = ISDestroy(&pcbddc->ISForDofs[i]);CHKERRQ(ierr);
522   }
523   ierr = PetscFree(pcbddc->ISForDofs);CHKERRQ(ierr);
524   /* allocate space then set */
525   ierr = PetscMalloc(n_is*sizeof(IS),&pcbddc->ISForDofs);CHKERRQ(ierr);
526   for (i=0;i<n_is;i++) {
527     ierr = PetscObjectReference((PetscObject)ISForDofs[i]);CHKERRQ(ierr);
528     pcbddc->ISForDofs[i]=ISForDofs[i];
529   }
530   pcbddc->n_ISForDofs=n_is;
531   PetscFunctionReturn(0);
532 }
533 
534 #undef __FUNCT__
535 #define __FUNCT__ "PCBDDCSetDofsSplitting"
536 /*@
537  PCBDDCSetDofsSplitting - Set index sets defining fields of local mat.
538 
539    Not collective
540 
541    Input Parameters:
542 +  pc - the preconditioning context
543 -  n - number of index sets defining the fields
544 -  IS[] - array of IS describing the fields
545 
546    Level: intermediate
547 
548    Notes:
549 
550 .seealso: PCBDDC
551 @*/
552 PetscErrorCode PCBDDCSetDofsSplitting(PC pc,PetscInt n_is, IS ISForDofs[])
553 {
554   PetscErrorCode ierr;
555 
556   PetscFunctionBegin;
557   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
558   ierr = PetscTryMethod(pc,"PCBDDCSetDofsSplitting_C",(PC,PetscInt,IS[]),(pc,n_is,ISForDofs));CHKERRQ(ierr);
559   PetscFunctionReturn(0);
560 }
561 /* -------------------------------------------------------------------------- */
562 #undef __FUNCT__
563 #define __FUNCT__ "PCPreSolve_BDDC"
564 /* -------------------------------------------------------------------------- */
565 /*
566    PCPreSolve_BDDC - Changes the right hand side and (if necessary) the initial
567                      guess if a transformation of basis approach has been selected.
568 
569    Input Parameter:
570 +  pc - the preconditioner contex
571 
572    Application Interface Routine: PCPreSolve()
573 
574    Notes:
575    The interface routine PCPreSolve() is not usually called directly by
576    the user, but instead is called by KSPSolve().
577 */
578 static PetscErrorCode PCPreSolve_BDDC(PC pc, KSP ksp, Vec rhs, Vec x)
579 {
580   PetscErrorCode ierr;
581   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;
582   PC_IS          *pcis = (PC_IS*)(pc->data);
583   Mat_IS         *matis = (Mat_IS*)pc->pmat->data;
584   Mat            temp_mat;
585   IS             dirIS;
586   PetscInt       dirsize,i,*is_indices;
587   PetscScalar    *array_x,*array_diagonal;
588   Vec            used_vec;
589   PetscBool      guess_nonzero;
590 
591   PetscFunctionBegin;
592   /* Creates parallel work vectors used in presolve. */
593   if (!pcbddc->original_rhs) {
594     ierr = VecDuplicate(pcis->vec1_global,&pcbddc->original_rhs);CHKERRQ(ierr);
595   }
596   if (!pcbddc->temp_solution) {
597     ierr = VecDuplicate(pcis->vec1_global,&pcbddc->temp_solution);CHKERRQ(ierr);
598   }
599   if (x) {
600     ierr = PetscObjectReference((PetscObject)x);CHKERRQ(ierr);
601     used_vec = x;
602   } else {
603     ierr = PetscObjectReference((PetscObject)pcbddc->temp_solution);CHKERRQ(ierr);
604     used_vec = pcbddc->temp_solution;
605     ierr = VecSet(used_vec,0.0);CHKERRQ(ierr);
606   }
607   /* hack into ksp data structure PCPreSolve comes earlier in src/ksp/ksp/interface/itfunc.c */
608   if (ksp) {
609     ierr = KSPGetInitialGuessNonzero(ksp,&guess_nonzero);CHKERRQ(ierr);
610     if ( !guess_nonzero ) {
611       ierr = VecSet(used_vec,0.0);CHKERRQ(ierr);
612     }
613   }
614 
615   if (rhs) { /* TODO: wiser handling of rhs removal, which is only needed in case of zeroed rows */
616     /* store the original rhs */
617     ierr = VecCopy(rhs,pcbddc->original_rhs);CHKERRQ(ierr);
618 
619     /* Take into account zeroed rows -> change rhs and store solution removed */
620     ierr = MatGetDiagonal(pc->pmat,pcis->vec1_global);CHKERRQ(ierr);
621     ierr = VecPointwiseDivide(pcis->vec1_global,rhs,pcis->vec1_global);CHKERRQ(ierr);
622     ierr = VecScatterBegin(matis->ctx,pcis->vec1_global,pcis->vec2_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
623     ierr = VecScatterEnd(matis->ctx,pcis->vec1_global,pcis->vec2_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
624     ierr = VecScatterBegin(matis->ctx,used_vec,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
625     ierr = VecScatterEnd(matis->ctx,used_vec,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
626     ierr = PCBDDCGetDirichletBoundaries(pc,&dirIS);CHKERRQ(ierr);
627     if (dirIS) {
628       ierr = ISGetSize(dirIS,&dirsize);CHKERRQ(ierr);
629       ierr = VecGetArray(pcis->vec1_N,&array_x);CHKERRQ(ierr);
630       ierr = VecGetArray(pcis->vec2_N,&array_diagonal);CHKERRQ(ierr);
631       ierr = ISGetIndices(dirIS,(const PetscInt**)&is_indices);CHKERRQ(ierr);
632       for (i=0; i<dirsize; i++) array_x[is_indices[i]] = array_diagonal[is_indices[i]];
633       ierr = ISRestoreIndices(dirIS,(const PetscInt**)&is_indices);CHKERRQ(ierr);
634       ierr = VecRestoreArray(pcis->vec2_N,&array_diagonal);CHKERRQ(ierr);
635       ierr = VecRestoreArray(pcis->vec1_N,&array_x);CHKERRQ(ierr);
636     }
637     ierr = VecScatterBegin(matis->ctx,pcis->vec1_N,used_vec,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
638     ierr = VecScatterEnd(matis->ctx,pcis->vec1_N,used_vec,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
639 
640     /* remove the computed solution from the rhs */
641     ierr = VecScale(used_vec,-1.0);CHKERRQ(ierr);
642     ierr = MatMultAdd(pc->pmat,used_vec,rhs,rhs);CHKERRQ(ierr);
643     ierr = VecScale(used_vec,-1.0);CHKERRQ(ierr);
644   }
645 
646   /* store partially computed solution and set initial guess */
647   if (x) {
648     ierr = VecCopy(used_vec,pcbddc->temp_solution);CHKERRQ(ierr);
649     ierr = VecSet(used_vec,0.0);CHKERRQ(ierr);
650     if (pcbddc->use_exact_dirichlet && !pcbddc->coarse_psi_B) {
651       ierr = VecScatterBegin(pcis->global_to_D,rhs,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
652       ierr = VecScatterEnd  (pcis->global_to_D,rhs,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
653       ierr = KSPSolve(pcbddc->ksp_D,pcis->vec1_D,pcis->vec2_D);CHKERRQ(ierr);
654       ierr = VecScatterBegin(pcis->global_to_D,pcis->vec2_D,used_vec,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
655       ierr = VecScatterEnd  (pcis->global_to_D,pcis->vec2_D,used_vec,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
656       if (ksp) {
657         ierr = KSPSetInitialGuessNonzero(ksp,PETSC_TRUE);CHKERRQ(ierr);
658       }
659     }
660   }
661 
662   if (pcbddc->use_change_of_basis) {
663     /* swap pointers for local matrices */
664     temp_mat = matis->A;
665     matis->A = pcbddc->local_mat;
666     pcbddc->local_mat = temp_mat;
667   }
668   if (pcbddc->use_change_of_basis && rhs) {
669     /* Get local rhs and apply transformation of basis */
670     ierr = VecScatterBegin(pcis->global_to_B,rhs,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
671     ierr = VecScatterEnd  (pcis->global_to_B,rhs,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
672     /* from original basis to modified basis */
673     ierr = MatMultTranspose(pcbddc->ChangeOfBasisMatrix,pcis->vec1_B,pcis->vec2_B);CHKERRQ(ierr);
674     /* put back modified values into the global vec using INSERT_VALUES copy mode */
675     ierr = VecScatterBegin(pcis->global_to_B,pcis->vec2_B,rhs,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
676     ierr = VecScatterEnd  (pcis->global_to_B,pcis->vec2_B,rhs,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
677   }
678   if (ksp && pcbddc->NullSpace) {
679     ierr = MatNullSpaceRemove(pcbddc->NullSpace,used_vec);CHKERRQ(ierr);
680     ierr = MatNullSpaceRemove(pcbddc->NullSpace,rhs);CHKERRQ(ierr);
681   }
682   ierr = VecDestroy(&used_vec);CHKERRQ(ierr);
683   PetscFunctionReturn(0);
684 }
685 /* -------------------------------------------------------------------------- */
686 #undef __FUNCT__
687 #define __FUNCT__ "PCPostSolve_BDDC"
688 /* -------------------------------------------------------------------------- */
689 /*
690    PCPostSolve_BDDC - Changes the computed solution if a transformation of basis
691                      approach has been selected. Also, restores rhs to its original state.
692 
693    Input Parameter:
694 +  pc - the preconditioner contex
695 
696    Application Interface Routine: PCPostSolve()
697 
698    Notes:
699    The interface routine PCPostSolve() is not usually called directly by
700    the user, but instead is called by KSPSolve().
701 */
702 static PetscErrorCode PCPostSolve_BDDC(PC pc, KSP ksp, Vec rhs, Vec x)
703 {
704   PetscErrorCode ierr;
705   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;
706   PC_IS          *pcis   = (PC_IS*)(pc->data);
707   Mat_IS         *matis = (Mat_IS*)pc->pmat->data;
708   Mat            temp_mat;
709 
710   PetscFunctionBegin;
711   if (pcbddc->use_change_of_basis) {
712     /* swap pointers for local matrices */
713     temp_mat = matis->A;
714     matis->A = pcbddc->local_mat;
715     pcbddc->local_mat = temp_mat;
716   }
717   if (pcbddc->use_change_of_basis && x) {
718     /* Get Local boundary and apply transformation of basis to solution vector */
719     ierr = VecScatterBegin(pcis->global_to_B,x,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
720     ierr = VecScatterEnd  (pcis->global_to_B,x,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
721     /* from modified basis to original basis */
722     ierr = MatMult(pcbddc->ChangeOfBasisMatrix,pcis->vec1_B,pcis->vec2_B);CHKERRQ(ierr);
723     /* put back modified values into the global vec using INSERT_VALUES copy mode */
724     ierr = VecScatterBegin(pcis->global_to_B,pcis->vec2_B,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
725     ierr = VecScatterEnd  (pcis->global_to_B,pcis->vec2_B,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
726   }
727   /* add solution removed in presolve */
728   if (x) {
729     ierr = VecAXPY(x,1.0,pcbddc->temp_solution);CHKERRQ(ierr);
730   }
731   /* restore rhs to its original state */
732   if (rhs) {
733     ierr = VecCopy(pcbddc->original_rhs,rhs);CHKERRQ(ierr);
734   }
735   PetscFunctionReturn(0);
736 }
737 /* -------------------------------------------------------------------------- */
738 #undef __FUNCT__
739 #define __FUNCT__ "PCSetUp_BDDC"
740 /* -------------------------------------------------------------------------- */
741 /*
742    PCSetUp_BDDC - Prepares for the use of the BDDC preconditioner
743                   by setting data structures and options.
744 
745    Input Parameter:
746 +  pc - the preconditioner context
747 
748    Application Interface Routine: PCSetUp()
749 
750    Notes:
751    The interface routine PCSetUp() is not usually called directly by
752    the user, but instead is called by PCApply() if necessary.
753 */
754 PetscErrorCode PCSetUp_BDDC(PC pc)
755 {
756   PetscErrorCode ierr;
757   PC_BDDC*       pcbddc = (PC_BDDC*)pc->data;
758   MatStructure   flag;
759   PetscBool      computeis,computetopography,computesolvers;
760 
761   PetscFunctionBegin;
762   /* the following lines of code should be replaced by a better logic between PCIS, PCNN, PCBDDC and other nonoverlapping preconditioners */
763   /* For BDDC we need to define a local "Neumann" problem different to that defined in PCISSetup
764      So, we set to pcnone the Neumann problem of pcis in order to avoid unneeded computation
765      Also, we decide to directly build the (same) Dirichlet problem */
766   ierr = PetscOptionsSetValue("-is_localN_pc_type","none");CHKERRQ(ierr);
767   ierr = PetscOptionsSetValue("-is_localD_pc_type","none");CHKERRQ(ierr);
768   /* Get stdout for dbg */
769   if (pcbddc->dbg_flag && !pcbddc->dbg_viewer) {
770     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc),&pcbddc->dbg_viewer);CHKERRQ(ierr);
771     ierr = PetscViewerASCIISynchronizedAllow(pcbddc->dbg_viewer,PETSC_TRUE);CHKERRQ(ierr);
772   }
773   /* first attempt to split work */
774   if (pc->setupcalled) {
775     computeis = PETSC_FALSE;
776     ierr = PCGetOperators(pc,NULL,NULL,&flag);CHKERRQ(ierr);
777     if (flag == SAME_PRECONDITIONER) {
778       computetopography = PETSC_FALSE;
779       computesolvers = PETSC_FALSE;
780     } else if (flag == SAME_NONZERO_PATTERN) {
781       computetopography = PETSC_FALSE;
782       computesolvers = PETSC_TRUE;
783     } else { /* DIFFERENT_NONZERO_PATTERN */
784       computetopography = PETSC_TRUE;
785       computesolvers = PETSC_TRUE;
786     }
787   } else {
788     computeis = PETSC_TRUE;
789     computetopography = PETSC_TRUE;
790     computesolvers = PETSC_TRUE;
791   }
792   /* Set up all the "iterative substructuring" common block */
793   if (computeis) {
794     ierr = PCISSetUp(pc);CHKERRQ(ierr);
795   }
796   /* Analyze interface and set up local constraint and change of basis matrices */
797   if (computetopography) {
798     /* reset data */
799     ierr = PCBDDCResetTopography(pc);CHKERRQ(ierr);
800     ierr = PCBDDCAnalyzeInterface(pc);CHKERRQ(ierr);
801     ierr = PCBDDCConstraintsSetUp(pc);CHKERRQ(ierr);
802   }
803   if (computesolvers) {
804     /* reset data */
805     ierr = PCBDDCResetSolvers(pc);CHKERRQ(ierr);
806     ierr = PCBDDCScalingDestroy(pc);CHKERRQ(ierr);
807     /* Create coarse and local stuffs used for evaluating action of preconditioner */
808     ierr = PCBDDCCoarseSetUp(pc);CHKERRQ(ierr);
809     ierr = PCBDDCScalingSetUp(pc);CHKERRQ(ierr);
810   }
811   PetscFunctionReturn(0);
812 }
813 
814 /* -------------------------------------------------------------------------- */
815 /*
816    PCApply_BDDC - Applies the BDDC preconditioner to a vector.
817 
818    Input Parameters:
819 .  pc - the preconditioner context
820 .  r - input vector (global)
821 
822    Output Parameter:
823 .  z - output vector (global)
824 
825    Application Interface Routine: PCApply()
826  */
827 #undef __FUNCT__
828 #define __FUNCT__ "PCApply_BDDC"
829 PetscErrorCode PCApply_BDDC(PC pc,Vec r,Vec z)
830 {
831   PC_IS             *pcis = (PC_IS*)(pc->data);
832   PC_BDDC           *pcbddc = (PC_BDDC*)(pc->data);
833   PetscErrorCode    ierr;
834   const PetscScalar one = 1.0;
835   const PetscScalar m_one = -1.0;
836   const PetscScalar zero = 0.0;
837 
838 /* This code is similar to that provided in nn.c for PCNN
839    NN interface preconditioner changed to BDDC
840    Added support for M_3 preconditioner in the reference article (code is active if pcbddc->inexact_prec_type = PETSC_TRUE) */
841 
842   PetscFunctionBegin;
843   if (!pcbddc->use_exact_dirichlet || pcbddc->coarse_psi_B) {
844     /* First Dirichlet solve */
845     ierr = VecScatterBegin(pcis->global_to_D,r,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
846     ierr = VecScatterEnd  (pcis->global_to_D,r,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
847     ierr = KSPSolve(pcbddc->ksp_D,pcis->vec1_D,pcis->vec2_D);CHKERRQ(ierr);
848     /*
849       Assembling right hand side for BDDC operator
850       - pcis->vec1_D for the Dirichlet part (if needed, i.e. prec_flag=PETSC_TRUE)
851       - pcis->vec1_B the interface part of the global vector z
852     */
853     ierr = VecScale(pcis->vec2_D,m_one);CHKERRQ(ierr);
854     ierr = MatMult(pcis->A_BI,pcis->vec2_D,pcis->vec1_B);CHKERRQ(ierr);
855     if (pcbddc->inexact_prec_type) { ierr = MatMultAdd(pcis->A_II,pcis->vec2_D,pcis->vec1_D,pcis->vec1_D);CHKERRQ(ierr); }
856     ierr = VecScale(pcis->vec2_D,m_one);CHKERRQ(ierr);
857     ierr = VecCopy(r,z);CHKERRQ(ierr);
858     ierr = VecScatterBegin(pcis->global_to_B,pcis->vec1_B,z,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
859     ierr = VecScatterEnd  (pcis->global_to_B,pcis->vec1_B,z,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
860     ierr = PCBDDCScalingRestriction(pc,z,pcis->vec1_B);CHKERRQ(ierr);
861   } else {
862     ierr = VecSet(pcis->vec1_D,zero);CHKERRQ(ierr);
863     ierr = VecSet(pcis->vec2_D,zero);CHKERRQ(ierr);
864     ierr = PCBDDCScalingRestriction(pc,r,pcis->vec1_B);CHKERRQ(ierr);
865   }
866 
867   /* Apply interface preconditioner
868      input/output vecs: pcis->vec1_B and pcis->vec1_D */
869   ierr = PCBDDCApplyInterfacePreconditioner(pc);CHKERRQ(ierr);
870 
871   /* Apply transpose of partition of unity operator */
872   ierr = PCBDDCScalingExtension(pc,pcis->vec1_B,z);CHKERRQ(ierr);
873 
874   /* Second Dirichlet solve and assembling of output */
875   ierr = VecScatterBegin(pcis->global_to_B,z,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
876   ierr = VecScatterEnd  (pcis->global_to_B,z,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
877   ierr = MatMult(pcis->A_IB,pcis->vec1_B,pcis->vec3_D);CHKERRQ(ierr);
878   if (pcbddc->inexact_prec_type) { ierr = MatMultAdd(pcis->A_II,pcis->vec1_D,pcis->vec3_D,pcis->vec3_D);CHKERRQ(ierr); }
879   ierr = KSPSolve(pcbddc->ksp_D,pcis->vec3_D,pcbddc->vec4_D);CHKERRQ(ierr);
880   ierr = VecScale(pcbddc->vec4_D,m_one);CHKERRQ(ierr);
881   if (pcbddc->inexact_prec_type) { ierr = VecAXPY (pcbddc->vec4_D,one,pcis->vec1_D);CHKERRQ(ierr); }
882   ierr = VecAXPY (pcis->vec2_D,one,pcbddc->vec4_D);CHKERRQ(ierr);
883   ierr = VecScatterBegin(pcis->global_to_D,pcis->vec2_D,z,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
884   ierr = VecScatterEnd  (pcis->global_to_D,pcis->vec2_D,z,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
885   PetscFunctionReturn(0);
886 }
887 /* -------------------------------------------------------------------------- */
888 
889 #undef __FUNCT__
890 #define __FUNCT__ "PCDestroy_BDDC"
891 PetscErrorCode PCDestroy_BDDC(PC pc)
892 {
893   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;
894   PetscErrorCode ierr;
895 
896   PetscFunctionBegin;
897   /* free data created by PCIS */
898   ierr = PCISDestroy(pc);CHKERRQ(ierr);
899   /* free BDDC custom data  */
900   ierr = PCBDDCResetCustomization(pc);CHKERRQ(ierr);
901   /* destroy objects related to topography */
902   ierr = PCBDDCResetTopography(pc);CHKERRQ(ierr);
903   /* free allocated graph structure */
904   ierr = PetscFree(pcbddc->mat_graph);CHKERRQ(ierr);
905   /* free data for scaling operator */
906   ierr = PCBDDCScalingDestroy(pc);CHKERRQ(ierr);
907   /* free solvers stuff */
908   ierr = PCBDDCResetSolvers(pc);CHKERRQ(ierr);
909   ierr = KSPDestroy(&pcbddc->ksp_D);CHKERRQ(ierr);
910   ierr = KSPDestroy(&pcbddc->ksp_R);CHKERRQ(ierr);
911   ierr = MatDestroy(&pcbddc->local_mat);CHKERRQ(ierr);
912   /* free global vectors needed in presolve */
913   ierr = VecDestroy(&pcbddc->temp_solution);CHKERRQ(ierr);
914   ierr = VecDestroy(&pcbddc->original_rhs);CHKERRQ(ierr);
915   /* remove functions */
916   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetPrimalVerticesLocalIS_C",NULL);CHKERRQ(ierr);
917   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetCoarseningRatio_C",NULL);CHKERRQ(ierr);
918   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetMaxLevels_C",NULL);CHKERRQ(ierr);
919   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetNullSpace_C",NULL);CHKERRQ(ierr);
920   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDirichletBoundaries_C",NULL);CHKERRQ(ierr);
921   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetNeumannBoundaries_C",NULL);CHKERRQ(ierr);
922   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetDirichletBoundaries_C",NULL);CHKERRQ(ierr);
923   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetNeumannBoundaries_C",NULL);CHKERRQ(ierr);
924   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetCoarseProblemType_C",NULL);CHKERRQ(ierr);
925   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDofsSplitting_C",NULL);CHKERRQ(ierr);
926   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetLocalAdjacencyGraph_C",NULL);CHKERRQ(ierr);
927   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCCreateFETIDPOperators_C",NULL);CHKERRQ(ierr);
928   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCMatFETIDPGetRHS_C",NULL);CHKERRQ(ierr);
929   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCMatFETIDPGetSolution_C",NULL);CHKERRQ(ierr);
930   /* Free the private data structure */
931   ierr = PetscFree(pc->data);CHKERRQ(ierr);
932   PetscFunctionReturn(0);
933 }
934 /* -------------------------------------------------------------------------- */
935 
936 #undef __FUNCT__
937 #define __FUNCT__ "PCBDDCMatFETIDPGetRHS_BDDC"
938 static PetscErrorCode PCBDDCMatFETIDPGetRHS_BDDC(Mat fetidp_mat, Vec standard_rhs, Vec fetidp_flux_rhs)
939 {
940   FETIDPMat_ctx  mat_ctx;
941   PC_IS*         pcis;
942   PC_BDDC*       pcbddc;
943   PetscErrorCode ierr;
944 
945   PetscFunctionBegin;
946   ierr = MatShellGetContext(fetidp_mat,&mat_ctx);CHKERRQ(ierr);
947   pcis = (PC_IS*)mat_ctx->pc->data;
948   pcbddc = (PC_BDDC*)mat_ctx->pc->data;
949 
950   /* change of basis for physical rhs if needed
951      It also changes the rhs in case of dirichlet boundaries */
952   ierr = PCPreSolve_BDDC(mat_ctx->pc,NULL,standard_rhs,NULL);CHKERRQ(ierr);
953   /* store vectors for computation of fetidp final solution */
954   ierr = VecScatterBegin(pcis->global_to_D,standard_rhs,mat_ctx->temp_solution_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
955   ierr = VecScatterEnd(pcis->global_to_D,standard_rhs,mat_ctx->temp_solution_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
956   /* scale rhs since it should be unassembled */
957   /* TODO use counter scaling? (also below) */
958   ierr = VecScatterBegin(pcis->global_to_B,standard_rhs,mat_ctx->temp_solution_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
959   ierr = VecScatterEnd(pcis->global_to_B,standard_rhs,mat_ctx->temp_solution_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
960   /* Apply partition of unity */
961   ierr = VecPointwiseMult(mat_ctx->temp_solution_B,pcis->D,mat_ctx->temp_solution_B);CHKERRQ(ierr);
962   /* ierr = PCBDDCScalingRestriction(mat_ctx->pc,standard_rhs,mat_ctx->temp_solution_B);CHKERRQ(ierr); */
963   if (!pcbddc->inexact_prec_type) {
964     /* compute partially subassembled Schur complement right-hand side */
965     ierr = KSPSolve(pcbddc->ksp_D,mat_ctx->temp_solution_D,pcis->vec1_D);CHKERRQ(ierr);
966     ierr = MatMult(pcis->A_BI,pcis->vec1_D,pcis->vec1_B);CHKERRQ(ierr);
967     ierr = VecAXPY(mat_ctx->temp_solution_B,-1.0,pcis->vec1_B);CHKERRQ(ierr);
968     ierr = VecSet(standard_rhs,0.0);CHKERRQ(ierr);
969     ierr = VecScatterBegin(pcis->global_to_B,mat_ctx->temp_solution_B,standard_rhs,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
970     ierr = VecScatterEnd(pcis->global_to_B,mat_ctx->temp_solution_B,standard_rhs,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
971     /* ierr = PCBDDCScalingRestriction(mat_ctx->pc,standard_rhs,mat_ctx->temp_solution_B);CHKERRQ(ierr); */
972     ierr = VecScatterBegin(pcis->global_to_B,standard_rhs,mat_ctx->temp_solution_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
973     ierr = VecScatterEnd(pcis->global_to_B,standard_rhs,mat_ctx->temp_solution_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
974     ierr = VecPointwiseMult(mat_ctx->temp_solution_B,pcis->D,mat_ctx->temp_solution_B);CHKERRQ(ierr);
975   }
976   /* BDDC rhs */
977   ierr = VecCopy(mat_ctx->temp_solution_B,pcis->vec1_B);CHKERRQ(ierr);
978   if (pcbddc->inexact_prec_type) {
979     ierr = VecCopy(mat_ctx->temp_solution_D,pcis->vec1_D);CHKERRQ(ierr);
980   }
981   /* apply BDDC */
982   ierr = PCBDDCApplyInterfacePreconditioner(mat_ctx->pc);CHKERRQ(ierr);
983   /* Application of B_delta and assembling of rhs for fetidp fluxes */
984   ierr = VecSet(fetidp_flux_rhs,0.0);CHKERRQ(ierr);
985   ierr = MatMult(mat_ctx->B_delta,pcis->vec1_B,mat_ctx->lambda_local);CHKERRQ(ierr);
986   ierr = VecScatterBegin(mat_ctx->l2g_lambda,mat_ctx->lambda_local,fetidp_flux_rhs,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
987   ierr = VecScatterEnd  (mat_ctx->l2g_lambda,mat_ctx->lambda_local,fetidp_flux_rhs,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
988   /* restore original rhs */
989   ierr = VecCopy(pcbddc->original_rhs,standard_rhs);CHKERRQ(ierr);
990   PetscFunctionReturn(0);
991 }
992 
993 #undef __FUNCT__
994 #define __FUNCT__ "PCBDDCMatFETIDPGetRHS"
995 /*@
996  PCBDDCMatFETIDPGetRHS - Get rhs for FETIDP linear system.
997 
998    Collective
999 
1000    Input Parameters:
1001 +  fetidp_mat   - the FETIDP mat obtained by a call to PCBDDCCreateFETIDPOperators
1002 +  standard_rhs - the rhs of your linear system
1003 
1004    Output Parameters:
1005 +  fetidp_flux_rhs   - the rhs of the FETIDP linear system
1006 
1007    Level: developer
1008 
1009    Notes:
1010 
1011 .seealso: PCBDDC
1012 @*/
1013 PetscErrorCode PCBDDCMatFETIDPGetRHS(Mat fetidp_mat, Vec standard_rhs, Vec fetidp_flux_rhs)
1014 {
1015   FETIDPMat_ctx  mat_ctx;
1016   PetscErrorCode ierr;
1017 
1018   PetscFunctionBegin;
1019   ierr = MatShellGetContext(fetidp_mat,&mat_ctx);CHKERRQ(ierr);
1020   ierr = PetscTryMethod(mat_ctx->pc,"PCBDDCMatFETIDPGetRHS_C",(Mat,Vec,Vec),(fetidp_mat,standard_rhs,fetidp_flux_rhs));CHKERRQ(ierr);
1021   PetscFunctionReturn(0);
1022 }
1023 /* -------------------------------------------------------------------------- */
1024 
1025 #undef __FUNCT__
1026 #define __FUNCT__ "PCBDDCMatFETIDPGetSolution_BDDC"
1027 static PetscErrorCode PCBDDCMatFETIDPGetSolution_BDDC(Mat fetidp_mat, Vec fetidp_flux_sol, Vec standard_sol)
1028 {
1029   FETIDPMat_ctx  mat_ctx;
1030   PC_IS*         pcis;
1031   PC_BDDC*       pcbddc;
1032   PetscErrorCode ierr;
1033 
1034   PetscFunctionBegin;
1035   ierr = MatShellGetContext(fetidp_mat,&mat_ctx);CHKERRQ(ierr);
1036   pcis = (PC_IS*)mat_ctx->pc->data;
1037   pcbddc = (PC_BDDC*)mat_ctx->pc->data;
1038 
1039   /* apply B_delta^T */
1040   ierr = VecScatterBegin(mat_ctx->l2g_lambda,fetidp_flux_sol,mat_ctx->lambda_local,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1041   ierr = VecScatterEnd  (mat_ctx->l2g_lambda,fetidp_flux_sol,mat_ctx->lambda_local,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1042   ierr = MatMultTranspose(mat_ctx->B_delta,mat_ctx->lambda_local,pcis->vec1_B);CHKERRQ(ierr);
1043   /* compute rhs for BDDC application */
1044   ierr = VecAYPX(pcis->vec1_B,-1.0,mat_ctx->temp_solution_B);CHKERRQ(ierr);
1045   if (pcbddc->inexact_prec_type) {
1046     ierr = VecCopy(mat_ctx->temp_solution_D,pcis->vec1_D);CHKERRQ(ierr);
1047   }
1048   /* apply BDDC */
1049   ierr = PCBDDCApplyInterfacePreconditioner(mat_ctx->pc);CHKERRQ(ierr);
1050   /* put values into standard global vector */
1051   ierr = VecScatterBegin(pcis->global_to_B,pcis->vec1_B,standard_sol,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1052   ierr = VecScatterEnd  (pcis->global_to_B,pcis->vec1_B,standard_sol,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1053   if (!pcbddc->inexact_prec_type) {
1054     /* compute values into the interior if solved for the partially subassembled Schur complement */
1055     ierr = MatMult(pcis->A_IB,pcis->vec1_B,pcis->vec1_D);CHKERRQ(ierr);
1056     ierr = VecAXPY(mat_ctx->temp_solution_D,-1.0,pcis->vec1_D);CHKERRQ(ierr);
1057     ierr = KSPSolve(pcbddc->ksp_D,mat_ctx->temp_solution_D,pcis->vec1_D);CHKERRQ(ierr);
1058   }
1059   ierr = VecScatterBegin(pcis->global_to_D,pcis->vec1_D,standard_sol,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1060   ierr = VecScatterEnd  (pcis->global_to_D,pcis->vec1_D,standard_sol,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1061   /* final change of basis if needed
1062      Is also sums the dirichlet part removed during RHS assembling */
1063   ierr = PCPostSolve_BDDC(mat_ctx->pc,NULL,NULL,standard_sol);CHKERRQ(ierr);
1064   PetscFunctionReturn(0);
1065 }
1066 
1067 #undef __FUNCT__
1068 #define __FUNCT__ "PCBDDCMatFETIDPGetSolution"
1069 /*@
1070  PCBDDCMatFETIDPGetSolution - Get Solution for FETIDP linear system.
1071 
1072    Collective
1073 
1074    Input Parameters:
1075 +  fetidp_mat        - the FETIDP mat obtained by a call to PCBDDCCreateFETIDPOperators
1076 +  fetidp_flux_sol - the solution of the FETIDP linear system
1077 
1078    Output Parameters:
1079 +  standard_sol      - the solution on the global domain
1080 
1081    Level: developer
1082 
1083    Notes:
1084 
1085 .seealso: PCBDDC
1086 @*/
1087 PetscErrorCode PCBDDCMatFETIDPGetSolution(Mat fetidp_mat, Vec fetidp_flux_sol, Vec standard_sol)
1088 {
1089   FETIDPMat_ctx  mat_ctx;
1090   PetscErrorCode ierr;
1091 
1092   PetscFunctionBegin;
1093   ierr = MatShellGetContext(fetidp_mat,&mat_ctx);CHKERRQ(ierr);
1094   ierr = PetscTryMethod(mat_ctx->pc,"PCBDDCMatFETIDPGetSolution_C",(Mat,Vec,Vec),(fetidp_mat,fetidp_flux_sol,standard_sol));CHKERRQ(ierr);
1095   PetscFunctionReturn(0);
1096 }
1097 /* -------------------------------------------------------------------------- */
1098 
1099 extern PetscErrorCode FETIDPMatMult(Mat,Vec,Vec);
1100 extern PetscErrorCode PCBDDCDestroyFETIDPMat(Mat);
1101 extern PetscErrorCode FETIDPPCApply(PC,Vec,Vec);
1102 extern PetscErrorCode PCBDDCDestroyFETIDPPC(PC);
1103 
1104 #undef __FUNCT__
1105 #define __FUNCT__ "PCBDDCCreateFETIDPOperators_BDDC"
1106 static PetscErrorCode PCBDDCCreateFETIDPOperators_BDDC(PC pc, Mat *fetidp_mat, PC *fetidp_pc)
1107 {
1108 
1109   FETIDPMat_ctx  fetidpmat_ctx;
1110   Mat            newmat;
1111   FETIDPPC_ctx   fetidppc_ctx;
1112   PC             newpc;
1113   MPI_Comm       comm;
1114   PetscErrorCode ierr;
1115 
1116   PetscFunctionBegin;
1117   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
1118   /* FETIDP linear matrix */
1119   ierr = PCBDDCCreateFETIDPMatContext(pc,&fetidpmat_ctx);CHKERRQ(ierr);
1120   ierr = PCBDDCSetupFETIDPMatContext(fetidpmat_ctx);CHKERRQ(ierr);
1121   ierr = MatCreateShell(comm,PETSC_DECIDE,PETSC_DECIDE,fetidpmat_ctx->n_lambda,fetidpmat_ctx->n_lambda,fetidpmat_ctx,&newmat);CHKERRQ(ierr);
1122   ierr = MatShellSetOperation(newmat,MATOP_MULT,(void (*)(void))FETIDPMatMult);CHKERRQ(ierr);
1123   ierr = MatShellSetOperation(newmat,MATOP_DESTROY,(void (*)(void))PCBDDCDestroyFETIDPMat);CHKERRQ(ierr);
1124   ierr = MatSetUp(newmat);CHKERRQ(ierr);
1125   /* FETIDP preconditioner */
1126   ierr = PCBDDCCreateFETIDPPCContext(pc,&fetidppc_ctx);CHKERRQ(ierr);
1127   ierr = PCBDDCSetupFETIDPPCContext(newmat,fetidppc_ctx);CHKERRQ(ierr);
1128   ierr = PCCreate(comm,&newpc);CHKERRQ(ierr);
1129   ierr = PCSetType(newpc,PCSHELL);CHKERRQ(ierr);
1130   ierr = PCShellSetContext(newpc,fetidppc_ctx);CHKERRQ(ierr);
1131   ierr = PCShellSetApply(newpc,FETIDPPCApply);CHKERRQ(ierr);
1132   ierr = PCShellSetDestroy(newpc,PCBDDCDestroyFETIDPPC);CHKERRQ(ierr);
1133   ierr = PCSetOperators(newpc,newmat,newmat,SAME_PRECONDITIONER);CHKERRQ(ierr);
1134   ierr = PCSetUp(newpc);CHKERRQ(ierr);
1135   /* return pointers for objects created */
1136   *fetidp_mat=newmat;
1137   *fetidp_pc=newpc;
1138   PetscFunctionReturn(0);
1139 }
1140 
1141 #undef __FUNCT__
1142 #define __FUNCT__ "PCBDDCCreateFETIDPOperators"
1143 /*@
1144  PCBDDCCreateFETIDPOperators - Create operators for FETIDP.
1145 
1146    Collective
1147 
1148    Input Parameters:
1149 +  pc - the BDDC preconditioning context (setup must be already called)
1150 
1151    Level: developer
1152 
1153    Notes:
1154 
1155 .seealso: PCBDDC
1156 @*/
1157 PetscErrorCode PCBDDCCreateFETIDPOperators(PC pc, Mat *fetidp_mat, PC *fetidp_pc)
1158 {
1159   PetscErrorCode ierr;
1160 
1161   PetscFunctionBegin;
1162   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1163   if (pc->setupcalled) {
1164     ierr = PetscTryMethod(pc,"PCBDDCCreateFETIDPOperators_C",(PC,Mat*,PC*),(pc,fetidp_mat,fetidp_pc));CHKERRQ(ierr);
1165   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"You must call PCSetup_BDDC() first \n");
1166   PetscFunctionReturn(0);
1167 }
1168 /* -------------------------------------------------------------------------- */
1169 /*MC
1170    PCBDDC - Balancing Domain Decomposition by Constraints.
1171 
1172    Options Database Keys:
1173 .    -pcbddc ??? -
1174 
1175    Level: intermediate
1176 
1177    Notes: The matrix used with this preconditioner must be of type MATIS
1178 
1179           Unlike more 'conventional' interface preconditioners, this iterates over ALL the
1180           degrees of freedom, NOT just those on the interface (this allows the use of approximate solvers
1181           on the subdomains).
1182 
1183           Options for the coarse grid preconditioner can be set with -
1184           Options for the Dirichlet subproblem can be set with -
1185           Options for the Neumann subproblem can be set with -
1186 
1187    Contributed by Stefano Zampini
1188 
1189 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,  MATIS
1190 M*/
1191 
1192 #undef __FUNCT__
1193 #define __FUNCT__ "PCCreate_BDDC"
1194 PETSC_EXTERN PetscErrorCode PCCreate_BDDC(PC pc)
1195 {
1196   PetscErrorCode      ierr;
1197   PC_BDDC             *pcbddc;
1198 
1199   PetscFunctionBegin;
1200   /* Creates the private data structure for this preconditioner and attach it to the PC object. */
1201   ierr      = PetscNewLog(pc,PC_BDDC,&pcbddc);CHKERRQ(ierr);
1202   pc->data  = (void*)pcbddc;
1203 
1204   /* create PCIS data structure */
1205   ierr = PCISCreate(pc);CHKERRQ(ierr);
1206 
1207   /* BDDC specific */
1208   pcbddc->user_primal_vertices       = 0;
1209   pcbddc->NullSpace                  = 0;
1210   pcbddc->temp_solution              = 0;
1211   pcbddc->original_rhs               = 0;
1212   pcbddc->local_mat                  = 0;
1213   pcbddc->ChangeOfBasisMatrix        = 0;
1214   pcbddc->use_change_of_basis        = PETSC_TRUE;
1215   pcbddc->use_change_on_faces        = PETSC_FALSE;
1216   pcbddc->coarse_vec                 = 0;
1217   pcbddc->coarse_rhs                 = 0;
1218   pcbddc->coarse_ksp                 = 0;
1219   pcbddc->coarse_phi_B               = 0;
1220   pcbddc->coarse_phi_D               = 0;
1221   pcbddc->coarse_psi_B               = 0;
1222   pcbddc->coarse_psi_D               = 0;
1223   pcbddc->vec1_P                     = 0;
1224   pcbddc->vec1_R                     = 0;
1225   pcbddc->vec2_R                     = 0;
1226   pcbddc->local_auxmat1              = 0;
1227   pcbddc->local_auxmat2              = 0;
1228   pcbddc->R_to_B                     = 0;
1229   pcbddc->R_to_D                     = 0;
1230   pcbddc->ksp_D                      = 0;
1231   pcbddc->ksp_R                      = 0;
1232   pcbddc->local_primal_indices       = 0;
1233   pcbddc->inexact_prec_type          = PETSC_FALSE;
1234   pcbddc->NeumannBoundaries          = 0;
1235   pcbddc->ISForDofs                  = 0;
1236   pcbddc->ConstraintMatrix           = 0;
1237   pcbddc->use_nnsp_true              = PETSC_FALSE;
1238   pcbddc->local_primal_sizes         = 0;
1239   pcbddc->local_primal_displacements = 0;
1240   pcbddc->coarse_loc_to_glob         = 0;
1241   pcbddc->dbg_flag                   = 0;
1242   pcbddc->coarsening_ratio           = 8;
1243   pcbddc->use_exact_dirichlet        = PETSC_TRUE;
1244   pcbddc->current_level              = 0;
1245   pcbddc->max_levels                 = 1;
1246   pcbddc->replicated_local_primal_indices = 0;
1247   pcbddc->replicated_local_primal_values  = 0;
1248 
1249   /* create local graph structure */
1250   ierr = PCBDDCGraphCreate(&pcbddc->mat_graph);CHKERRQ(ierr);
1251 
1252   /* scaling */
1253   pcbddc->use_deluxe_scaling         = PETSC_FALSE;
1254   pcbddc->work_scaling               = 0;
1255 
1256   /* function pointers */
1257   pc->ops->apply               = PCApply_BDDC;
1258   pc->ops->applytranspose      = 0;
1259   pc->ops->setup               = PCSetUp_BDDC;
1260   pc->ops->destroy             = PCDestroy_BDDC;
1261   pc->ops->setfromoptions      = PCSetFromOptions_BDDC;
1262   pc->ops->view                = 0;
1263   pc->ops->applyrichardson     = 0;
1264   pc->ops->applysymmetricleft  = 0;
1265   pc->ops->applysymmetricright = 0;
1266   pc->ops->presolve            = PCPreSolve_BDDC;
1267   pc->ops->postsolve           = PCPostSolve_BDDC;
1268 
1269   /* composing function */
1270   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetPrimalVerticesLocalIS_C",PCBDDCSetPrimalVerticesLocalIS_BDDC);CHKERRQ(ierr);
1271   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetCoarseningRatio_C",PCBDDCSetCoarseningRatio_BDDC);CHKERRQ(ierr);
1272   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetMaxLevels_C",PCBDDCSetMaxLevels_BDDC);CHKERRQ(ierr);
1273   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetNullSpace_C",PCBDDCSetNullSpace_BDDC);CHKERRQ(ierr);
1274   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDirichletBoundaries_C",PCBDDCSetDirichletBoundaries_BDDC);CHKERRQ(ierr);
1275   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetNeumannBoundaries_C",PCBDDCSetNeumannBoundaries_BDDC);CHKERRQ(ierr);
1276   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetDirichletBoundaries_C",PCBDDCGetDirichletBoundaries_BDDC);CHKERRQ(ierr);
1277   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetNeumannBoundaries_C",PCBDDCGetNeumannBoundaries_BDDC);CHKERRQ(ierr);
1278   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetCoarseProblemType_C",PCBDDCSetCoarseProblemType_BDDC);CHKERRQ(ierr);
1279   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDofsSplitting_C",PCBDDCSetDofsSplitting_BDDC);CHKERRQ(ierr);
1280   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetLocalAdjacencyGraph_C",PCBDDCSetLocalAdjacencyGraph_BDDC);CHKERRQ(ierr);
1281   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCCreateFETIDPOperators_C",PCBDDCCreateFETIDPOperators_BDDC);CHKERRQ(ierr);
1282   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCMatFETIDPGetRHS_C",PCBDDCMatFETIDPGetRHS_BDDC);CHKERRQ(ierr);
1283   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCMatFETIDPGetSolution_C",PCBDDCMatFETIDPGetSolution_BDDC);CHKERRQ(ierr);
1284   PetscFunctionReturn(0);
1285 }
1286 
1287 /* -------------------------------------------------------------------------- */
1288 /* All static functions from now on                                           */
1289 /* -------------------------------------------------------------------------- */
1290 
1291 /* -------------------------------------------------------------------------- */
1292 #undef __FUNCT__
1293 #define __FUNCT__ "PCBDDCCoarseSetUp"
1294 static PetscErrorCode PCBDDCCoarseSetUp(PC pc)
1295 {
1296   PC_BDDC*          pcbddc = (PC_BDDC*)pc->data;
1297   PetscErrorCode    ierr;
1298 
1299   PetscFunctionBegin;
1300   /* compute matrix after change of basis and extract local submatrices */
1301   ierr = PCBDDCSetUpLocalMatrices(pc);CHKERRQ(ierr);
1302 
1303   /* Change global null space passed in by the user if change of basis has been requested */
1304   if (pcbddc->NullSpace && pcbddc->use_change_of_basis) {
1305     ierr = PCBDDCNullSpaceAdaptGlobal(pc);CHKERRQ(ierr);
1306   }
1307 
1308   /* Allocate needed vectors */
1309   ierr = PCBDDCCreateWorkVectors(pc);CHKERRQ(ierr);
1310 
1311   /* setup local scatters R_to_B and (optionally) R_to_D : PCBDDCCreateWorkVectors should be called first! */
1312   ierr = PCBDDCSetUpLocalScatters(pc);CHKERRQ(ierr);
1313 
1314   /* setup local solvers ksp_D and ksp_R */
1315   ierr = PCBDDCSetUpLocalSolvers(pc);CHKERRQ(ierr);
1316 
1317   /* setup local correction and local part of coarse basis */
1318   ierr = PCBDDCSetUpCoarseLocal(pc);CHKERRQ(ierr);
1319 
1320   PetscFunctionReturn(0);
1321 }
1322 
1323 /* -------------------------------------------------------------------------- */
1324 
1325 /* BDDC requires metis 5.0.1 for multilevel */
1326 #if defined(PETSC_HAVE_METIS)
1327 #include "metis.h"
1328 #define MetisInt    idx_t
1329 #define MetisScalar real_t
1330 #endif
1331 
1332 #undef __FUNCT__
1333 #define __FUNCT__ "PCBDDCSetUpCoarseEnvironment"
1334 PetscErrorCode PCBDDCSetUpCoarseEnvironment(PC pc,PetscScalar* coarse_submat_vals)
1335 {
1336 
1337 
1338   Mat_IS    *matis    = (Mat_IS*)pc->pmat->data;
1339   PC_BDDC   *pcbddc   = (PC_BDDC*)pc->data;
1340   PC_IS     *pcis     = (PC_IS*)pc->data;
1341   MPI_Comm  prec_comm;
1342   MPI_Comm  coarse_comm;
1343 
1344   MatNullSpace CoarseNullSpace;
1345 
1346   /* common to all choiches */
1347   PetscScalar *temp_coarse_mat_vals;
1348   PetscScalar *ins_coarse_mat_vals;
1349   PetscInt    *ins_local_primal_indices;
1350   PetscMPIInt *localsizes2,*localdispl2;
1351   PetscMPIInt size_prec_comm;
1352   PetscMPIInt rank_prec_comm;
1353   PetscMPIInt active_rank=MPI_PROC_NULL;
1354   PetscMPIInt master_proc=0;
1355   PetscInt    ins_local_primal_size;
1356   /* specific to MULTILEVEL_BDDC */
1357   PetscMPIInt *ranks_recv=0;
1358   PetscMPIInt count_recv=0;
1359   PetscMPIInt rank_coarse_proc_send_to=-1;
1360   PetscMPIInt coarse_color = MPI_UNDEFINED;
1361   ISLocalToGlobalMapping coarse_ISLG;
1362   /* some other variables */
1363   PetscErrorCode ierr;
1364   MatType coarse_mat_type;
1365   PCType  coarse_pc_type;
1366   KSPType coarse_ksp_type;
1367   PC pc_temp;
1368   PetscInt i,j,k;
1369   PetscInt max_it_coarse_ksp=1;  /* don't increase this value */
1370   /* verbose output viewer */
1371   PetscViewer viewer=pcbddc->dbg_viewer;
1372   PetscInt    dbg_flag=pcbddc->dbg_flag;
1373 
1374   PetscInt      offset,offset2;
1375   PetscMPIInt   im_active,active_procs;
1376   PetscInt      *dnz,*onz;
1377 
1378   PetscBool     setsym,issym=PETSC_FALSE;
1379 
1380   PetscFunctionBegin;
1381   ierr = PetscObjectGetComm((PetscObject)pc,&prec_comm);CHKERRQ(ierr);
1382   ins_local_primal_indices = 0;
1383   ins_coarse_mat_vals      = 0;
1384   localsizes2              = 0;
1385   localdispl2              = 0;
1386   temp_coarse_mat_vals     = 0;
1387   coarse_ISLG              = 0;
1388 
1389   ierr = MPI_Comm_size(prec_comm,&size_prec_comm);CHKERRQ(ierr);
1390   ierr = MPI_Comm_rank(prec_comm,&rank_prec_comm);CHKERRQ(ierr);
1391   ierr = MatIsSymmetricKnown(pc->pmat,&setsym,&issym);CHKERRQ(ierr);
1392 
1393   /* Assign global numbering to coarse dofs */
1394   {
1395     PetscInt     *auxlocal_primal,*aux_idx;
1396     PetscMPIInt  mpi_local_primal_size;
1397     PetscScalar  coarsesum,*array;
1398 
1399     mpi_local_primal_size = (PetscMPIInt)pcbddc->local_primal_size;
1400 
1401     /* Construct needed data structures for message passing */
1402     j = 0;
1403     if (rank_prec_comm == 0 || pcbddc->coarse_problem_type == REPLICATED_BDDC || pcbddc->coarse_problem_type == MULTILEVEL_BDDC) {
1404       j = size_prec_comm;
1405     }
1406     ierr = PetscMalloc(j*sizeof(PetscMPIInt),&pcbddc->local_primal_sizes);CHKERRQ(ierr);
1407     ierr = PetscMalloc(j*sizeof(PetscMPIInt),&pcbddc->local_primal_displacements);CHKERRQ(ierr);
1408     /* Gather local_primal_size information for all processes  */
1409     if (pcbddc->coarse_problem_type == REPLICATED_BDDC || pcbddc->coarse_problem_type == MULTILEVEL_BDDC) {
1410       ierr = MPI_Allgather(&mpi_local_primal_size,1,MPIU_INT,&pcbddc->local_primal_sizes[0],1,MPIU_INT,prec_comm);CHKERRQ(ierr);
1411     } else {
1412       ierr = MPI_Gather(&mpi_local_primal_size,1,MPIU_INT,&pcbddc->local_primal_sizes[0],1,MPIU_INT,0,prec_comm);CHKERRQ(ierr);
1413     }
1414     pcbddc->replicated_primal_size = 0;
1415     for (i=0; i<j; i++) {
1416       pcbddc->local_primal_displacements[i] = pcbddc->replicated_primal_size ;
1417       pcbddc->replicated_primal_size += pcbddc->local_primal_sizes[i];
1418     }
1419 
1420     /* First let's count coarse dofs.
1421        This code fragment assumes that the number of local constraints per connected component
1422        is not greater than the number of nodes defined for the connected component
1423        (otherwise we will surely have linear dependence between constraints and thus a singular coarse problem) */
1424     ierr = PetscMalloc(pcbddc->local_primal_size*sizeof(PetscInt),&auxlocal_primal);CHKERRQ(ierr);
1425     ierr = PCBDDCGetPrimalVerticesLocalIdx(pc,&i,&aux_idx);CHKERRQ(ierr);
1426     ierr = PetscMemcpy(auxlocal_primal,aux_idx,i*sizeof(PetscInt));CHKERRQ(ierr);
1427     ierr = PetscFree(aux_idx);CHKERRQ(ierr);
1428     ierr = PCBDDCGetPrimalConstraintsLocalIdx(pc,&j,&aux_idx);CHKERRQ(ierr);
1429     ierr = PetscMemcpy(&auxlocal_primal[i],aux_idx,j*sizeof(PetscInt));CHKERRQ(ierr);
1430     ierr = PetscFree(aux_idx);CHKERRQ(ierr);
1431     /* Compute number of coarse dofs */
1432     ierr = PCBDDCSubsetNumbering(prec_comm,matis->mapping,pcbddc->local_primal_size,auxlocal_primal,NULL,&pcbddc->coarse_size,&pcbddc->local_primal_indices);CHKERRQ(ierr);
1433 
1434     if (dbg_flag) {
1435       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1436       ierr = PetscViewerASCIIPrintf(viewer,"--------------------------------------------------\n");CHKERRQ(ierr);
1437       ierr = PetscViewerASCIIPrintf(viewer,"Check coarse indices\n");CHKERRQ(ierr);
1438       ierr = VecSet(pcis->vec1_N,0.0);CHKERRQ(ierr);
1439       ierr = VecGetArray(pcis->vec1_N,&array);CHKERRQ(ierr);
1440       for (i=0;i<pcbddc->local_primal_size;i++) array[auxlocal_primal[i]]=1.0;
1441       ierr = VecRestoreArray(pcis->vec1_N,&array);CHKERRQ(ierr);
1442       ierr = VecSet(pcis->vec1_global,0.0);CHKERRQ(ierr);
1443       ierr = VecScatterBegin(matis->ctx,pcis->vec1_N,pcis->vec1_global,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1444       ierr = VecScatterEnd  (matis->ctx,pcis->vec1_N,pcis->vec1_global,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1445       ierr = VecScatterBegin(matis->ctx,pcis->vec1_global,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1446       ierr = VecScatterEnd  (matis->ctx,pcis->vec1_global,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1447       ierr = VecGetArray(pcis->vec1_N,&array);CHKERRQ(ierr);
1448       for (i=0;i<pcis->n;i++) {
1449         if (array[i] == 1.0) {
1450           ierr = ISLocalToGlobalMappingApply(matis->mapping,1,&i,&j);CHKERRQ(ierr);
1451           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d: WRONG COARSE INDEX %d (local %d)\n",PetscGlobalRank,j,i);CHKERRQ(ierr);
1452         }
1453       }
1454       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1455       for (i=0;i<pcis->n;i++) {
1456         if (PetscRealPart(array[i]) > 0.0) array[i] = 1.0/PetscRealPart(array[i]);
1457       }
1458       ierr = VecRestoreArray(pcis->vec1_N,&array);CHKERRQ(ierr);
1459       ierr = VecSet(pcis->vec1_global,0.0);CHKERRQ(ierr);
1460       ierr = VecScatterBegin(matis->ctx,pcis->vec1_N,pcis->vec1_global,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1461       ierr = VecScatterEnd  (matis->ctx,pcis->vec1_N,pcis->vec1_global,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1462       ierr = VecSum(pcis->vec1_global,&coarsesum);CHKERRQ(ierr);
1463       ierr = PetscViewerASCIIPrintf(viewer,"Size of coarse problem SHOULD be %lf\n",coarsesum);CHKERRQ(ierr);
1464       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1465     }
1466     ierr = PetscFree(auxlocal_primal);CHKERRQ(ierr);
1467   }
1468 
1469   if (dbg_flag) {
1470     ierr = PetscViewerASCIIPrintf(viewer,"Size of coarse problem is %d\n",pcbddc->coarse_size);CHKERRQ(ierr);
1471     if (dbg_flag > 1) {
1472       ierr = PetscViewerASCIIPrintf(viewer,"Distribution of local primal indices\n");CHKERRQ(ierr);
1473       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1474       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d\n",PetscGlobalRank);CHKERRQ(ierr);
1475       for (i=0;i<pcbddc->local_primal_size;i++) {
1476         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"local_primal_indices[%d]=%d \n",i,pcbddc->local_primal_indices[i]);
1477       }
1478       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1479     }
1480   }
1481 
1482   im_active = 0;
1483   if (pcis->n) im_active = 1;
1484   ierr = MPI_Allreduce(&im_active,&active_procs,1,MPIU_INT,MPI_SUM,prec_comm);CHKERRQ(ierr);
1485 
1486   /* adapt coarse problem type */
1487 #if defined(PETSC_HAVE_METIS)
1488   if (pcbddc->coarse_problem_type == MULTILEVEL_BDDC) {
1489     if (pcbddc->current_level < pcbddc->max_levels) {
1490       if ( (active_procs/pcbddc->coarsening_ratio) < 2 ) {
1491         if (dbg_flag) {
1492           ierr = PetscViewerASCIIPrintf(viewer,"Not enough active processes on level %d (active %d,ratio %d). Parallel direct solve for coarse problem\n",pcbddc->current_level,active_procs,pcbddc->coarsening_ratio);CHKERRQ(ierr);
1493          ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1494         }
1495         pcbddc->coarse_problem_type = PARALLEL_BDDC;
1496       }
1497     } else {
1498       if (dbg_flag) {
1499         ierr = PetscViewerASCIIPrintf(viewer,"Max number of levels reached. Using parallel direct solve for coarse problem\n",pcbddc->max_levels,active_procs,pcbddc->coarsening_ratio);CHKERRQ(ierr);
1500         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1501       }
1502       pcbddc->coarse_problem_type = PARALLEL_BDDC;
1503     }
1504   }
1505 #else
1506   pcbddc->coarse_problem_type = PARALLEL_BDDC;
1507 #endif
1508 
1509   switch(pcbddc->coarse_problem_type){
1510 
1511     case(MULTILEVEL_BDDC):   /* we define a coarse mesh where subdomains are elements */
1512     {
1513 #if defined(PETSC_HAVE_METIS)
1514       /* we need additional variables */
1515       MetisInt    n_subdomains,n_parts,objval,ncon,faces_nvtxs;
1516       MetisInt    *metis_coarse_subdivision;
1517       MetisInt    options[METIS_NOPTIONS];
1518       PetscMPIInt size_coarse_comm,rank_coarse_comm;
1519       PetscMPIInt procs_jumps_coarse_comm;
1520       PetscMPIInt *coarse_subdivision;
1521       PetscMPIInt *total_count_recv;
1522       PetscMPIInt *total_ranks_recv;
1523       PetscMPIInt *displacements_recv;
1524       PetscMPIInt *my_faces_connectivity;
1525       PetscMPIInt *petsc_faces_adjncy;
1526       MetisInt    *faces_adjncy;
1527       MetisInt    *faces_xadj;
1528       PetscMPIInt *number_of_faces;
1529       PetscMPIInt *faces_displacements;
1530       PetscInt    *array_int;
1531       PetscMPIInt my_faces=0;
1532       PetscMPIInt total_faces=0;
1533       PetscInt    ranks_stretching_ratio;
1534 
1535       /* define some quantities */
1536       pcbddc->coarse_communications_type = SCATTERS_BDDC;
1537       coarse_mat_type = MATIS;
1538       coarse_pc_type  = PCBDDC;
1539       coarse_ksp_type = KSPRICHARDSON;
1540 
1541       /* details of coarse decomposition */
1542       n_subdomains = active_procs;
1543       n_parts      = n_subdomains/pcbddc->coarsening_ratio;
1544       ranks_stretching_ratio = size_prec_comm/active_procs;
1545       procs_jumps_coarse_comm = pcbddc->coarsening_ratio*ranks_stretching_ratio;
1546 
1547 #if 0
1548       PetscMPIInt *old_ranks;
1549       PetscInt    *new_ranks,*jj,*ii;
1550       MatPartitioning mat_part;
1551       IS coarse_new_decomposition,is_numbering;
1552       PetscViewer viewer_test;
1553       MPI_Comm    test_coarse_comm;
1554       PetscMPIInt test_coarse_color;
1555       Mat         mat_adj;
1556       /* Create new communicator for coarse problem splitting the old one */
1557       /* procs with coarse_color = MPI_UNDEFINED will have coarse_comm = MPI_COMM_NULL (from mpi standards)
1558          key = rank_prec_comm -> keep same ordering of ranks from the old to the new communicator */
1559       test_coarse_color = ( im_active ? 0 : MPI_UNDEFINED );
1560       test_coarse_comm = MPI_COMM_NULL;
1561       ierr = MPI_Comm_split(prec_comm,test_coarse_color,rank_prec_comm,&test_coarse_comm);CHKERRQ(ierr);
1562       if (im_active) {
1563         ierr = PetscMalloc(n_subdomains*sizeof(PetscMPIInt),&old_ranks);
1564         ierr = PetscMalloc(size_prec_comm*sizeof(PetscInt),&new_ranks);
1565         ierr = MPI_Comm_rank(test_coarse_comm,&rank_coarse_comm);CHKERRQ(ierr);
1566         ierr = MPI_Comm_size(test_coarse_comm,&j);CHKERRQ(ierr);
1567         ierr = MPI_Allgather(&rank_prec_comm,1,MPIU_INT,old_ranks,1,MPIU_INT,test_coarse_comm);CHKERRQ(ierr);
1568         for (i=0; i<size_prec_comm; i++) new_ranks[i] = -1;
1569         for (i=0; i<n_subdomains; i++) new_ranks[old_ranks[i]] = i;
1570         ierr = PetscViewerASCIIOpen(test_coarse_comm,"test_mat_part.out",&viewer_test);CHKERRQ(ierr);
1571         k = pcis->n_neigh-1;
1572         ierr = PetscMalloc(2*sizeof(PetscInt),&ii);
1573         ii[0]=0;
1574         ii[1]=k;
1575         ierr = PetscMalloc(k*sizeof(PetscInt),&jj);
1576         for (i=0; i<k; i++) jj[i]=new_ranks[pcis->neigh[i+1]];
1577         ierr = PetscSortInt(k,jj);CHKERRQ(ierr);
1578         ierr = MatCreateMPIAdj(test_coarse_comm,1,n_subdomains,ii,jj,NULL,&mat_adj);CHKERRQ(ierr);
1579         ierr = MatView(mat_adj,viewer_test);CHKERRQ(ierr);
1580         ierr = MatPartitioningCreate(test_coarse_comm,&mat_part);CHKERRQ(ierr);
1581         ierr = MatPartitioningSetAdjacency(mat_part,mat_adj);CHKERRQ(ierr);
1582         ierr = MatPartitioningSetFromOptions(mat_part);CHKERRQ(ierr);
1583         printf("Setting Nparts %d\n",n_parts);
1584         ierr = MatPartitioningSetNParts(mat_part,n_parts);CHKERRQ(ierr);
1585         ierr = MatPartitioningView(mat_part,viewer_test);CHKERRQ(ierr);
1586         ierr = MatPartitioningApply(mat_part,&coarse_new_decomposition);CHKERRQ(ierr);
1587         ierr = ISView(coarse_new_decomposition,viewer_test);CHKERRQ(ierr);
1588         ierr = ISPartitioningToNumbering(coarse_new_decomposition,&is_numbering);CHKERRQ(ierr);
1589         ierr = ISView(is_numbering,viewer_test);CHKERRQ(ierr);
1590         ierr = PetscViewerDestroy(&viewer_test);CHKERRQ(ierr);
1591         ierr = ISDestroy(&coarse_new_decomposition);CHKERRQ(ierr);
1592         ierr = ISDestroy(&is_numbering);CHKERRQ(ierr);
1593         ierr = MatPartitioningDestroy(&mat_part);CHKERRQ(ierr);
1594         ierr = PetscFree(old_ranks);CHKERRQ(ierr);
1595         ierr = PetscFree(new_ranks);CHKERRQ(ierr);
1596         ierr = MPI_Comm_free(&test_coarse_comm);CHKERRQ(ierr);
1597       }
1598 #endif
1599 
1600       /* build CSR graph of subdomains' connectivity */
1601       ierr = PetscMalloc (pcis->n*sizeof(PetscInt),&array_int);CHKERRQ(ierr);
1602       ierr = PetscMemzero(array_int,pcis->n*sizeof(PetscInt));CHKERRQ(ierr);
1603       for (i=1;i<pcis->n_neigh;i++){/* i=1 so I don't count myself -> faces nodes counts to 1 */
1604         for (j=0;j<pcis->n_shared[i];j++){
1605           array_int[ pcis->shared[i][j] ]+=1;
1606         }
1607       }
1608       for (i=1;i<pcis->n_neigh;i++){
1609         for (j=0;j<pcis->n_shared[i];j++){
1610           if (array_int[ pcis->shared[i][j] ] > 0 ){
1611             my_faces++;
1612             break;
1613           }
1614         }
1615       }
1616 
1617       ierr = MPI_Reduce(&my_faces,&total_faces,1,MPIU_INT,MPI_SUM,master_proc,prec_comm);CHKERRQ(ierr);
1618       ierr = PetscMalloc (my_faces*sizeof(PetscInt),&my_faces_connectivity);CHKERRQ(ierr);
1619       my_faces=0;
1620       for (i=1;i<pcis->n_neigh;i++){
1621         for (j=0;j<pcis->n_shared[i];j++){
1622           if (array_int[ pcis->shared[i][j] ] > 0 ){
1623             my_faces_connectivity[my_faces]=pcis->neigh[i];
1624             my_faces++;
1625             break;
1626           }
1627         }
1628       }
1629       if (rank_prec_comm == master_proc) {
1630         ierr = PetscMalloc (total_faces*sizeof(PetscMPIInt),&petsc_faces_adjncy);CHKERRQ(ierr);
1631         ierr = PetscMalloc (size_prec_comm*sizeof(PetscMPIInt),&number_of_faces);CHKERRQ(ierr);
1632         ierr = PetscMalloc (total_faces*sizeof(MetisInt),&faces_adjncy);CHKERRQ(ierr);
1633         ierr = PetscMalloc ((n_subdomains+1)*sizeof(MetisInt),&faces_xadj);CHKERRQ(ierr);
1634         ierr = PetscMalloc ((size_prec_comm+1)*sizeof(PetscMPIInt),&faces_displacements);CHKERRQ(ierr);
1635       }
1636       ierr = MPI_Gather(&my_faces,1,MPIU_INT,&number_of_faces[0],1,MPIU_INT,master_proc,prec_comm);CHKERRQ(ierr);
1637       if (rank_prec_comm == master_proc) {
1638         faces_xadj[0]=0;
1639         faces_displacements[0]=0;
1640         j=0;
1641         for (i=1;i<size_prec_comm+1;i++) {
1642           faces_displacements[i]=faces_displacements[i-1]+number_of_faces[i-1];
1643           if (number_of_faces[i-1]) {
1644             j++;
1645             faces_xadj[j]=faces_xadj[j-1]+number_of_faces[i-1];
1646           }
1647         }
1648       }
1649       ierr = MPI_Gatherv(&my_faces_connectivity[0],my_faces,MPIU_INT,&petsc_faces_adjncy[0],number_of_faces,faces_displacements,MPIU_INT,master_proc,prec_comm);CHKERRQ(ierr);
1650       ierr = PetscFree(my_faces_connectivity);CHKERRQ(ierr);
1651       ierr = PetscFree(array_int);CHKERRQ(ierr);
1652       if (rank_prec_comm == master_proc) {
1653         for (i=0;i<total_faces;i++) faces_adjncy[i]=(MetisInt)(petsc_faces_adjncy[i]/ranks_stretching_ratio); /* cast to MetisInt */
1654         ierr = PetscFree(faces_displacements);CHKERRQ(ierr);
1655         ierr = PetscFree(number_of_faces);CHKERRQ(ierr);
1656         ierr = PetscFree(petsc_faces_adjncy);CHKERRQ(ierr);
1657       }
1658 
1659       if ( rank_prec_comm == master_proc ) {
1660 
1661         PetscInt heuristic_for_metis=3;
1662 
1663         ncon=1;
1664         faces_nvtxs=n_subdomains;
1665         /* partition graoh induced by face connectivity */
1666         ierr = PetscMalloc (n_subdomains*sizeof(MetisInt),&metis_coarse_subdivision);CHKERRQ(ierr);
1667         ierr = METIS_SetDefaultOptions(options);
1668         /* we need a contiguous partition of the coarse mesh */
1669         options[METIS_OPTION_CONTIG]=1;
1670         options[METIS_OPTION_NITER]=30;
1671         if (pcbddc->coarsening_ratio > 1) {
1672           if (n_subdomains>n_parts*heuristic_for_metis) {
1673             options[METIS_OPTION_IPTYPE]=METIS_IPTYPE_EDGE;
1674             options[METIS_OPTION_OBJTYPE]=METIS_OBJTYPE_CUT;
1675             ierr = METIS_PartGraphKway(&faces_nvtxs,&ncon,faces_xadj,faces_adjncy,NULL,NULL,NULL,&n_parts,NULL,NULL,options,&objval,metis_coarse_subdivision);
1676             if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in METIS_PartGraphKway (metis error code %D) called from PCBDDCSetUpCoarseEnvironment\n",ierr);
1677           } else {
1678             ierr = METIS_PartGraphRecursive(&faces_nvtxs,&ncon,faces_xadj,faces_adjncy,NULL,NULL,NULL,&n_parts,NULL,NULL,options,&objval,metis_coarse_subdivision);
1679             if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in METIS_PartGraphRecursive (metis error code %D) called from PCBDDCSetUpCoarseEnvironment\n",ierr);
1680           }
1681         } else {
1682           for (i=0;i<n_subdomains;i++) metis_coarse_subdivision[i]=i;
1683         }
1684         ierr = PetscFree(faces_xadj);CHKERRQ(ierr);
1685         ierr = PetscFree(faces_adjncy);CHKERRQ(ierr);
1686         ierr = PetscMalloc(size_prec_comm*sizeof(PetscMPIInt),&coarse_subdivision);CHKERRQ(ierr);
1687 
1688         /* copy/cast values avoiding possible type conflicts between PETSc, MPI and METIS */
1689         for (i=0;i<size_prec_comm;i++) coarse_subdivision[i]=MPI_PROC_NULL;
1690         for (i=0;i<n_subdomains;i++) coarse_subdivision[ranks_stretching_ratio*i]=(PetscInt)(metis_coarse_subdivision[i]);
1691         ierr = PetscFree(metis_coarse_subdivision);CHKERRQ(ierr);
1692       }
1693 
1694       /* Create new communicator for coarse problem splitting the old one */
1695       if ( !(rank_prec_comm%procs_jumps_coarse_comm) && rank_prec_comm < procs_jumps_coarse_comm*n_parts ){
1696         coarse_color=0;              /* for communicator splitting */
1697         active_rank=rank_prec_comm;  /* for insertion of matrix values */
1698       }
1699       /* procs with coarse_color = MPI_UNDEFINED will have coarse_comm = MPI_COMM_NULL (from mpi standards)
1700          key = rank_prec_comm -> keep same ordering of ranks from the old to the new communicator */
1701       ierr = MPI_Comm_split(prec_comm,coarse_color,rank_prec_comm,&coarse_comm);CHKERRQ(ierr);
1702 
1703       if ( coarse_color == 0 ) {
1704         ierr = MPI_Comm_size(coarse_comm,&size_coarse_comm);CHKERRQ(ierr);
1705         ierr = MPI_Comm_rank(coarse_comm,&rank_coarse_comm);CHKERRQ(ierr);
1706       } else {
1707         rank_coarse_comm = MPI_PROC_NULL;
1708       }
1709 
1710       /* master proc take care of arranging and distributing coarse information */
1711       if (rank_coarse_comm == master_proc) {
1712         ierr = PetscMalloc (size_coarse_comm*sizeof(PetscMPIInt),&displacements_recv);CHKERRQ(ierr);
1713         ierr = PetscMalloc (size_coarse_comm*sizeof(PetscMPIInt),&total_count_recv);CHKERRQ(ierr);
1714         ierr = PetscMalloc (n_subdomains*sizeof(PetscMPIInt),&total_ranks_recv);CHKERRQ(ierr);
1715         /* some initializations */
1716         displacements_recv[0]=0;
1717         ierr = PetscMemzero(total_count_recv,size_coarse_comm*sizeof(PetscMPIInt));CHKERRQ(ierr);
1718         /* count from how many processes the j-th process of the coarse decomposition will receive data */
1719         for (j=0;j<size_coarse_comm;j++) {
1720           for (i=0;i<size_prec_comm;i++) {
1721           if (coarse_subdivision[i]==j) total_count_recv[j]++;
1722           }
1723         }
1724         /* displacements needed for scatterv of total_ranks_recv */
1725       for (i=1; i<size_coarse_comm; i++) displacements_recv[i]=displacements_recv[i-1]+total_count_recv[i-1];
1726 
1727         /* Now fill properly total_ranks_recv -> each coarse process will receive the ranks (in prec_comm communicator) of its friend (sending) processes */
1728         ierr = PetscMemzero(total_count_recv,size_coarse_comm*sizeof(PetscMPIInt));CHKERRQ(ierr);
1729         for (j=0;j<size_coarse_comm;j++) {
1730           for (i=0;i<size_prec_comm;i++) {
1731             if (coarse_subdivision[i]==j) {
1732               total_ranks_recv[displacements_recv[j]+total_count_recv[j]]=i;
1733               total_count_recv[j]+=1;
1734             }
1735           }
1736         }
1737         /*for (j=0;j<size_coarse_comm;j++) {
1738           printf("process %d in new rank will receive from %d processes (original ranks follows)\n",j,total_count_recv[j]);
1739           for (i=0;i<total_count_recv[j];i++) {
1740             printf("%d ",total_ranks_recv[displacements_recv[j]+i]);
1741           }
1742           printf("\n");
1743         }*/
1744 
1745         /* identify new decomposition in terms of ranks in the old communicator */
1746         for (i=0;i<n_subdomains;i++) {
1747           coarse_subdivision[ranks_stretching_ratio*i]=coarse_subdivision[ranks_stretching_ratio*i]*procs_jumps_coarse_comm;
1748         }
1749         /*printf("coarse_subdivision in old end new ranks\n");
1750         for (i=0;i<size_prec_comm;i++)
1751           if (coarse_subdivision[i]!=MPI_PROC_NULL) {
1752             printf("%d=(%d %d), ",i,coarse_subdivision[i],coarse_subdivision[i]/procs_jumps_coarse_comm);
1753           } else {
1754             printf("%d=(%d %d), ",i,coarse_subdivision[i],coarse_subdivision[i]);
1755           }
1756         printf("\n");*/
1757       }
1758 
1759       /* Scatter new decomposition for send details */
1760       ierr = MPI_Scatter(&coarse_subdivision[0],1,MPIU_INT,&rank_coarse_proc_send_to,1,MPIU_INT,master_proc,prec_comm);CHKERRQ(ierr);
1761       /* Scatter receiving details to members of coarse decomposition */
1762       if ( coarse_color == 0) {
1763         ierr = MPI_Scatter(&total_count_recv[0],1,MPIU_INT,&count_recv,1,MPIU_INT,master_proc,coarse_comm);CHKERRQ(ierr);
1764         ierr = PetscMalloc (count_recv*sizeof(PetscMPIInt),&ranks_recv);CHKERRQ(ierr);
1765         ierr = MPI_Scatterv(&total_ranks_recv[0],total_count_recv,displacements_recv,MPIU_INT,&ranks_recv[0],count_recv,MPIU_INT,master_proc,coarse_comm);CHKERRQ(ierr);
1766       }
1767 
1768       /*printf("I will send my matrix data to proc  %d\n",rank_coarse_proc_send_to);
1769       if (coarse_color == 0) {
1770         printf("I will receive some matrix data from %d processes (ranks follows)\n",count_recv);
1771         for (i=0;i<count_recv;i++)
1772           printf("%d ",ranks_recv[i]);
1773         printf("\n");
1774       }*/
1775 
1776       if (rank_prec_comm == master_proc) {
1777         ierr = PetscFree(coarse_subdivision);CHKERRQ(ierr);
1778         ierr = PetscFree(total_count_recv);CHKERRQ(ierr);
1779         ierr = PetscFree(total_ranks_recv);CHKERRQ(ierr);
1780         ierr = PetscFree(displacements_recv);CHKERRQ(ierr);
1781       }
1782 #endif
1783       break;
1784     }
1785 
1786     case(REPLICATED_BDDC):
1787 
1788       pcbddc->coarse_communications_type = GATHERS_BDDC;
1789       coarse_mat_type = MATSEQAIJ;
1790       coarse_pc_type  = PCLU;
1791       coarse_ksp_type  = KSPPREONLY;
1792       coarse_comm = PETSC_COMM_SELF;
1793       active_rank = rank_prec_comm;
1794       break;
1795 
1796     case(PARALLEL_BDDC):
1797 
1798       pcbddc->coarse_communications_type = SCATTERS_BDDC;
1799       coarse_mat_type = MATAIJ;
1800       coarse_pc_type  = PCREDUNDANT;
1801       coarse_ksp_type  = KSPPREONLY;
1802       coarse_comm = prec_comm;
1803       active_rank = rank_prec_comm;
1804       break;
1805 
1806     case(SEQUENTIAL_BDDC):
1807       pcbddc->coarse_communications_type = GATHERS_BDDC;
1808       coarse_mat_type = MATAIJ;
1809       coarse_pc_type = PCLU;
1810       coarse_ksp_type  = KSPPREONLY;
1811       coarse_comm = PETSC_COMM_SELF;
1812       active_rank = master_proc;
1813       break;
1814   }
1815 
1816   switch(pcbddc->coarse_communications_type){
1817 
1818     case(SCATTERS_BDDC):
1819       {
1820         if (pcbddc->coarse_problem_type==MULTILEVEL_BDDC) {
1821 
1822           IS coarse_IS;
1823 
1824           if(pcbddc->coarsening_ratio == 1) {
1825             ins_local_primal_size = pcbddc->local_primal_size;
1826             ins_local_primal_indices = pcbddc->local_primal_indices;
1827             if (coarse_color == 0) { ierr = PetscFree(ranks_recv);CHKERRQ(ierr); }
1828             /* nonzeros */
1829             ierr = PetscMalloc(ins_local_primal_size*sizeof(PetscInt),&dnz);CHKERRQ(ierr);
1830             ierr = PetscMemzero(dnz,ins_local_primal_size*sizeof(PetscInt));CHKERRQ(ierr);
1831             for (i=0;i<ins_local_primal_size;i++) {
1832               dnz[i] = ins_local_primal_size;
1833             }
1834           } else {
1835             PetscMPIInt send_size;
1836             PetscMPIInt *send_buffer;
1837             PetscInt    *aux_ins_indices;
1838             PetscInt    ii,jj;
1839             MPI_Request *requests;
1840 
1841             ierr = PetscMalloc(count_recv*sizeof(PetscMPIInt),&localdispl2);CHKERRQ(ierr);
1842             /* reusing pcbddc->local_primal_displacements and pcbddc->replicated_primal_size */
1843             ierr = PetscFree(pcbddc->local_primal_displacements);CHKERRQ(ierr);
1844             ierr = PetscMalloc((count_recv+1)*sizeof(PetscMPIInt),&pcbddc->local_primal_displacements);CHKERRQ(ierr);
1845             pcbddc->replicated_primal_size = count_recv;
1846             j = 0;
1847             for (i=0;i<count_recv;i++) {
1848               pcbddc->local_primal_displacements[i] = j;
1849               j += pcbddc->local_primal_sizes[ranks_recv[i]];
1850             }
1851             pcbddc->local_primal_displacements[count_recv] = j;
1852             ierr = PetscMalloc(j*sizeof(PetscMPIInt),&pcbddc->replicated_local_primal_indices);CHKERRQ(ierr);
1853             /* allocate auxiliary space */
1854             ierr = PetscMalloc(count_recv*sizeof(PetscMPIInt),&localsizes2);CHKERRQ(ierr);
1855             ierr = PetscMalloc(pcbddc->coarse_size*sizeof(PetscInt),&aux_ins_indices);CHKERRQ(ierr);
1856             ierr = PetscMemzero(aux_ins_indices,pcbddc->coarse_size*sizeof(PetscInt));CHKERRQ(ierr);
1857             /* allocate stuffs for message massing */
1858             ierr = PetscMalloc((count_recv+1)*sizeof(MPI_Request),&requests);CHKERRQ(ierr);
1859             for (i=0;i<count_recv+1;i++) { requests[i]=MPI_REQUEST_NULL; }
1860             /* send indices to be inserted */
1861             for (i=0;i<count_recv;i++) {
1862               send_size = pcbddc->local_primal_sizes[ranks_recv[i]];
1863               ierr = MPI_Irecv(&pcbddc->replicated_local_primal_indices[pcbddc->local_primal_displacements[i]],send_size,MPIU_INT,ranks_recv[i],999,prec_comm,&requests[i]);CHKERRQ(ierr);
1864             }
1865             if (rank_coarse_proc_send_to != MPI_PROC_NULL ) {
1866               send_size = pcbddc->local_primal_size;
1867               ierr = PetscMalloc(send_size*sizeof(PetscMPIInt),&send_buffer);CHKERRQ(ierr);
1868               for (i=0;i<send_size;i++) {
1869                 send_buffer[i]=(PetscMPIInt)pcbddc->local_primal_indices[i];
1870               }
1871               ierr = MPI_Isend(send_buffer,send_size,MPIU_INT,rank_coarse_proc_send_to,999,prec_comm,&requests[count_recv]);CHKERRQ(ierr);
1872             }
1873             ierr = MPI_Waitall(count_recv+1,requests,MPI_STATUSES_IGNORE);CHKERRQ(ierr);
1874             if (rank_coarse_proc_send_to != MPI_PROC_NULL ) {
1875               ierr = PetscFree(send_buffer);CHKERRQ(ierr);
1876             }
1877             j = 0;
1878             for (i=0;i<count_recv;i++) {
1879               ii = pcbddc->local_primal_displacements[i+1]-pcbddc->local_primal_displacements[i];
1880               localsizes2[i] = ii*ii;
1881               localdispl2[i] = j;
1882               j += localsizes2[i];
1883               jj = pcbddc->local_primal_displacements[i];
1884               /* it counts the coarse subdomains sharing the coarse node */
1885               for (k=0;k<ii;k++) {
1886                 aux_ins_indices[pcbddc->replicated_local_primal_indices[jj+k]] += 1;
1887               }
1888             }
1889             /* temp_coarse_mat_vals used to store matrix values to be received */
1890             ierr = PetscMalloc(j*sizeof(PetscScalar),&temp_coarse_mat_vals);CHKERRQ(ierr);
1891             /* evaluate how many values I will insert in coarse mat */
1892             ins_local_primal_size = 0;
1893             for (i=0;i<pcbddc->coarse_size;i++) {
1894               if (aux_ins_indices[i]) {
1895                 ins_local_primal_size++;
1896               }
1897             }
1898             /* evaluate indices I will insert in coarse mat */
1899             ierr = PetscMalloc(ins_local_primal_size*sizeof(PetscInt),&ins_local_primal_indices);CHKERRQ(ierr);
1900             j = 0;
1901             for(i=0;i<pcbddc->coarse_size;i++) {
1902               if(aux_ins_indices[i]) {
1903                 ins_local_primal_indices[j] = i;
1904                 j++;
1905               }
1906             }
1907             /* processes partecipating in coarse problem receive matrix data from their friends */
1908             for (i=0;i<count_recv;i++) {
1909               ierr = MPI_Irecv(&temp_coarse_mat_vals[localdispl2[i]],localsizes2[i],MPIU_SCALAR,ranks_recv[i],666,prec_comm,&requests[i]);CHKERRQ(ierr);
1910             }
1911             if (rank_coarse_proc_send_to != MPI_PROC_NULL ) {
1912               send_size = pcbddc->local_primal_size*pcbddc->local_primal_size;
1913               ierr = MPI_Isend(&coarse_submat_vals[0],send_size,MPIU_SCALAR,rank_coarse_proc_send_to,666,prec_comm,&requests[count_recv]);CHKERRQ(ierr);
1914             }
1915             ierr = MPI_Waitall(count_recv+1,requests,MPI_STATUSES_IGNORE);CHKERRQ(ierr);
1916             /* nonzeros */
1917             ierr = PetscMalloc(ins_local_primal_size*sizeof(PetscInt),&dnz);CHKERRQ(ierr);
1918             ierr = PetscMemzero(dnz,ins_local_primal_size*sizeof(PetscInt));CHKERRQ(ierr);
1919             /* use aux_ins_indices to realize a global to local mapping */
1920             j=0;
1921             for(i=0;i<pcbddc->coarse_size;i++){
1922               if(aux_ins_indices[i]==0){
1923                 aux_ins_indices[i]=-1;
1924               } else {
1925                 aux_ins_indices[i]=j;
1926                 j++;
1927               }
1928             }
1929             for (i=0;i<count_recv;i++) {
1930               j = pcbddc->local_primal_sizes[ranks_recv[i]];
1931               for (k=0;k<j;k++) {
1932                 dnz[aux_ins_indices[pcbddc->replicated_local_primal_indices[pcbddc->local_primal_displacements[i]+k]]] += j;
1933               }
1934             }
1935             /* check */
1936             for (i=0;i<ins_local_primal_size;i++) {
1937               if (dnz[i] > ins_local_primal_size) {
1938                 dnz[i] = ins_local_primal_size;
1939               }
1940             }
1941             ierr = PetscFree(requests);CHKERRQ(ierr);
1942             ierr = PetscFree(aux_ins_indices);CHKERRQ(ierr);
1943             if (coarse_color == 0) { ierr = PetscFree(ranks_recv);CHKERRQ(ierr); }
1944           }
1945           /* create local to global mapping needed by coarse MATIS */
1946           if (coarse_comm != MPI_COMM_NULL ) {ierr = MPI_Comm_free(&coarse_comm);CHKERRQ(ierr);}
1947           coarse_comm = prec_comm;
1948           active_rank = rank_prec_comm;
1949           ierr = ISCreateGeneral(coarse_comm,ins_local_primal_size,ins_local_primal_indices,PETSC_COPY_VALUES,&coarse_IS);CHKERRQ(ierr);
1950           ierr = ISLocalToGlobalMappingCreateIS(coarse_IS,&coarse_ISLG);CHKERRQ(ierr);
1951           ierr = ISDestroy(&coarse_IS);CHKERRQ(ierr);
1952         } else if (pcbddc->coarse_problem_type==PARALLEL_BDDC) {
1953           /* arrays for values insertion */
1954           ins_local_primal_size = pcbddc->local_primal_size;
1955           ierr = PetscMalloc(ins_local_primal_size*sizeof(PetscInt),&ins_local_primal_indices);CHKERRQ(ierr);
1956           ierr = PetscMalloc(ins_local_primal_size*ins_local_primal_size*sizeof(PetscScalar),&ins_coarse_mat_vals);CHKERRQ(ierr);
1957           for (j=0;j<ins_local_primal_size;j++){
1958             ins_local_primal_indices[j]=pcbddc->local_primal_indices[j];
1959             for (i=0;i<ins_local_primal_size;i++) {
1960               ins_coarse_mat_vals[j*ins_local_primal_size+i]=coarse_submat_vals[j*ins_local_primal_size+i];
1961             }
1962           }
1963         }
1964         break;
1965 
1966     }
1967 
1968     case(GATHERS_BDDC):
1969       {
1970 
1971         PetscMPIInt mysize,mysize2;
1972         PetscMPIInt *send_buffer;
1973 
1974         if (rank_prec_comm==active_rank) {
1975           ierr = PetscMalloc ( pcbddc->replicated_primal_size*sizeof(PetscMPIInt),&pcbddc->replicated_local_primal_indices);CHKERRQ(ierr);
1976           ierr = PetscMalloc ( pcbddc->replicated_primal_size*sizeof(PetscScalar),&pcbddc->replicated_local_primal_values);CHKERRQ(ierr);
1977           ierr = PetscMalloc ( size_prec_comm*sizeof(PetscMPIInt),&localsizes2);CHKERRQ(ierr);
1978           ierr = PetscMalloc ( size_prec_comm*sizeof(PetscMPIInt),&localdispl2);CHKERRQ(ierr);
1979           /* arrays for values insertion */
1980       for (i=0;i<size_prec_comm;i++) localsizes2[i]=pcbddc->local_primal_sizes[i]*pcbddc->local_primal_sizes[i];
1981           localdispl2[0]=0;
1982       for (i=1;i<size_prec_comm;i++) localdispl2[i]=localsizes2[i-1]+localdispl2[i-1];
1983           j=0;
1984       for (i=0;i<size_prec_comm;i++) j+=localsizes2[i];
1985           ierr = PetscMalloc ( j*sizeof(PetscScalar),&temp_coarse_mat_vals);CHKERRQ(ierr);
1986         }
1987 
1988         mysize=pcbddc->local_primal_size;
1989         mysize2=pcbddc->local_primal_size*pcbddc->local_primal_size;
1990         ierr = PetscMalloc(mysize*sizeof(PetscMPIInt),&send_buffer);CHKERRQ(ierr);
1991     for (i=0; i<mysize; i++) send_buffer[i]=(PetscMPIInt)pcbddc->local_primal_indices[i];
1992 
1993         if (pcbddc->coarse_problem_type == SEQUENTIAL_BDDC){
1994           ierr = MPI_Gatherv(send_buffer,mysize,MPIU_INT,&pcbddc->replicated_local_primal_indices[0],pcbddc->local_primal_sizes,pcbddc->local_primal_displacements,MPIU_INT,master_proc,prec_comm);CHKERRQ(ierr);
1995           ierr = MPI_Gatherv(&coarse_submat_vals[0],mysize2,MPIU_SCALAR,&temp_coarse_mat_vals[0],localsizes2,localdispl2,MPIU_SCALAR,master_proc,prec_comm);CHKERRQ(ierr);
1996         } else {
1997           ierr = MPI_Allgatherv(send_buffer,mysize,MPIU_INT,&pcbddc->replicated_local_primal_indices[0],pcbddc->local_primal_sizes,pcbddc->local_primal_displacements,MPIU_INT,prec_comm);CHKERRQ(ierr);
1998           ierr = MPI_Allgatherv(&coarse_submat_vals[0],mysize2,MPIU_SCALAR,&temp_coarse_mat_vals[0],localsizes2,localdispl2,MPIU_SCALAR,prec_comm);CHKERRQ(ierr);
1999         }
2000         ierr = PetscFree(send_buffer);CHKERRQ(ierr);
2001         break;
2002       }/* switch on coarse problem and communications associated with finished */
2003   }
2004 
2005   /* Now create and fill up coarse matrix */
2006   if ( rank_prec_comm == active_rank ) {
2007 
2008     Mat matis_coarse_local_mat;
2009 
2010     if (pcbddc->coarse_problem_type != MULTILEVEL_BDDC) {
2011       ierr = MatCreate(coarse_comm,&pcbddc->coarse_mat);CHKERRQ(ierr);
2012       ierr = MatSetSizes(pcbddc->coarse_mat,PETSC_DECIDE,PETSC_DECIDE,pcbddc->coarse_size,pcbddc->coarse_size);CHKERRQ(ierr);
2013       ierr = MatSetType(pcbddc->coarse_mat,coarse_mat_type);CHKERRQ(ierr);
2014       ierr = MatSetOptionsPrefix(pcbddc->coarse_mat,"coarse_");CHKERRQ(ierr);
2015       ierr = MatSetFromOptions(pcbddc->coarse_mat);CHKERRQ(ierr);
2016       ierr = MatSetUp(pcbddc->coarse_mat);CHKERRQ(ierr);
2017       ierr = MatSetOption(pcbddc->coarse_mat,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr); /* local values stored in column major */
2018       ierr = MatSetOption(pcbddc->coarse_mat,MAT_IGNORE_ZERO_ENTRIES,PETSC_TRUE);CHKERRQ(ierr);
2019     } else {
2020       ierr = MatCreateIS(coarse_comm,1,PETSC_DECIDE,PETSC_DECIDE,pcbddc->coarse_size,pcbddc->coarse_size,coarse_ISLG,&pcbddc->coarse_mat);CHKERRQ(ierr);
2021       ierr = MatSetUp(pcbddc->coarse_mat);CHKERRQ(ierr);
2022       ierr = MatISGetLocalMat(pcbddc->coarse_mat,&matis_coarse_local_mat);CHKERRQ(ierr);
2023       ierr = MatSetOptionsPrefix(pcbddc->coarse_mat,"coarse_");CHKERRQ(ierr);
2024       ierr = MatSetFromOptions(pcbddc->coarse_mat);CHKERRQ(ierr);
2025       ierr = MatSetUp(matis_coarse_local_mat);CHKERRQ(ierr);
2026       ierr = MatSetOption(matis_coarse_local_mat,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr); /* local values stored in column major */
2027       ierr = MatSetOption(matis_coarse_local_mat,MAT_IGNORE_ZERO_ENTRIES,PETSC_TRUE);CHKERRQ(ierr);
2028     }
2029     /* preallocation */
2030     if (pcbddc->coarse_problem_type != MULTILEVEL_BDDC) {
2031 
2032       PetscInt lrows,lcols,bs;
2033 
2034       ierr = MatGetLocalSize(pcbddc->coarse_mat,&lrows,&lcols);CHKERRQ(ierr);
2035       ierr = MatPreallocateInitialize(coarse_comm,lrows,lcols,dnz,onz);CHKERRQ(ierr);
2036       ierr = MatGetBlockSize(pcbddc->coarse_mat,&bs);CHKERRQ(ierr);
2037 
2038       if (pcbddc->coarse_problem_type == PARALLEL_BDDC) {
2039 
2040         Vec         vec_dnz,vec_onz;
2041         PetscScalar *my_dnz,*my_onz,*array;
2042         PetscInt    *mat_ranges,*row_ownership;
2043         PetscInt    coarse_index_row,coarse_index_col,owner;
2044 
2045         ierr = VecCreate(prec_comm,&vec_dnz);CHKERRQ(ierr);
2046         ierr = VecSetBlockSize(vec_dnz,bs);CHKERRQ(ierr);
2047         ierr = VecSetSizes(vec_dnz,PETSC_DECIDE,pcbddc->coarse_size);CHKERRQ(ierr);
2048         ierr = VecSetType(vec_dnz,VECMPI);CHKERRQ(ierr);
2049         ierr = VecDuplicate(vec_dnz,&vec_onz);CHKERRQ(ierr);
2050 
2051         ierr = PetscMalloc(pcbddc->local_primal_size*sizeof(PetscScalar),&my_dnz);CHKERRQ(ierr);
2052         ierr = PetscMalloc(pcbddc->local_primal_size*sizeof(PetscScalar),&my_onz);CHKERRQ(ierr);
2053         ierr = PetscMemzero(my_dnz,pcbddc->local_primal_size*sizeof(PetscScalar));CHKERRQ(ierr);
2054         ierr = PetscMemzero(my_onz,pcbddc->local_primal_size*sizeof(PetscScalar));CHKERRQ(ierr);
2055 
2056         ierr = PetscMalloc(pcbddc->coarse_size*sizeof(PetscInt),&row_ownership);CHKERRQ(ierr);
2057         ierr = MatGetOwnershipRanges(pcbddc->coarse_mat,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
2058         for (i=0;i<size_prec_comm;i++) {
2059           for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
2060             row_ownership[j]=i;
2061           }
2062         }
2063 
2064         for (i=0;i<pcbddc->local_primal_size;i++) {
2065           coarse_index_row = pcbddc->local_primal_indices[i];
2066           owner = row_ownership[coarse_index_row];
2067           for (j=i;j<pcbddc->local_primal_size;j++) {
2068             owner = row_ownership[coarse_index_row];
2069             coarse_index_col = pcbddc->local_primal_indices[j];
2070             if (coarse_index_col > mat_ranges[owner]-1 && coarse_index_col < mat_ranges[owner+1] ) {
2071               my_dnz[i] += 1.0;
2072             } else {
2073               my_onz[i] += 1.0;
2074             }
2075             if (i != j) {
2076               owner = row_ownership[coarse_index_col];
2077               if (coarse_index_row > mat_ranges[owner]-1 && coarse_index_row < mat_ranges[owner+1] ) {
2078                 my_dnz[j] += 1.0;
2079               } else {
2080                 my_onz[j] += 1.0;
2081               }
2082             }
2083           }
2084         }
2085         ierr = VecSet(vec_dnz,0.0);CHKERRQ(ierr);
2086         ierr = VecSet(vec_onz,0.0);CHKERRQ(ierr);
2087         if (pcbddc->local_primal_size) {
2088           ierr = VecSetValues(vec_dnz,pcbddc->local_primal_size,pcbddc->local_primal_indices,my_dnz,ADD_VALUES);CHKERRQ(ierr);
2089           ierr = VecSetValues(vec_onz,pcbddc->local_primal_size,pcbddc->local_primal_indices,my_onz,ADD_VALUES);CHKERRQ(ierr);
2090         }
2091         ierr = VecAssemblyBegin(vec_dnz);CHKERRQ(ierr);
2092         ierr = VecAssemblyBegin(vec_onz);CHKERRQ(ierr);
2093         ierr = VecAssemblyEnd(vec_dnz);CHKERRQ(ierr);
2094         ierr = VecAssemblyEnd(vec_onz);CHKERRQ(ierr);
2095         j = mat_ranges[rank_prec_comm+1]-mat_ranges[rank_prec_comm];
2096         ierr = VecGetArray(vec_dnz,&array);CHKERRQ(ierr);
2097         for (i=0; i<j; i++) dnz[i] = (PetscInt)PetscRealPart(array[i]);
2098 
2099         ierr = VecRestoreArray(vec_dnz,&array);CHKERRQ(ierr);
2100         ierr = VecGetArray(vec_onz,&array);CHKERRQ(ierr);
2101         for (i=0;i<j;i++) onz[i] = (PetscInt)PetscRealPart(array[i]);
2102 
2103         ierr = VecRestoreArray(vec_onz,&array);CHKERRQ(ierr);
2104         ierr = PetscFree(my_dnz);CHKERRQ(ierr);
2105         ierr = PetscFree(my_onz);CHKERRQ(ierr);
2106         ierr = PetscFree(row_ownership);CHKERRQ(ierr);
2107         ierr = VecDestroy(&vec_dnz);CHKERRQ(ierr);
2108         ierr = VecDestroy(&vec_onz);CHKERRQ(ierr);
2109       } else {
2110         for (k=0;k<size_prec_comm;k++){
2111           offset=pcbddc->local_primal_displacements[k];
2112           offset2=localdispl2[k];
2113           ins_local_primal_size = pcbddc->local_primal_sizes[k];
2114           ierr = PetscMalloc(ins_local_primal_size*sizeof(PetscInt),&ins_local_primal_indices);CHKERRQ(ierr);
2115           for (j=0;j<ins_local_primal_size;j++){
2116             ins_local_primal_indices[j]=(PetscInt)pcbddc->replicated_local_primal_indices[offset+j];
2117           }
2118           for (j=0;j<ins_local_primal_size;j++) {
2119             ierr = MatPreallocateSet(ins_local_primal_indices[j],ins_local_primal_size,ins_local_primal_indices,dnz,onz);CHKERRQ(ierr);
2120           }
2121           ierr = PetscFree(ins_local_primal_indices);CHKERRQ(ierr);
2122         }
2123       }
2124 
2125       /* check */
2126       for (i=0;i<lrows;i++) {
2127         if (dnz[i]>lcols) dnz[i]=lcols;
2128         if (onz[i]>pcbddc->coarse_size-lcols) onz[i]=pcbddc->coarse_size-lcols;
2129       }
2130       ierr = MatSeqAIJSetPreallocation(pcbddc->coarse_mat,0,dnz);CHKERRQ(ierr);
2131       ierr = MatMPIAIJSetPreallocation(pcbddc->coarse_mat,0,dnz,0,onz);CHKERRQ(ierr);
2132       ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
2133     } else {
2134       ierr = MatSeqAIJSetPreallocation(matis_coarse_local_mat,0,dnz);CHKERRQ(ierr);
2135       ierr = PetscFree(dnz);CHKERRQ(ierr);
2136     }
2137     /* insert values */
2138     if (pcbddc->coarse_problem_type == PARALLEL_BDDC) {
2139       ierr = MatSetValues(pcbddc->coarse_mat,ins_local_primal_size,ins_local_primal_indices,ins_local_primal_size,ins_local_primal_indices,ins_coarse_mat_vals,ADD_VALUES);CHKERRQ(ierr);
2140     } else if (pcbddc->coarse_problem_type == MULTILEVEL_BDDC) {
2141       if (pcbddc->coarsening_ratio == 1) {
2142         ins_coarse_mat_vals = coarse_submat_vals;
2143         ierr = MatSetValues(pcbddc->coarse_mat,ins_local_primal_size,ins_local_primal_indices,ins_local_primal_size,ins_local_primal_indices,ins_coarse_mat_vals,INSERT_VALUES);CHKERRQ(ierr);
2144       } else {
2145         ierr = PetscFree(ins_local_primal_indices);CHKERRQ(ierr);
2146         for (k=0;k<pcbddc->replicated_primal_size;k++) {
2147           offset = pcbddc->local_primal_displacements[k];
2148           offset2 = localdispl2[k];
2149           ins_local_primal_size = pcbddc->local_primal_displacements[k+1]-pcbddc->local_primal_displacements[k];
2150           ierr = PetscMalloc(ins_local_primal_size*sizeof(PetscInt),&ins_local_primal_indices);CHKERRQ(ierr);
2151           for (j=0;j<ins_local_primal_size;j++){
2152             ins_local_primal_indices[j]=(PetscInt)pcbddc->replicated_local_primal_indices[offset+j];
2153           }
2154           ins_coarse_mat_vals = &temp_coarse_mat_vals[offset2];
2155           ierr = MatSetValues(pcbddc->coarse_mat,ins_local_primal_size,ins_local_primal_indices,ins_local_primal_size,ins_local_primal_indices,ins_coarse_mat_vals,ADD_VALUES);CHKERRQ(ierr);
2156           ierr = PetscFree(ins_local_primal_indices);CHKERRQ(ierr);
2157         }
2158       }
2159       ins_local_primal_indices = 0;
2160       ins_coarse_mat_vals = 0;
2161     } else {
2162       for (k=0;k<size_prec_comm;k++){
2163         offset=pcbddc->local_primal_displacements[k];
2164         offset2=localdispl2[k];
2165         ins_local_primal_size = pcbddc->local_primal_sizes[k];
2166         ierr = PetscMalloc(ins_local_primal_size*sizeof(PetscInt),&ins_local_primal_indices);CHKERRQ(ierr);
2167         for (j=0;j<ins_local_primal_size;j++){
2168           ins_local_primal_indices[j]=(PetscInt)pcbddc->replicated_local_primal_indices[offset+j];
2169         }
2170         ins_coarse_mat_vals = &temp_coarse_mat_vals[offset2];
2171         ierr = MatSetValues(pcbddc->coarse_mat,ins_local_primal_size,ins_local_primal_indices,ins_local_primal_size,ins_local_primal_indices,ins_coarse_mat_vals,ADD_VALUES);CHKERRQ(ierr);
2172         ierr = PetscFree(ins_local_primal_indices);CHKERRQ(ierr);
2173       }
2174       ins_local_primal_indices = 0;
2175       ins_coarse_mat_vals = 0;
2176     }
2177     ierr = MatAssemblyBegin(pcbddc->coarse_mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2178     ierr = MatAssemblyEnd(pcbddc->coarse_mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2179     /* symmetry of coarse matrix */
2180     if (issym) {
2181       ierr = MatSetOption(pcbddc->coarse_mat,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
2182     }
2183     ierr = MatGetVecs(pcbddc->coarse_mat,&pcbddc->coarse_vec,&pcbddc->coarse_rhs);CHKERRQ(ierr);
2184   }
2185 
2186   /* create loc to glob scatters if needed */
2187   if (pcbddc->coarse_communications_type == SCATTERS_BDDC) {
2188      IS local_IS,global_IS;
2189      ierr = ISCreateStride(PETSC_COMM_SELF,pcbddc->local_primal_size,0,1,&local_IS);CHKERRQ(ierr);
2190      ierr = ISCreateGeneral(PETSC_COMM_SELF,pcbddc->local_primal_size,pcbddc->local_primal_indices,PETSC_COPY_VALUES,&global_IS);CHKERRQ(ierr);
2191      ierr = VecScatterCreate(pcbddc->vec1_P,local_IS,pcbddc->coarse_vec,global_IS,&pcbddc->coarse_loc_to_glob);CHKERRQ(ierr);
2192      ierr = ISDestroy(&local_IS);CHKERRQ(ierr);
2193      ierr = ISDestroy(&global_IS);CHKERRQ(ierr);
2194   }
2195 
2196   /* free memory no longer needed */
2197   if (coarse_ISLG)              { ierr = ISLocalToGlobalMappingDestroy(&coarse_ISLG);CHKERRQ(ierr); }
2198   if (ins_local_primal_indices) { ierr = PetscFree(ins_local_primal_indices);CHKERRQ(ierr); }
2199   if (ins_coarse_mat_vals)      { ierr = PetscFree(ins_coarse_mat_vals);CHKERRQ(ierr); }
2200   if (localsizes2)              { ierr = PetscFree(localsizes2);CHKERRQ(ierr); }
2201   if (localdispl2)              { ierr = PetscFree(localdispl2);CHKERRQ(ierr); }
2202   if (temp_coarse_mat_vals)     { ierr = PetscFree(temp_coarse_mat_vals);CHKERRQ(ierr); }
2203 
2204   /* Compute coarse null space */
2205   CoarseNullSpace = 0;
2206   if (pcbddc->NullSpace) {
2207     ierr = PCBDDCNullSpaceAssembleCoarse(pc,&CoarseNullSpace);CHKERRQ(ierr);
2208   }
2209 
2210   /* KSP for coarse problem */
2211   if (rank_prec_comm == active_rank) {
2212     PetscBool isbddc=PETSC_FALSE;
2213 
2214     ierr = KSPCreate(coarse_comm,&pcbddc->coarse_ksp);CHKERRQ(ierr);
2215     ierr = PetscObjectIncrementTabLevel((PetscObject)pcbddc->coarse_ksp,(PetscObject)pc,1);CHKERRQ(ierr);
2216     ierr = KSPSetOperators(pcbddc->coarse_ksp,pcbddc->coarse_mat,pcbddc->coarse_mat,SAME_PRECONDITIONER);CHKERRQ(ierr);
2217     ierr = KSPSetTolerances(pcbddc->coarse_ksp,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,max_it_coarse_ksp);CHKERRQ(ierr);
2218     ierr = KSPSetType(pcbddc->coarse_ksp,coarse_ksp_type);CHKERRQ(ierr);
2219     ierr = KSPGetPC(pcbddc->coarse_ksp,&pc_temp);CHKERRQ(ierr);
2220     ierr = PCSetType(pc_temp,coarse_pc_type);CHKERRQ(ierr);
2221     /* Allow user's customization */
2222     ierr = KSPSetOptionsPrefix(pcbddc->coarse_ksp,"coarse_");CHKERRQ(ierr);
2223     /* Set Up PC for coarse problem BDDC */
2224     if (pcbddc->coarse_problem_type == MULTILEVEL_BDDC) {
2225       i = pcbddc->current_level+1;
2226       ierr = PCBDDCSetLevel(pc_temp,i);CHKERRQ(ierr);
2227       ierr = PCBDDCSetCoarseningRatio(pc_temp,pcbddc->coarsening_ratio);CHKERRQ(ierr);
2228       ierr = PCBDDCSetMaxLevels(pc_temp,pcbddc->max_levels);CHKERRQ(ierr);
2229       ierr = PCBDDCSetCoarseProblemType(pc_temp,MULTILEVEL_BDDC);CHKERRQ(ierr);
2230       if (CoarseNullSpace) {
2231         ierr = PCBDDCSetNullSpace(pc_temp,CoarseNullSpace);CHKERRQ(ierr);
2232       }
2233       if (dbg_flag) {
2234         ierr = PetscViewerASCIIPrintf(viewer,"----------------Level %d: Setting up level %d---------------\n",pcbddc->current_level,i);CHKERRQ(ierr);
2235         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
2236       }
2237     } else {
2238       if (CoarseNullSpace) {
2239         ierr = KSPSetNullSpace(pcbddc->coarse_ksp,CoarseNullSpace);CHKERRQ(ierr);
2240       }
2241     }
2242     ierr = KSPSetFromOptions(pcbddc->coarse_ksp);CHKERRQ(ierr);
2243     ierr = KSPSetUp(pcbddc->coarse_ksp);CHKERRQ(ierr);
2244 
2245     ierr = KSPGetTolerances(pcbddc->coarse_ksp,NULL,NULL,NULL,&j);CHKERRQ(ierr);
2246     ierr = KSPGetPC(pcbddc->coarse_ksp,&pc_temp);CHKERRQ(ierr);
2247     ierr = PetscObjectTypeCompare((PetscObject)pc_temp,PCBDDC,&isbddc);CHKERRQ(ierr);
2248     if (j == 1) {
2249       ierr = KSPSetNormType(pcbddc->coarse_ksp,KSP_NORM_NONE);CHKERRQ(ierr);
2250       if (isbddc) {
2251         ierr = PCBDDCSetUseExactDirichlet(pc_temp,PETSC_FALSE);CHKERRQ(ierr);
2252       }
2253     }
2254   }
2255   /* Check coarse problem if requested */
2256   if ( dbg_flag && rank_prec_comm == active_rank ) {
2257     KSP check_ksp;
2258     PC  check_pc;
2259     Vec check_vec;
2260     PetscReal   abs_infty_error,infty_error,lambda_min,lambda_max;
2261     KSPType check_ksp_type;
2262 
2263     /* Create ksp object suitable for extreme eigenvalues' estimation */
2264     ierr = KSPCreate(coarse_comm,&check_ksp);CHKERRQ(ierr);
2265     ierr = KSPSetOperators(check_ksp,pcbddc->coarse_mat,pcbddc->coarse_mat,SAME_PRECONDITIONER);CHKERRQ(ierr);
2266     ierr = KSPSetTolerances(check_ksp,1.e-12,1.e-12,PETSC_DEFAULT,pcbddc->coarse_size);CHKERRQ(ierr);
2267     if (pcbddc->coarse_problem_type == MULTILEVEL_BDDC) {
2268       if (issym) check_ksp_type = KSPCG;
2269       else check_ksp_type = KSPGMRES;
2270       ierr = KSPSetComputeSingularValues(check_ksp,PETSC_TRUE);CHKERRQ(ierr);
2271     } else {
2272       check_ksp_type = KSPPREONLY;
2273     }
2274     ierr = KSPSetType(check_ksp,check_ksp_type);CHKERRQ(ierr);
2275     ierr = KSPGetPC(pcbddc->coarse_ksp,&check_pc);CHKERRQ(ierr);
2276     ierr = KSPSetPC(check_ksp,check_pc);CHKERRQ(ierr);
2277     ierr = KSPSetUp(check_ksp);CHKERRQ(ierr);
2278     /* create random vec */
2279     ierr = VecDuplicate(pcbddc->coarse_vec,&check_vec);CHKERRQ(ierr);
2280     ierr = VecSetRandom(check_vec,NULL);CHKERRQ(ierr);
2281     if (CoarseNullSpace) {
2282       ierr = MatNullSpaceRemove(CoarseNullSpace,check_vec);CHKERRQ(ierr);
2283     }
2284     ierr = MatMult(pcbddc->coarse_mat,check_vec,pcbddc->coarse_rhs);CHKERRQ(ierr);
2285     /* solve coarse problem */
2286     ierr = KSPSolve(check_ksp,pcbddc->coarse_rhs,pcbddc->coarse_vec);CHKERRQ(ierr);
2287     if (CoarseNullSpace) {
2288       ierr = MatNullSpaceRemove(CoarseNullSpace,pcbddc->coarse_vec);CHKERRQ(ierr);
2289     }
2290     /* check coarse problem residual error */
2291     ierr = VecAXPY(check_vec,-1.0,pcbddc->coarse_vec);CHKERRQ(ierr);
2292     ierr = VecNorm(check_vec,NORM_INFINITY,&infty_error);CHKERRQ(ierr);
2293     ierr = MatMult(pcbddc->coarse_mat,check_vec,pcbddc->coarse_rhs);CHKERRQ(ierr);
2294     ierr = VecNorm(pcbddc->coarse_rhs,NORM_INFINITY,&abs_infty_error);CHKERRQ(ierr);
2295     ierr = VecDestroy(&check_vec);CHKERRQ(ierr);
2296     /* get eigenvalue estimation if inexact */
2297     if (pcbddc->coarse_problem_type == MULTILEVEL_BDDC) {
2298       ierr = KSPComputeExtremeSingularValues(check_ksp,&lambda_max,&lambda_min);CHKERRQ(ierr);
2299       ierr = KSPGetIterationNumber(check_ksp,&k);CHKERRQ(ierr);
2300       ierr = PetscViewerASCIIPrintf(viewer,"Coarse problem eigenvalues estimated with %d iterations of %s.\n",k,check_ksp_type);CHKERRQ(ierr);
2301       ierr = PetscViewerASCIIPrintf(viewer,"Coarse problem eigenvalues: % 1.14e %1.14e\n",lambda_min,lambda_max);CHKERRQ(ierr);
2302     }
2303     ierr = PetscViewerASCIIPrintf(viewer,"Coarse problem exact infty_error   : %1.14e\n",infty_error);CHKERRQ(ierr);
2304     ierr = PetscViewerASCIIPrintf(viewer,"Coarse problem residual infty_error: %1.14e\n",abs_infty_error);CHKERRQ(ierr);
2305     ierr = KSPDestroy(&check_ksp);CHKERRQ(ierr);
2306   }
2307   if (dbg_flag) {
2308     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
2309   }
2310   ierr = MatNullSpaceDestroy(&CoarseNullSpace);CHKERRQ(ierr);
2311 
2312   PetscFunctionReturn(0);
2313 }
2314 
2315