xref: /petsc/src/ksp/pc/impls/bddc/bddc.c (revision 8ce42a968a7c9c62a1a503b5a381eaf1d8dbcaa1)
1 /* TODOLIST
2    DofSplitting and DM attached to pc?
3    Change SetNeumannBoundaries to SetNeumannBoundariesLocal and provide new SetNeumannBoundaries (same Dirichlet)
4    change how to deal with the coarse problem (PCBDDCSetCoarseEnvironment):
5      - simplify coarse problem structure -> PCBDDC or PCREDUDANT, nothing else -> same comm for all levels?
6      - remove coarse enums and allow use of PCBDDCGetCoarseKSP
7      - remove metis dependency -> use MatPartitioning for multilevel -> Assemble serial adjacency in PCBDDCAnalyzeInterface?
8    code refactoring:
9      - pick up better names for static functions
10    change options structure:
11      - insert BDDC into MG framework?
12    provide other ops? Ask to developers
13    remove all unused printf
14    man pages
15 */
16 
17 /* ----------------------------------------------------------------------------------------------------------------------------------------------
18    Implementation of BDDC preconditioner based on:
19    C. Dohrmann "An approximate BDDC preconditioner", Numerical Linear Algebra with Applications Volume 14, Issue 2, pages 149-168, March 2007
20    ---------------------------------------------------------------------------------------------------------------------------------------------- */
21 
22 #include "bddc.h" /*I "petscpc.h" I*/  /* includes for fortran wrappers */
23 #include "bddcprivate.h"
24 #include <petscblaslapack.h>
25 
26 /* prototypes for static functions contained in bddc.c */
27 static PetscErrorCode PCBDDCSetLevel(PC,PetscInt);
28 static PetscErrorCode PCBDDCCoarseSetUp(PC);
29 
30 /* -------------------------------------------------------------------------- */
31 #undef __FUNCT__
32 #define __FUNCT__ "PCSetFromOptions_BDDC"
33 PetscErrorCode PCSetFromOptions_BDDC(PC pc)
34 {
35   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;
36   PetscErrorCode ierr;
37 
38   PetscFunctionBegin;
39   ierr = PetscOptionsHead("BDDC options");CHKERRQ(ierr);
40   /* Verbose debugging of main data structures */
41   ierr = PetscOptionsInt("-pc_bddc_check_level"       ,"Verbose (debugging) output for PCBDDC"                       ,"none",pcbddc->dbg_flag      ,&pcbddc->dbg_flag      ,NULL);CHKERRQ(ierr);
42   /* Some customization for default primal space */
43   ierr = PetscOptionsBool("-pc_bddc_vertices_only"   ,"Use only vertices in coarse space (i.e. discard constraints)","none",pcbddc->vertices_flag   ,&pcbddc->vertices_flag   ,NULL);CHKERRQ(ierr);
44   ierr = PetscOptionsBool("-pc_bddc_constraints_only","Use only constraints in coarse space (i.e. discard vertices)","none",pcbddc->constraints_flag,&pcbddc->constraints_flag,NULL);CHKERRQ(ierr);
45   ierr = PetscOptionsBool("-pc_bddc_faces_only"      ,"Use only faces among constraints of coarse space (i.e. discard edges)"         ,"none",pcbddc->faces_flag      ,&pcbddc->faces_flag      ,NULL);CHKERRQ(ierr);
46   ierr = PetscOptionsBool("-pc_bddc_edges_only"      ,"Use only edges among constraints of coarse space (i.e. discard faces)"         ,"none",pcbddc->edges_flag      ,&pcbddc->edges_flag      ,NULL);CHKERRQ(ierr);
47   /* Coarse solver context */
48   static const char * const avail_coarse_problems[] = {"sequential","replicated","parallel","multilevel","CoarseProblemType","PC_BDDC_",0}; /*order of choiches depends on ENUM defined in bddc.h */
49   ierr = PetscOptionsEnum("-pc_bddc_coarse_problem_type","Set coarse problem type","none",avail_coarse_problems,(PetscEnum)pcbddc->coarse_problem_type,(PetscEnum*)&pcbddc->coarse_problem_type,NULL);CHKERRQ(ierr);
50   /* Two different application of BDDC to the whole set of dofs, internal and interface */
51   ierr = PetscOptionsBool("-pc_bddc_switch_preconditioning_type","Switch between M_2 (default) and M_3 preconditioners (as defined by Dohrmann)","none",pcbddc->inexact_prec_type,&pcbddc->inexact_prec_type,NULL);CHKERRQ(ierr);
52   ierr = PetscOptionsBool("-pc_bddc_use_change_of_basis","Use change of basis approach for primal space","none",pcbddc->use_change_of_basis,&pcbddc->use_change_of_basis,NULL);CHKERRQ(ierr);
53   ierr = PetscOptionsBool("-pc_bddc_use_change_on_faces","Use change of basis approach for face constraints","none",pcbddc->use_change_on_faces,&pcbddc->use_change_on_faces,NULL);CHKERRQ(ierr);
54   if (!pcbddc->use_change_of_basis) {
55     pcbddc->use_change_on_faces = PETSC_FALSE;
56   }
57   ierr = PetscOptionsInt("-pc_bddc_coarsening_ratio","Set coarsening ratio used in multilevel coarsening","none",pcbddc->coarsening_ratio,&pcbddc->coarsening_ratio,NULL);CHKERRQ(ierr);
58   ierr = PetscOptionsInt("-pc_bddc_max_levels","Set maximum number of levels for multilevel","none",pcbddc->max_levels,&pcbddc->max_levels,NULL);CHKERRQ(ierr);
59   ierr = PetscOptionsBool("-pc_bddc_use_deluxe_scaling","Use deluxe scaling for BDDC","none",pcbddc->use_deluxe_scaling,&pcbddc->use_deluxe_scaling,NULL);CHKERRQ(ierr);
60   ierr = PetscOptionsTail();CHKERRQ(ierr);
61   PetscFunctionReturn(0);
62 }
63 /* -------------------------------------------------------------------------- */
64 #undef __FUNCT__
65 #define __FUNCT__ "PCBDDCSetPrimalVerticesLocalIS_BDDC"
66 static PetscErrorCode PCBDDCSetPrimalVerticesLocalIS_BDDC(PC pc, IS PrimalVertices)
67 {
68   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;
69   PetscErrorCode ierr;
70 
71   PetscFunctionBegin;
72   ierr = ISDestroy(&pcbddc->user_primal_vertices);CHKERRQ(ierr);
73   ierr = PetscObjectReference((PetscObject)PrimalVertices);CHKERRQ(ierr);
74   pcbddc->user_primal_vertices = PrimalVertices;
75   PetscFunctionReturn(0);
76 }
77 #undef __FUNCT__
78 #define __FUNCT__ "PCBDDCSetPrimalVerticesLocalIS"
79 /*@
80  PCBDDCSetPrimalVerticesLocalIS - Set user defined primal vertices in PCBDDC.
81 
82    Not collective
83 
84    Input Parameters:
85 +  pc - the preconditioning context
86 -  PrimalVertices - index sets of primal vertices in local numbering
87 
88    Level: intermediate
89 
90    Notes:
91 
92 .seealso: PCBDDC
93 @*/
94 PetscErrorCode PCBDDCSetPrimalVerticesLocalIS(PC pc, IS PrimalVertices)
95 {
96   PetscErrorCode ierr;
97 
98   PetscFunctionBegin;
99   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
100   PetscValidHeaderSpecific(PrimalVertices,IS_CLASSID,2);
101   ierr = PetscTryMethod(pc,"PCBDDCSetPrimalVerticesLocalIS_C",(PC,IS),(pc,PrimalVertices));CHKERRQ(ierr);
102   PetscFunctionReturn(0);
103 }
104 /* -------------------------------------------------------------------------- */
105 #undef __FUNCT__
106 #define __FUNCT__ "PCBDDCSetCoarseProblemType_BDDC"
107 static PetscErrorCode PCBDDCSetCoarseProblemType_BDDC(PC pc, CoarseProblemType CPT)
108 {
109   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
110 
111   PetscFunctionBegin;
112   pcbddc->coarse_problem_type = CPT;
113   PetscFunctionReturn(0);
114 }
115 
116 #undef __FUNCT__
117 #define __FUNCT__ "PCBDDCSetCoarseProblemType"
118 /*@
119  PCBDDCSetCoarseProblemType - Set coarse problem type in PCBDDC.
120 
121    Not collective
122 
123    Input Parameters:
124 +  pc - the preconditioning context
125 -  CoarseProblemType - pick a better name and explain what this is
126 
127    Level: intermediate
128 
129    Notes:
130    Not collective but all procs must call with same arguments.
131 
132 .seealso: PCBDDC
133 @*/
134 PetscErrorCode PCBDDCSetCoarseProblemType(PC pc, CoarseProblemType CPT)
135 {
136   PetscErrorCode ierr;
137 
138   PetscFunctionBegin;
139   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
140   ierr = PetscTryMethod(pc,"PCBDDCSetCoarseProblemType_C",(PC,CoarseProblemType),(pc,CPT));CHKERRQ(ierr);
141   PetscFunctionReturn(0);
142 }
143 /* -------------------------------------------------------------------------- */
144 #undef __FUNCT__
145 #define __FUNCT__ "PCBDDCSetCoarseningRatio_BDDC"
146 static PetscErrorCode PCBDDCSetCoarseningRatio_BDDC(PC pc,PetscInt k)
147 {
148   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
149 
150   PetscFunctionBegin;
151   pcbddc->coarsening_ratio=k;
152   PetscFunctionReturn(0);
153 }
154 
155 #undef __FUNCT__
156 #define __FUNCT__ "PCBDDCSetCoarseningRatio"
157 /*@
158  PCBDDCSetCoarseningRatio - Set coarsening ratio used in multilevel coarsening
159 
160    Logically collective on PC
161 
162    Input Parameters:
163 +  pc - the preconditioning context
164 -  k - coarsening ratio
165 
166    Approximatively k subdomains at the finer level will be aggregated into a single subdomain at the coarser level.
167 
168    Level: intermediate
169 
170    Notes:
171 
172 .seealso: PCBDDC
173 @*/
174 PetscErrorCode PCBDDCSetCoarseningRatio(PC pc,PetscInt k)
175 {
176   PetscErrorCode ierr;
177 
178   PetscFunctionBegin;
179   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
180   ierr = PetscTryMethod(pc,"PCBDDCSetCoarseningRatio_C",(PC,PetscInt),(pc,k));CHKERRQ(ierr);
181   PetscFunctionReturn(0);
182 }
183 /* -------------------------------------------------------------------------- */
184 
185 #undef __FUNCT__
186 #define __FUNCT__ "PCBDDCSetMaxLevels_BDDC"
187 static PetscErrorCode PCBDDCSetMaxLevels_BDDC(PC pc,PetscInt max_levels)
188 {
189   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
190 
191   PetscFunctionBegin;
192   pcbddc->max_levels=max_levels;
193   PetscFunctionReturn(0);
194 }
195 
196 #undef __FUNCT__
197 #define __FUNCT__ "PCBDDCSetMaxLevels"
198 /*@
199  PCBDDCSetMaxLevels - Sets the maximum number of levels within the multilevel approach.
200 
201    Logically collective on PC
202 
203    Input Parameters:
204 +  pc - the preconditioning context
205 -  max_levels - the maximum number of levels
206 
207    Default value is 1, i.e. coarse problem will be solved inexactly with one application
208    of PCBDDC preconditioner if the multilevel approach is requested.
209 
210    Level: intermediate
211 
212    Notes:
213 
214 .seealso: PCBDDC
215 @*/
216 PetscErrorCode PCBDDCSetMaxLevels(PC pc,PetscInt max_levels)
217 {
218   PetscErrorCode ierr;
219 
220   PetscFunctionBegin;
221   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
222   ierr = PetscTryMethod(pc,"PCBDDCSetMaxLevels_C",(PC,PetscInt),(pc,max_levels));CHKERRQ(ierr);
223   PetscFunctionReturn(0);
224 }
225 /* -------------------------------------------------------------------------- */
226 
227 #undef __FUNCT__
228 #define __FUNCT__ "PCBDDCSetNullSpace_BDDC"
229 static PetscErrorCode PCBDDCSetNullSpace_BDDC(PC pc,MatNullSpace NullSpace)
230 {
231   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
232   PetscErrorCode ierr;
233 
234   PetscFunctionBegin;
235   ierr = PetscObjectReference((PetscObject)NullSpace);CHKERRQ(ierr);
236   ierr = MatNullSpaceDestroy(&pcbddc->NullSpace);CHKERRQ(ierr);
237   pcbddc->NullSpace=NullSpace;
238   PetscFunctionReturn(0);
239 }
240 
241 #undef __FUNCT__
242 #define __FUNCT__ "PCBDDCSetNullSpace"
243 /*@
244  PCBDDCSetNullSpace - Set NullSpace of global operator of BDDC preconditioned mat.
245 
246    Logically collective on PC and MatNullSpace
247 
248    Input Parameters:
249 +  pc - the preconditioning context
250 -  NullSpace - Null space of the linear operator to be preconditioned.
251 
252    Level: intermediate
253 
254    Notes:
255 
256 .seealso: PCBDDC
257 @*/
258 PetscErrorCode PCBDDCSetNullSpace(PC pc,MatNullSpace NullSpace)
259 {
260   PetscErrorCode ierr;
261 
262   PetscFunctionBegin;
263   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
264   PetscValidHeaderSpecific(NullSpace,MAT_NULLSPACE_CLASSID,2);
265   ierr = PetscTryMethod(pc,"PCBDDCSetNullSpace_C",(PC,MatNullSpace),(pc,NullSpace));CHKERRQ(ierr);
266   PetscFunctionReturn(0);
267 }
268 /* -------------------------------------------------------------------------- */
269 
270 #undef __FUNCT__
271 #define __FUNCT__ "PCBDDCSetDirichletBoundaries_BDDC"
272 static PetscErrorCode PCBDDCSetDirichletBoundaries_BDDC(PC pc,IS DirichletBoundaries)
273 {
274   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
275   PetscErrorCode ierr;
276 
277   PetscFunctionBegin;
278   ierr = ISDestroy(&pcbddc->DirichletBoundaries);CHKERRQ(ierr);
279   ierr = PetscObjectReference((PetscObject)DirichletBoundaries);CHKERRQ(ierr);
280   pcbddc->DirichletBoundaries=DirichletBoundaries;
281   PetscFunctionReturn(0);
282 }
283 
284 #undef __FUNCT__
285 #define __FUNCT__ "PCBDDCSetDirichletBoundaries"
286 /*@
287  PCBDDCSetDirichletBoundaries - Set index set defining subdomain part (in local ordering)
288                               of Dirichlet boundaries for the global problem.
289 
290    Not collective
291 
292    Input Parameters:
293 +  pc - the preconditioning context
294 -  DirichletBoundaries - sequential index set defining the subdomain part of Dirichlet boundaries (can be NULL)
295 
296    Level: intermediate
297 
298    Notes:
299 
300 .seealso: PCBDDC
301 @*/
302 PetscErrorCode PCBDDCSetDirichletBoundaries(PC pc,IS DirichletBoundaries)
303 {
304   PetscErrorCode ierr;
305 
306   PetscFunctionBegin;
307   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
308   PetscValidHeaderSpecific(DirichletBoundaries,IS_CLASSID,2);
309   ierr = PetscTryMethod(pc,"PCBDDCSetDirichletBoundaries_C",(PC,IS),(pc,DirichletBoundaries));CHKERRQ(ierr);
310   PetscFunctionReturn(0);
311 }
312 /* -------------------------------------------------------------------------- */
313 
314 #undef __FUNCT__
315 #define __FUNCT__ "PCBDDCSetNeumannBoundaries_BDDC"
316 static PetscErrorCode PCBDDCSetNeumannBoundaries_BDDC(PC pc,IS NeumannBoundaries)
317 {
318   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
319   PetscErrorCode ierr;
320 
321   PetscFunctionBegin;
322   ierr = ISDestroy(&pcbddc->NeumannBoundaries);CHKERRQ(ierr);
323   ierr = PetscObjectReference((PetscObject)NeumannBoundaries);CHKERRQ(ierr);
324   pcbddc->NeumannBoundaries=NeumannBoundaries;
325   PetscFunctionReturn(0);
326 }
327 
328 #undef __FUNCT__
329 #define __FUNCT__ "PCBDDCSetNeumannBoundaries"
330 /*@
331  PCBDDCSetNeumannBoundaries - Set index set defining subdomain part (in local ordering)
332                               of Neumann boundaries for the global problem.
333 
334    Not collective
335 
336    Input Parameters:
337 +  pc - the preconditioning context
338 -  NeumannBoundaries - sequential index set defining the subdomain part of Neumann boundaries (can be NULL)
339 
340    Level: intermediate
341 
342    Notes:
343 
344 .seealso: PCBDDC
345 @*/
346 PetscErrorCode PCBDDCSetNeumannBoundaries(PC pc,IS NeumannBoundaries)
347 {
348   PetscErrorCode ierr;
349 
350   PetscFunctionBegin;
351   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
352   PetscValidHeaderSpecific(NeumannBoundaries,IS_CLASSID,2);
353   ierr = PetscTryMethod(pc,"PCBDDCSetNeumannBoundaries_C",(PC,IS),(pc,NeumannBoundaries));CHKERRQ(ierr);
354   PetscFunctionReturn(0);
355 }
356 /* -------------------------------------------------------------------------- */
357 
358 #undef __FUNCT__
359 #define __FUNCT__ "PCBDDCGetDirichletBoundaries_BDDC"
360 static PetscErrorCode PCBDDCGetDirichletBoundaries_BDDC(PC pc,IS *DirichletBoundaries)
361 {
362   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
363 
364   PetscFunctionBegin;
365   *DirichletBoundaries = pcbddc->DirichletBoundaries;
366   PetscFunctionReturn(0);
367 }
368 
369 #undef __FUNCT__
370 #define __FUNCT__ "PCBDDCGetDirichletBoundaries"
371 /*@
372  PCBDDCGetDirichletBoundaries - Get index set defining subdomain part (in local ordering)
373                                 of Dirichlet boundaries for the global problem.
374 
375    Not collective
376 
377    Input Parameters:
378 +  pc - the preconditioning context
379 
380    Output Parameters:
381 +  DirichletBoundaries - index set defining the subdomain part of Dirichlet boundaries
382 
383    Level: intermediate
384 
385    Notes:
386 
387 .seealso: PCBDDC
388 @*/
389 PetscErrorCode PCBDDCGetDirichletBoundaries(PC pc,IS *DirichletBoundaries)
390 {
391   PetscErrorCode ierr;
392 
393   PetscFunctionBegin;
394   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
395   ierr = PetscUseMethod(pc,"PCBDDCGetDirichletBoundaries_C",(PC,IS*),(pc,DirichletBoundaries));CHKERRQ(ierr);
396   PetscFunctionReturn(0);
397 }
398 /* -------------------------------------------------------------------------- */
399 
400 #undef __FUNCT__
401 #define __FUNCT__ "PCBDDCGetNeumannBoundaries_BDDC"
402 static PetscErrorCode PCBDDCGetNeumannBoundaries_BDDC(PC pc,IS *NeumannBoundaries)
403 {
404   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
405 
406   PetscFunctionBegin;
407   *NeumannBoundaries = pcbddc->NeumannBoundaries;
408   PetscFunctionReturn(0);
409 }
410 
411 #undef __FUNCT__
412 #define __FUNCT__ "PCBDDCGetNeumannBoundaries"
413 /*@
414  PCBDDCGetNeumannBoundaries - Get index set defining subdomain part (in local ordering)
415                               of Neumann boundaries for the global problem.
416 
417    Not collective
418 
419    Input Parameters:
420 +  pc - the preconditioning context
421 
422    Output Parameters:
423 +  NeumannBoundaries - index set defining the subdomain part of Neumann boundaries
424 
425    Level: intermediate
426 
427    Notes:
428 
429 .seealso: PCBDDC
430 @*/
431 PetscErrorCode PCBDDCGetNeumannBoundaries(PC pc,IS *NeumannBoundaries)
432 {
433   PetscErrorCode ierr;
434 
435   PetscFunctionBegin;
436   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
437   ierr = PetscUseMethod(pc,"PCBDDCGetNeumannBoundaries_C",(PC,IS*),(pc,NeumannBoundaries));CHKERRQ(ierr);
438   PetscFunctionReturn(0);
439 }
440 /* -------------------------------------------------------------------------- */
441 
442 #undef __FUNCT__
443 #define __FUNCT__ "PCBDDCSetLocalAdjacencyGraph_BDDC"
444 static PetscErrorCode PCBDDCSetLocalAdjacencyGraph_BDDC(PC pc, PetscInt nvtxs,const PetscInt xadj[],const PetscInt adjncy[], PetscCopyMode copymode)
445 {
446   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;
447   PCBDDCGraph    mat_graph = pcbddc->mat_graph;
448   PetscErrorCode ierr;
449 
450   PetscFunctionBegin;
451   /* free old CSR */
452   ierr = PCBDDCGraphResetCSR(mat_graph);CHKERRQ(ierr);
453   /* TODO: PCBDDCGraphSetAdjacency */
454   /* get CSR into graph structure */
455   if (copymode == PETSC_COPY_VALUES) {
456     ierr = PetscMalloc((nvtxs+1)*sizeof(PetscInt),&mat_graph->xadj);CHKERRQ(ierr);
457     ierr = PetscMalloc(xadj[nvtxs]*sizeof(PetscInt),&mat_graph->adjncy);CHKERRQ(ierr);
458     ierr = PetscMemcpy(mat_graph->xadj,xadj,(nvtxs+1)*sizeof(PetscInt));CHKERRQ(ierr);
459     ierr = PetscMemcpy(mat_graph->adjncy,adjncy,xadj[nvtxs]*sizeof(PetscInt));CHKERRQ(ierr);
460   } else if (copymode == PETSC_OWN_POINTER) {
461     mat_graph->xadj = (PetscInt*)xadj;
462     mat_graph->adjncy = (PetscInt*)adjncy;
463   }
464   mat_graph->nvtxs_csr = nvtxs;
465   PetscFunctionReturn(0);
466 }
467 
468 #undef __FUNCT__
469 #define __FUNCT__ "PCBDDCSetLocalAdjacencyGraph"
470 /*@
471  PCBDDCSetLocalAdjacencyGraph - Set CSR graph of local matrix for use of PCBDDC.
472 
473    Not collective
474 
475    Input Parameters:
476 +  pc - the preconditioning context
477 -  nvtxs - number of local vertices of the graph
478 -  xadj, adjncy - the CSR graph
479 -  copymode - either PETSC_COPY_VALUES or PETSC_OWN_POINTER. In the former case the user must free the array passed in;
480                                                              in the latter case, memory must be obtained with PetscMalloc.
481 
482    Level: intermediate
483 
484    Notes:
485 
486 .seealso: PCBDDC
487 @*/
488 PetscErrorCode PCBDDCSetLocalAdjacencyGraph(PC pc,PetscInt nvtxs,const PetscInt xadj[],const PetscInt adjncy[], PetscCopyMode copymode)
489 {
490   void (*f)(void) = 0;
491   PetscErrorCode ierr;
492 
493   PetscFunctionBegin;
494   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
495   PetscValidIntPointer(xadj,3);
496   PetscValidIntPointer(xadj,4);
497   if (copymode != PETSC_COPY_VALUES && copymode != PETSC_OWN_POINTER) {
498     SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unsupported copy mode %d in %s\n",copymode,__FUNCT__);
499   }
500   ierr = PetscTryMethod(pc,"PCBDDCSetLocalAdjacencyGraph_C",(PC,PetscInt,const PetscInt[],const PetscInt[],PetscCopyMode),(pc,nvtxs,xadj,adjncy,copymode));CHKERRQ(ierr);
501   /* free arrays if PCBDDC is not the PC type */
502   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCBDDCSetLocalAdjacencyGraph_C",&f);CHKERRQ(ierr);
503   if (!f && copymode == PETSC_OWN_POINTER) {
504     ierr = PetscFree(xadj);CHKERRQ(ierr);
505     ierr = PetscFree(adjncy);CHKERRQ(ierr);
506   }
507   PetscFunctionReturn(0);
508 }
509 /* -------------------------------------------------------------------------- */
510 
511 #undef __FUNCT__
512 #define __FUNCT__ "PCBDDCSetDofsSplitting_BDDC"
513 static PetscErrorCode PCBDDCSetDofsSplitting_BDDC(PC pc,PetscInt n_is, IS ISForDofs[])
514 {
515   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
516   PetscInt i;
517   PetscErrorCode ierr;
518 
519   PetscFunctionBegin;
520   /* Destroy ISes if they were already set */
521   for (i=0;i<pcbddc->n_ISForDofs;i++) {
522     ierr = ISDestroy(&pcbddc->ISForDofs[i]);CHKERRQ(ierr);
523   }
524   ierr = PetscFree(pcbddc->ISForDofs);CHKERRQ(ierr);
525   /* allocate space then set */
526   ierr = PetscMalloc(n_is*sizeof(IS),&pcbddc->ISForDofs);CHKERRQ(ierr);
527   for (i=0;i<n_is;i++) {
528     ierr = PetscObjectReference((PetscObject)ISForDofs[i]);CHKERRQ(ierr);
529     pcbddc->ISForDofs[i]=ISForDofs[i];
530   }
531   pcbddc->n_ISForDofs=n_is;
532   PetscFunctionReturn(0);
533 }
534 
535 #undef __FUNCT__
536 #define __FUNCT__ "PCBDDCSetDofsSplitting"
537 /*@
538  PCBDDCSetDofsSplitting - Set index sets defining fields of local mat.
539 
540    Not collective
541 
542    Input Parameters:
543 +  pc - the preconditioning context
544 -  n - number of index sets defining the fields
545 -  IS[] - array of IS describing the fields
546 
547    Level: intermediate
548 
549    Notes:
550 
551 .seealso: PCBDDC
552 @*/
553 PetscErrorCode PCBDDCSetDofsSplitting(PC pc,PetscInt n_is, IS ISForDofs[])
554 {
555   PetscErrorCode ierr;
556 
557   PetscFunctionBegin;
558   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
559   ierr = PetscTryMethod(pc,"PCBDDCSetDofsSplitting_C",(PC,PetscInt,IS[]),(pc,n_is,ISForDofs));CHKERRQ(ierr);
560   PetscFunctionReturn(0);
561 }
562 /* -------------------------------------------------------------------------- */
563 #undef __FUNCT__
564 #define __FUNCT__ "PCPreSolve_BDDC"
565 /* -------------------------------------------------------------------------- */
566 /*
567    PCPreSolve_BDDC - Changes the right hand side and (if necessary) the initial
568                      guess if a transformation of basis approach has been selected.
569 
570    Input Parameter:
571 +  pc - the preconditioner contex
572 
573    Application Interface Routine: PCPreSolve()
574 
575    Notes:
576    The interface routine PCPreSolve() is not usually called directly by
577    the user, but instead is called by KSPSolve().
578 */
579 static PetscErrorCode PCPreSolve_BDDC(PC pc, KSP ksp, Vec rhs, Vec x)
580 {
581   PetscErrorCode ierr;
582   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;
583   PC_IS          *pcis = (PC_IS*)(pc->data);
584   Mat_IS         *matis = (Mat_IS*)pc->pmat->data;
585   Mat            temp_mat;
586   IS             dirIS;
587   PetscInt       dirsize,i,*is_indices;
588   PetscScalar    *array_x,*array_diagonal;
589   Vec            used_vec;
590   PetscBool      guess_nonzero;
591 
592   PetscFunctionBegin;
593   /* Creates parallel work vectors used in presolve. */
594   if (!pcbddc->original_rhs) {
595     ierr = VecDuplicate(pcis->vec1_global,&pcbddc->original_rhs);CHKERRQ(ierr);
596   }
597   if (!pcbddc->temp_solution) {
598     ierr = VecDuplicate(pcis->vec1_global,&pcbddc->temp_solution);CHKERRQ(ierr);
599   }
600   if (x) {
601     ierr = PetscObjectReference((PetscObject)x);CHKERRQ(ierr);
602     used_vec = x;
603   } else {
604     ierr = PetscObjectReference((PetscObject)pcbddc->temp_solution);CHKERRQ(ierr);
605     used_vec = pcbddc->temp_solution;
606     ierr = VecSet(used_vec,0.0);CHKERRQ(ierr);
607   }
608   /* hack into ksp data structure PCPreSolve comes earlier in src/ksp/ksp/interface/itfunc.c */
609   if (ksp) {
610     ierr = KSPGetInitialGuessNonzero(ksp,&guess_nonzero);CHKERRQ(ierr);
611     if ( !guess_nonzero ) {
612       ierr = VecSet(used_vec,0.0);CHKERRQ(ierr);
613     }
614   }
615 
616   if (rhs) { /* TODO: wiser handling of rhs removal, which is only needed in case of zeroed rows */
617     /* store the original rhs */
618     ierr = VecCopy(rhs,pcbddc->original_rhs);CHKERRQ(ierr);
619 
620     /* Take into account zeroed rows -> change rhs and store solution removed */
621     ierr = MatGetDiagonal(pc->pmat,pcis->vec1_global);CHKERRQ(ierr);
622     ierr = VecPointwiseDivide(pcis->vec1_global,rhs,pcis->vec1_global);CHKERRQ(ierr);
623     ierr = VecScatterBegin(matis->ctx,pcis->vec1_global,pcis->vec2_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
624     ierr = VecScatterEnd(matis->ctx,pcis->vec1_global,pcis->vec2_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
625     ierr = VecScatterBegin(matis->ctx,used_vec,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
626     ierr = VecScatterEnd(matis->ctx,used_vec,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
627     ierr = PCBDDCGetDirichletBoundaries(pc,&dirIS);CHKERRQ(ierr);
628     if (dirIS) {
629       ierr = ISGetSize(dirIS,&dirsize);CHKERRQ(ierr);
630       ierr = VecGetArray(pcis->vec1_N,&array_x);CHKERRQ(ierr);
631       ierr = VecGetArray(pcis->vec2_N,&array_diagonal);CHKERRQ(ierr);
632       ierr = ISGetIndices(dirIS,(const PetscInt**)&is_indices);CHKERRQ(ierr);
633       for (i=0; i<dirsize; i++) array_x[is_indices[i]] = array_diagonal[is_indices[i]];
634       ierr = ISRestoreIndices(dirIS,(const PetscInt**)&is_indices);CHKERRQ(ierr);
635       ierr = VecRestoreArray(pcis->vec2_N,&array_diagonal);CHKERRQ(ierr);
636       ierr = VecRestoreArray(pcis->vec1_N,&array_x);CHKERRQ(ierr);
637     }
638     ierr = VecScatterBegin(matis->ctx,pcis->vec1_N,used_vec,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
639     ierr = VecScatterEnd(matis->ctx,pcis->vec1_N,used_vec,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
640 
641     /* remove the computed solution from the rhs */
642     ierr = VecScale(used_vec,-1.0);CHKERRQ(ierr);
643     ierr = MatMultAdd(pc->pmat,used_vec,rhs,rhs);CHKERRQ(ierr);
644     ierr = VecScale(used_vec,-1.0);CHKERRQ(ierr);
645   }
646 
647   /* store partially computed solution and set initial guess */
648   if (x) {
649     ierr = VecCopy(used_vec,pcbddc->temp_solution);CHKERRQ(ierr);
650     ierr = VecSet(used_vec,0.0);CHKERRQ(ierr);
651     if (pcbddc->use_exact_dirichlet && !pcbddc->coarse_psi_B) {
652       ierr = VecScatterBegin(pcis->global_to_D,rhs,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
653       ierr = VecScatterEnd  (pcis->global_to_D,rhs,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
654       ierr = KSPSolve(pcbddc->ksp_D,pcis->vec1_D,pcis->vec2_D);CHKERRQ(ierr);
655       ierr = VecScatterBegin(pcis->global_to_D,pcis->vec2_D,used_vec,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
656       ierr = VecScatterEnd  (pcis->global_to_D,pcis->vec2_D,used_vec,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
657       if (ksp) {
658         ierr = KSPSetInitialGuessNonzero(ksp,PETSC_TRUE);CHKERRQ(ierr);
659       }
660     }
661   }
662 
663   if (pcbddc->use_change_of_basis) {
664     /* swap pointers for local matrices */
665     temp_mat = matis->A;
666     matis->A = pcbddc->local_mat;
667     pcbddc->local_mat = temp_mat;
668   }
669   if (pcbddc->use_change_of_basis && rhs) {
670     /* Get local rhs and apply transformation of basis */
671     ierr = VecScatterBegin(pcis->global_to_B,rhs,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
672     ierr = VecScatterEnd  (pcis->global_to_B,rhs,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
673     /* from original basis to modified basis */
674     ierr = MatMultTranspose(pcbddc->ChangeOfBasisMatrix,pcis->vec1_B,pcis->vec2_B);CHKERRQ(ierr);
675     /* put back modified values into the global vec using INSERT_VALUES copy mode */
676     ierr = VecScatterBegin(pcis->global_to_B,pcis->vec2_B,rhs,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
677     ierr = VecScatterEnd  (pcis->global_to_B,pcis->vec2_B,rhs,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
678   }
679   if (ksp && pcbddc->NullSpace) {
680     ierr = MatNullSpaceRemove(pcbddc->NullSpace,used_vec);CHKERRQ(ierr);
681     ierr = MatNullSpaceRemove(pcbddc->NullSpace,rhs);CHKERRQ(ierr);
682   }
683   ierr = VecDestroy(&used_vec);CHKERRQ(ierr);
684   PetscFunctionReturn(0);
685 }
686 /* -------------------------------------------------------------------------- */
687 #undef __FUNCT__
688 #define __FUNCT__ "PCPostSolve_BDDC"
689 /* -------------------------------------------------------------------------- */
690 /*
691    PCPostSolve_BDDC - Changes the computed solution if a transformation of basis
692                      approach has been selected. Also, restores rhs to its original state.
693 
694    Input Parameter:
695 +  pc - the preconditioner contex
696 
697    Application Interface Routine: PCPostSolve()
698 
699    Notes:
700    The interface routine PCPostSolve() is not usually called directly by
701    the user, but instead is called by KSPSolve().
702 */
703 static PetscErrorCode PCPostSolve_BDDC(PC pc, KSP ksp, Vec rhs, Vec x)
704 {
705   PetscErrorCode ierr;
706   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;
707   PC_IS          *pcis   = (PC_IS*)(pc->data);
708   Mat_IS         *matis = (Mat_IS*)pc->pmat->data;
709   Mat            temp_mat;
710 
711   PetscFunctionBegin;
712   if (pcbddc->use_change_of_basis) {
713     /* swap pointers for local matrices */
714     temp_mat = matis->A;
715     matis->A = pcbddc->local_mat;
716     pcbddc->local_mat = temp_mat;
717   }
718   if (pcbddc->use_change_of_basis && x) {
719     /* Get Local boundary and apply transformation of basis to solution vector */
720     ierr = VecScatterBegin(pcis->global_to_B,x,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
721     ierr = VecScatterEnd  (pcis->global_to_B,x,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
722     /* from modified basis to original basis */
723     ierr = MatMult(pcbddc->ChangeOfBasisMatrix,pcis->vec1_B,pcis->vec2_B);CHKERRQ(ierr);
724     /* put back modified values into the global vec using INSERT_VALUES copy mode */
725     ierr = VecScatterBegin(pcis->global_to_B,pcis->vec2_B,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
726     ierr = VecScatterEnd  (pcis->global_to_B,pcis->vec2_B,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
727   }
728   /* add solution removed in presolve */
729   if (x) {
730     ierr = VecAXPY(x,1.0,pcbddc->temp_solution);CHKERRQ(ierr);
731   }
732   /* restore rhs to its original state */
733   if (rhs) {
734     ierr = VecCopy(pcbddc->original_rhs,rhs);CHKERRQ(ierr);
735   }
736   PetscFunctionReturn(0);
737 }
738 /* -------------------------------------------------------------------------- */
739 #undef __FUNCT__
740 #define __FUNCT__ "PCSetUp_BDDC"
741 /* -------------------------------------------------------------------------- */
742 /*
743    PCSetUp_BDDC - Prepares for the use of the BDDC preconditioner
744                   by setting data structures and options.
745 
746    Input Parameter:
747 +  pc - the preconditioner context
748 
749    Application Interface Routine: PCSetUp()
750 
751    Notes:
752    The interface routine PCSetUp() is not usually called directly by
753    the user, but instead is called by PCApply() if necessary.
754 */
755 PetscErrorCode PCSetUp_BDDC(PC pc)
756 {
757   PetscErrorCode ierr;
758   PC_BDDC*       pcbddc = (PC_BDDC*)pc->data;
759   MatStructure   flag;
760   PetscBool      computeis,computetopography,computesolvers;
761 
762   PetscFunctionBegin;
763   /* the following lines of code should be replaced by a better logic between PCIS, PCNN, PCBDDC and other nonoverlapping preconditioners */
764   /* For BDDC we need to define a local "Neumann" problem different to that defined in PCISSetup
765      So, we set to pcnone the Neumann problem of pcis in order to avoid unneeded computation
766      Also, we decide to directly build the (same) Dirichlet problem */
767   ierr = PetscOptionsSetValue("-is_localN_pc_type","none");CHKERRQ(ierr);
768   ierr = PetscOptionsSetValue("-is_localD_pc_type","none");CHKERRQ(ierr);
769   /* Get stdout for dbg */
770   if (pcbddc->dbg_flag && !pcbddc->dbg_viewer) {
771     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc),&pcbddc->dbg_viewer);CHKERRQ(ierr);
772     ierr = PetscViewerASCIISynchronizedAllow(pcbddc->dbg_viewer,PETSC_TRUE);CHKERRQ(ierr);
773   }
774   /* first attempt to split work */
775   if (pc->setupcalled) {
776     computeis = PETSC_FALSE;
777     ierr = PCGetOperators(pc,NULL,NULL,&flag);CHKERRQ(ierr);
778     if (flag == SAME_PRECONDITIONER) {
779       computetopography = PETSC_FALSE;
780       computesolvers = PETSC_FALSE;
781     } else if (flag == SAME_NONZERO_PATTERN) {
782       computetopography = PETSC_FALSE;
783       computesolvers = PETSC_TRUE;
784     } else { /* DIFFERENT_NONZERO_PATTERN */
785       computetopography = PETSC_TRUE;
786       computesolvers = PETSC_TRUE;
787     }
788   } else {
789     computeis = PETSC_TRUE;
790     computetopography = PETSC_TRUE;
791     computesolvers = PETSC_TRUE;
792   }
793   /* Set up all the "iterative substructuring" common block */
794   if (computeis) {
795     ierr = PCISSetUp(pc);CHKERRQ(ierr);
796   }
797   /* Analyze interface and set up local constraint and change of basis matrices */
798   if (computetopography) {
799     /* reset data */
800     ierr = PCBDDCResetTopography(pc);CHKERRQ(ierr);
801     ierr = PCBDDCAnalyzeInterface(pc);CHKERRQ(ierr);
802     ierr = PCBDDCConstraintsSetUp(pc);CHKERRQ(ierr);
803   }
804   if (computesolvers) {
805     /* reset data */
806     ierr = PCBDDCResetSolvers(pc);CHKERRQ(ierr);
807     ierr = PCBDDCScalingDestroy(pc);CHKERRQ(ierr);
808     /* Create coarse and local stuffs used for evaluating action of preconditioner */
809     ierr = PCBDDCCoarseSetUp(pc);CHKERRQ(ierr);
810     ierr = PCBDDCScalingSetUp(pc);CHKERRQ(ierr);
811   }
812   PetscFunctionReturn(0);
813 }
814 
815 /* -------------------------------------------------------------------------- */
816 /*
817    PCApply_BDDC - Applies the BDDC preconditioner to a vector.
818 
819    Input Parameters:
820 .  pc - the preconditioner context
821 .  r - input vector (global)
822 
823    Output Parameter:
824 .  z - output vector (global)
825 
826    Application Interface Routine: PCApply()
827  */
828 #undef __FUNCT__
829 #define __FUNCT__ "PCApply_BDDC"
830 PetscErrorCode PCApply_BDDC(PC pc,Vec r,Vec z)
831 {
832   PC_IS             *pcis = (PC_IS*)(pc->data);
833   PC_BDDC           *pcbddc = (PC_BDDC*)(pc->data);
834   PetscErrorCode    ierr;
835   const PetscScalar one = 1.0;
836   const PetscScalar m_one = -1.0;
837   const PetscScalar zero = 0.0;
838 
839 /* This code is similar to that provided in nn.c for PCNN
840    NN interface preconditioner changed to BDDC
841    Added support for M_3 preconditioner in the reference article (code is active if pcbddc->inexact_prec_type = PETSC_TRUE) */
842 
843   PetscFunctionBegin;
844   if (!pcbddc->use_exact_dirichlet || pcbddc->coarse_psi_B) {
845     /* First Dirichlet solve */
846     ierr = VecScatterBegin(pcis->global_to_D,r,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
847     ierr = VecScatterEnd  (pcis->global_to_D,r,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
848     ierr = KSPSolve(pcbddc->ksp_D,pcis->vec1_D,pcis->vec2_D);CHKERRQ(ierr);
849     /*
850       Assembling right hand side for BDDC operator
851       - pcis->vec1_D for the Dirichlet part (if needed, i.e. prec_flag=PETSC_TRUE)
852       - pcis->vec1_B the interface part of the global vector z
853     */
854     ierr = VecScale(pcis->vec2_D,m_one);CHKERRQ(ierr);
855     ierr = MatMult(pcis->A_BI,pcis->vec2_D,pcis->vec1_B);CHKERRQ(ierr);
856     if (pcbddc->inexact_prec_type) { ierr = MatMultAdd(pcis->A_II,pcis->vec2_D,pcis->vec1_D,pcis->vec1_D);CHKERRQ(ierr); }
857     ierr = VecScale(pcis->vec2_D,m_one);CHKERRQ(ierr);
858     ierr = VecCopy(r,z);CHKERRQ(ierr);
859     ierr = VecScatterBegin(pcis->global_to_B,pcis->vec1_B,z,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
860     ierr = VecScatterEnd  (pcis->global_to_B,pcis->vec1_B,z,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
861     ierr = PCBDDCScalingRestriction(pc,z,pcis->vec1_B);CHKERRQ(ierr);
862   } else {
863     ierr = VecSet(pcis->vec1_D,zero);CHKERRQ(ierr);
864     ierr = VecSet(pcis->vec2_D,zero);CHKERRQ(ierr);
865     ierr = PCBDDCScalingRestriction(pc,r,pcis->vec1_B);CHKERRQ(ierr);
866   }
867 
868   /* Apply interface preconditioner
869      input/output vecs: pcis->vec1_B and pcis->vec1_D */
870   ierr = PCBDDCApplyInterfacePreconditioner(pc);CHKERRQ(ierr);
871 
872   /* Apply transpose of partition of unity operator */
873   ierr = PCBDDCScalingExtension(pc,pcis->vec1_B,z);CHKERRQ(ierr);
874 
875   /* Second Dirichlet solve and assembling of output */
876   ierr = VecScatterBegin(pcis->global_to_B,z,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
877   ierr = VecScatterEnd  (pcis->global_to_B,z,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
878   ierr = MatMult(pcis->A_IB,pcis->vec1_B,pcis->vec3_D);CHKERRQ(ierr);
879   if (pcbddc->inexact_prec_type) { ierr = MatMultAdd(pcis->A_II,pcis->vec1_D,pcis->vec3_D,pcis->vec3_D);CHKERRQ(ierr); }
880   ierr = KSPSolve(pcbddc->ksp_D,pcis->vec3_D,pcbddc->vec4_D);CHKERRQ(ierr);
881   ierr = VecScale(pcbddc->vec4_D,m_one);CHKERRQ(ierr);
882   if (pcbddc->inexact_prec_type) { ierr = VecAXPY (pcbddc->vec4_D,one,pcis->vec1_D);CHKERRQ(ierr); }
883   ierr = VecAXPY (pcis->vec2_D,one,pcbddc->vec4_D);CHKERRQ(ierr);
884   ierr = VecScatterBegin(pcis->global_to_D,pcis->vec2_D,z,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
885   ierr = VecScatterEnd  (pcis->global_to_D,pcis->vec2_D,z,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
886   PetscFunctionReturn(0);
887 }
888 /* -------------------------------------------------------------------------- */
889 
890 #undef __FUNCT__
891 #define __FUNCT__ "PCDestroy_BDDC"
892 PetscErrorCode PCDestroy_BDDC(PC pc)
893 {
894   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;
895   PetscErrorCode ierr;
896 
897   PetscFunctionBegin;
898   /* free data created by PCIS */
899   ierr = PCISDestroy(pc);CHKERRQ(ierr);
900   /* free BDDC custom data  */
901   ierr = PCBDDCResetCustomization(pc);CHKERRQ(ierr);
902   /* destroy objects related to topography */
903   ierr = PCBDDCResetTopography(pc);CHKERRQ(ierr);
904   /* free allocated graph structure */
905   ierr = PetscFree(pcbddc->mat_graph);CHKERRQ(ierr);
906   /* free data for scaling operator */
907   ierr = PCBDDCScalingDestroy(pc);CHKERRQ(ierr);
908   /* free solvers stuff */
909   ierr = PCBDDCResetSolvers(pc);CHKERRQ(ierr);
910   ierr = KSPDestroy(&pcbddc->ksp_D);CHKERRQ(ierr);
911   ierr = KSPDestroy(&pcbddc->ksp_R);CHKERRQ(ierr);
912   ierr = MatDestroy(&pcbddc->local_mat);CHKERRQ(ierr);
913   /* free global vectors needed in presolve */
914   ierr = VecDestroy(&pcbddc->temp_solution);CHKERRQ(ierr);
915   ierr = VecDestroy(&pcbddc->original_rhs);CHKERRQ(ierr);
916   /* remove functions */
917   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetPrimalVerticesLocalIS_C",NULL);CHKERRQ(ierr);
918   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetCoarseningRatio_C",NULL);CHKERRQ(ierr);
919   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetMaxLevels_C",NULL);CHKERRQ(ierr);
920   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetNullSpace_C",NULL);CHKERRQ(ierr);
921   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDirichletBoundaries_C",NULL);CHKERRQ(ierr);
922   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetNeumannBoundaries_C",NULL);CHKERRQ(ierr);
923   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetDirichletBoundaries_C",NULL);CHKERRQ(ierr);
924   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetNeumannBoundaries_C",NULL);CHKERRQ(ierr);
925   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetCoarseProblemType_C",NULL);CHKERRQ(ierr);
926   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDofsSplitting_C",NULL);CHKERRQ(ierr);
927   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetLocalAdjacencyGraph_C",NULL);CHKERRQ(ierr);
928   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCCreateFETIDPOperators_C",NULL);CHKERRQ(ierr);
929   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCMatFETIDPGetRHS_C",NULL);CHKERRQ(ierr);
930   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCMatFETIDPGetSolution_C",NULL);CHKERRQ(ierr);
931   /* Free the private data structure */
932   ierr = PetscFree(pc->data);CHKERRQ(ierr);
933   PetscFunctionReturn(0);
934 }
935 /* -------------------------------------------------------------------------- */
936 
937 #undef __FUNCT__
938 #define __FUNCT__ "PCBDDCMatFETIDPGetRHS_BDDC"
939 static PetscErrorCode PCBDDCMatFETIDPGetRHS_BDDC(Mat fetidp_mat, Vec standard_rhs, Vec fetidp_flux_rhs)
940 {
941   FETIDPMat_ctx  mat_ctx;
942   PC_IS*         pcis;
943   PC_BDDC*       pcbddc;
944   PetscErrorCode ierr;
945 
946   PetscFunctionBegin;
947   ierr = MatShellGetContext(fetidp_mat,&mat_ctx);CHKERRQ(ierr);
948   pcis = (PC_IS*)mat_ctx->pc->data;
949   pcbddc = (PC_BDDC*)mat_ctx->pc->data;
950 
951   /* change of basis for physical rhs if needed
952      It also changes the rhs in case of dirichlet boundaries */
953   ierr = PCPreSolve_BDDC(mat_ctx->pc,NULL,standard_rhs,NULL);CHKERRQ(ierr);
954   /* store vectors for computation of fetidp final solution */
955   ierr = VecScatterBegin(pcis->global_to_D,standard_rhs,mat_ctx->temp_solution_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
956   ierr = VecScatterEnd(pcis->global_to_D,standard_rhs,mat_ctx->temp_solution_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
957   /* scale rhs since it should be unassembled */
958   /* TODO use counter scaling? (also below) */
959   ierr = VecScatterBegin(pcis->global_to_B,standard_rhs,mat_ctx->temp_solution_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
960   ierr = VecScatterEnd(pcis->global_to_B,standard_rhs,mat_ctx->temp_solution_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
961   /* Apply partition of unity */
962   ierr = VecPointwiseMult(mat_ctx->temp_solution_B,pcis->D,mat_ctx->temp_solution_B);CHKERRQ(ierr);
963   /* ierr = PCBDDCScalingRestriction(mat_ctx->pc,standard_rhs,mat_ctx->temp_solution_B);CHKERRQ(ierr); */
964   if (!pcbddc->inexact_prec_type) {
965     /* compute partially subassembled Schur complement right-hand side */
966     ierr = KSPSolve(pcbddc->ksp_D,mat_ctx->temp_solution_D,pcis->vec1_D);CHKERRQ(ierr);
967     ierr = MatMult(pcis->A_BI,pcis->vec1_D,pcis->vec1_B);CHKERRQ(ierr);
968     ierr = VecAXPY(mat_ctx->temp_solution_B,-1.0,pcis->vec1_B);CHKERRQ(ierr);
969     ierr = VecSet(standard_rhs,0.0);CHKERRQ(ierr);
970     ierr = VecScatterBegin(pcis->global_to_B,mat_ctx->temp_solution_B,standard_rhs,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
971     ierr = VecScatterEnd(pcis->global_to_B,mat_ctx->temp_solution_B,standard_rhs,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
972     /* ierr = PCBDDCScalingRestriction(mat_ctx->pc,standard_rhs,mat_ctx->temp_solution_B);CHKERRQ(ierr); */
973     ierr = VecScatterBegin(pcis->global_to_B,standard_rhs,mat_ctx->temp_solution_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
974     ierr = VecScatterEnd(pcis->global_to_B,standard_rhs,mat_ctx->temp_solution_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
975     ierr = VecPointwiseMult(mat_ctx->temp_solution_B,pcis->D,mat_ctx->temp_solution_B);CHKERRQ(ierr);
976   }
977   /* BDDC rhs */
978   ierr = VecCopy(mat_ctx->temp_solution_B,pcis->vec1_B);CHKERRQ(ierr);
979   if (pcbddc->inexact_prec_type) {
980     ierr = VecCopy(mat_ctx->temp_solution_D,pcis->vec1_D);CHKERRQ(ierr);
981   }
982   /* apply BDDC */
983   ierr = PCBDDCApplyInterfacePreconditioner(mat_ctx->pc);CHKERRQ(ierr);
984   /* Application of B_delta and assembling of rhs for fetidp fluxes */
985   ierr = VecSet(fetidp_flux_rhs,0.0);CHKERRQ(ierr);
986   ierr = MatMult(mat_ctx->B_delta,pcis->vec1_B,mat_ctx->lambda_local);CHKERRQ(ierr);
987   ierr = VecScatterBegin(mat_ctx->l2g_lambda,mat_ctx->lambda_local,fetidp_flux_rhs,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
988   ierr = VecScatterEnd  (mat_ctx->l2g_lambda,mat_ctx->lambda_local,fetidp_flux_rhs,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
989   /* restore original rhs */
990   ierr = VecCopy(pcbddc->original_rhs,standard_rhs);CHKERRQ(ierr);
991   PetscFunctionReturn(0);
992 }
993 
994 #undef __FUNCT__
995 #define __FUNCT__ "PCBDDCMatFETIDPGetRHS"
996 /*@
997  PCBDDCMatFETIDPGetRHS - Get rhs for FETIDP linear system.
998 
999    Collective
1000 
1001    Input Parameters:
1002 +  fetidp_mat   - the FETIDP mat obtained by a call to PCBDDCCreateFETIDPOperators
1003 +  standard_rhs - the rhs of your linear system
1004 
1005    Output Parameters:
1006 +  fetidp_flux_rhs   - the rhs of the FETIDP linear system
1007 
1008    Level: developer
1009 
1010    Notes:
1011 
1012 .seealso: PCBDDC
1013 @*/
1014 PetscErrorCode PCBDDCMatFETIDPGetRHS(Mat fetidp_mat, Vec standard_rhs, Vec fetidp_flux_rhs)
1015 {
1016   FETIDPMat_ctx  mat_ctx;
1017   PetscErrorCode ierr;
1018 
1019   PetscFunctionBegin;
1020   ierr = MatShellGetContext(fetidp_mat,&mat_ctx);CHKERRQ(ierr);
1021   ierr = PetscTryMethod(mat_ctx->pc,"PCBDDCMatFETIDPGetRHS_C",(Mat,Vec,Vec),(fetidp_mat,standard_rhs,fetidp_flux_rhs));CHKERRQ(ierr);
1022   PetscFunctionReturn(0);
1023 }
1024 /* -------------------------------------------------------------------------- */
1025 
1026 #undef __FUNCT__
1027 #define __FUNCT__ "PCBDDCMatFETIDPGetSolution_BDDC"
1028 static PetscErrorCode PCBDDCMatFETIDPGetSolution_BDDC(Mat fetidp_mat, Vec fetidp_flux_sol, Vec standard_sol)
1029 {
1030   FETIDPMat_ctx  mat_ctx;
1031   PC_IS*         pcis;
1032   PC_BDDC*       pcbddc;
1033   PetscErrorCode ierr;
1034 
1035   PetscFunctionBegin;
1036   ierr = MatShellGetContext(fetidp_mat,&mat_ctx);CHKERRQ(ierr);
1037   pcis = (PC_IS*)mat_ctx->pc->data;
1038   pcbddc = (PC_BDDC*)mat_ctx->pc->data;
1039 
1040   /* apply B_delta^T */
1041   ierr = VecScatterBegin(mat_ctx->l2g_lambda,fetidp_flux_sol,mat_ctx->lambda_local,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1042   ierr = VecScatterEnd  (mat_ctx->l2g_lambda,fetidp_flux_sol,mat_ctx->lambda_local,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1043   ierr = MatMultTranspose(mat_ctx->B_delta,mat_ctx->lambda_local,pcis->vec1_B);CHKERRQ(ierr);
1044   /* compute rhs for BDDC application */
1045   ierr = VecAYPX(pcis->vec1_B,-1.0,mat_ctx->temp_solution_B);CHKERRQ(ierr);
1046   if (pcbddc->inexact_prec_type) {
1047     ierr = VecCopy(mat_ctx->temp_solution_D,pcis->vec1_D);CHKERRQ(ierr);
1048   }
1049   /* apply BDDC */
1050   ierr = PCBDDCApplyInterfacePreconditioner(mat_ctx->pc);CHKERRQ(ierr);
1051   /* put values into standard global vector */
1052   ierr = VecScatterBegin(pcis->global_to_B,pcis->vec1_B,standard_sol,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1053   ierr = VecScatterEnd  (pcis->global_to_B,pcis->vec1_B,standard_sol,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1054   if (!pcbddc->inexact_prec_type) {
1055     /* compute values into the interior if solved for the partially subassembled Schur complement */
1056     ierr = MatMult(pcis->A_IB,pcis->vec1_B,pcis->vec1_D);CHKERRQ(ierr);
1057     ierr = VecAXPY(mat_ctx->temp_solution_D,-1.0,pcis->vec1_D);CHKERRQ(ierr);
1058     ierr = KSPSolve(pcbddc->ksp_D,mat_ctx->temp_solution_D,pcis->vec1_D);CHKERRQ(ierr);
1059   }
1060   ierr = VecScatterBegin(pcis->global_to_D,pcis->vec1_D,standard_sol,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1061   ierr = VecScatterEnd  (pcis->global_to_D,pcis->vec1_D,standard_sol,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1062   /* final change of basis if needed
1063      Is also sums the dirichlet part removed during RHS assembling */
1064   ierr = PCPostSolve_BDDC(mat_ctx->pc,NULL,NULL,standard_sol);CHKERRQ(ierr);
1065   PetscFunctionReturn(0);
1066 }
1067 
1068 #undef __FUNCT__
1069 #define __FUNCT__ "PCBDDCMatFETIDPGetSolution"
1070 /*@
1071  PCBDDCMatFETIDPGetSolution - Get Solution for FETIDP linear system.
1072 
1073    Collective
1074 
1075    Input Parameters:
1076 +  fetidp_mat        - the FETIDP mat obtained by a call to PCBDDCCreateFETIDPOperators
1077 +  fetidp_flux_sol - the solution of the FETIDP linear system
1078 
1079    Output Parameters:
1080 +  standard_sol      - the solution on the global domain
1081 
1082    Level: developer
1083 
1084    Notes:
1085 
1086 .seealso: PCBDDC
1087 @*/
1088 PetscErrorCode PCBDDCMatFETIDPGetSolution(Mat fetidp_mat, Vec fetidp_flux_sol, Vec standard_sol)
1089 {
1090   FETIDPMat_ctx  mat_ctx;
1091   PetscErrorCode ierr;
1092 
1093   PetscFunctionBegin;
1094   ierr = MatShellGetContext(fetidp_mat,&mat_ctx);CHKERRQ(ierr);
1095   ierr = PetscTryMethod(mat_ctx->pc,"PCBDDCMatFETIDPGetSolution_C",(Mat,Vec,Vec),(fetidp_mat,fetidp_flux_sol,standard_sol));CHKERRQ(ierr);
1096   PetscFunctionReturn(0);
1097 }
1098 /* -------------------------------------------------------------------------- */
1099 
1100 extern PetscErrorCode FETIDPMatMult(Mat,Vec,Vec);
1101 extern PetscErrorCode PCBDDCDestroyFETIDPMat(Mat);
1102 extern PetscErrorCode FETIDPPCApply(PC,Vec,Vec);
1103 extern PetscErrorCode PCBDDCDestroyFETIDPPC(PC);
1104 
1105 #undef __FUNCT__
1106 #define __FUNCT__ "PCBDDCCreateFETIDPOperators_BDDC"
1107 static PetscErrorCode PCBDDCCreateFETIDPOperators_BDDC(PC pc, Mat *fetidp_mat, PC *fetidp_pc)
1108 {
1109 
1110   FETIDPMat_ctx  fetidpmat_ctx;
1111   Mat            newmat;
1112   FETIDPPC_ctx   fetidppc_ctx;
1113   PC             newpc;
1114   MPI_Comm       comm;
1115   PetscErrorCode ierr;
1116 
1117   PetscFunctionBegin;
1118   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
1119   /* FETIDP linear matrix */
1120   ierr = PCBDDCCreateFETIDPMatContext(pc,&fetidpmat_ctx);CHKERRQ(ierr);
1121   ierr = PCBDDCSetupFETIDPMatContext(fetidpmat_ctx);CHKERRQ(ierr);
1122   ierr = MatCreateShell(comm,PETSC_DECIDE,PETSC_DECIDE,fetidpmat_ctx->n_lambda,fetidpmat_ctx->n_lambda,fetidpmat_ctx,&newmat);CHKERRQ(ierr);
1123   ierr = MatShellSetOperation(newmat,MATOP_MULT,(void (*)(void))FETIDPMatMult);CHKERRQ(ierr);
1124   ierr = MatShellSetOperation(newmat,MATOP_DESTROY,(void (*)(void))PCBDDCDestroyFETIDPMat);CHKERRQ(ierr);
1125   ierr = MatSetUp(newmat);CHKERRQ(ierr);
1126   /* FETIDP preconditioner */
1127   ierr = PCBDDCCreateFETIDPPCContext(pc,&fetidppc_ctx);CHKERRQ(ierr);
1128   ierr = PCBDDCSetupFETIDPPCContext(newmat,fetidppc_ctx);CHKERRQ(ierr);
1129   ierr = PCCreate(comm,&newpc);CHKERRQ(ierr);
1130   ierr = PCSetType(newpc,PCSHELL);CHKERRQ(ierr);
1131   ierr = PCShellSetContext(newpc,fetidppc_ctx);CHKERRQ(ierr);
1132   ierr = PCShellSetApply(newpc,FETIDPPCApply);CHKERRQ(ierr);
1133   ierr = PCShellSetDestroy(newpc,PCBDDCDestroyFETIDPPC);CHKERRQ(ierr);
1134   ierr = PCSetOperators(newpc,newmat,newmat,SAME_PRECONDITIONER);CHKERRQ(ierr);
1135   ierr = PCSetUp(newpc);CHKERRQ(ierr);
1136   /* return pointers for objects created */
1137   *fetidp_mat=newmat;
1138   *fetidp_pc=newpc;
1139   PetscFunctionReturn(0);
1140 }
1141 
1142 #undef __FUNCT__
1143 #define __FUNCT__ "PCBDDCCreateFETIDPOperators"
1144 /*@
1145  PCBDDCCreateFETIDPOperators - Create operators for FETIDP.
1146 
1147    Collective
1148 
1149    Input Parameters:
1150 +  pc - the BDDC preconditioning context (setup must be already called)
1151 
1152    Level: developer
1153 
1154    Notes:
1155 
1156 .seealso: PCBDDC
1157 @*/
1158 PetscErrorCode PCBDDCCreateFETIDPOperators(PC pc, Mat *fetidp_mat, PC *fetidp_pc)
1159 {
1160   PetscErrorCode ierr;
1161 
1162   PetscFunctionBegin;
1163   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1164   if (pc->setupcalled) {
1165     ierr = PetscTryMethod(pc,"PCBDDCCreateFETIDPOperators_C",(PC,Mat*,PC*),(pc,fetidp_mat,fetidp_pc));CHKERRQ(ierr);
1166   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"You must call PCSetup_BDDC() first \n");
1167   PetscFunctionReturn(0);
1168 }
1169 /* -------------------------------------------------------------------------- */
1170 /*MC
1171    PCBDDC - Balancing Domain Decomposition by Constraints.
1172 
1173    Options Database Keys:
1174 .    -pcbddc ??? -
1175 
1176    Level: intermediate
1177 
1178    Notes: The matrix used with this preconditioner must be of type MATIS
1179 
1180           Unlike more 'conventional' interface preconditioners, this iterates over ALL the
1181           degrees of freedom, NOT just those on the interface (this allows the use of approximate solvers
1182           on the subdomains).
1183 
1184           Options for the coarse grid preconditioner can be set with -
1185           Options for the Dirichlet subproblem can be set with -
1186           Options for the Neumann subproblem can be set with -
1187 
1188    Contributed by Stefano Zampini
1189 
1190 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,  MATIS
1191 M*/
1192 
1193 #undef __FUNCT__
1194 #define __FUNCT__ "PCCreate_BDDC"
1195 PETSC_EXTERN PetscErrorCode PCCreate_BDDC(PC pc)
1196 {
1197   PetscErrorCode      ierr;
1198   PC_BDDC             *pcbddc;
1199 
1200   PetscFunctionBegin;
1201   /* Creates the private data structure for this preconditioner and attach it to the PC object. */
1202   ierr      = PetscNewLog(pc,PC_BDDC,&pcbddc);CHKERRQ(ierr);
1203   pc->data  = (void*)pcbddc;
1204 
1205   /* create PCIS data structure */
1206   ierr = PCISCreate(pc);CHKERRQ(ierr);
1207 
1208   /* BDDC specific */
1209   pcbddc->user_primal_vertices       = 0;
1210   pcbddc->NullSpace                  = 0;
1211   pcbddc->temp_solution              = 0;
1212   pcbddc->original_rhs               = 0;
1213   pcbddc->local_mat                  = 0;
1214   pcbddc->ChangeOfBasisMatrix        = 0;
1215   pcbddc->use_change_of_basis        = PETSC_TRUE;
1216   pcbddc->use_change_on_faces        = PETSC_FALSE;
1217   pcbddc->coarse_vec                 = 0;
1218   pcbddc->coarse_rhs                 = 0;
1219   pcbddc->coarse_ksp                 = 0;
1220   pcbddc->coarse_phi_B               = 0;
1221   pcbddc->coarse_phi_D               = 0;
1222   pcbddc->coarse_psi_B               = 0;
1223   pcbddc->coarse_psi_D               = 0;
1224   pcbddc->vec1_P                     = 0;
1225   pcbddc->vec1_R                     = 0;
1226   pcbddc->vec2_R                     = 0;
1227   pcbddc->local_auxmat1              = 0;
1228   pcbddc->local_auxmat2              = 0;
1229   pcbddc->R_to_B                     = 0;
1230   pcbddc->R_to_D                     = 0;
1231   pcbddc->ksp_D                      = 0;
1232   pcbddc->ksp_R                      = 0;
1233   pcbddc->local_primal_indices       = 0;
1234   pcbddc->inexact_prec_type          = PETSC_FALSE;
1235   pcbddc->NeumannBoundaries          = 0;
1236   pcbddc->ISForDofs                  = 0;
1237   pcbddc->ConstraintMatrix           = 0;
1238   pcbddc->use_nnsp_true              = PETSC_FALSE;
1239   pcbddc->local_primal_sizes         = 0;
1240   pcbddc->local_primal_displacements = 0;
1241   pcbddc->coarse_loc_to_glob         = 0;
1242   pcbddc->dbg_flag                   = 0;
1243   pcbddc->coarsening_ratio           = 8;
1244   pcbddc->use_exact_dirichlet        = PETSC_TRUE;
1245   pcbddc->current_level              = 0;
1246   pcbddc->max_levels                 = 1;
1247   pcbddc->replicated_local_primal_indices = 0;
1248   pcbddc->replicated_local_primal_values  = 0;
1249 
1250   /* create local graph structure */
1251   ierr = PCBDDCGraphCreate(&pcbddc->mat_graph);CHKERRQ(ierr);
1252 
1253   /* scaling */
1254   pcbddc->use_deluxe_scaling         = PETSC_FALSE;
1255   pcbddc->work_scaling               = 0;
1256 
1257   /* function pointers */
1258   pc->ops->apply               = PCApply_BDDC;
1259   pc->ops->applytranspose      = 0;
1260   pc->ops->setup               = PCSetUp_BDDC;
1261   pc->ops->destroy             = PCDestroy_BDDC;
1262   pc->ops->setfromoptions      = PCSetFromOptions_BDDC;
1263   pc->ops->view                = 0;
1264   pc->ops->applyrichardson     = 0;
1265   pc->ops->applysymmetricleft  = 0;
1266   pc->ops->applysymmetricright = 0;
1267   pc->ops->presolve            = PCPreSolve_BDDC;
1268   pc->ops->postsolve           = PCPostSolve_BDDC;
1269 
1270   /* composing function */
1271   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetPrimalVerticesLocalIS_C",PCBDDCSetPrimalVerticesLocalIS_BDDC);CHKERRQ(ierr);
1272   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetCoarseningRatio_C",PCBDDCSetCoarseningRatio_BDDC);CHKERRQ(ierr);
1273   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetMaxLevels_C",PCBDDCSetMaxLevels_BDDC);CHKERRQ(ierr);
1274   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetNullSpace_C",PCBDDCSetNullSpace_BDDC);CHKERRQ(ierr);
1275   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDirichletBoundaries_C",PCBDDCSetDirichletBoundaries_BDDC);CHKERRQ(ierr);
1276   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetNeumannBoundaries_C",PCBDDCSetNeumannBoundaries_BDDC);CHKERRQ(ierr);
1277   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetDirichletBoundaries_C",PCBDDCGetDirichletBoundaries_BDDC);CHKERRQ(ierr);
1278   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetNeumannBoundaries_C",PCBDDCGetNeumannBoundaries_BDDC);CHKERRQ(ierr);
1279   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetCoarseProblemType_C",PCBDDCSetCoarseProblemType_BDDC);CHKERRQ(ierr);
1280   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDofsSplitting_C",PCBDDCSetDofsSplitting_BDDC);CHKERRQ(ierr);
1281   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetLocalAdjacencyGraph_C",PCBDDCSetLocalAdjacencyGraph_BDDC);CHKERRQ(ierr);
1282   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCCreateFETIDPOperators_C",PCBDDCCreateFETIDPOperators_BDDC);CHKERRQ(ierr);
1283   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCMatFETIDPGetRHS_C",PCBDDCMatFETIDPGetRHS_BDDC);CHKERRQ(ierr);
1284   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCMatFETIDPGetSolution_C",PCBDDCMatFETIDPGetSolution_BDDC);CHKERRQ(ierr);
1285   PetscFunctionReturn(0);
1286 }
1287 
1288 /* -------------------------------------------------------------------------- */
1289 /* All static functions from now on                                           */
1290 /* -------------------------------------------------------------------------- */
1291 
1292 #undef __FUNCT__
1293 #define __FUNCT__ "PCBDDCSetLevel"
1294 static PetscErrorCode PCBDDCSetLevel(PC pc,PetscInt level)
1295 {
1296   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
1297 
1298   PetscFunctionBegin;
1299   pcbddc->current_level=level;
1300   PetscFunctionReturn(0);
1301 }
1302 
1303 /* -------------------------------------------------------------------------- */
1304 #undef __FUNCT__
1305 #define __FUNCT__ "PCBDDCCoarseSetUp"
1306 static PetscErrorCode PCBDDCCoarseSetUp(PC pc)
1307 {
1308   PC_BDDC*          pcbddc = (PC_BDDC*)pc->data;
1309   PetscErrorCode    ierr;
1310 
1311   PetscFunctionBegin;
1312   /* compute matrix after change of basis and extract local submatrices */
1313   ierr = PCBDDCSetUpLocalMatrices(pc);CHKERRQ(ierr);
1314 
1315   /* Change global null space passed in by the user if change of basis has been requested */
1316   if (pcbddc->NullSpace && pcbddc->use_change_of_basis) {
1317     ierr = PCBDDCNullSpaceAdaptGlobal(pc);CHKERRQ(ierr);
1318   }
1319 
1320   /* Allocate needed vectors */
1321   ierr = PCBDDCCreateWorkVectors(pc);CHKERRQ(ierr);
1322 
1323   /* setup local scatters R_to_B and (optionally) R_to_D : PCBDDCCreateWorkVectors should be called first! */
1324   ierr = PCBDDCSetUpLocalScatters(pc);CHKERRQ(ierr);
1325 
1326   /* setup local solvers ksp_D and ksp_R */
1327   ierr = PCBDDCSetUpLocalSolvers(pc);CHKERRQ(ierr);
1328 
1329   /* setup local correction and local part of coarse basis */
1330   ierr = PCBDDCSetUpCoarseLocal(pc);CHKERRQ(ierr);
1331 
1332   PetscFunctionReturn(0);
1333 }
1334 
1335 /* -------------------------------------------------------------------------- */
1336 
1337 /* BDDC requires metis 5.0.1 for multilevel */
1338 #if defined(PETSC_HAVE_METIS)
1339 #include "metis.h"
1340 #define MetisInt    idx_t
1341 #define MetisScalar real_t
1342 #endif
1343 
1344 #undef __FUNCT__
1345 #define __FUNCT__ "PCBDDCSetUpCoarseEnvironment"
1346 PetscErrorCode PCBDDCSetUpCoarseEnvironment(PC pc,PetscScalar* coarse_submat_vals)
1347 {
1348 
1349 
1350   Mat_IS    *matis    = (Mat_IS*)pc->pmat->data;
1351   PC_BDDC   *pcbddc   = (PC_BDDC*)pc->data;
1352   PC_IS     *pcis     = (PC_IS*)pc->data;
1353   MPI_Comm  prec_comm;
1354   MPI_Comm  coarse_comm;
1355 
1356   MatNullSpace CoarseNullSpace;
1357 
1358   /* common to all choiches */
1359   PetscScalar *temp_coarse_mat_vals;
1360   PetscScalar *ins_coarse_mat_vals;
1361   PetscInt    *ins_local_primal_indices;
1362   PetscMPIInt *localsizes2,*localdispl2;
1363   PetscMPIInt size_prec_comm;
1364   PetscMPIInt rank_prec_comm;
1365   PetscMPIInt active_rank=MPI_PROC_NULL;
1366   PetscMPIInt master_proc=0;
1367   PetscInt    ins_local_primal_size;
1368   /* specific to MULTILEVEL_BDDC */
1369   PetscMPIInt *ranks_recv=0;
1370   PetscMPIInt count_recv=0;
1371   PetscMPIInt rank_coarse_proc_send_to=-1;
1372   PetscMPIInt coarse_color = MPI_UNDEFINED;
1373   ISLocalToGlobalMapping coarse_ISLG;
1374   /* some other variables */
1375   PetscErrorCode ierr;
1376   MatType coarse_mat_type;
1377   PCType  coarse_pc_type;
1378   KSPType coarse_ksp_type;
1379   PC pc_temp;
1380   PetscInt i,j,k;
1381   PetscInt max_it_coarse_ksp=1;  /* don't increase this value */
1382   /* verbose output viewer */
1383   PetscViewer viewer=pcbddc->dbg_viewer;
1384   PetscInt    dbg_flag=pcbddc->dbg_flag;
1385 
1386   PetscInt      offset,offset2;
1387   PetscMPIInt   im_active,active_procs;
1388   PetscInt      *dnz,*onz;
1389 
1390   PetscBool     setsym,issym=PETSC_FALSE;
1391 
1392   PetscFunctionBegin;
1393   ierr = PetscObjectGetComm((PetscObject)pc,&prec_comm);CHKERRQ(ierr);
1394   ins_local_primal_indices = 0;
1395   ins_coarse_mat_vals      = 0;
1396   localsizes2              = 0;
1397   localdispl2              = 0;
1398   temp_coarse_mat_vals     = 0;
1399   coarse_ISLG              = 0;
1400 
1401   ierr = MPI_Comm_size(prec_comm,&size_prec_comm);CHKERRQ(ierr);
1402   ierr = MPI_Comm_rank(prec_comm,&rank_prec_comm);CHKERRQ(ierr);
1403   ierr = MatIsSymmetricKnown(pc->pmat,&setsym,&issym);CHKERRQ(ierr);
1404 
1405   /* Assign global numbering to coarse dofs */
1406   {
1407     PetscInt     *auxlocal_primal,*aux_idx;
1408     PetscMPIInt  mpi_local_primal_size;
1409     PetscScalar  coarsesum,*array;
1410 
1411     mpi_local_primal_size = (PetscMPIInt)pcbddc->local_primal_size;
1412 
1413     /* Construct needed data structures for message passing */
1414     j = 0;
1415     if (rank_prec_comm == 0 || pcbddc->coarse_problem_type == REPLICATED_BDDC || pcbddc->coarse_problem_type == MULTILEVEL_BDDC) {
1416       j = size_prec_comm;
1417     }
1418     ierr = PetscMalloc(j*sizeof(PetscMPIInt),&pcbddc->local_primal_sizes);CHKERRQ(ierr);
1419     ierr = PetscMalloc(j*sizeof(PetscMPIInt),&pcbddc->local_primal_displacements);CHKERRQ(ierr);
1420     /* Gather local_primal_size information for all processes  */
1421     if (pcbddc->coarse_problem_type == REPLICATED_BDDC || pcbddc->coarse_problem_type == MULTILEVEL_BDDC) {
1422       ierr = MPI_Allgather(&mpi_local_primal_size,1,MPIU_INT,&pcbddc->local_primal_sizes[0],1,MPIU_INT,prec_comm);CHKERRQ(ierr);
1423     } else {
1424       ierr = MPI_Gather(&mpi_local_primal_size,1,MPIU_INT,&pcbddc->local_primal_sizes[0],1,MPIU_INT,0,prec_comm);CHKERRQ(ierr);
1425     }
1426     pcbddc->replicated_primal_size = 0;
1427     for (i=0; i<j; i++) {
1428       pcbddc->local_primal_displacements[i] = pcbddc->replicated_primal_size ;
1429       pcbddc->replicated_primal_size += pcbddc->local_primal_sizes[i];
1430     }
1431 
1432     /* First let's count coarse dofs.
1433        This code fragment assumes that the number of local constraints per connected component
1434        is not greater than the number of nodes defined for the connected component
1435        (otherwise we will surely have linear dependence between constraints and thus a singular coarse problem) */
1436     ierr = PetscMalloc(pcbddc->local_primal_size*sizeof(PetscInt),&auxlocal_primal);CHKERRQ(ierr);
1437     ierr = PCBDDCGetPrimalVerticesLocalIdx(pc,&i,&aux_idx);CHKERRQ(ierr);
1438     ierr = PetscMemcpy(auxlocal_primal,aux_idx,i*sizeof(PetscInt));CHKERRQ(ierr);
1439     ierr = PetscFree(aux_idx);CHKERRQ(ierr);
1440     ierr = PCBDDCGetPrimalConstraintsLocalIdx(pc,&j,&aux_idx);CHKERRQ(ierr);
1441     ierr = PetscMemcpy(&auxlocal_primal[i],aux_idx,j*sizeof(PetscInt));CHKERRQ(ierr);
1442     ierr = PetscFree(aux_idx);CHKERRQ(ierr);
1443     /* Compute number of coarse dofs */
1444     ierr = PCBDDCSubsetNumbering(prec_comm,matis->mapping,pcbddc->local_primal_size,auxlocal_primal,NULL,&pcbddc->coarse_size,&pcbddc->local_primal_indices);CHKERRQ(ierr);
1445 
1446     if (dbg_flag) {
1447       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1448       ierr = PetscViewerASCIIPrintf(viewer,"--------------------------------------------------\n");CHKERRQ(ierr);
1449       ierr = PetscViewerASCIIPrintf(viewer,"Check coarse indices\n");CHKERRQ(ierr);
1450       ierr = VecSet(pcis->vec1_N,0.0);CHKERRQ(ierr);
1451       ierr = VecGetArray(pcis->vec1_N,&array);CHKERRQ(ierr);
1452       for (i=0;i<pcbddc->local_primal_size;i++) array[auxlocal_primal[i]]=1.0;
1453       ierr = VecRestoreArray(pcis->vec1_N,&array);CHKERRQ(ierr);
1454       ierr = VecSet(pcis->vec1_global,0.0);CHKERRQ(ierr);
1455       ierr = VecScatterBegin(matis->ctx,pcis->vec1_N,pcis->vec1_global,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1456       ierr = VecScatterEnd  (matis->ctx,pcis->vec1_N,pcis->vec1_global,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1457       ierr = VecScatterBegin(matis->ctx,pcis->vec1_global,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1458       ierr = VecScatterEnd  (matis->ctx,pcis->vec1_global,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1459       ierr = VecGetArray(pcis->vec1_N,&array);CHKERRQ(ierr);
1460       for (i=0;i<pcis->n;i++) {
1461         if (array[i] == 1.0) {
1462           ierr = ISLocalToGlobalMappingApply(matis->mapping,1,&i,&j);CHKERRQ(ierr);
1463           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d: WRONG COARSE INDEX %d (local %d)\n",PetscGlobalRank,j,i);CHKERRQ(ierr);
1464         }
1465       }
1466       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1467       for (i=0;i<pcis->n;i++) {
1468         if (PetscRealPart(array[i]) > 0.0) array[i] = 1.0/PetscRealPart(array[i]);
1469       }
1470       ierr = VecRestoreArray(pcis->vec1_N,&array);CHKERRQ(ierr);
1471       ierr = VecSet(pcis->vec1_global,0.0);CHKERRQ(ierr);
1472       ierr = VecScatterBegin(matis->ctx,pcis->vec1_N,pcis->vec1_global,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1473       ierr = VecScatterEnd  (matis->ctx,pcis->vec1_N,pcis->vec1_global,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1474       ierr = VecSum(pcis->vec1_global,&coarsesum);CHKERRQ(ierr);
1475       ierr = PetscViewerASCIIPrintf(viewer,"Size of coarse problem SHOULD be %lf\n",coarsesum);CHKERRQ(ierr);
1476       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1477     }
1478     ierr = PetscFree(auxlocal_primal);CHKERRQ(ierr);
1479   }
1480 
1481   if (dbg_flag) {
1482     ierr = PetscViewerASCIIPrintf(viewer,"Size of coarse problem is %d\n",pcbddc->coarse_size);CHKERRQ(ierr);
1483     if (dbg_flag > 1) {
1484       ierr = PetscViewerASCIIPrintf(viewer,"Distribution of local primal indices\n");CHKERRQ(ierr);
1485       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1486       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d\n",PetscGlobalRank);CHKERRQ(ierr);
1487       for (i=0;i<pcbddc->local_primal_size;i++) {
1488         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"local_primal_indices[%d]=%d \n",i,pcbddc->local_primal_indices[i]);
1489       }
1490       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1491     }
1492   }
1493 
1494   im_active = 0;
1495   if (pcis->n) im_active = 1;
1496   ierr = MPI_Allreduce(&im_active,&active_procs,1,MPIU_INT,MPI_SUM,prec_comm);CHKERRQ(ierr);
1497 
1498   /* adapt coarse problem type */
1499 #if defined(PETSC_HAVE_METIS)
1500   if (pcbddc->coarse_problem_type == MULTILEVEL_BDDC) {
1501     if (pcbddc->current_level < pcbddc->max_levels) {
1502       if ( (active_procs/pcbddc->coarsening_ratio) < 2 ) {
1503         if (dbg_flag) {
1504           ierr = PetscViewerASCIIPrintf(viewer,"Not enough active processes on level %d (active %d,ratio %d). Parallel direct solve for coarse problem\n",pcbddc->current_level,active_procs,pcbddc->coarsening_ratio);CHKERRQ(ierr);
1505          ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1506         }
1507         pcbddc->coarse_problem_type = PARALLEL_BDDC;
1508       }
1509     } else {
1510       if (dbg_flag) {
1511         ierr = PetscViewerASCIIPrintf(viewer,"Max number of levels reached. Using parallel direct solve for coarse problem\n",pcbddc->max_levels,active_procs,pcbddc->coarsening_ratio);CHKERRQ(ierr);
1512         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1513       }
1514       pcbddc->coarse_problem_type = PARALLEL_BDDC;
1515     }
1516   }
1517 #else
1518   pcbddc->coarse_problem_type = PARALLEL_BDDC;
1519 #endif
1520 
1521   switch(pcbddc->coarse_problem_type){
1522 
1523     case(MULTILEVEL_BDDC):   /* we define a coarse mesh where subdomains are elements */
1524     {
1525 #if defined(PETSC_HAVE_METIS)
1526       /* we need additional variables */
1527       MetisInt    n_subdomains,n_parts,objval,ncon,faces_nvtxs;
1528       MetisInt    *metis_coarse_subdivision;
1529       MetisInt    options[METIS_NOPTIONS];
1530       PetscMPIInt size_coarse_comm,rank_coarse_comm;
1531       PetscMPIInt procs_jumps_coarse_comm;
1532       PetscMPIInt *coarse_subdivision;
1533       PetscMPIInt *total_count_recv;
1534       PetscMPIInt *total_ranks_recv;
1535       PetscMPIInt *displacements_recv;
1536       PetscMPIInt *my_faces_connectivity;
1537       PetscMPIInt *petsc_faces_adjncy;
1538       MetisInt    *faces_adjncy;
1539       MetisInt    *faces_xadj;
1540       PetscMPIInt *number_of_faces;
1541       PetscMPIInt *faces_displacements;
1542       PetscInt    *array_int;
1543       PetscMPIInt my_faces=0;
1544       PetscMPIInt total_faces=0;
1545       PetscInt    ranks_stretching_ratio;
1546 
1547       /* define some quantities */
1548       pcbddc->coarse_communications_type = SCATTERS_BDDC;
1549       coarse_mat_type = MATIS;
1550       coarse_pc_type  = PCBDDC;
1551       coarse_ksp_type = KSPRICHARDSON;
1552 
1553       /* details of coarse decomposition */
1554       n_subdomains = active_procs;
1555       n_parts      = n_subdomains/pcbddc->coarsening_ratio;
1556       ranks_stretching_ratio = size_prec_comm/active_procs;
1557       procs_jumps_coarse_comm = pcbddc->coarsening_ratio*ranks_stretching_ratio;
1558 
1559 #if 0
1560       PetscMPIInt *old_ranks;
1561       PetscInt    *new_ranks,*jj,*ii;
1562       MatPartitioning mat_part;
1563       IS coarse_new_decomposition,is_numbering;
1564       PetscViewer viewer_test;
1565       MPI_Comm    test_coarse_comm;
1566       PetscMPIInt test_coarse_color;
1567       Mat         mat_adj;
1568       /* Create new communicator for coarse problem splitting the old one */
1569       /* procs with coarse_color = MPI_UNDEFINED will have coarse_comm = MPI_COMM_NULL (from mpi standards)
1570          key = rank_prec_comm -> keep same ordering of ranks from the old to the new communicator */
1571       test_coarse_color = ( im_active ? 0 : MPI_UNDEFINED );
1572       test_coarse_comm = MPI_COMM_NULL;
1573       ierr = MPI_Comm_split(prec_comm,test_coarse_color,rank_prec_comm,&test_coarse_comm);CHKERRQ(ierr);
1574       if (im_active) {
1575         ierr = PetscMalloc(n_subdomains*sizeof(PetscMPIInt),&old_ranks);
1576         ierr = PetscMalloc(size_prec_comm*sizeof(PetscInt),&new_ranks);
1577         ierr = MPI_Comm_rank(test_coarse_comm,&rank_coarse_comm);CHKERRQ(ierr);
1578         ierr = MPI_Comm_size(test_coarse_comm,&j);CHKERRQ(ierr);
1579         ierr = MPI_Allgather(&rank_prec_comm,1,MPIU_INT,old_ranks,1,MPIU_INT,test_coarse_comm);CHKERRQ(ierr);
1580         for (i=0; i<size_prec_comm; i++) new_ranks[i] = -1;
1581         for (i=0; i<n_subdomains; i++) new_ranks[old_ranks[i]] = i;
1582         ierr = PetscViewerASCIIOpen(test_coarse_comm,"test_mat_part.out",&viewer_test);CHKERRQ(ierr);
1583         k = pcis->n_neigh-1;
1584         ierr = PetscMalloc(2*sizeof(PetscInt),&ii);
1585         ii[0]=0;
1586         ii[1]=k;
1587         ierr = PetscMalloc(k*sizeof(PetscInt),&jj);
1588         for (i=0; i<k; i++) jj[i]=new_ranks[pcis->neigh[i+1]];
1589         ierr = PetscSortInt(k,jj);CHKERRQ(ierr);
1590         ierr = MatCreateMPIAdj(test_coarse_comm,1,n_subdomains,ii,jj,NULL,&mat_adj);CHKERRQ(ierr);
1591         ierr = MatView(mat_adj,viewer_test);CHKERRQ(ierr);
1592         ierr = MatPartitioningCreate(test_coarse_comm,&mat_part);CHKERRQ(ierr);
1593         ierr = MatPartitioningSetAdjacency(mat_part,mat_adj);CHKERRQ(ierr);
1594         ierr = MatPartitioningSetFromOptions(mat_part);CHKERRQ(ierr);
1595         printf("Setting Nparts %d\n",n_parts);
1596         ierr = MatPartitioningSetNParts(mat_part,n_parts);CHKERRQ(ierr);
1597         ierr = MatPartitioningView(mat_part,viewer_test);CHKERRQ(ierr);
1598         ierr = MatPartitioningApply(mat_part,&coarse_new_decomposition);CHKERRQ(ierr);
1599         ierr = ISView(coarse_new_decomposition,viewer_test);CHKERRQ(ierr);
1600         ierr = ISPartitioningToNumbering(coarse_new_decomposition,&is_numbering);CHKERRQ(ierr);
1601         ierr = ISView(is_numbering,viewer_test);CHKERRQ(ierr);
1602         ierr = PetscViewerDestroy(&viewer_test);CHKERRQ(ierr);
1603         ierr = ISDestroy(&coarse_new_decomposition);CHKERRQ(ierr);
1604         ierr = ISDestroy(&is_numbering);CHKERRQ(ierr);
1605         ierr = MatPartitioningDestroy(&mat_part);CHKERRQ(ierr);
1606         ierr = PetscFree(old_ranks);CHKERRQ(ierr);
1607         ierr = PetscFree(new_ranks);CHKERRQ(ierr);
1608         ierr = MPI_Comm_free(&test_coarse_comm);CHKERRQ(ierr);
1609       }
1610 #endif
1611 
1612       /* build CSR graph of subdomains' connectivity */
1613       ierr = PetscMalloc (pcis->n*sizeof(PetscInt),&array_int);CHKERRQ(ierr);
1614       ierr = PetscMemzero(array_int,pcis->n*sizeof(PetscInt));CHKERRQ(ierr);
1615       for (i=1;i<pcis->n_neigh;i++){/* i=1 so I don't count myself -> faces nodes counts to 1 */
1616         for (j=0;j<pcis->n_shared[i];j++){
1617           array_int[ pcis->shared[i][j] ]+=1;
1618         }
1619       }
1620       for (i=1;i<pcis->n_neigh;i++){
1621         for (j=0;j<pcis->n_shared[i];j++){
1622           if (array_int[ pcis->shared[i][j] ] > 0 ){
1623             my_faces++;
1624             break;
1625           }
1626         }
1627       }
1628 
1629       ierr = MPI_Reduce(&my_faces,&total_faces,1,MPIU_INT,MPI_SUM,master_proc,prec_comm);CHKERRQ(ierr);
1630       ierr = PetscMalloc (my_faces*sizeof(PetscInt),&my_faces_connectivity);CHKERRQ(ierr);
1631       my_faces=0;
1632       for (i=1;i<pcis->n_neigh;i++){
1633         for (j=0;j<pcis->n_shared[i];j++){
1634           if (array_int[ pcis->shared[i][j] ] > 0 ){
1635             my_faces_connectivity[my_faces]=pcis->neigh[i];
1636             my_faces++;
1637             break;
1638           }
1639         }
1640       }
1641       if (rank_prec_comm == master_proc) {
1642         ierr = PetscMalloc (total_faces*sizeof(PetscMPIInt),&petsc_faces_adjncy);CHKERRQ(ierr);
1643         ierr = PetscMalloc (size_prec_comm*sizeof(PetscMPIInt),&number_of_faces);CHKERRQ(ierr);
1644         ierr = PetscMalloc (total_faces*sizeof(MetisInt),&faces_adjncy);CHKERRQ(ierr);
1645         ierr = PetscMalloc ((n_subdomains+1)*sizeof(MetisInt),&faces_xadj);CHKERRQ(ierr);
1646         ierr = PetscMalloc ((size_prec_comm+1)*sizeof(PetscMPIInt),&faces_displacements);CHKERRQ(ierr);
1647       }
1648       ierr = MPI_Gather(&my_faces,1,MPIU_INT,&number_of_faces[0],1,MPIU_INT,master_proc,prec_comm);CHKERRQ(ierr);
1649       if (rank_prec_comm == master_proc) {
1650         faces_xadj[0]=0;
1651         faces_displacements[0]=0;
1652         j=0;
1653         for (i=1;i<size_prec_comm+1;i++) {
1654           faces_displacements[i]=faces_displacements[i-1]+number_of_faces[i-1];
1655           if (number_of_faces[i-1]) {
1656             j++;
1657             faces_xadj[j]=faces_xadj[j-1]+number_of_faces[i-1];
1658           }
1659         }
1660       }
1661       ierr = MPI_Gatherv(&my_faces_connectivity[0],my_faces,MPIU_INT,&petsc_faces_adjncy[0],number_of_faces,faces_displacements,MPIU_INT,master_proc,prec_comm);CHKERRQ(ierr);
1662       ierr = PetscFree(my_faces_connectivity);CHKERRQ(ierr);
1663       ierr = PetscFree(array_int);CHKERRQ(ierr);
1664       if (rank_prec_comm == master_proc) {
1665         for (i=0;i<total_faces;i++) faces_adjncy[i]=(MetisInt)(petsc_faces_adjncy[i]/ranks_stretching_ratio); /* cast to MetisInt */
1666         ierr = PetscFree(faces_displacements);CHKERRQ(ierr);
1667         ierr = PetscFree(number_of_faces);CHKERRQ(ierr);
1668         ierr = PetscFree(petsc_faces_adjncy);CHKERRQ(ierr);
1669       }
1670 
1671       if ( rank_prec_comm == master_proc ) {
1672 
1673         PetscInt heuristic_for_metis=3;
1674 
1675         ncon=1;
1676         faces_nvtxs=n_subdomains;
1677         /* partition graoh induced by face connectivity */
1678         ierr = PetscMalloc (n_subdomains*sizeof(MetisInt),&metis_coarse_subdivision);CHKERRQ(ierr);
1679         ierr = METIS_SetDefaultOptions(options);
1680         /* we need a contiguous partition of the coarse mesh */
1681         options[METIS_OPTION_CONTIG]=1;
1682         options[METIS_OPTION_NITER]=30;
1683         if (pcbddc->coarsening_ratio > 1) {
1684           if (n_subdomains>n_parts*heuristic_for_metis) {
1685             options[METIS_OPTION_IPTYPE]=METIS_IPTYPE_EDGE;
1686             options[METIS_OPTION_OBJTYPE]=METIS_OBJTYPE_CUT;
1687             ierr = METIS_PartGraphKway(&faces_nvtxs,&ncon,faces_xadj,faces_adjncy,NULL,NULL,NULL,&n_parts,NULL,NULL,options,&objval,metis_coarse_subdivision);
1688             if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in METIS_PartGraphKway (metis error code %D) called from PCBDDCSetUpCoarseEnvironment\n",ierr);
1689           } else {
1690             ierr = METIS_PartGraphRecursive(&faces_nvtxs,&ncon,faces_xadj,faces_adjncy,NULL,NULL,NULL,&n_parts,NULL,NULL,options,&objval,metis_coarse_subdivision);
1691             if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in METIS_PartGraphRecursive (metis error code %D) called from PCBDDCSetUpCoarseEnvironment\n",ierr);
1692           }
1693         } else {
1694           for (i=0;i<n_subdomains;i++) metis_coarse_subdivision[i]=i;
1695         }
1696         ierr = PetscFree(faces_xadj);CHKERRQ(ierr);
1697         ierr = PetscFree(faces_adjncy);CHKERRQ(ierr);
1698         ierr = PetscMalloc(size_prec_comm*sizeof(PetscMPIInt),&coarse_subdivision);CHKERRQ(ierr);
1699 
1700         /* copy/cast values avoiding possible type conflicts between PETSc, MPI and METIS */
1701         for (i=0;i<size_prec_comm;i++) coarse_subdivision[i]=MPI_PROC_NULL;
1702         for (i=0;i<n_subdomains;i++) coarse_subdivision[ranks_stretching_ratio*i]=(PetscInt)(metis_coarse_subdivision[i]);
1703         ierr = PetscFree(metis_coarse_subdivision);CHKERRQ(ierr);
1704       }
1705 
1706       /* Create new communicator for coarse problem splitting the old one */
1707       if ( !(rank_prec_comm%procs_jumps_coarse_comm) && rank_prec_comm < procs_jumps_coarse_comm*n_parts ){
1708         coarse_color=0;              /* for communicator splitting */
1709         active_rank=rank_prec_comm;  /* for insertion of matrix values */
1710       }
1711       /* procs with coarse_color = MPI_UNDEFINED will have coarse_comm = MPI_COMM_NULL (from mpi standards)
1712          key = rank_prec_comm -> keep same ordering of ranks from the old to the new communicator */
1713       ierr = MPI_Comm_split(prec_comm,coarse_color,rank_prec_comm,&coarse_comm);CHKERRQ(ierr);
1714 
1715       if ( coarse_color == 0 ) {
1716         ierr = MPI_Comm_size(coarse_comm,&size_coarse_comm);CHKERRQ(ierr);
1717         ierr = MPI_Comm_rank(coarse_comm,&rank_coarse_comm);CHKERRQ(ierr);
1718       } else {
1719         rank_coarse_comm = MPI_PROC_NULL;
1720       }
1721 
1722       /* master proc take care of arranging and distributing coarse information */
1723       if (rank_coarse_comm == master_proc) {
1724         ierr = PetscMalloc (size_coarse_comm*sizeof(PetscMPIInt),&displacements_recv);CHKERRQ(ierr);
1725         ierr = PetscMalloc (size_coarse_comm*sizeof(PetscMPIInt),&total_count_recv);CHKERRQ(ierr);
1726         ierr = PetscMalloc (n_subdomains*sizeof(PetscMPIInt),&total_ranks_recv);CHKERRQ(ierr);
1727         /* some initializations */
1728         displacements_recv[0]=0;
1729         ierr = PetscMemzero(total_count_recv,size_coarse_comm*sizeof(PetscMPIInt));CHKERRQ(ierr);
1730         /* count from how many processes the j-th process of the coarse decomposition will receive data */
1731         for (j=0;j<size_coarse_comm;j++) {
1732           for (i=0;i<size_prec_comm;i++) {
1733           if (coarse_subdivision[i]==j) total_count_recv[j]++;
1734           }
1735         }
1736         /* displacements needed for scatterv of total_ranks_recv */
1737       for (i=1; i<size_coarse_comm; i++) displacements_recv[i]=displacements_recv[i-1]+total_count_recv[i-1];
1738 
1739         /* Now fill properly total_ranks_recv -> each coarse process will receive the ranks (in prec_comm communicator) of its friend (sending) processes */
1740         ierr = PetscMemzero(total_count_recv,size_coarse_comm*sizeof(PetscMPIInt));CHKERRQ(ierr);
1741         for (j=0;j<size_coarse_comm;j++) {
1742           for (i=0;i<size_prec_comm;i++) {
1743             if (coarse_subdivision[i]==j) {
1744               total_ranks_recv[displacements_recv[j]+total_count_recv[j]]=i;
1745               total_count_recv[j]+=1;
1746             }
1747           }
1748         }
1749         /*for (j=0;j<size_coarse_comm;j++) {
1750           printf("process %d in new rank will receive from %d processes (original ranks follows)\n",j,total_count_recv[j]);
1751           for (i=0;i<total_count_recv[j];i++) {
1752             printf("%d ",total_ranks_recv[displacements_recv[j]+i]);
1753           }
1754           printf("\n");
1755         }*/
1756 
1757         /* identify new decomposition in terms of ranks in the old communicator */
1758         for (i=0;i<n_subdomains;i++) {
1759           coarse_subdivision[ranks_stretching_ratio*i]=coarse_subdivision[ranks_stretching_ratio*i]*procs_jumps_coarse_comm;
1760         }
1761         /*printf("coarse_subdivision in old end new ranks\n");
1762         for (i=0;i<size_prec_comm;i++)
1763           if (coarse_subdivision[i]!=MPI_PROC_NULL) {
1764             printf("%d=(%d %d), ",i,coarse_subdivision[i],coarse_subdivision[i]/procs_jumps_coarse_comm);
1765           } else {
1766             printf("%d=(%d %d), ",i,coarse_subdivision[i],coarse_subdivision[i]);
1767           }
1768         printf("\n");*/
1769       }
1770 
1771       /* Scatter new decomposition for send details */
1772       ierr = MPI_Scatter(&coarse_subdivision[0],1,MPIU_INT,&rank_coarse_proc_send_to,1,MPIU_INT,master_proc,prec_comm);CHKERRQ(ierr);
1773       /* Scatter receiving details to members of coarse decomposition */
1774       if ( coarse_color == 0) {
1775         ierr = MPI_Scatter(&total_count_recv[0],1,MPIU_INT,&count_recv,1,MPIU_INT,master_proc,coarse_comm);CHKERRQ(ierr);
1776         ierr = PetscMalloc (count_recv*sizeof(PetscMPIInt),&ranks_recv);CHKERRQ(ierr);
1777         ierr = MPI_Scatterv(&total_ranks_recv[0],total_count_recv,displacements_recv,MPIU_INT,&ranks_recv[0],count_recv,MPIU_INT,master_proc,coarse_comm);CHKERRQ(ierr);
1778       }
1779 
1780       /*printf("I will send my matrix data to proc  %d\n",rank_coarse_proc_send_to);
1781       if (coarse_color == 0) {
1782         printf("I will receive some matrix data from %d processes (ranks follows)\n",count_recv);
1783         for (i=0;i<count_recv;i++)
1784           printf("%d ",ranks_recv[i]);
1785         printf("\n");
1786       }*/
1787 
1788       if (rank_prec_comm == master_proc) {
1789         ierr = PetscFree(coarse_subdivision);CHKERRQ(ierr);
1790         ierr = PetscFree(total_count_recv);CHKERRQ(ierr);
1791         ierr = PetscFree(total_ranks_recv);CHKERRQ(ierr);
1792         ierr = PetscFree(displacements_recv);CHKERRQ(ierr);
1793       }
1794 #endif
1795       break;
1796     }
1797 
1798     case(REPLICATED_BDDC):
1799 
1800       pcbddc->coarse_communications_type = GATHERS_BDDC;
1801       coarse_mat_type = MATSEQAIJ;
1802       coarse_pc_type  = PCLU;
1803       coarse_ksp_type  = KSPPREONLY;
1804       coarse_comm = PETSC_COMM_SELF;
1805       active_rank = rank_prec_comm;
1806       break;
1807 
1808     case(PARALLEL_BDDC):
1809 
1810       pcbddc->coarse_communications_type = SCATTERS_BDDC;
1811       coarse_mat_type = MATAIJ;
1812       coarse_pc_type  = PCREDUNDANT;
1813       coarse_ksp_type  = KSPPREONLY;
1814       coarse_comm = prec_comm;
1815       active_rank = rank_prec_comm;
1816       break;
1817 
1818     case(SEQUENTIAL_BDDC):
1819       pcbddc->coarse_communications_type = GATHERS_BDDC;
1820       coarse_mat_type = MATAIJ;
1821       coarse_pc_type = PCLU;
1822       coarse_ksp_type  = KSPPREONLY;
1823       coarse_comm = PETSC_COMM_SELF;
1824       active_rank = master_proc;
1825       break;
1826   }
1827 
1828   switch(pcbddc->coarse_communications_type){
1829 
1830     case(SCATTERS_BDDC):
1831       {
1832         if (pcbddc->coarse_problem_type==MULTILEVEL_BDDC) {
1833 
1834           IS coarse_IS;
1835 
1836           if(pcbddc->coarsening_ratio == 1) {
1837             ins_local_primal_size = pcbddc->local_primal_size;
1838             ins_local_primal_indices = pcbddc->local_primal_indices;
1839             if (coarse_color == 0) { ierr = PetscFree(ranks_recv);CHKERRQ(ierr); }
1840             /* nonzeros */
1841             ierr = PetscMalloc(ins_local_primal_size*sizeof(PetscInt),&dnz);CHKERRQ(ierr);
1842             ierr = PetscMemzero(dnz,ins_local_primal_size*sizeof(PetscInt));CHKERRQ(ierr);
1843             for (i=0;i<ins_local_primal_size;i++) {
1844               dnz[i] = ins_local_primal_size;
1845             }
1846           } else {
1847             PetscMPIInt send_size;
1848             PetscMPIInt *send_buffer;
1849             PetscInt    *aux_ins_indices;
1850             PetscInt    ii,jj;
1851             MPI_Request *requests;
1852 
1853             ierr = PetscMalloc(count_recv*sizeof(PetscMPIInt),&localdispl2);CHKERRQ(ierr);
1854             /* reusing pcbddc->local_primal_displacements and pcbddc->replicated_primal_size */
1855             ierr = PetscFree(pcbddc->local_primal_displacements);CHKERRQ(ierr);
1856             ierr = PetscMalloc((count_recv+1)*sizeof(PetscMPIInt),&pcbddc->local_primal_displacements);CHKERRQ(ierr);
1857             pcbddc->replicated_primal_size = count_recv;
1858             j = 0;
1859             for (i=0;i<count_recv;i++) {
1860               pcbddc->local_primal_displacements[i] = j;
1861               j += pcbddc->local_primal_sizes[ranks_recv[i]];
1862             }
1863             pcbddc->local_primal_displacements[count_recv] = j;
1864             ierr = PetscMalloc(j*sizeof(PetscMPIInt),&pcbddc->replicated_local_primal_indices);CHKERRQ(ierr);
1865             /* allocate auxiliary space */
1866             ierr = PetscMalloc(count_recv*sizeof(PetscMPIInt),&localsizes2);CHKERRQ(ierr);
1867             ierr = PetscMalloc(pcbddc->coarse_size*sizeof(PetscInt),&aux_ins_indices);CHKERRQ(ierr);
1868             ierr = PetscMemzero(aux_ins_indices,pcbddc->coarse_size*sizeof(PetscInt));CHKERRQ(ierr);
1869             /* allocate stuffs for message massing */
1870             ierr = PetscMalloc((count_recv+1)*sizeof(MPI_Request),&requests);CHKERRQ(ierr);
1871             for (i=0;i<count_recv+1;i++) { requests[i]=MPI_REQUEST_NULL; }
1872             /* send indices to be inserted */
1873             for (i=0;i<count_recv;i++) {
1874               send_size = pcbddc->local_primal_sizes[ranks_recv[i]];
1875               ierr = MPI_Irecv(&pcbddc->replicated_local_primal_indices[pcbddc->local_primal_displacements[i]],send_size,MPIU_INT,ranks_recv[i],999,prec_comm,&requests[i]);CHKERRQ(ierr);
1876             }
1877             if (rank_coarse_proc_send_to != MPI_PROC_NULL ) {
1878               send_size = pcbddc->local_primal_size;
1879               ierr = PetscMalloc(send_size*sizeof(PetscMPIInt),&send_buffer);CHKERRQ(ierr);
1880               for (i=0;i<send_size;i++) {
1881                 send_buffer[i]=(PetscMPIInt)pcbddc->local_primal_indices[i];
1882               }
1883               ierr = MPI_Isend(send_buffer,send_size,MPIU_INT,rank_coarse_proc_send_to,999,prec_comm,&requests[count_recv]);CHKERRQ(ierr);
1884             }
1885             ierr = MPI_Waitall(count_recv+1,requests,MPI_STATUSES_IGNORE);CHKERRQ(ierr);
1886             if (rank_coarse_proc_send_to != MPI_PROC_NULL ) {
1887               ierr = PetscFree(send_buffer);CHKERRQ(ierr);
1888             }
1889             j = 0;
1890             for (i=0;i<count_recv;i++) {
1891               ii = pcbddc->local_primal_displacements[i+1]-pcbddc->local_primal_displacements[i];
1892               localsizes2[i] = ii*ii;
1893               localdispl2[i] = j;
1894               j += localsizes2[i];
1895               jj = pcbddc->local_primal_displacements[i];
1896               /* it counts the coarse subdomains sharing the coarse node */
1897               for (k=0;k<ii;k++) {
1898                 aux_ins_indices[pcbddc->replicated_local_primal_indices[jj+k]] += 1;
1899               }
1900             }
1901             /* temp_coarse_mat_vals used to store matrix values to be received */
1902             ierr = PetscMalloc(j*sizeof(PetscScalar),&temp_coarse_mat_vals);CHKERRQ(ierr);
1903             /* evaluate how many values I will insert in coarse mat */
1904             ins_local_primal_size = 0;
1905             for (i=0;i<pcbddc->coarse_size;i++) {
1906               if (aux_ins_indices[i]) {
1907                 ins_local_primal_size++;
1908               }
1909             }
1910             /* evaluate indices I will insert in coarse mat */
1911             ierr = PetscMalloc(ins_local_primal_size*sizeof(PetscInt),&ins_local_primal_indices);CHKERRQ(ierr);
1912             j = 0;
1913             for(i=0;i<pcbddc->coarse_size;i++) {
1914               if(aux_ins_indices[i]) {
1915                 ins_local_primal_indices[j] = i;
1916                 j++;
1917               }
1918             }
1919             /* processes partecipating in coarse problem receive matrix data from their friends */
1920             for (i=0;i<count_recv;i++) {
1921               ierr = MPI_Irecv(&temp_coarse_mat_vals[localdispl2[i]],localsizes2[i],MPIU_SCALAR,ranks_recv[i],666,prec_comm,&requests[i]);CHKERRQ(ierr);
1922             }
1923             if (rank_coarse_proc_send_to != MPI_PROC_NULL ) {
1924               send_size = pcbddc->local_primal_size*pcbddc->local_primal_size;
1925               ierr = MPI_Isend(&coarse_submat_vals[0],send_size,MPIU_SCALAR,rank_coarse_proc_send_to,666,prec_comm,&requests[count_recv]);CHKERRQ(ierr);
1926             }
1927             ierr = MPI_Waitall(count_recv+1,requests,MPI_STATUSES_IGNORE);CHKERRQ(ierr);
1928             /* nonzeros */
1929             ierr = PetscMalloc(ins_local_primal_size*sizeof(PetscInt),&dnz);CHKERRQ(ierr);
1930             ierr = PetscMemzero(dnz,ins_local_primal_size*sizeof(PetscInt));CHKERRQ(ierr);
1931             /* use aux_ins_indices to realize a global to local mapping */
1932             j=0;
1933             for(i=0;i<pcbddc->coarse_size;i++){
1934               if(aux_ins_indices[i]==0){
1935                 aux_ins_indices[i]=-1;
1936               } else {
1937                 aux_ins_indices[i]=j;
1938                 j++;
1939               }
1940             }
1941             for (i=0;i<count_recv;i++) {
1942               j = pcbddc->local_primal_sizes[ranks_recv[i]];
1943               for (k=0;k<j;k++) {
1944                 dnz[aux_ins_indices[pcbddc->replicated_local_primal_indices[pcbddc->local_primal_displacements[i]+k]]] += j;
1945               }
1946             }
1947             /* check */
1948             for (i=0;i<ins_local_primal_size;i++) {
1949               if (dnz[i] > ins_local_primal_size) {
1950                 dnz[i] = ins_local_primal_size;
1951               }
1952             }
1953             ierr = PetscFree(requests);CHKERRQ(ierr);
1954             ierr = PetscFree(aux_ins_indices);CHKERRQ(ierr);
1955             if (coarse_color == 0) { ierr = PetscFree(ranks_recv);CHKERRQ(ierr); }
1956           }
1957           /* create local to global mapping needed by coarse MATIS */
1958           if (coarse_comm != MPI_COMM_NULL ) {ierr = MPI_Comm_free(&coarse_comm);CHKERRQ(ierr);}
1959           coarse_comm = prec_comm;
1960           active_rank = rank_prec_comm;
1961           ierr = ISCreateGeneral(coarse_comm,ins_local_primal_size,ins_local_primal_indices,PETSC_COPY_VALUES,&coarse_IS);CHKERRQ(ierr);
1962           ierr = ISLocalToGlobalMappingCreateIS(coarse_IS,&coarse_ISLG);CHKERRQ(ierr);
1963           ierr = ISDestroy(&coarse_IS);CHKERRQ(ierr);
1964         } else if (pcbddc->coarse_problem_type==PARALLEL_BDDC) {
1965           /* arrays for values insertion */
1966           ins_local_primal_size = pcbddc->local_primal_size;
1967           ierr = PetscMalloc(ins_local_primal_size*sizeof(PetscInt),&ins_local_primal_indices);CHKERRQ(ierr);
1968           ierr = PetscMalloc(ins_local_primal_size*ins_local_primal_size*sizeof(PetscScalar),&ins_coarse_mat_vals);CHKERRQ(ierr);
1969           for (j=0;j<ins_local_primal_size;j++){
1970             ins_local_primal_indices[j]=pcbddc->local_primal_indices[j];
1971             for (i=0;i<ins_local_primal_size;i++) {
1972               ins_coarse_mat_vals[j*ins_local_primal_size+i]=coarse_submat_vals[j*ins_local_primal_size+i];
1973             }
1974           }
1975         }
1976         break;
1977 
1978     }
1979 
1980     case(GATHERS_BDDC):
1981       {
1982 
1983         PetscMPIInt mysize,mysize2;
1984         PetscMPIInt *send_buffer;
1985 
1986         if (rank_prec_comm==active_rank) {
1987           ierr = PetscMalloc ( pcbddc->replicated_primal_size*sizeof(PetscMPIInt),&pcbddc->replicated_local_primal_indices);CHKERRQ(ierr);
1988           ierr = PetscMalloc ( pcbddc->replicated_primal_size*sizeof(PetscScalar),&pcbddc->replicated_local_primal_values);CHKERRQ(ierr);
1989           ierr = PetscMalloc ( size_prec_comm*sizeof(PetscMPIInt),&localsizes2);CHKERRQ(ierr);
1990           ierr = PetscMalloc ( size_prec_comm*sizeof(PetscMPIInt),&localdispl2);CHKERRQ(ierr);
1991           /* arrays for values insertion */
1992       for (i=0;i<size_prec_comm;i++) localsizes2[i]=pcbddc->local_primal_sizes[i]*pcbddc->local_primal_sizes[i];
1993           localdispl2[0]=0;
1994       for (i=1;i<size_prec_comm;i++) localdispl2[i]=localsizes2[i-1]+localdispl2[i-1];
1995           j=0;
1996       for (i=0;i<size_prec_comm;i++) j+=localsizes2[i];
1997           ierr = PetscMalloc ( j*sizeof(PetscScalar),&temp_coarse_mat_vals);CHKERRQ(ierr);
1998         }
1999 
2000         mysize=pcbddc->local_primal_size;
2001         mysize2=pcbddc->local_primal_size*pcbddc->local_primal_size;
2002         ierr = PetscMalloc(mysize*sizeof(PetscMPIInt),&send_buffer);CHKERRQ(ierr);
2003     for (i=0; i<mysize; i++) send_buffer[i]=(PetscMPIInt)pcbddc->local_primal_indices[i];
2004 
2005         if (pcbddc->coarse_problem_type == SEQUENTIAL_BDDC){
2006           ierr = MPI_Gatherv(send_buffer,mysize,MPIU_INT,&pcbddc->replicated_local_primal_indices[0],pcbddc->local_primal_sizes,pcbddc->local_primal_displacements,MPIU_INT,master_proc,prec_comm);CHKERRQ(ierr);
2007           ierr = MPI_Gatherv(&coarse_submat_vals[0],mysize2,MPIU_SCALAR,&temp_coarse_mat_vals[0],localsizes2,localdispl2,MPIU_SCALAR,master_proc,prec_comm);CHKERRQ(ierr);
2008         } else {
2009           ierr = MPI_Allgatherv(send_buffer,mysize,MPIU_INT,&pcbddc->replicated_local_primal_indices[0],pcbddc->local_primal_sizes,pcbddc->local_primal_displacements,MPIU_INT,prec_comm);CHKERRQ(ierr);
2010           ierr = MPI_Allgatherv(&coarse_submat_vals[0],mysize2,MPIU_SCALAR,&temp_coarse_mat_vals[0],localsizes2,localdispl2,MPIU_SCALAR,prec_comm);CHKERRQ(ierr);
2011         }
2012         ierr = PetscFree(send_buffer);CHKERRQ(ierr);
2013         break;
2014       }/* switch on coarse problem and communications associated with finished */
2015   }
2016 
2017   /* Now create and fill up coarse matrix */
2018   if ( rank_prec_comm == active_rank ) {
2019 
2020     Mat matis_coarse_local_mat;
2021 
2022     if (pcbddc->coarse_problem_type != MULTILEVEL_BDDC) {
2023       ierr = MatCreate(coarse_comm,&pcbddc->coarse_mat);CHKERRQ(ierr);
2024       ierr = MatSetSizes(pcbddc->coarse_mat,PETSC_DECIDE,PETSC_DECIDE,pcbddc->coarse_size,pcbddc->coarse_size);CHKERRQ(ierr);
2025       ierr = MatSetType(pcbddc->coarse_mat,coarse_mat_type);CHKERRQ(ierr);
2026       ierr = MatSetOptionsPrefix(pcbddc->coarse_mat,"coarse_");CHKERRQ(ierr);
2027       ierr = MatSetFromOptions(pcbddc->coarse_mat);CHKERRQ(ierr);
2028       ierr = MatSetUp(pcbddc->coarse_mat);CHKERRQ(ierr);
2029       ierr = MatSetOption(pcbddc->coarse_mat,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr); /* local values stored in column major */
2030       ierr = MatSetOption(pcbddc->coarse_mat,MAT_IGNORE_ZERO_ENTRIES,PETSC_TRUE);CHKERRQ(ierr);
2031     } else {
2032       ierr = MatCreateIS(coarse_comm,1,PETSC_DECIDE,PETSC_DECIDE,pcbddc->coarse_size,pcbddc->coarse_size,coarse_ISLG,&pcbddc->coarse_mat);CHKERRQ(ierr);
2033       ierr = MatSetUp(pcbddc->coarse_mat);CHKERRQ(ierr);
2034       ierr = MatISGetLocalMat(pcbddc->coarse_mat,&matis_coarse_local_mat);CHKERRQ(ierr);
2035       ierr = MatSetOptionsPrefix(pcbddc->coarse_mat,"coarse_");CHKERRQ(ierr);
2036       ierr = MatSetFromOptions(pcbddc->coarse_mat);CHKERRQ(ierr);
2037       ierr = MatSetUp(matis_coarse_local_mat);CHKERRQ(ierr);
2038       ierr = MatSetOption(matis_coarse_local_mat,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr); /* local values stored in column major */
2039       ierr = MatSetOption(matis_coarse_local_mat,MAT_IGNORE_ZERO_ENTRIES,PETSC_TRUE);CHKERRQ(ierr);
2040     }
2041     /* preallocation */
2042     if (pcbddc->coarse_problem_type != MULTILEVEL_BDDC) {
2043 
2044       PetscInt lrows,lcols,bs;
2045 
2046       ierr = MatGetLocalSize(pcbddc->coarse_mat,&lrows,&lcols);CHKERRQ(ierr);
2047       ierr = MatPreallocateInitialize(coarse_comm,lrows,lcols,dnz,onz);CHKERRQ(ierr);
2048       ierr = MatGetBlockSize(pcbddc->coarse_mat,&bs);CHKERRQ(ierr);
2049 
2050       if (pcbddc->coarse_problem_type == PARALLEL_BDDC) {
2051 
2052         Vec         vec_dnz,vec_onz;
2053         PetscScalar *my_dnz,*my_onz,*array;
2054         PetscInt    *mat_ranges,*row_ownership;
2055         PetscInt    coarse_index_row,coarse_index_col,owner;
2056 
2057         ierr = VecCreate(prec_comm,&vec_dnz);CHKERRQ(ierr);
2058         ierr = VecSetBlockSize(vec_dnz,bs);CHKERRQ(ierr);
2059         ierr = VecSetSizes(vec_dnz,PETSC_DECIDE,pcbddc->coarse_size);CHKERRQ(ierr);
2060         ierr = VecSetType(vec_dnz,VECMPI);CHKERRQ(ierr);
2061         ierr = VecDuplicate(vec_dnz,&vec_onz);CHKERRQ(ierr);
2062 
2063         ierr = PetscMalloc(pcbddc->local_primal_size*sizeof(PetscScalar),&my_dnz);CHKERRQ(ierr);
2064         ierr = PetscMalloc(pcbddc->local_primal_size*sizeof(PetscScalar),&my_onz);CHKERRQ(ierr);
2065         ierr = PetscMemzero(my_dnz,pcbddc->local_primal_size*sizeof(PetscScalar));CHKERRQ(ierr);
2066         ierr = PetscMemzero(my_onz,pcbddc->local_primal_size*sizeof(PetscScalar));CHKERRQ(ierr);
2067 
2068         ierr = PetscMalloc(pcbddc->coarse_size*sizeof(PetscInt),&row_ownership);CHKERRQ(ierr);
2069         ierr = MatGetOwnershipRanges(pcbddc->coarse_mat,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
2070         for (i=0;i<size_prec_comm;i++) {
2071           for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
2072             row_ownership[j]=i;
2073           }
2074         }
2075 
2076         for (i=0;i<pcbddc->local_primal_size;i++) {
2077           coarse_index_row = pcbddc->local_primal_indices[i];
2078           owner = row_ownership[coarse_index_row];
2079           for (j=i;j<pcbddc->local_primal_size;j++) {
2080             owner = row_ownership[coarse_index_row];
2081             coarse_index_col = pcbddc->local_primal_indices[j];
2082             if (coarse_index_col > mat_ranges[owner]-1 && coarse_index_col < mat_ranges[owner+1] ) {
2083               my_dnz[i] += 1.0;
2084             } else {
2085               my_onz[i] += 1.0;
2086             }
2087             if (i != j) {
2088               owner = row_ownership[coarse_index_col];
2089               if (coarse_index_row > mat_ranges[owner]-1 && coarse_index_row < mat_ranges[owner+1] ) {
2090                 my_dnz[j] += 1.0;
2091               } else {
2092                 my_onz[j] += 1.0;
2093               }
2094             }
2095           }
2096         }
2097         ierr = VecSet(vec_dnz,0.0);CHKERRQ(ierr);
2098         ierr = VecSet(vec_onz,0.0);CHKERRQ(ierr);
2099         if (pcbddc->local_primal_size) {
2100           ierr = VecSetValues(vec_dnz,pcbddc->local_primal_size,pcbddc->local_primal_indices,my_dnz,ADD_VALUES);CHKERRQ(ierr);
2101           ierr = VecSetValues(vec_onz,pcbddc->local_primal_size,pcbddc->local_primal_indices,my_onz,ADD_VALUES);CHKERRQ(ierr);
2102         }
2103         ierr = VecAssemblyBegin(vec_dnz);CHKERRQ(ierr);
2104         ierr = VecAssemblyBegin(vec_onz);CHKERRQ(ierr);
2105         ierr = VecAssemblyEnd(vec_dnz);CHKERRQ(ierr);
2106         ierr = VecAssemblyEnd(vec_onz);CHKERRQ(ierr);
2107         j = mat_ranges[rank_prec_comm+1]-mat_ranges[rank_prec_comm];
2108         ierr = VecGetArray(vec_dnz,&array);CHKERRQ(ierr);
2109         for (i=0; i<j; i++) dnz[i] = (PetscInt)PetscRealPart(array[i]);
2110 
2111         ierr = VecRestoreArray(vec_dnz,&array);CHKERRQ(ierr);
2112         ierr = VecGetArray(vec_onz,&array);CHKERRQ(ierr);
2113         for (i=0;i<j;i++) onz[i] = (PetscInt)PetscRealPart(array[i]);
2114 
2115         ierr = VecRestoreArray(vec_onz,&array);CHKERRQ(ierr);
2116         ierr = PetscFree(my_dnz);CHKERRQ(ierr);
2117         ierr = PetscFree(my_onz);CHKERRQ(ierr);
2118         ierr = PetscFree(row_ownership);CHKERRQ(ierr);
2119         ierr = VecDestroy(&vec_dnz);CHKERRQ(ierr);
2120         ierr = VecDestroy(&vec_onz);CHKERRQ(ierr);
2121       } else {
2122         for (k=0;k<size_prec_comm;k++){
2123           offset=pcbddc->local_primal_displacements[k];
2124           offset2=localdispl2[k];
2125           ins_local_primal_size = pcbddc->local_primal_sizes[k];
2126           ierr = PetscMalloc(ins_local_primal_size*sizeof(PetscInt),&ins_local_primal_indices);CHKERRQ(ierr);
2127           for (j=0;j<ins_local_primal_size;j++){
2128             ins_local_primal_indices[j]=(PetscInt)pcbddc->replicated_local_primal_indices[offset+j];
2129           }
2130           for (j=0;j<ins_local_primal_size;j++) {
2131             ierr = MatPreallocateSet(ins_local_primal_indices[j],ins_local_primal_size,ins_local_primal_indices,dnz,onz);CHKERRQ(ierr);
2132           }
2133           ierr = PetscFree(ins_local_primal_indices);CHKERRQ(ierr);
2134         }
2135       }
2136 
2137       /* check */
2138       for (i=0;i<lrows;i++) {
2139         if (dnz[i]>lcols) dnz[i]=lcols;
2140         if (onz[i]>pcbddc->coarse_size-lcols) onz[i]=pcbddc->coarse_size-lcols;
2141       }
2142       ierr = MatSeqAIJSetPreallocation(pcbddc->coarse_mat,0,dnz);CHKERRQ(ierr);
2143       ierr = MatMPIAIJSetPreallocation(pcbddc->coarse_mat,0,dnz,0,onz);CHKERRQ(ierr);
2144       ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
2145     } else {
2146       ierr = MatSeqAIJSetPreallocation(matis_coarse_local_mat,0,dnz);CHKERRQ(ierr);
2147       ierr = PetscFree(dnz);CHKERRQ(ierr);
2148     }
2149     /* insert values */
2150     if (pcbddc->coarse_problem_type == PARALLEL_BDDC) {
2151       ierr = MatSetValues(pcbddc->coarse_mat,ins_local_primal_size,ins_local_primal_indices,ins_local_primal_size,ins_local_primal_indices,ins_coarse_mat_vals,ADD_VALUES);CHKERRQ(ierr);
2152     } else if (pcbddc->coarse_problem_type == MULTILEVEL_BDDC) {
2153       if (pcbddc->coarsening_ratio == 1) {
2154         ins_coarse_mat_vals = coarse_submat_vals;
2155         ierr = MatSetValues(pcbddc->coarse_mat,ins_local_primal_size,ins_local_primal_indices,ins_local_primal_size,ins_local_primal_indices,ins_coarse_mat_vals,INSERT_VALUES);CHKERRQ(ierr);
2156       } else {
2157         ierr = PetscFree(ins_local_primal_indices);CHKERRQ(ierr);
2158         for (k=0;k<pcbddc->replicated_primal_size;k++) {
2159           offset = pcbddc->local_primal_displacements[k];
2160           offset2 = localdispl2[k];
2161           ins_local_primal_size = pcbddc->local_primal_displacements[k+1]-pcbddc->local_primal_displacements[k];
2162           ierr = PetscMalloc(ins_local_primal_size*sizeof(PetscInt),&ins_local_primal_indices);CHKERRQ(ierr);
2163           for (j=0;j<ins_local_primal_size;j++){
2164             ins_local_primal_indices[j]=(PetscInt)pcbddc->replicated_local_primal_indices[offset+j];
2165           }
2166           ins_coarse_mat_vals = &temp_coarse_mat_vals[offset2];
2167           ierr = MatSetValues(pcbddc->coarse_mat,ins_local_primal_size,ins_local_primal_indices,ins_local_primal_size,ins_local_primal_indices,ins_coarse_mat_vals,ADD_VALUES);CHKERRQ(ierr);
2168           ierr = PetscFree(ins_local_primal_indices);CHKERRQ(ierr);
2169         }
2170       }
2171       ins_local_primal_indices = 0;
2172       ins_coarse_mat_vals = 0;
2173     } else {
2174       for (k=0;k<size_prec_comm;k++){
2175         offset=pcbddc->local_primal_displacements[k];
2176         offset2=localdispl2[k];
2177         ins_local_primal_size = pcbddc->local_primal_sizes[k];
2178         ierr = PetscMalloc(ins_local_primal_size*sizeof(PetscInt),&ins_local_primal_indices);CHKERRQ(ierr);
2179         for (j=0;j<ins_local_primal_size;j++){
2180           ins_local_primal_indices[j]=(PetscInt)pcbddc->replicated_local_primal_indices[offset+j];
2181         }
2182         ins_coarse_mat_vals = &temp_coarse_mat_vals[offset2];
2183         ierr = MatSetValues(pcbddc->coarse_mat,ins_local_primal_size,ins_local_primal_indices,ins_local_primal_size,ins_local_primal_indices,ins_coarse_mat_vals,ADD_VALUES);CHKERRQ(ierr);
2184         ierr = PetscFree(ins_local_primal_indices);CHKERRQ(ierr);
2185       }
2186       ins_local_primal_indices = 0;
2187       ins_coarse_mat_vals = 0;
2188     }
2189     ierr = MatAssemblyBegin(pcbddc->coarse_mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2190     ierr = MatAssemblyEnd(pcbddc->coarse_mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2191     /* symmetry of coarse matrix */
2192     if (issym) {
2193       ierr = MatSetOption(pcbddc->coarse_mat,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
2194     }
2195     ierr = MatGetVecs(pcbddc->coarse_mat,&pcbddc->coarse_vec,&pcbddc->coarse_rhs);CHKERRQ(ierr);
2196   }
2197 
2198   /* create loc to glob scatters if needed */
2199   if (pcbddc->coarse_communications_type == SCATTERS_BDDC) {
2200      IS local_IS,global_IS;
2201      ierr = ISCreateStride(PETSC_COMM_SELF,pcbddc->local_primal_size,0,1,&local_IS);CHKERRQ(ierr);
2202      ierr = ISCreateGeneral(PETSC_COMM_SELF,pcbddc->local_primal_size,pcbddc->local_primal_indices,PETSC_COPY_VALUES,&global_IS);CHKERRQ(ierr);
2203      ierr = VecScatterCreate(pcbddc->vec1_P,local_IS,pcbddc->coarse_vec,global_IS,&pcbddc->coarse_loc_to_glob);CHKERRQ(ierr);
2204      ierr = ISDestroy(&local_IS);CHKERRQ(ierr);
2205      ierr = ISDestroy(&global_IS);CHKERRQ(ierr);
2206   }
2207 
2208   /* free memory no longer needed */
2209   if (coarse_ISLG)              { ierr = ISLocalToGlobalMappingDestroy(&coarse_ISLG);CHKERRQ(ierr); }
2210   if (ins_local_primal_indices) { ierr = PetscFree(ins_local_primal_indices);CHKERRQ(ierr); }
2211   if (ins_coarse_mat_vals)      { ierr = PetscFree(ins_coarse_mat_vals);CHKERRQ(ierr); }
2212   if (localsizes2)              { ierr = PetscFree(localsizes2);CHKERRQ(ierr); }
2213   if (localdispl2)              { ierr = PetscFree(localdispl2);CHKERRQ(ierr); }
2214   if (temp_coarse_mat_vals)     { ierr = PetscFree(temp_coarse_mat_vals);CHKERRQ(ierr); }
2215 
2216   /* Compute coarse null space */
2217   CoarseNullSpace = 0;
2218   if (pcbddc->NullSpace) {
2219     ierr = PCBDDCNullSpaceAssembleCoarse(pc,&CoarseNullSpace);CHKERRQ(ierr);
2220   }
2221 
2222   /* KSP for coarse problem */
2223   if (rank_prec_comm == active_rank) {
2224     PetscBool isbddc=PETSC_FALSE;
2225 
2226     ierr = KSPCreate(coarse_comm,&pcbddc->coarse_ksp);CHKERRQ(ierr);
2227     ierr = PetscObjectIncrementTabLevel((PetscObject)pcbddc->coarse_ksp,(PetscObject)pc,1);CHKERRQ(ierr);
2228     ierr = KSPSetOperators(pcbddc->coarse_ksp,pcbddc->coarse_mat,pcbddc->coarse_mat,SAME_PRECONDITIONER);CHKERRQ(ierr);
2229     ierr = KSPSetTolerances(pcbddc->coarse_ksp,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,max_it_coarse_ksp);CHKERRQ(ierr);
2230     ierr = KSPSetType(pcbddc->coarse_ksp,coarse_ksp_type);CHKERRQ(ierr);
2231     ierr = KSPGetPC(pcbddc->coarse_ksp,&pc_temp);CHKERRQ(ierr);
2232     ierr = PCSetType(pc_temp,coarse_pc_type);CHKERRQ(ierr);
2233     /* Allow user's customization */
2234     ierr = KSPSetOptionsPrefix(pcbddc->coarse_ksp,"coarse_");CHKERRQ(ierr);
2235     /* Set Up PC for coarse problem BDDC */
2236     if (pcbddc->coarse_problem_type == MULTILEVEL_BDDC) {
2237       i = pcbddc->current_level+1;
2238       ierr = PCBDDCSetLevel(pc_temp,i);CHKERRQ(ierr);
2239       ierr = PCBDDCSetCoarseningRatio(pc_temp,pcbddc->coarsening_ratio);CHKERRQ(ierr);
2240       ierr = PCBDDCSetMaxLevels(pc_temp,pcbddc->max_levels);CHKERRQ(ierr);
2241       ierr = PCBDDCSetCoarseProblemType(pc_temp,MULTILEVEL_BDDC);CHKERRQ(ierr);
2242       if (CoarseNullSpace) {
2243         ierr = PCBDDCSetNullSpace(pc_temp,CoarseNullSpace);CHKERRQ(ierr);
2244       }
2245       if (dbg_flag) {
2246         ierr = PetscViewerASCIIPrintf(viewer,"----------------Level %d: Setting up level %d---------------\n",pcbddc->current_level,i);CHKERRQ(ierr);
2247         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
2248       }
2249     } else {
2250       if (CoarseNullSpace) {
2251         ierr = KSPSetNullSpace(pcbddc->coarse_ksp,CoarseNullSpace);CHKERRQ(ierr);
2252       }
2253     }
2254     ierr = KSPSetFromOptions(pcbddc->coarse_ksp);CHKERRQ(ierr);
2255     ierr = KSPSetUp(pcbddc->coarse_ksp);CHKERRQ(ierr);
2256 
2257     ierr = KSPGetTolerances(pcbddc->coarse_ksp,NULL,NULL,NULL,&j);CHKERRQ(ierr);
2258     ierr = KSPGetPC(pcbddc->coarse_ksp,&pc_temp);CHKERRQ(ierr);
2259     ierr = PetscObjectTypeCompare((PetscObject)pc_temp,PCBDDC,&isbddc);CHKERRQ(ierr);
2260     if (j == 1) {
2261       ierr = KSPSetNormType(pcbddc->coarse_ksp,KSP_NORM_NONE);CHKERRQ(ierr);
2262       if (isbddc) {
2263         ierr = PCBDDCSetUseExactDirichlet(pc_temp,PETSC_FALSE);CHKERRQ(ierr);
2264       }
2265     }
2266   }
2267   /* Check coarse problem if requested */
2268   if ( dbg_flag && rank_prec_comm == active_rank ) {
2269     KSP check_ksp;
2270     PC  check_pc;
2271     Vec check_vec;
2272     PetscReal   abs_infty_error,infty_error,lambda_min,lambda_max;
2273     KSPType check_ksp_type;
2274 
2275     /* Create ksp object suitable for extreme eigenvalues' estimation */
2276     ierr = KSPCreate(coarse_comm,&check_ksp);CHKERRQ(ierr);
2277     ierr = KSPSetOperators(check_ksp,pcbddc->coarse_mat,pcbddc->coarse_mat,SAME_PRECONDITIONER);CHKERRQ(ierr);
2278     ierr = KSPSetTolerances(check_ksp,1.e-12,1.e-12,PETSC_DEFAULT,pcbddc->coarse_size);CHKERRQ(ierr);
2279     if (pcbddc->coarse_problem_type == MULTILEVEL_BDDC) {
2280       if (issym) check_ksp_type = KSPCG;
2281       else check_ksp_type = KSPGMRES;
2282       ierr = KSPSetComputeSingularValues(check_ksp,PETSC_TRUE);CHKERRQ(ierr);
2283     } else {
2284       check_ksp_type = KSPPREONLY;
2285     }
2286     ierr = KSPSetType(check_ksp,check_ksp_type);CHKERRQ(ierr);
2287     ierr = KSPGetPC(pcbddc->coarse_ksp,&check_pc);CHKERRQ(ierr);
2288     ierr = KSPSetPC(check_ksp,check_pc);CHKERRQ(ierr);
2289     ierr = KSPSetUp(check_ksp);CHKERRQ(ierr);
2290     /* create random vec */
2291     ierr = VecDuplicate(pcbddc->coarse_vec,&check_vec);CHKERRQ(ierr);
2292     ierr = VecSetRandom(check_vec,NULL);CHKERRQ(ierr);
2293     if (CoarseNullSpace) {
2294       ierr = MatNullSpaceRemove(CoarseNullSpace,check_vec);CHKERRQ(ierr);
2295     }
2296     ierr = MatMult(pcbddc->coarse_mat,check_vec,pcbddc->coarse_rhs);CHKERRQ(ierr);
2297     /* solve coarse problem */
2298     ierr = KSPSolve(check_ksp,pcbddc->coarse_rhs,pcbddc->coarse_vec);CHKERRQ(ierr);
2299     if (CoarseNullSpace) {
2300       ierr = MatNullSpaceRemove(CoarseNullSpace,pcbddc->coarse_vec);CHKERRQ(ierr);
2301     }
2302     /* check coarse problem residual error */
2303     ierr = VecAXPY(check_vec,-1.0,pcbddc->coarse_vec);CHKERRQ(ierr);
2304     ierr = VecNorm(check_vec,NORM_INFINITY,&infty_error);CHKERRQ(ierr);
2305     ierr = MatMult(pcbddc->coarse_mat,check_vec,pcbddc->coarse_rhs);CHKERRQ(ierr);
2306     ierr = VecNorm(pcbddc->coarse_rhs,NORM_INFINITY,&abs_infty_error);CHKERRQ(ierr);
2307     ierr = VecDestroy(&check_vec);CHKERRQ(ierr);
2308     /* get eigenvalue estimation if inexact */
2309     if (pcbddc->coarse_problem_type == MULTILEVEL_BDDC) {
2310       ierr = KSPComputeExtremeSingularValues(check_ksp,&lambda_max,&lambda_min);CHKERRQ(ierr);
2311       ierr = KSPGetIterationNumber(check_ksp,&k);CHKERRQ(ierr);
2312       ierr = PetscViewerASCIIPrintf(viewer,"Coarse problem eigenvalues estimated with %d iterations of %s.\n",k,check_ksp_type);CHKERRQ(ierr);
2313       ierr = PetscViewerASCIIPrintf(viewer,"Coarse problem eigenvalues: % 1.14e %1.14e\n",lambda_min,lambda_max);CHKERRQ(ierr);
2314     }
2315     ierr = PetscViewerASCIIPrintf(viewer,"Coarse problem exact infty_error   : %1.14e\n",infty_error);CHKERRQ(ierr);
2316     ierr = PetscViewerASCIIPrintf(viewer,"Coarse problem residual infty_error: %1.14e\n",abs_infty_error);CHKERRQ(ierr);
2317     ierr = KSPDestroy(&check_ksp);CHKERRQ(ierr);
2318   }
2319   if (dbg_flag) {
2320     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
2321   }
2322   ierr = MatNullSpaceDestroy(&CoarseNullSpace);CHKERRQ(ierr);
2323 
2324   PetscFunctionReturn(0);
2325 }
2326 
2327