xref: /petsc/src/ksp/pc/impls/bddc/bddc.c (revision 88ebb7492cd0215be3d18dc7a81e485fff46f8ca)
1 /* TODOLIST
2    DofSplitting and DM attached to pc?
3    Change SetNeumannBoundaries to SetNeumannBoundariesLocal and provide new SetNeumannBoundaries (same Dirichlet)
4    change how to deal with the coarse problem (PCBDDCSetCoarseEnvironment):
5      - simplify coarse problem structure -> PCBDDC or PCREDUDANT, nothing else -> same comm for all levels?
6      - remove coarse enums and allow use of PCBDDCGetCoarseKSP
7      - remove metis dependency -> use MatPartitioning for multilevel -> Assemble serial adjacency in PCBDDCAnalyzeInterface?
8    code refactoring:
9      - pick up better names for static functions
10    change options structure:
11      - insert BDDC into MG framework?
12    provide other ops? Ask to developers
13    remove all unused printf
14    man pages
15 */
16 
17 /* ----------------------------------------------------------------------------------------------------------------------------------------------
18    Implementation of BDDC preconditioner based on:
19    C. Dohrmann "An approximate BDDC preconditioner", Numerical Linear Algebra with Applications Volume 14, Issue 2, pages 149-168, March 2007
20    ---------------------------------------------------------------------------------------------------------------------------------------------- */
21 
22 #include "bddc.h" /*I "petscpc.h" I*/  /* includes for fortran wrappers */
23 #include "bddcprivate.h"
24 #include <petscblaslapack.h>
25 
26 /* prototypes for static functions contained in bddc.c */
27 static PetscErrorCode PCBDDCSetLevel(PC,PetscInt);
28 static PetscErrorCode PCBDDCCoarseSetUp(PC);
29 
30 /* -------------------------------------------------------------------------- */
31 #undef __FUNCT__
32 #define __FUNCT__ "PCSetFromOptions_BDDC"
33 PetscErrorCode PCSetFromOptions_BDDC(PC pc)
34 {
35   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;
36   PetscErrorCode ierr;
37 
38   PetscFunctionBegin;
39   ierr = PetscOptionsHead("BDDC options");CHKERRQ(ierr);
40   /* Verbose debugging of main data structures */
41   ierr = PetscOptionsInt("-pc_bddc_check_level"       ,"Verbose (debugging) output for PCBDDC"                       ,"none",pcbddc->dbg_flag      ,&pcbddc->dbg_flag      ,NULL);CHKERRQ(ierr);
42   /* Some customization for default primal space */
43   ierr = PetscOptionsBool("-pc_bddc_vertices_only"   ,"Use only vertices in coarse space (i.e. discard constraints)","none",pcbddc->vertices_flag   ,&pcbddc->vertices_flag   ,NULL);CHKERRQ(ierr);
44   ierr = PetscOptionsBool("-pc_bddc_constraints_only","Use only constraints in coarse space (i.e. discard vertices)","none",pcbddc->constraints_flag,&pcbddc->constraints_flag,NULL);CHKERRQ(ierr);
45   ierr = PetscOptionsBool("-pc_bddc_faces_only"      ,"Use only faces among constraints of coarse space (i.e. discard edges)"         ,"none",pcbddc->faces_flag      ,&pcbddc->faces_flag      ,NULL);CHKERRQ(ierr);
46   ierr = PetscOptionsBool("-pc_bddc_edges_only"      ,"Use only edges among constraints of coarse space (i.e. discard faces)"         ,"none",pcbddc->edges_flag      ,&pcbddc->edges_flag      ,NULL);CHKERRQ(ierr);
47   /* Coarse solver context */
48   static const char * const avail_coarse_problems[] = {"sequential","replicated","parallel","multilevel","CoarseProblemType","PC_BDDC_",0}; /*order of choiches depends on ENUM defined in bddc.h */
49   ierr = PetscOptionsEnum("-pc_bddc_coarse_problem_type","Set coarse problem type","none",avail_coarse_problems,(PetscEnum)pcbddc->coarse_problem_type,(PetscEnum*)&pcbddc->coarse_problem_type,NULL);CHKERRQ(ierr);
50   /* Two different application of BDDC to the whole set of dofs, internal and interface */
51   ierr = PetscOptionsBool("-pc_bddc_switch_preconditioning_type","Switch between M_2 (default) and M_3 preconditioners (as defined by Dohrmann)","none",pcbddc->inexact_prec_type,&pcbddc->inexact_prec_type,NULL);CHKERRQ(ierr);
52   ierr = PetscOptionsBool("-pc_bddc_use_change_of_basis","Use change of basis approach for primal space","none",pcbddc->use_change_of_basis,&pcbddc->use_change_of_basis,NULL);CHKERRQ(ierr);
53   ierr = PetscOptionsBool("-pc_bddc_use_change_on_faces","Use change of basis approach for face constraints","none",pcbddc->use_change_on_faces,&pcbddc->use_change_on_faces,NULL);CHKERRQ(ierr);
54   if (!pcbddc->use_change_of_basis) {
55     pcbddc->use_change_on_faces = PETSC_FALSE;
56   }
57   ierr = PetscOptionsInt("-pc_bddc_coarsening_ratio","Set coarsening ratio used in multilevel coarsening","none",pcbddc->coarsening_ratio,&pcbddc->coarsening_ratio,NULL);CHKERRQ(ierr);
58   ierr = PetscOptionsInt("-pc_bddc_max_levels","Set maximum number of levels for multilevel","none",pcbddc->max_levels,&pcbddc->max_levels,NULL);CHKERRQ(ierr);
59   ierr = PetscOptionsBool("-pc_bddc_use_deluxe_scaling","Use deluxe scaling for BDDC","none",pcbddc->use_deluxe_scaling,&pcbddc->use_deluxe_scaling,NULL);CHKERRQ(ierr);
60   ierr = PetscOptionsTail();CHKERRQ(ierr);
61   PetscFunctionReturn(0);
62 }
63 /* -------------------------------------------------------------------------- */
64 #undef __FUNCT__
65 #define __FUNCT__ "PCBDDCSetPrimalVerticesLocalIS_BDDC"
66 static PetscErrorCode PCBDDCSetPrimalVerticesLocalIS_BDDC(PC pc, IS PrimalVertices)
67 {
68   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;
69   PetscErrorCode ierr;
70 
71   PetscFunctionBegin;
72   ierr = ISDestroy(&pcbddc->user_primal_vertices);CHKERRQ(ierr);
73   ierr = PetscObjectReference((PetscObject)PrimalVertices);CHKERRQ(ierr);
74   pcbddc->user_primal_vertices = PrimalVertices;
75   PetscFunctionReturn(0);
76 }
77 #undef __FUNCT__
78 #define __FUNCT__ "PCBDDCSetPrimalVerticesLocalIS"
79 /*@
80  PCBDDCSetPrimalVerticesLocalIS - Set user defined primal vertices in PCBDDC.
81 
82    Not collective
83 
84    Input Parameters:
85 +  pc - the preconditioning context
86 -  PrimalVertices - index sets of primal vertices in local numbering
87 
88    Level: intermediate
89 
90    Notes:
91 
92 .seealso: PCBDDC
93 @*/
94 PetscErrorCode PCBDDCSetPrimalVerticesLocalIS(PC pc, IS PrimalVertices)
95 {
96   PetscErrorCode ierr;
97 
98   PetscFunctionBegin;
99   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
100   PetscValidHeaderSpecific(PrimalVertices,IS_CLASSID,2);
101   ierr = PetscTryMethod(pc,"PCBDDCSetPrimalVerticesLocalIS_C",(PC,IS),(pc,PrimalVertices));CHKERRQ(ierr);
102   PetscFunctionReturn(0);
103 }
104 /* -------------------------------------------------------------------------- */
105 #undef __FUNCT__
106 #define __FUNCT__ "PCBDDCSetCoarseProblemType_BDDC"
107 static PetscErrorCode PCBDDCSetCoarseProblemType_BDDC(PC pc, CoarseProblemType CPT)
108 {
109   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
110 
111   PetscFunctionBegin;
112   pcbddc->coarse_problem_type = CPT;
113   PetscFunctionReturn(0);
114 }
115 
116 #undef __FUNCT__
117 #define __FUNCT__ "PCBDDCSetCoarseProblemType"
118 /*@
119  PCBDDCSetCoarseProblemType - Set coarse problem type in PCBDDC.
120 
121    Not collective
122 
123    Input Parameters:
124 +  pc - the preconditioning context
125 -  CoarseProblemType - pick a better name and explain what this is
126 
127    Level: intermediate
128 
129    Notes:
130    Not collective but all procs must call with same arguments.
131 
132 .seealso: PCBDDC
133 @*/
134 PetscErrorCode PCBDDCSetCoarseProblemType(PC pc, CoarseProblemType CPT)
135 {
136   PetscErrorCode ierr;
137 
138   PetscFunctionBegin;
139   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
140   ierr = PetscTryMethod(pc,"PCBDDCSetCoarseProblemType_C",(PC,CoarseProblemType),(pc,CPT));CHKERRQ(ierr);
141   PetscFunctionReturn(0);
142 }
143 /* -------------------------------------------------------------------------- */
144 #undef __FUNCT__
145 #define __FUNCT__ "PCBDDCSetCoarseningRatio_BDDC"
146 static PetscErrorCode PCBDDCSetCoarseningRatio_BDDC(PC pc,PetscInt k)
147 {
148   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
149 
150   PetscFunctionBegin;
151   pcbddc->coarsening_ratio=k;
152   PetscFunctionReturn(0);
153 }
154 
155 #undef __FUNCT__
156 #define __FUNCT__ "PCBDDCSetCoarseningRatio"
157 /*@
158  PCBDDCSetCoarseningRatio - Set coarsening ratio used in multilevel coarsening
159 
160    Logically collective on PC
161 
162    Input Parameters:
163 +  pc - the preconditioning context
164 -  k - coarsening ratio
165 
166    Approximatively k subdomains at the finer level will be aggregated into a single subdomain at the coarser level.
167 
168    Level: intermediate
169 
170    Notes:
171 
172 .seealso: PCBDDC
173 @*/
174 PetscErrorCode PCBDDCSetCoarseningRatio(PC pc,PetscInt k)
175 {
176   PetscErrorCode ierr;
177 
178   PetscFunctionBegin;
179   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
180   ierr = PetscTryMethod(pc,"PCBDDCSetCoarseningRatio_C",(PC,PetscInt),(pc,k));CHKERRQ(ierr);
181   PetscFunctionReturn(0);
182 }
183 /* -------------------------------------------------------------------------- */
184 
185 #undef __FUNCT__
186 #define __FUNCT__ "PCBDDCSetMaxLevels_BDDC"
187 static PetscErrorCode PCBDDCSetMaxLevels_BDDC(PC pc,PetscInt max_levels)
188 {
189   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
190 
191   PetscFunctionBegin;
192   pcbddc->max_levels=max_levels;
193   PetscFunctionReturn(0);
194 }
195 
196 #undef __FUNCT__
197 #define __FUNCT__ "PCBDDCSetMaxLevels"
198 /*@
199  PCBDDCSetMaxLevels - Sets the maximum number of levels within the multilevel approach.
200 
201    Logically collective on PC
202 
203    Input Parameters:
204 +  pc - the preconditioning context
205 -  max_levels - the maximum number of levels
206 
207    Default value is 1, i.e. coarse problem will be solved inexactly with one application
208    of PCBDDC preconditioner if the multilevel approach is requested.
209 
210    Level: intermediate
211 
212    Notes:
213 
214 .seealso: PCBDDC
215 @*/
216 PetscErrorCode PCBDDCSetMaxLevels(PC pc,PetscInt max_levels)
217 {
218   PetscErrorCode ierr;
219 
220   PetscFunctionBegin;
221   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
222   ierr = PetscTryMethod(pc,"PCBDDCSetMaxLevels_C",(PC,PetscInt),(pc,max_levels));CHKERRQ(ierr);
223   PetscFunctionReturn(0);
224 }
225 /* -------------------------------------------------------------------------- */
226 
227 #undef __FUNCT__
228 #define __FUNCT__ "PCBDDCSetNullSpace_BDDC"
229 static PetscErrorCode PCBDDCSetNullSpace_BDDC(PC pc,MatNullSpace NullSpace)
230 {
231   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
232   PetscErrorCode ierr;
233 
234   PetscFunctionBegin;
235   ierr = PetscObjectReference((PetscObject)NullSpace);CHKERRQ(ierr);
236   ierr = MatNullSpaceDestroy(&pcbddc->NullSpace);CHKERRQ(ierr);
237   pcbddc->NullSpace=NullSpace;
238   PetscFunctionReturn(0);
239 }
240 
241 #undef __FUNCT__
242 #define __FUNCT__ "PCBDDCSetNullSpace"
243 /*@
244  PCBDDCSetNullSpace - Set NullSpace of global operator of BDDC preconditioned mat.
245 
246    Logically collective on PC and MatNullSpace
247 
248    Input Parameters:
249 +  pc - the preconditioning context
250 -  NullSpace - Null space of the linear operator to be preconditioned.
251 
252    Level: intermediate
253 
254    Notes:
255 
256 .seealso: PCBDDC
257 @*/
258 PetscErrorCode PCBDDCSetNullSpace(PC pc,MatNullSpace NullSpace)
259 {
260   PetscErrorCode ierr;
261 
262   PetscFunctionBegin;
263   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
264   PetscValidHeaderSpecific(NullSpace,MAT_NULLSPACE_CLASSID,2);
265   ierr = PetscTryMethod(pc,"PCBDDCSetNullSpace_C",(PC,MatNullSpace),(pc,NullSpace));CHKERRQ(ierr);
266   PetscFunctionReturn(0);
267 }
268 /* -------------------------------------------------------------------------- */
269 
270 #undef __FUNCT__
271 #define __FUNCT__ "PCBDDCSetDirichletBoundaries_BDDC"
272 static PetscErrorCode PCBDDCSetDirichletBoundaries_BDDC(PC pc,IS DirichletBoundaries)
273 {
274   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
275   PetscErrorCode ierr;
276 
277   PetscFunctionBegin;
278   ierr = ISDestroy(&pcbddc->DirichletBoundaries);CHKERRQ(ierr);
279   ierr = PetscObjectReference((PetscObject)DirichletBoundaries);CHKERRQ(ierr);
280   pcbddc->DirichletBoundaries=DirichletBoundaries;
281   PetscFunctionReturn(0);
282 }
283 
284 #undef __FUNCT__
285 #define __FUNCT__ "PCBDDCSetDirichletBoundaries"
286 /*@
287  PCBDDCSetDirichletBoundaries - Set index set defining subdomain part (in local ordering)
288                               of Dirichlet boundaries for the global problem.
289 
290    Not collective
291 
292    Input Parameters:
293 +  pc - the preconditioning context
294 -  DirichletBoundaries - sequential index set defining the subdomain part of Dirichlet boundaries (can be NULL)
295 
296    Level: intermediate
297 
298    Notes:
299 
300 .seealso: PCBDDC
301 @*/
302 PetscErrorCode PCBDDCSetDirichletBoundaries(PC pc,IS DirichletBoundaries)
303 {
304   PetscErrorCode ierr;
305 
306   PetscFunctionBegin;
307   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
308   PetscValidHeaderSpecific(DirichletBoundaries,IS_CLASSID,2);
309   ierr = PetscTryMethod(pc,"PCBDDCSetDirichletBoundaries_C",(PC,IS),(pc,DirichletBoundaries));CHKERRQ(ierr);
310   PetscFunctionReturn(0);
311 }
312 /* -------------------------------------------------------------------------- */
313 
314 #undef __FUNCT__
315 #define __FUNCT__ "PCBDDCSetNeumannBoundaries_BDDC"
316 static PetscErrorCode PCBDDCSetNeumannBoundaries_BDDC(PC pc,IS NeumannBoundaries)
317 {
318   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
319   PetscErrorCode ierr;
320 
321   PetscFunctionBegin;
322   ierr = ISDestroy(&pcbddc->NeumannBoundaries);CHKERRQ(ierr);
323   ierr = PetscObjectReference((PetscObject)NeumannBoundaries);CHKERRQ(ierr);
324   pcbddc->NeumannBoundaries=NeumannBoundaries;
325   PetscFunctionReturn(0);
326 }
327 
328 #undef __FUNCT__
329 #define __FUNCT__ "PCBDDCSetNeumannBoundaries"
330 /*@
331  PCBDDCSetNeumannBoundaries - Set index set defining subdomain part (in local ordering)
332                               of Neumann boundaries for the global problem.
333 
334    Not collective
335 
336    Input Parameters:
337 +  pc - the preconditioning context
338 -  NeumannBoundaries - sequential index set defining the subdomain part of Neumann boundaries (can be NULL)
339 
340    Level: intermediate
341 
342    Notes:
343 
344 .seealso: PCBDDC
345 @*/
346 PetscErrorCode PCBDDCSetNeumannBoundaries(PC pc,IS NeumannBoundaries)
347 {
348   PetscErrorCode ierr;
349 
350   PetscFunctionBegin;
351   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
352   PetscValidHeaderSpecific(NeumannBoundaries,IS_CLASSID,2);
353   ierr = PetscTryMethod(pc,"PCBDDCSetNeumannBoundaries_C",(PC,IS),(pc,NeumannBoundaries));CHKERRQ(ierr);
354   PetscFunctionReturn(0);
355 }
356 /* -------------------------------------------------------------------------- */
357 
358 #undef __FUNCT__
359 #define __FUNCT__ "PCBDDCGetDirichletBoundaries_BDDC"
360 static PetscErrorCode PCBDDCGetDirichletBoundaries_BDDC(PC pc,IS *DirichletBoundaries)
361 {
362   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
363 
364   PetscFunctionBegin;
365   *DirichletBoundaries = pcbddc->DirichletBoundaries;
366   PetscFunctionReturn(0);
367 }
368 
369 #undef __FUNCT__
370 #define __FUNCT__ "PCBDDCGetDirichletBoundaries"
371 /*@
372  PCBDDCGetDirichletBoundaries - Get index set defining subdomain part (in local ordering)
373                                 of Dirichlet boundaries for the global problem.
374 
375    Not collective
376 
377    Input Parameters:
378 +  pc - the preconditioning context
379 
380    Output Parameters:
381 +  DirichletBoundaries - index set defining the subdomain part of Dirichlet boundaries
382 
383    Level: intermediate
384 
385    Notes:
386 
387 .seealso: PCBDDC
388 @*/
389 PetscErrorCode PCBDDCGetDirichletBoundaries(PC pc,IS *DirichletBoundaries)
390 {
391   PetscErrorCode ierr;
392 
393   PetscFunctionBegin;
394   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
395   ierr = PetscUseMethod(pc,"PCBDDCGetDirichletBoundaries_C",(PC,IS*),(pc,DirichletBoundaries));CHKERRQ(ierr);
396   PetscFunctionReturn(0);
397 }
398 /* -------------------------------------------------------------------------- */
399 
400 #undef __FUNCT__
401 #define __FUNCT__ "PCBDDCGetNeumannBoundaries_BDDC"
402 static PetscErrorCode PCBDDCGetNeumannBoundaries_BDDC(PC pc,IS *NeumannBoundaries)
403 {
404   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
405 
406   PetscFunctionBegin;
407   *NeumannBoundaries = pcbddc->NeumannBoundaries;
408   PetscFunctionReturn(0);
409 }
410 
411 #undef __FUNCT__
412 #define __FUNCT__ "PCBDDCGetNeumannBoundaries"
413 /*@
414  PCBDDCGetNeumannBoundaries - Get index set defining subdomain part (in local ordering)
415                               of Neumann boundaries for the global problem.
416 
417    Not collective
418 
419    Input Parameters:
420 +  pc - the preconditioning context
421 
422    Output Parameters:
423 +  NeumannBoundaries - index set defining the subdomain part of Neumann boundaries
424 
425    Level: intermediate
426 
427    Notes:
428 
429 .seealso: PCBDDC
430 @*/
431 PetscErrorCode PCBDDCGetNeumannBoundaries(PC pc,IS *NeumannBoundaries)
432 {
433   PetscErrorCode ierr;
434 
435   PetscFunctionBegin;
436   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
437   ierr = PetscUseMethod(pc,"PCBDDCGetNeumannBoundaries_C",(PC,IS*),(pc,NeumannBoundaries));CHKERRQ(ierr);
438   PetscFunctionReturn(0);
439 }
440 /* -------------------------------------------------------------------------- */
441 
442 #undef __FUNCT__
443 #define __FUNCT__ "PCBDDCSetLocalAdjacencyGraph_BDDC"
444 static PetscErrorCode PCBDDCSetLocalAdjacencyGraph_BDDC(PC pc, PetscInt nvtxs,const PetscInt xadj[],const PetscInt adjncy[], PetscCopyMode copymode)
445 {
446   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;
447   PCBDDCGraph    mat_graph = pcbddc->mat_graph;
448   PetscErrorCode ierr;
449 
450   PetscFunctionBegin;
451   /* free old CSR */
452   ierr = PCBDDCGraphResetCSR(mat_graph);CHKERRQ(ierr);
453   /* TODO: PCBDDCGraphSetAdjacency */
454   /* get CSR into graph structure */
455   if (copymode == PETSC_COPY_VALUES) {
456     ierr = PetscMalloc((nvtxs+1)*sizeof(PetscInt),&mat_graph->xadj);CHKERRQ(ierr);
457     ierr = PetscMalloc(xadj[nvtxs]*sizeof(PetscInt),&mat_graph->adjncy);CHKERRQ(ierr);
458     ierr = PetscMemcpy(mat_graph->xadj,xadj,(nvtxs+1)*sizeof(PetscInt));CHKERRQ(ierr);
459     ierr = PetscMemcpy(mat_graph->adjncy,adjncy,xadj[nvtxs]*sizeof(PetscInt));CHKERRQ(ierr);
460   } else if (copymode == PETSC_OWN_POINTER) {
461     mat_graph->xadj = (PetscInt*)xadj;
462     mat_graph->adjncy = (PetscInt*)adjncy;
463   }
464   mat_graph->nvtxs_csr = nvtxs;
465   PetscFunctionReturn(0);
466 }
467 
468 #undef __FUNCT__
469 #define __FUNCT__ "PCBDDCSetLocalAdjacencyGraph"
470 /*@
471  PCBDDCSetLocalAdjacencyGraph - Set CSR graph of local matrix for use of PCBDDC.
472 
473    Not collective
474 
475    Input Parameters:
476 +  pc - the preconditioning context
477 -  nvtxs - number of local vertices of the graph
478 -  xadj, adjncy - the CSR graph
479 -  copymode - either PETSC_COPY_VALUES or PETSC_OWN_POINTER. In the former case the user must free the array passed in;
480                                                              in the latter case, memory must be obtained with PetscMalloc.
481 
482    Level: intermediate
483 
484    Notes:
485 
486 .seealso: PCBDDC
487 @*/
488 PetscErrorCode PCBDDCSetLocalAdjacencyGraph(PC pc,PetscInt nvtxs,const PetscInt xadj[],const PetscInt adjncy[], PetscCopyMode copymode)
489 {
490   void (*f)(void) = 0;
491   PetscErrorCode ierr;
492 
493   PetscFunctionBegin;
494   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
495   PetscValidIntPointer(xadj,3);
496   PetscValidIntPointer(xadj,4);
497   if (copymode != PETSC_COPY_VALUES && copymode != PETSC_OWN_POINTER) {
498     SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unsupported copy mode %d in %s\n",copymode,__FUNCT__);
499   }
500   ierr = PetscTryMethod(pc,"PCBDDCSetLocalAdjacencyGraph_C",(PC,PetscInt,const PetscInt[],const PetscInt[],PetscCopyMode),(pc,nvtxs,xadj,adjncy,copymode));CHKERRQ(ierr);
501   /* free arrays if PCBDDC is not the PC type */
502   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCBDDCSetLocalAdjacencyGraph_C",&f);CHKERRQ(ierr);
503   if (!f && copymode == PETSC_OWN_POINTER) {
504     ierr = PetscFree(xadj);CHKERRQ(ierr);
505     ierr = PetscFree(adjncy);CHKERRQ(ierr);
506   }
507   PetscFunctionReturn(0);
508 }
509 /* -------------------------------------------------------------------------- */
510 
511 #undef __FUNCT__
512 #define __FUNCT__ "PCBDDCSetDofsSplitting_BDDC"
513 static PetscErrorCode PCBDDCSetDofsSplitting_BDDC(PC pc,PetscInt n_is, IS ISForDofs[])
514 {
515   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
516   PetscInt i;
517   PetscErrorCode ierr;
518 
519   PetscFunctionBegin;
520   /* Destroy ISes if they were already set */
521   for (i=0;i<pcbddc->n_ISForDofs;i++) {
522     ierr = ISDestroy(&pcbddc->ISForDofs[i]);CHKERRQ(ierr);
523   }
524   ierr = PetscFree(pcbddc->ISForDofs);CHKERRQ(ierr);
525   /* allocate space then set */
526   ierr = PetscMalloc(n_is*sizeof(IS),&pcbddc->ISForDofs);CHKERRQ(ierr);
527   for (i=0;i<n_is;i++) {
528     ierr = PetscObjectReference((PetscObject)ISForDofs[i]);CHKERRQ(ierr);
529     pcbddc->ISForDofs[i]=ISForDofs[i];
530   }
531   pcbddc->n_ISForDofs=n_is;
532   PetscFunctionReturn(0);
533 }
534 
535 #undef __FUNCT__
536 #define __FUNCT__ "PCBDDCSetDofsSplitting"
537 /*@
538  PCBDDCSetDofsSplitting - Set index sets defining fields of local mat.
539 
540    Not collective
541 
542    Input Parameters:
543 +  pc - the preconditioning context
544 -  n - number of index sets defining the fields
545 -  IS[] - array of IS describing the fields
546 
547    Level: intermediate
548 
549    Notes:
550 
551 .seealso: PCBDDC
552 @*/
553 PetscErrorCode PCBDDCSetDofsSplitting(PC pc,PetscInt n_is, IS ISForDofs[])
554 {
555   PetscErrorCode ierr;
556 
557   PetscFunctionBegin;
558   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
559   ierr = PetscTryMethod(pc,"PCBDDCSetDofsSplitting_C",(PC,PetscInt,IS[]),(pc,n_is,ISForDofs));CHKERRQ(ierr);
560   PetscFunctionReturn(0);
561 }
562 /* -------------------------------------------------------------------------- */
563 #undef __FUNCT__
564 #define __FUNCT__ "PCPreSolve_BDDC"
565 /* -------------------------------------------------------------------------- */
566 /*
567    PCPreSolve_BDDC - Changes the right hand side and (if necessary) the initial
568                      guess if a transformation of basis approach has been selected.
569 
570    Input Parameter:
571 +  pc - the preconditioner contex
572 
573    Application Interface Routine: PCPreSolve()
574 
575    Notes:
576    The interface routine PCPreSolve() is not usually called directly by
577    the user, but instead is called by KSPSolve().
578 */
579 static PetscErrorCode PCPreSolve_BDDC(PC pc, KSP ksp, Vec rhs, Vec x)
580 {
581   PetscErrorCode ierr;
582   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;
583   PC_IS          *pcis = (PC_IS*)(pc->data);
584   Mat_IS         *matis = (Mat_IS*)pc->pmat->data;
585   Mat            temp_mat;
586   IS             dirIS;
587   PetscInt       dirsize,i,*is_indices;
588   PetscScalar    *array_x,*array_diagonal;
589   Vec            used_vec;
590   PetscBool      guess_nonzero;
591 
592   PetscFunctionBegin;
593   /* Creates parallel work vectors used in presolve. */
594   if (!pcbddc->original_rhs) {
595     ierr = VecDuplicate(pcis->vec1_global,&pcbddc->original_rhs);CHKERRQ(ierr);
596   }
597   if (!pcbddc->temp_solution) {
598     ierr = VecDuplicate(pcis->vec1_global,&pcbddc->temp_solution);CHKERRQ(ierr);
599   }
600   if (x) {
601     ierr = PetscObjectReference((PetscObject)x);CHKERRQ(ierr);
602     used_vec = x;
603   } else {
604     ierr = PetscObjectReference((PetscObject)pcbddc->temp_solution);CHKERRQ(ierr);
605     used_vec = pcbddc->temp_solution;
606     ierr = VecSet(used_vec,0.0);CHKERRQ(ierr);
607   }
608   /* hack into ksp data structure PCPreSolve comes earlier in src/ksp/ksp/interface/itfunc.c */
609   if (ksp) {
610     ierr = KSPGetInitialGuessNonzero(ksp,&guess_nonzero);CHKERRQ(ierr);
611     if ( !guess_nonzero ) {
612       ierr = VecSet(used_vec,0.0);CHKERRQ(ierr);
613     }
614   }
615 
616   if (rhs) { /* TODO: wiser handling of rhs removal, which is only needed in case of zeroed rows */
617     /* store the original rhs */
618     ierr = VecCopy(rhs,pcbddc->original_rhs);CHKERRQ(ierr);
619 
620     /* Take into account zeroed rows -> change rhs and store solution removed */
621     ierr = MatGetDiagonal(pc->pmat,pcis->vec1_global);CHKERRQ(ierr);
622     ierr = VecPointwiseDivide(pcis->vec1_global,rhs,pcis->vec1_global);CHKERRQ(ierr);
623     ierr = VecScatterBegin(matis->ctx,pcis->vec1_global,pcis->vec2_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
624     ierr = VecScatterEnd(matis->ctx,pcis->vec1_global,pcis->vec2_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
625     ierr = VecScatterBegin(matis->ctx,used_vec,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
626     ierr = VecScatterEnd(matis->ctx,used_vec,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
627     ierr = PCBDDCGetDirichletBoundaries(pc,&dirIS);CHKERRQ(ierr);
628     if (dirIS) {
629       ierr = ISGetSize(dirIS,&dirsize);CHKERRQ(ierr);
630       ierr = VecGetArray(pcis->vec1_N,&array_x);CHKERRQ(ierr);
631       ierr = VecGetArray(pcis->vec2_N,&array_diagonal);CHKERRQ(ierr);
632       ierr = ISGetIndices(dirIS,(const PetscInt**)&is_indices);CHKERRQ(ierr);
633       for (i=0; i<dirsize; i++) array_x[is_indices[i]] = array_diagonal[is_indices[i]];
634       ierr = ISRestoreIndices(dirIS,(const PetscInt**)&is_indices);CHKERRQ(ierr);
635       ierr = VecRestoreArray(pcis->vec2_N,&array_diagonal);CHKERRQ(ierr);
636       ierr = VecRestoreArray(pcis->vec1_N,&array_x);CHKERRQ(ierr);
637     }
638     ierr = VecScatterBegin(matis->ctx,pcis->vec1_N,used_vec,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
639     ierr = VecScatterEnd(matis->ctx,pcis->vec1_N,used_vec,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
640 
641     /* remove the computed solution from the rhs */
642     ierr = VecScale(used_vec,-1.0);CHKERRQ(ierr);
643     ierr = MatMultAdd(pc->pmat,used_vec,rhs,rhs);CHKERRQ(ierr);
644     ierr = VecScale(used_vec,-1.0);CHKERRQ(ierr);
645   }
646 
647   /* store partially computed solution and set initial guess */
648   if (x) {
649     ierr = VecCopy(used_vec,pcbddc->temp_solution);CHKERRQ(ierr);
650     ierr = VecSet(used_vec,0.0);CHKERRQ(ierr);
651     if (pcbddc->use_exact_dirichlet && !pcbddc->coarse_psi_B) {
652       ierr = VecScatterBegin(pcis->global_to_D,rhs,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
653       ierr = VecScatterEnd  (pcis->global_to_D,rhs,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
654       ierr = KSPSolve(pcbddc->ksp_D,pcis->vec1_D,pcis->vec2_D);CHKERRQ(ierr);
655       ierr = VecScatterBegin(pcis->global_to_D,pcis->vec2_D,used_vec,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
656       ierr = VecScatterEnd  (pcis->global_to_D,pcis->vec2_D,used_vec,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
657       if (ksp) {
658         ierr = KSPSetInitialGuessNonzero(ksp,PETSC_TRUE);CHKERRQ(ierr);
659       }
660     }
661   }
662 
663   if (pcbddc->use_change_of_basis) {
664     /* swap pointers for local matrices */
665     temp_mat = matis->A;
666     matis->A = pcbddc->local_mat;
667     pcbddc->local_mat = temp_mat;
668   }
669   if (pcbddc->use_change_of_basis && rhs) {
670     /* Get local rhs and apply transformation of basis */
671     ierr = VecScatterBegin(pcis->global_to_B,rhs,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
672     ierr = VecScatterEnd  (pcis->global_to_B,rhs,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
673     /* from original basis to modified basis */
674     ierr = MatMultTranspose(pcbddc->ChangeOfBasisMatrix,pcis->vec1_B,pcis->vec2_B);CHKERRQ(ierr);
675     /* put back modified values into the global vec using INSERT_VALUES copy mode */
676     ierr = VecScatterBegin(pcis->global_to_B,pcis->vec2_B,rhs,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
677     ierr = VecScatterEnd  (pcis->global_to_B,pcis->vec2_B,rhs,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
678   }
679   if (ksp && pcbddc->NullSpace) {
680     ierr = MatNullSpaceRemove(pcbddc->NullSpace,used_vec);CHKERRQ(ierr);
681     ierr = MatNullSpaceRemove(pcbddc->NullSpace,rhs);CHKERRQ(ierr);
682   }
683   ierr = VecDestroy(&used_vec);CHKERRQ(ierr);
684   PetscFunctionReturn(0);
685 }
686 /* -------------------------------------------------------------------------- */
687 #undef __FUNCT__
688 #define __FUNCT__ "PCPostSolve_BDDC"
689 /* -------------------------------------------------------------------------- */
690 /*
691    PCPostSolve_BDDC - Changes the computed solution if a transformation of basis
692                      approach has been selected. Also, restores rhs to its original state.
693 
694    Input Parameter:
695 +  pc - the preconditioner contex
696 
697    Application Interface Routine: PCPostSolve()
698 
699    Notes:
700    The interface routine PCPostSolve() is not usually called directly by
701    the user, but instead is called by KSPSolve().
702 */
703 static PetscErrorCode PCPostSolve_BDDC(PC pc, KSP ksp, Vec rhs, Vec x)
704 {
705   PetscErrorCode ierr;
706   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;
707   PC_IS          *pcis   = (PC_IS*)(pc->data);
708   Mat_IS         *matis = (Mat_IS*)pc->pmat->data;
709   Mat            temp_mat;
710 
711   PetscFunctionBegin;
712   if (pcbddc->use_change_of_basis) {
713     /* swap pointers for local matrices */
714     temp_mat = matis->A;
715     matis->A = pcbddc->local_mat;
716     pcbddc->local_mat = temp_mat;
717   }
718   if (pcbddc->use_change_of_basis && x) {
719     /* Get Local boundary and apply transformation of basis to solution vector */
720     ierr = VecScatterBegin(pcis->global_to_B,x,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
721     ierr = VecScatterEnd  (pcis->global_to_B,x,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
722     /* from modified basis to original basis */
723     ierr = MatMult(pcbddc->ChangeOfBasisMatrix,pcis->vec1_B,pcis->vec2_B);CHKERRQ(ierr);
724     /* put back modified values into the global vec using INSERT_VALUES copy mode */
725     ierr = VecScatterBegin(pcis->global_to_B,pcis->vec2_B,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
726     ierr = VecScatterEnd  (pcis->global_to_B,pcis->vec2_B,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
727   }
728   /* add solution removed in presolve */
729   if (x) {
730     ierr = VecAXPY(x,1.0,pcbddc->temp_solution);CHKERRQ(ierr);
731   }
732   /* restore rhs to its original state */
733   if (rhs) {
734     ierr = VecCopy(pcbddc->original_rhs,rhs);CHKERRQ(ierr);
735   }
736   PetscFunctionReturn(0);
737 }
738 /* -------------------------------------------------------------------------- */
739 #undef __FUNCT__
740 #define __FUNCT__ "PCSetUp_BDDC"
741 /* -------------------------------------------------------------------------- */
742 /*
743    PCSetUp_BDDC - Prepares for the use of the BDDC preconditioner
744                   by setting data structures and options.
745 
746    Input Parameter:
747 +  pc - the preconditioner context
748 
749    Application Interface Routine: PCSetUp()
750 
751    Notes:
752    The interface routine PCSetUp() is not usually called directly by
753    the user, but instead is called by PCApply() if necessary.
754 */
755 PetscErrorCode PCSetUp_BDDC(PC pc)
756 {
757   PetscErrorCode ierr;
758   PC_BDDC*       pcbddc = (PC_BDDC*)pc->data;
759   MatStructure   flag;
760   PetscBool      computeis,computetopography,computesolvers;
761 
762   PetscFunctionBegin;
763   /* the following lines of code should be replaced by a better logic between PCIS, PCNN, PCBDDC and other nonoverlapping preconditioners */
764   /* For BDDC we need to define a local "Neumann" problem different to that defined in PCISSetup
765      So, we set to pcnone the Neumann problem of pcis in order to avoid unneeded computation
766      Also, we decide to directly build the (same) Dirichlet problem */
767   ierr = PetscOptionsSetValue("-is_localN_pc_type","none");CHKERRQ(ierr);
768   ierr = PetscOptionsSetValue("-is_localD_pc_type","none");CHKERRQ(ierr);
769   /* Get stdout for dbg */
770   if (pcbddc->dbg_flag && !pcbddc->dbg_viewer) {
771     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc),&pcbddc->dbg_viewer);CHKERRQ(ierr);
772     ierr = PetscViewerASCIISynchronizedAllow(pcbddc->dbg_viewer,PETSC_TRUE);CHKERRQ(ierr);
773   }
774   /* first attempt to split work */
775   if (pc->setupcalled) {
776     computeis = PETSC_FALSE;
777     ierr = PCGetOperators(pc,NULL,NULL,&flag);CHKERRQ(ierr);
778     if (flag == SAME_PRECONDITIONER) {
779       computetopography = PETSC_FALSE;
780       computesolvers = PETSC_FALSE;
781     } else if (flag == SAME_NONZERO_PATTERN) {
782       computetopography = PETSC_FALSE;
783       computesolvers = PETSC_TRUE;
784     } else { /* DIFFERENT_NONZERO_PATTERN */
785       computetopography = PETSC_TRUE;
786       computesolvers = PETSC_TRUE;
787     }
788   } else {
789     computeis = PETSC_TRUE;
790     computetopography = PETSC_TRUE;
791     computesolvers = PETSC_TRUE;
792   }
793   /* Set up all the "iterative substructuring" common block */
794   if (computeis) {
795     ierr = PCISSetUp(pc);CHKERRQ(ierr);
796   }
797   /* Analyze interface and set up local constraint and change of basis matrices */
798   if (computetopography) {
799     /* reset data */
800     ierr = PCBDDCResetTopography(pc);CHKERRQ(ierr);
801     ierr = PCBDDCAnalyzeInterface(pc);CHKERRQ(ierr);
802     ierr = PCBDDCConstraintsSetUp(pc);CHKERRQ(ierr);
803   }
804   if (computesolvers) {
805     /* reset data */
806     ierr = PCBDDCResetSolvers(pc);CHKERRQ(ierr);
807     ierr = PCBDDCScalingDestroy(pc);CHKERRQ(ierr);
808     /* Create coarse and local stuffs used for evaluating action of preconditioner */
809     ierr = PCBDDCCoarseSetUp(pc);CHKERRQ(ierr);
810     ierr = PCBDDCScalingSetUp(pc);CHKERRQ(ierr);
811   }
812   PetscFunctionReturn(0);
813 }
814 
815 /* -------------------------------------------------------------------------- */
816 /*
817    PCApply_BDDC - Applies the BDDC preconditioner to a vector.
818 
819    Input Parameters:
820 .  pc - the preconditioner context
821 .  r - input vector (global)
822 
823    Output Parameter:
824 .  z - output vector (global)
825 
826    Application Interface Routine: PCApply()
827  */
828 #undef __FUNCT__
829 #define __FUNCT__ "PCApply_BDDC"
830 PetscErrorCode PCApply_BDDC(PC pc,Vec r,Vec z)
831 {
832   PC_IS             *pcis = (PC_IS*)(pc->data);
833   PC_BDDC           *pcbddc = (PC_BDDC*)(pc->data);
834   PetscErrorCode    ierr;
835   const PetscScalar one = 1.0;
836   const PetscScalar m_one = -1.0;
837   const PetscScalar zero = 0.0;
838 
839 /* This code is similar to that provided in nn.c for PCNN
840    NN interface preconditioner changed to BDDC
841    Added support for M_3 preconditioner in the reference article (code is active if pcbddc->inexact_prec_type = PETSC_TRUE) */
842 
843   PetscFunctionBegin;
844   if (!pcbddc->use_exact_dirichlet || pcbddc->coarse_psi_B) {
845     /* First Dirichlet solve */
846     ierr = VecScatterBegin(pcis->global_to_D,r,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
847     ierr = VecScatterEnd  (pcis->global_to_D,r,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
848     ierr = KSPSolve(pcbddc->ksp_D,pcis->vec1_D,pcis->vec2_D);CHKERRQ(ierr);
849     /*
850       Assembling right hand side for BDDC operator
851       - pcis->vec1_D for the Dirichlet part (if needed, i.e. prec_flag=PETSC_TRUE)
852       - pcis->vec1_B the interface part of the global vector z
853     */
854     ierr = VecScale(pcis->vec2_D,m_one);CHKERRQ(ierr);
855     ierr = MatMult(pcis->A_BI,pcis->vec2_D,pcis->vec1_B);CHKERRQ(ierr);
856     if (pcbddc->inexact_prec_type) { ierr = MatMultAdd(pcis->A_II,pcis->vec2_D,pcis->vec1_D,pcis->vec1_D);CHKERRQ(ierr); }
857     ierr = VecScale(pcis->vec2_D,m_one);CHKERRQ(ierr);
858     ierr = VecCopy(r,z);CHKERRQ(ierr);
859     ierr = VecScatterBegin(pcis->global_to_B,pcis->vec1_B,z,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
860     ierr = VecScatterEnd  (pcis->global_to_B,pcis->vec1_B,z,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
861     ierr = PCBDDCScalingRestriction(pc,z,pcis->vec1_B);CHKERRQ(ierr);
862   } else {
863     ierr = VecSet(pcis->vec1_D,zero);CHKERRQ(ierr);
864     ierr = VecSet(pcis->vec2_D,zero);CHKERRQ(ierr);
865     ierr = PCBDDCScalingRestriction(pc,r,pcis->vec1_B);CHKERRQ(ierr);
866   }
867 
868   /* Apply interface preconditioner
869      input/output vecs: pcis->vec1_B and pcis->vec1_D */
870   ierr = PCBDDCApplyInterfacePreconditioner(pc);CHKERRQ(ierr);
871 
872   /* Apply transpose of partition of unity operator */
873   ierr = PCBDDCScalingExtension(pc,pcis->vec1_B,z);CHKERRQ(ierr);
874 
875   /* Second Dirichlet solve and assembling of output */
876   ierr = VecScatterBegin(pcis->global_to_B,z,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
877   ierr = VecScatterEnd  (pcis->global_to_B,z,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
878   ierr = MatMult(pcis->A_IB,pcis->vec1_B,pcis->vec3_D);CHKERRQ(ierr);
879   if (pcbddc->inexact_prec_type) { ierr = MatMultAdd(pcis->A_II,pcis->vec1_D,pcis->vec3_D,pcis->vec3_D);CHKERRQ(ierr); }
880   ierr = KSPSolve(pcbddc->ksp_D,pcis->vec3_D,pcbddc->vec4_D);CHKERRQ(ierr);
881   ierr = VecScale(pcbddc->vec4_D,m_one);CHKERRQ(ierr);
882   if (pcbddc->inexact_prec_type) { ierr = VecAXPY (pcbddc->vec4_D,one,pcis->vec1_D);CHKERRQ(ierr); }
883   ierr = VecAXPY (pcis->vec2_D,one,pcbddc->vec4_D);CHKERRQ(ierr);
884   ierr = VecScatterBegin(pcis->global_to_D,pcis->vec2_D,z,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
885   ierr = VecScatterEnd  (pcis->global_to_D,pcis->vec2_D,z,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
886   PetscFunctionReturn(0);
887 }
888 /* -------------------------------------------------------------------------- */
889 
890 #undef __FUNCT__
891 #define __FUNCT__ "PCDestroy_BDDC"
892 PetscErrorCode PCDestroy_BDDC(PC pc)
893 {
894   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;
895   PetscErrorCode ierr;
896 
897   PetscFunctionBegin;
898   /* free data created by PCIS */
899   ierr = PCISDestroy(pc);CHKERRQ(ierr);
900   /* free BDDC custom data  */
901   ierr = PCBDDCResetCustomization(pc);CHKERRQ(ierr);
902   /* destroy objects related to topography */
903   ierr = PCBDDCResetTopography(pc);CHKERRQ(ierr);
904   /* free allocated graph structure */
905   ierr = PetscFree(pcbddc->mat_graph);CHKERRQ(ierr);
906   /* free data for scaling operator */
907   ierr = PCBDDCScalingDestroy(pc);CHKERRQ(ierr);
908   /* free solvers stuff */
909   ierr = PCBDDCResetSolvers(pc);CHKERRQ(ierr);
910   ierr = KSPDestroy(&pcbddc->ksp_D);CHKERRQ(ierr);
911   ierr = KSPDestroy(&pcbddc->ksp_R);CHKERRQ(ierr);
912   ierr = MatDestroy(&pcbddc->local_mat);CHKERRQ(ierr);
913   /* free global vectors needed in presolve */
914   ierr = VecDestroy(&pcbddc->temp_solution);CHKERRQ(ierr);
915   ierr = VecDestroy(&pcbddc->original_rhs);CHKERRQ(ierr);
916   /* remove functions */
917   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetPrimalVerticesLocalIS_C",NULL);CHKERRQ(ierr);
918   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetCoarseningRatio_C",NULL);CHKERRQ(ierr);
919   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetMaxLevels_C",NULL);CHKERRQ(ierr);
920   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetNullSpace_C",NULL);CHKERRQ(ierr);
921   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDirichletBoundaries_C",NULL);CHKERRQ(ierr);
922   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetNeumannBoundaries_C",NULL);CHKERRQ(ierr);
923   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetDirichletBoundaries_C",NULL);CHKERRQ(ierr);
924   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetNeumannBoundaries_C",NULL);CHKERRQ(ierr);
925   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetCoarseProblemType_C",NULL);CHKERRQ(ierr);
926   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDofsSplitting_C",NULL);CHKERRQ(ierr);
927   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetLocalAdjacencyGraph_C",NULL);CHKERRQ(ierr);
928   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCCreateFETIDPOperators_C",NULL);CHKERRQ(ierr);
929   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCMatFETIDPGetRHS_C",NULL);CHKERRQ(ierr);
930   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCMatFETIDPGetSolution_C",NULL);CHKERRQ(ierr);
931   /* Free the private data structure */
932   ierr = PetscFree(pc->data);CHKERRQ(ierr);
933   PetscFunctionReturn(0);
934 }
935 /* -------------------------------------------------------------------------- */
936 
937 #undef __FUNCT__
938 #define __FUNCT__ "PCBDDCMatFETIDPGetRHS_BDDC"
939 static PetscErrorCode PCBDDCMatFETIDPGetRHS_BDDC(Mat fetidp_mat, Vec standard_rhs, Vec fetidp_flux_rhs)
940 {
941   FETIDPMat_ctx  mat_ctx;
942   PC_IS*         pcis;
943   PC_BDDC*       pcbddc;
944   PetscErrorCode ierr;
945 
946   PetscFunctionBegin;
947   ierr = MatShellGetContext(fetidp_mat,&mat_ctx);CHKERRQ(ierr);
948   pcis = (PC_IS*)mat_ctx->pc->data;
949   pcbddc = (PC_BDDC*)mat_ctx->pc->data;
950 
951   /* change of basis for physical rhs if needed
952      It also changes the rhs in case of dirichlet boundaries */
953   ierr = PCPreSolve_BDDC(mat_ctx->pc,NULL,standard_rhs,NULL);CHKERRQ(ierr);
954   /* store vectors for computation of fetidp final solution */
955   ierr = VecScatterBegin(pcis->global_to_D,standard_rhs,mat_ctx->temp_solution_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
956   ierr = VecScatterEnd(pcis->global_to_D,standard_rhs,mat_ctx->temp_solution_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
957   /* scale rhs since it should be unassembled */
958   /* TODO use counter scaling? (also below) */
959   ierr = VecScatterBegin(pcis->global_to_B,standard_rhs,mat_ctx->temp_solution_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
960   ierr = VecScatterEnd(pcis->global_to_B,standard_rhs,mat_ctx->temp_solution_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
961   /* Apply partition of unity */
962   ierr = VecPointwiseMult(mat_ctx->temp_solution_B,pcis->D,mat_ctx->temp_solution_B);CHKERRQ(ierr);
963   /* ierr = PCBDDCScalingRestriction(mat_ctx->pc,standard_rhs,mat_ctx->temp_solution_B);CHKERRQ(ierr); */
964   if (!pcbddc->inexact_prec_type) {
965     /* compute partially subassembled Schur complement right-hand side */
966     ierr = KSPSolve(pcbddc->ksp_D,mat_ctx->temp_solution_D,pcis->vec1_D);CHKERRQ(ierr);
967     ierr = MatMult(pcis->A_BI,pcis->vec1_D,pcis->vec1_B);CHKERRQ(ierr);
968     ierr = VecAXPY(mat_ctx->temp_solution_B,-1.0,pcis->vec1_B);CHKERRQ(ierr);
969     ierr = VecSet(standard_rhs,0.0);CHKERRQ(ierr);
970     ierr = VecScatterBegin(pcis->global_to_B,mat_ctx->temp_solution_B,standard_rhs,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
971     ierr = VecScatterEnd(pcis->global_to_B,mat_ctx->temp_solution_B,standard_rhs,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
972     /* ierr = PCBDDCScalingRestriction(mat_ctx->pc,standard_rhs,mat_ctx->temp_solution_B);CHKERRQ(ierr); */
973     ierr = VecScatterBegin(pcis->global_to_B,standard_rhs,mat_ctx->temp_solution_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
974     ierr = VecScatterEnd(pcis->global_to_B,standard_rhs,mat_ctx->temp_solution_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
975     ierr = VecPointwiseMult(mat_ctx->temp_solution_B,pcis->D,mat_ctx->temp_solution_B);CHKERRQ(ierr);
976   }
977   /* BDDC rhs */
978   ierr = VecCopy(mat_ctx->temp_solution_B,pcis->vec1_B);CHKERRQ(ierr);
979   if (pcbddc->inexact_prec_type) {
980     ierr = VecCopy(mat_ctx->temp_solution_D,pcis->vec1_D);CHKERRQ(ierr);
981   }
982   /* apply BDDC */
983   ierr = PCBDDCApplyInterfacePreconditioner(mat_ctx->pc);CHKERRQ(ierr);
984   /* Application of B_delta and assembling of rhs for fetidp fluxes */
985   ierr = VecSet(fetidp_flux_rhs,0.0);CHKERRQ(ierr);
986   ierr = MatMult(mat_ctx->B_delta,pcis->vec1_B,mat_ctx->lambda_local);CHKERRQ(ierr);
987   ierr = VecScatterBegin(mat_ctx->l2g_lambda,mat_ctx->lambda_local,fetidp_flux_rhs,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
988   ierr = VecScatterEnd  (mat_ctx->l2g_lambda,mat_ctx->lambda_local,fetidp_flux_rhs,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
989   /* restore original rhs */
990   ierr = VecCopy(pcbddc->original_rhs,standard_rhs);CHKERRQ(ierr);
991   PetscFunctionReturn(0);
992 }
993 
994 #undef __FUNCT__
995 #define __FUNCT__ "PCBDDCMatFETIDPGetRHS"
996 /*@
997  PCBDDCMatFETIDPGetRHS - Get rhs for FETIDP linear system.
998 
999    Collective
1000 
1001    Input Parameters:
1002 +  fetidp_mat   - the FETIDP mat obtained by a call to PCBDDCCreateFETIDPOperators
1003 +  standard_rhs - the rhs of your linear system
1004 
1005    Output Parameters:
1006 +  fetidp_flux_rhs   - the rhs of the FETIDP linear system
1007 
1008    Level: developer
1009 
1010    Notes:
1011 
1012 .seealso: PCBDDC
1013 @*/
1014 PetscErrorCode PCBDDCMatFETIDPGetRHS(Mat fetidp_mat, Vec standard_rhs, Vec fetidp_flux_rhs)
1015 {
1016   FETIDPMat_ctx  mat_ctx;
1017   PetscErrorCode ierr;
1018 
1019   PetscFunctionBegin;
1020   ierr = MatShellGetContext(fetidp_mat,&mat_ctx);CHKERRQ(ierr);
1021   ierr = PetscTryMethod(mat_ctx->pc,"PCBDDCMatFETIDPGetRHS_C",(Mat,Vec,Vec),(fetidp_mat,standard_rhs,fetidp_flux_rhs));CHKERRQ(ierr);
1022   PetscFunctionReturn(0);
1023 }
1024 /* -------------------------------------------------------------------------- */
1025 
1026 #undef __FUNCT__
1027 #define __FUNCT__ "PCBDDCMatFETIDPGetSolution_BDDC"
1028 static PetscErrorCode PCBDDCMatFETIDPGetSolution_BDDC(Mat fetidp_mat, Vec fetidp_flux_sol, Vec standard_sol)
1029 {
1030   FETIDPMat_ctx  mat_ctx;
1031   PC_IS*         pcis;
1032   PC_BDDC*       pcbddc;
1033   PetscErrorCode ierr;
1034 
1035   PetscFunctionBegin;
1036   ierr = MatShellGetContext(fetidp_mat,&mat_ctx);CHKERRQ(ierr);
1037   pcis = (PC_IS*)mat_ctx->pc->data;
1038   pcbddc = (PC_BDDC*)mat_ctx->pc->data;
1039 
1040   /* apply B_delta^T */
1041   ierr = VecScatterBegin(mat_ctx->l2g_lambda,fetidp_flux_sol,mat_ctx->lambda_local,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1042   ierr = VecScatterEnd  (mat_ctx->l2g_lambda,fetidp_flux_sol,mat_ctx->lambda_local,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1043   ierr = MatMultTranspose(mat_ctx->B_delta,mat_ctx->lambda_local,pcis->vec1_B);CHKERRQ(ierr);
1044   /* compute rhs for BDDC application */
1045   ierr = VecAYPX(pcis->vec1_B,-1.0,mat_ctx->temp_solution_B);CHKERRQ(ierr);
1046   if (pcbddc->inexact_prec_type) {
1047     ierr = VecCopy(mat_ctx->temp_solution_D,pcis->vec1_D);CHKERRQ(ierr);
1048   }
1049   /* apply BDDC */
1050   ierr = PCBDDCApplyInterfacePreconditioner(mat_ctx->pc);CHKERRQ(ierr);
1051   /* put values into standard global vector */
1052   ierr = VecScatterBegin(pcis->global_to_B,pcis->vec1_B,standard_sol,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1053   ierr = VecScatterEnd  (pcis->global_to_B,pcis->vec1_B,standard_sol,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1054   if (!pcbddc->inexact_prec_type) {
1055     /* compute values into the interior if solved for the partially subassembled Schur complement */
1056     ierr = MatMult(pcis->A_IB,pcis->vec1_B,pcis->vec1_D);CHKERRQ(ierr);
1057     ierr = VecAXPY(mat_ctx->temp_solution_D,-1.0,pcis->vec1_D);CHKERRQ(ierr);
1058     ierr = KSPSolve(pcbddc->ksp_D,mat_ctx->temp_solution_D,pcis->vec1_D);CHKERRQ(ierr);
1059   }
1060   ierr = VecScatterBegin(pcis->global_to_D,pcis->vec1_D,standard_sol,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1061   ierr = VecScatterEnd  (pcis->global_to_D,pcis->vec1_D,standard_sol,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1062   /* final change of basis if needed
1063      Is also sums the dirichlet part removed during RHS assembling */
1064   ierr = PCPostSolve_BDDC(mat_ctx->pc,NULL,NULL,standard_sol);CHKERRQ(ierr);
1065   PetscFunctionReturn(0);
1066 }
1067 
1068 #undef __FUNCT__
1069 #define __FUNCT__ "PCBDDCMatFETIDPGetSolution"
1070 /*@
1071  PCBDDCMatFETIDPGetSolution - Get Solution for FETIDP linear system.
1072 
1073    Collective
1074 
1075    Input Parameters:
1076 +  fetidp_mat        - the FETIDP mat obtained by a call to PCBDDCCreateFETIDPOperators
1077 +  fetidp_flux_sol - the solution of the FETIDP linear system
1078 
1079    Output Parameters:
1080 +  standard_sol      - the solution on the global domain
1081 
1082    Level: developer
1083 
1084    Notes:
1085 
1086 .seealso: PCBDDC
1087 @*/
1088 PetscErrorCode PCBDDCMatFETIDPGetSolution(Mat fetidp_mat, Vec fetidp_flux_sol, Vec standard_sol)
1089 {
1090   FETIDPMat_ctx  mat_ctx;
1091   PetscErrorCode ierr;
1092 
1093   PetscFunctionBegin;
1094   ierr = MatShellGetContext(fetidp_mat,&mat_ctx);CHKERRQ(ierr);
1095   ierr = PetscTryMethod(mat_ctx->pc,"PCBDDCMatFETIDPGetSolution_C",(Mat,Vec,Vec),(fetidp_mat,fetidp_flux_sol,standard_sol));CHKERRQ(ierr);
1096   PetscFunctionReturn(0);
1097 }
1098 /* -------------------------------------------------------------------------- */
1099 
1100 extern PetscErrorCode FETIDPMatMult(Mat,Vec,Vec);
1101 extern PetscErrorCode PCBDDCDestroyFETIDPMat(Mat);
1102 extern PetscErrorCode FETIDPPCApply(PC,Vec,Vec);
1103 extern PetscErrorCode PCBDDCDestroyFETIDPPC(PC);
1104 
1105 #undef __FUNCT__
1106 #define __FUNCT__ "PCBDDCCreateFETIDPOperators_BDDC"
1107 static PetscErrorCode PCBDDCCreateFETIDPOperators_BDDC(PC pc, Mat *fetidp_mat, PC *fetidp_pc)
1108 {
1109 
1110   FETIDPMat_ctx  fetidpmat_ctx;
1111   Mat            newmat;
1112   FETIDPPC_ctx   fetidppc_ctx;
1113   PC             newpc;
1114   MPI_Comm       comm;
1115   PetscErrorCode ierr;
1116 
1117   PetscFunctionBegin;
1118   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
1119   /* FETIDP linear matrix */
1120   ierr = PCBDDCCreateFETIDPMatContext(pc,&fetidpmat_ctx);CHKERRQ(ierr);
1121   ierr = PCBDDCSetupFETIDPMatContext(fetidpmat_ctx);CHKERRQ(ierr);
1122   ierr = MatCreateShell(comm,PETSC_DECIDE,PETSC_DECIDE,fetidpmat_ctx->n_lambda,fetidpmat_ctx->n_lambda,fetidpmat_ctx,&newmat);CHKERRQ(ierr);
1123   ierr = MatShellSetOperation(newmat,MATOP_MULT,(void (*)(void))FETIDPMatMult);CHKERRQ(ierr);
1124   ierr = MatShellSetOperation(newmat,MATOP_DESTROY,(void (*)(void))PCBDDCDestroyFETIDPMat);CHKERRQ(ierr);
1125   ierr = MatSetUp(newmat);CHKERRQ(ierr);
1126   /* FETIDP preconditioner */
1127   ierr = PCBDDCCreateFETIDPPCContext(pc,&fetidppc_ctx);CHKERRQ(ierr);
1128   ierr = PCBDDCSetupFETIDPPCContext(newmat,fetidppc_ctx);CHKERRQ(ierr);
1129   ierr = PCCreate(comm,&newpc);CHKERRQ(ierr);
1130   ierr = PCSetType(newpc,PCSHELL);CHKERRQ(ierr);
1131   ierr = PCShellSetContext(newpc,fetidppc_ctx);CHKERRQ(ierr);
1132   ierr = PCShellSetApply(newpc,FETIDPPCApply);CHKERRQ(ierr);
1133   ierr = PCShellSetDestroy(newpc,PCBDDCDestroyFETIDPPC);CHKERRQ(ierr);
1134   ierr = PCSetOperators(newpc,newmat,newmat,SAME_PRECONDITIONER);CHKERRQ(ierr);
1135   ierr = PCSetUp(newpc);CHKERRQ(ierr);
1136   /* return pointers for objects created */
1137   *fetidp_mat=newmat;
1138   *fetidp_pc=newpc;
1139   PetscFunctionReturn(0);
1140 }
1141 
1142 #undef __FUNCT__
1143 #define __FUNCT__ "PCBDDCCreateFETIDPOperators"
1144 /*@
1145  PCBDDCCreateFETIDPOperators - Create operators for FETIDP.
1146 
1147    Collective
1148 
1149    Input Parameters:
1150 +  pc - the BDDC preconditioning context (setup must be already called)
1151 
1152    Level: developer
1153 
1154    Notes:
1155 
1156 .seealso: PCBDDC
1157 @*/
1158 PetscErrorCode PCBDDCCreateFETIDPOperators(PC pc, Mat *fetidp_mat, PC *fetidp_pc)
1159 {
1160   PetscErrorCode ierr;
1161 
1162   PetscFunctionBegin;
1163   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1164   if (pc->setupcalled) {
1165     ierr = PetscTryMethod(pc,"PCBDDCCreateFETIDPOperators_C",(PC,Mat*,PC*),(pc,fetidp_mat,fetidp_pc));CHKERRQ(ierr);
1166   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"You must call PCSetup_BDDC() first \n");
1167   PetscFunctionReturn(0);
1168 }
1169 /* -------------------------------------------------------------------------- */
1170 /*MC
1171    PCBDDC - Balancing Domain Decomposition by Constraints.
1172 
1173    Options Database Keys:
1174 .    -pcbddc ??? -
1175 
1176    Level: intermediate
1177 
1178    Notes: The matrix used with this preconditioner must be of type MATIS
1179 
1180           Unlike more 'conventional' interface preconditioners, this iterates over ALL the
1181           degrees of freedom, NOT just those on the interface (this allows the use of approximate solvers
1182           on the subdomains).
1183 
1184           Options for the coarse grid preconditioner can be set with -
1185           Options for the Dirichlet subproblem can be set with -
1186           Options for the Neumann subproblem can be set with -
1187 
1188    Contributed by Stefano Zampini
1189 
1190 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,  MATIS
1191 M*/
1192 
1193 #undef __FUNCT__
1194 #define __FUNCT__ "PCCreate_BDDC"
1195 PETSC_EXTERN PetscErrorCode PCCreate_BDDC(PC pc)
1196 {
1197   PetscErrorCode      ierr;
1198   PC_BDDC             *pcbddc;
1199 
1200   PetscFunctionBegin;
1201   /* Creates the private data structure for this preconditioner and attach it to the PC object. */
1202   ierr      = PetscNewLog(pc,PC_BDDC,&pcbddc);CHKERRQ(ierr);
1203   pc->data  = (void*)pcbddc;
1204 
1205   /* create PCIS data structure */
1206   ierr = PCISCreate(pc);CHKERRQ(ierr);
1207 
1208   /* BDDC specific */
1209   pcbddc->user_primal_vertices       = 0;
1210   pcbddc->NullSpace                  = 0;
1211   pcbddc->temp_solution              = 0;
1212   pcbddc->original_rhs               = 0;
1213   pcbddc->local_mat                  = 0;
1214   pcbddc->ChangeOfBasisMatrix        = 0;
1215   pcbddc->use_change_of_basis        = PETSC_TRUE;
1216   pcbddc->use_change_on_faces        = PETSC_FALSE;
1217   pcbddc->coarse_vec                 = 0;
1218   pcbddc->coarse_rhs                 = 0;
1219   pcbddc->coarse_ksp                 = 0;
1220   pcbddc->coarse_phi_B               = 0;
1221   pcbddc->coarse_phi_D               = 0;
1222   pcbddc->coarse_psi_B               = 0;
1223   pcbddc->coarse_psi_D               = 0;
1224   pcbddc->vec1_P                     = 0;
1225   pcbddc->vec1_R                     = 0;
1226   pcbddc->vec2_R                     = 0;
1227   pcbddc->local_auxmat1              = 0;
1228   pcbddc->local_auxmat2              = 0;
1229   pcbddc->R_to_B                     = 0;
1230   pcbddc->R_to_D                     = 0;
1231   pcbddc->ksp_D                      = 0;
1232   pcbddc->ksp_R                      = 0;
1233   pcbddc->local_primal_indices       = 0;
1234   pcbddc->inexact_prec_type          = PETSC_FALSE;
1235   pcbddc->NeumannBoundaries          = 0;
1236   pcbddc->ISForDofs                  = 0;
1237   pcbddc->ConstraintMatrix           = 0;
1238   pcbddc->use_nnsp_true              = PETSC_FALSE;
1239   pcbddc->local_primal_sizes         = 0;
1240   pcbddc->local_primal_displacements = 0;
1241   pcbddc->coarse_loc_to_glob         = 0;
1242   pcbddc->dbg_flag                   = 0;
1243   pcbddc->coarsening_ratio           = 8;
1244   pcbddc->use_exact_dirichlet        = PETSC_TRUE;
1245   pcbddc->current_level              = 0;
1246   pcbddc->max_levels                 = 1;
1247   pcbddc->replicated_local_primal_indices = 0;
1248   pcbddc->replicated_local_primal_values  = 0;
1249 
1250   /* create local graph structure */
1251   ierr = PCBDDCGraphCreate(&pcbddc->mat_graph);CHKERRQ(ierr);
1252 
1253   /* scaling */
1254   pcbddc->use_deluxe_scaling         = PETSC_FALSE;
1255   pcbddc->work_scaling               = 0;
1256 
1257   /* function pointers */
1258   pc->ops->apply               = PCApply_BDDC;
1259   pc->ops->applytranspose      = 0;
1260   pc->ops->setup               = PCSetUp_BDDC;
1261   pc->ops->destroy             = PCDestroy_BDDC;
1262   pc->ops->setfromoptions      = PCSetFromOptions_BDDC;
1263   pc->ops->view                = 0;
1264   pc->ops->applyrichardson     = 0;
1265   pc->ops->applysymmetricleft  = 0;
1266   pc->ops->applysymmetricright = 0;
1267   pc->ops->presolve            = PCPreSolve_BDDC;
1268   pc->ops->postsolve           = PCPostSolve_BDDC;
1269 
1270   /* composing function */
1271   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetPrimalVerticesLocalIS_C",PCBDDCSetPrimalVerticesLocalIS_BDDC);CHKERRQ(ierr);
1272   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetCoarseningRatio_C",PCBDDCSetCoarseningRatio_BDDC);CHKERRQ(ierr);
1273   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetMaxLevels_C",PCBDDCSetMaxLevels_BDDC);CHKERRQ(ierr);
1274   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetNullSpace_C",PCBDDCSetNullSpace_BDDC);CHKERRQ(ierr);
1275   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDirichletBoundaries_C",PCBDDCSetDirichletBoundaries_BDDC);CHKERRQ(ierr);
1276   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetNeumannBoundaries_C",PCBDDCSetNeumannBoundaries_BDDC);CHKERRQ(ierr);
1277   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetDirichletBoundaries_C",PCBDDCGetDirichletBoundaries_BDDC);CHKERRQ(ierr);
1278   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetNeumannBoundaries_C",PCBDDCGetNeumannBoundaries_BDDC);CHKERRQ(ierr);
1279   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetCoarseProblemType_C",PCBDDCSetCoarseProblemType_BDDC);CHKERRQ(ierr);
1280   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDofsSplitting_C",PCBDDCSetDofsSplitting_BDDC);CHKERRQ(ierr);
1281   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetLocalAdjacencyGraph_C",PCBDDCSetLocalAdjacencyGraph_BDDC);CHKERRQ(ierr);
1282   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCCreateFETIDPOperators_C",PCBDDCCreateFETIDPOperators_BDDC);CHKERRQ(ierr);
1283   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCMatFETIDPGetRHS_C",PCBDDCMatFETIDPGetRHS_BDDC);CHKERRQ(ierr);
1284   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCMatFETIDPGetSolution_C",PCBDDCMatFETIDPGetSolution_BDDC);CHKERRQ(ierr);
1285   PetscFunctionReturn(0);
1286 }
1287 
1288 /* -------------------------------------------------------------------------- */
1289 /* All static functions from now on                                           */
1290 /* -------------------------------------------------------------------------- */
1291 
1292 #undef __FUNCT__
1293 #define __FUNCT__ "PCBDDCSetLevel"
1294 static PetscErrorCode PCBDDCSetLevel(PC pc,PetscInt level)
1295 {
1296   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
1297 
1298   PetscFunctionBegin;
1299   pcbddc->current_level=level;
1300   PetscFunctionReturn(0);
1301 }
1302 
1303 /* -------------------------------------------------------------------------- */
1304 #undef __FUNCT__
1305 #define __FUNCT__ "PCBDDCCoarseSetUp"
1306 static PetscErrorCode PCBDDCCoarseSetUp(PC pc)
1307 {
1308   PC_IS*            pcis = (PC_IS*)(pc->data);
1309   PC_BDDC*          pcbddc = (PC_BDDC*)pc->data;
1310   IS                is_R_local;
1311   PetscErrorCode    ierr;
1312 
1313   PetscFunctionBegin;
1314   /* compute matrix after change of basis and extract local submatrices */
1315   ierr = PCBDDCSetUpLocalMatrices(pc);CHKERRQ(ierr);
1316 
1317   /* Change global null space passed in by the user if change of basis has been requested */
1318   if (pcbddc->NullSpace && pcbddc->use_change_of_basis) {
1319     ierr = PCBDDCNullSpaceAdaptGlobal(pc);CHKERRQ(ierr);
1320   }
1321 
1322   /* Allocate needed vectors */
1323   ierr = PCBDDCCreateWorkVectors(pc);CHKERRQ(ierr);
1324 
1325   /* setup local scatters R_to_B and (optionally) R_to_D : PCBDDCCreateWorkVectors should be called first! */
1326   ierr = PCBDDCSetUpLocalScatters(pc,&is_R_local);CHKERRQ(ierr);
1327 
1328   /* setup local solvers ksp_D and ksp_R */
1329   ierr = PCBDDCSetUpLocalSolvers(pc,pcis->is_I_local,is_R_local);CHKERRQ(ierr);
1330 
1331   /* setup local correction and local part of coarse basis */
1332   ierr = PCBDDCSetUpCorrectionAndBasis(pc,is_R_local);CHKERRQ(ierr);
1333 
1334   /* free memory */
1335   ierr = ISDestroy(&is_R_local);CHKERRQ(ierr);
1336 
1337   PetscFunctionReturn(0);
1338 }
1339 
1340 /* -------------------------------------------------------------------------- */
1341 
1342 /* BDDC requires metis 5.0.1 for multilevel */
1343 #if defined(PETSC_HAVE_METIS)
1344 #include "metis.h"
1345 #define MetisInt    idx_t
1346 #define MetisScalar real_t
1347 #endif
1348 
1349 #undef __FUNCT__
1350 #define __FUNCT__ "PCBDDCSetUpCoarseEnvironment"
1351 PetscErrorCode PCBDDCSetUpCoarseEnvironment(PC pc,PetscScalar* coarse_submat_vals)
1352 {
1353 
1354 
1355   Mat_IS    *matis    = (Mat_IS*)pc->pmat->data;
1356   PC_BDDC   *pcbddc   = (PC_BDDC*)pc->data;
1357   PC_IS     *pcis     = (PC_IS*)pc->data;
1358   MPI_Comm  prec_comm;
1359   MPI_Comm  coarse_comm;
1360 
1361   MatNullSpace CoarseNullSpace;
1362 
1363   /* common to all choiches */
1364   PetscScalar *temp_coarse_mat_vals;
1365   PetscScalar *ins_coarse_mat_vals;
1366   PetscInt    *ins_local_primal_indices;
1367   PetscMPIInt *localsizes2,*localdispl2;
1368   PetscMPIInt size_prec_comm;
1369   PetscMPIInt rank_prec_comm;
1370   PetscMPIInt active_rank=MPI_PROC_NULL;
1371   PetscMPIInt master_proc=0;
1372   PetscInt    ins_local_primal_size;
1373   /* specific to MULTILEVEL_BDDC */
1374   PetscMPIInt *ranks_recv=0;
1375   PetscMPIInt count_recv=0;
1376   PetscMPIInt rank_coarse_proc_send_to=-1;
1377   PetscMPIInt coarse_color = MPI_UNDEFINED;
1378   ISLocalToGlobalMapping coarse_ISLG;
1379   /* some other variables */
1380   PetscErrorCode ierr;
1381   MatType coarse_mat_type;
1382   PCType  coarse_pc_type;
1383   KSPType coarse_ksp_type;
1384   PC pc_temp;
1385   PetscInt i,j,k;
1386   PetscInt max_it_coarse_ksp=1;  /* don't increase this value */
1387   /* verbose output viewer */
1388   PetscViewer viewer=pcbddc->dbg_viewer;
1389   PetscInt    dbg_flag=pcbddc->dbg_flag;
1390 
1391   PetscInt      offset,offset2;
1392   PetscMPIInt   im_active,active_procs;
1393   PetscInt      *dnz,*onz;
1394 
1395   PetscBool     setsym,issym=PETSC_FALSE;
1396 
1397   PetscFunctionBegin;
1398   ierr = PetscObjectGetComm((PetscObject)pc,&prec_comm);CHKERRQ(ierr);
1399   ins_local_primal_indices = 0;
1400   ins_coarse_mat_vals      = 0;
1401   localsizes2              = 0;
1402   localdispl2              = 0;
1403   temp_coarse_mat_vals     = 0;
1404   coarse_ISLG              = 0;
1405 
1406   ierr = MPI_Comm_size(prec_comm,&size_prec_comm);CHKERRQ(ierr);
1407   ierr = MPI_Comm_rank(prec_comm,&rank_prec_comm);CHKERRQ(ierr);
1408   ierr = MatIsSymmetricKnown(pc->pmat,&setsym,&issym);CHKERRQ(ierr);
1409 
1410   /* Assign global numbering to coarse dofs */
1411   {
1412     PetscInt     *auxlocal_primal,*aux_idx;
1413     PetscMPIInt  mpi_local_primal_size;
1414     PetscScalar  coarsesum,*array;
1415 
1416     mpi_local_primal_size = (PetscMPIInt)pcbddc->local_primal_size;
1417 
1418     /* Construct needed data structures for message passing */
1419     j = 0;
1420     if (rank_prec_comm == 0 || pcbddc->coarse_problem_type == REPLICATED_BDDC || pcbddc->coarse_problem_type == MULTILEVEL_BDDC) {
1421       j = size_prec_comm;
1422     }
1423     ierr = PetscMalloc(j*sizeof(PetscMPIInt),&pcbddc->local_primal_sizes);CHKERRQ(ierr);
1424     ierr = PetscMalloc(j*sizeof(PetscMPIInt),&pcbddc->local_primal_displacements);CHKERRQ(ierr);
1425     /* Gather local_primal_size information for all processes  */
1426     if (pcbddc->coarse_problem_type == REPLICATED_BDDC || pcbddc->coarse_problem_type == MULTILEVEL_BDDC) {
1427       ierr = MPI_Allgather(&mpi_local_primal_size,1,MPIU_INT,&pcbddc->local_primal_sizes[0],1,MPIU_INT,prec_comm);CHKERRQ(ierr);
1428     } else {
1429       ierr = MPI_Gather(&mpi_local_primal_size,1,MPIU_INT,&pcbddc->local_primal_sizes[0],1,MPIU_INT,0,prec_comm);CHKERRQ(ierr);
1430     }
1431     pcbddc->replicated_primal_size = 0;
1432     for (i=0; i<j; i++) {
1433       pcbddc->local_primal_displacements[i] = pcbddc->replicated_primal_size ;
1434       pcbddc->replicated_primal_size += pcbddc->local_primal_sizes[i];
1435     }
1436 
1437     /* First let's count coarse dofs.
1438        This code fragment assumes that the number of local constraints per connected component
1439        is not greater than the number of nodes defined for the connected component
1440        (otherwise we will surely have linear dependence between constraints and thus a singular coarse problem) */
1441     ierr = PetscMalloc(pcbddc->local_primal_size*sizeof(PetscInt),&auxlocal_primal);CHKERRQ(ierr);
1442     ierr = PCBDDCGetPrimalVerticesLocalIdx(pc,&i,&aux_idx);CHKERRQ(ierr);
1443     ierr = PetscMemcpy(auxlocal_primal,aux_idx,i*sizeof(PetscInt));CHKERRQ(ierr);
1444     ierr = PetscFree(aux_idx);CHKERRQ(ierr);
1445     ierr = PCBDDCGetPrimalConstraintsLocalIdx(pc,&j,&aux_idx);CHKERRQ(ierr);
1446     ierr = PetscMemcpy(&auxlocal_primal[i],aux_idx,j*sizeof(PetscInt));CHKERRQ(ierr);
1447     ierr = PetscFree(aux_idx);CHKERRQ(ierr);
1448     /* Compute number of coarse dofs */
1449     ierr = PCBDDCSubsetNumbering(prec_comm,matis->mapping,pcbddc->local_primal_size,auxlocal_primal,NULL,&pcbddc->coarse_size,&pcbddc->local_primal_indices);CHKERRQ(ierr);
1450 
1451     if (dbg_flag) {
1452       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1453       ierr = PetscViewerASCIIPrintf(viewer,"--------------------------------------------------\n");CHKERRQ(ierr);
1454       ierr = PetscViewerASCIIPrintf(viewer,"Check coarse indices\n");CHKERRQ(ierr);
1455       ierr = VecSet(pcis->vec1_N,0.0);CHKERRQ(ierr);
1456       ierr = VecGetArray(pcis->vec1_N,&array);CHKERRQ(ierr);
1457       for (i=0;i<pcbddc->local_primal_size;i++) array[auxlocal_primal[i]]=1.0;
1458       ierr = VecRestoreArray(pcis->vec1_N,&array);CHKERRQ(ierr);
1459       ierr = VecSet(pcis->vec1_global,0.0);CHKERRQ(ierr);
1460       ierr = VecScatterBegin(matis->ctx,pcis->vec1_N,pcis->vec1_global,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1461       ierr = VecScatterEnd  (matis->ctx,pcis->vec1_N,pcis->vec1_global,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1462       ierr = VecScatterBegin(matis->ctx,pcis->vec1_global,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1463       ierr = VecScatterEnd  (matis->ctx,pcis->vec1_global,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1464       ierr = VecGetArray(pcis->vec1_N,&array);CHKERRQ(ierr);
1465       for (i=0;i<pcis->n;i++) {
1466         if (array[i] == 1.0) {
1467           ierr = ISLocalToGlobalMappingApply(matis->mapping,1,&i,&j);CHKERRQ(ierr);
1468           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d: WRONG COARSE INDEX %d (local %d)\n",PetscGlobalRank,j,i);CHKERRQ(ierr);
1469         }
1470       }
1471       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1472       for (i=0;i<pcis->n;i++) {
1473         if (PetscRealPart(array[i]) > 0.0) array[i] = 1.0/PetscRealPart(array[i]);
1474       }
1475       ierr = VecRestoreArray(pcis->vec1_N,&array);CHKERRQ(ierr);
1476       ierr = VecSet(pcis->vec1_global,0.0);CHKERRQ(ierr);
1477       ierr = VecScatterBegin(matis->ctx,pcis->vec1_N,pcis->vec1_global,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1478       ierr = VecScatterEnd  (matis->ctx,pcis->vec1_N,pcis->vec1_global,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1479       ierr = VecSum(pcis->vec1_global,&coarsesum);CHKERRQ(ierr);
1480       ierr = PetscViewerASCIIPrintf(viewer,"Size of coarse problem SHOULD be %lf\n",coarsesum);CHKERRQ(ierr);
1481       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1482     }
1483     ierr = PetscFree(auxlocal_primal);CHKERRQ(ierr);
1484   }
1485 
1486   if (dbg_flag) {
1487     ierr = PetscViewerASCIIPrintf(viewer,"Size of coarse problem is %d\n",pcbddc->coarse_size);CHKERRQ(ierr);
1488     if (dbg_flag > 1) {
1489       ierr = PetscViewerASCIIPrintf(viewer,"Distribution of local primal indices\n");CHKERRQ(ierr);
1490       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1491       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d\n",PetscGlobalRank);CHKERRQ(ierr);
1492       for (i=0;i<pcbddc->local_primal_size;i++) {
1493         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"local_primal_indices[%d]=%d \n",i,pcbddc->local_primal_indices[i]);
1494       }
1495       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1496     }
1497   }
1498 
1499   im_active = 0;
1500   if (pcis->n) im_active = 1;
1501   ierr = MPI_Allreduce(&im_active,&active_procs,1,MPIU_INT,MPI_SUM,prec_comm);CHKERRQ(ierr);
1502 
1503   /* adapt coarse problem type */
1504 #if defined(PETSC_HAVE_METIS)
1505   if (pcbddc->coarse_problem_type == MULTILEVEL_BDDC) {
1506     if (pcbddc->current_level < pcbddc->max_levels) {
1507       if ( (active_procs/pcbddc->coarsening_ratio) < 2 ) {
1508         if (dbg_flag) {
1509           ierr = PetscViewerASCIIPrintf(viewer,"Not enough active processes on level %d (active %d,ratio %d). Parallel direct solve for coarse problem\n",pcbddc->current_level,active_procs,pcbddc->coarsening_ratio);CHKERRQ(ierr);
1510          ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1511         }
1512         pcbddc->coarse_problem_type = PARALLEL_BDDC;
1513       }
1514     } else {
1515       if (dbg_flag) {
1516         ierr = PetscViewerASCIIPrintf(viewer,"Max number of levels reached. Using parallel direct solve for coarse problem\n",pcbddc->max_levels,active_procs,pcbddc->coarsening_ratio);CHKERRQ(ierr);
1517         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1518       }
1519       pcbddc->coarse_problem_type = PARALLEL_BDDC;
1520     }
1521   }
1522 #else
1523   pcbddc->coarse_problem_type = PARALLEL_BDDC;
1524 #endif
1525 
1526   switch(pcbddc->coarse_problem_type){
1527 
1528     case(MULTILEVEL_BDDC):   /* we define a coarse mesh where subdomains are elements */
1529     {
1530 #if defined(PETSC_HAVE_METIS)
1531       /* we need additional variables */
1532       MetisInt    n_subdomains,n_parts,objval,ncon,faces_nvtxs;
1533       MetisInt    *metis_coarse_subdivision;
1534       MetisInt    options[METIS_NOPTIONS];
1535       PetscMPIInt size_coarse_comm,rank_coarse_comm;
1536       PetscMPIInt procs_jumps_coarse_comm;
1537       PetscMPIInt *coarse_subdivision;
1538       PetscMPIInt *total_count_recv;
1539       PetscMPIInt *total_ranks_recv;
1540       PetscMPIInt *displacements_recv;
1541       PetscMPIInt *my_faces_connectivity;
1542       PetscMPIInt *petsc_faces_adjncy;
1543       MetisInt    *faces_adjncy;
1544       MetisInt    *faces_xadj;
1545       PetscMPIInt *number_of_faces;
1546       PetscMPIInt *faces_displacements;
1547       PetscInt    *array_int;
1548       PetscMPIInt my_faces=0;
1549       PetscMPIInt total_faces=0;
1550       PetscInt    ranks_stretching_ratio;
1551 
1552       /* define some quantities */
1553       pcbddc->coarse_communications_type = SCATTERS_BDDC;
1554       coarse_mat_type = MATIS;
1555       coarse_pc_type  = PCBDDC;
1556       coarse_ksp_type = KSPRICHARDSON;
1557 
1558       /* details of coarse decomposition */
1559       n_subdomains = active_procs;
1560       n_parts      = n_subdomains/pcbddc->coarsening_ratio;
1561       ranks_stretching_ratio = size_prec_comm/active_procs;
1562       procs_jumps_coarse_comm = pcbddc->coarsening_ratio*ranks_stretching_ratio;
1563 
1564 #if 0
1565       PetscMPIInt *old_ranks;
1566       PetscInt    *new_ranks,*jj,*ii;
1567       MatPartitioning mat_part;
1568       IS coarse_new_decomposition,is_numbering;
1569       PetscViewer viewer_test;
1570       MPI_Comm    test_coarse_comm;
1571       PetscMPIInt test_coarse_color;
1572       Mat         mat_adj;
1573       /* Create new communicator for coarse problem splitting the old one */
1574       /* procs with coarse_color = MPI_UNDEFINED will have coarse_comm = MPI_COMM_NULL (from mpi standards)
1575          key = rank_prec_comm -> keep same ordering of ranks from the old to the new communicator */
1576       test_coarse_color = ( im_active ? 0 : MPI_UNDEFINED );
1577       test_coarse_comm = MPI_COMM_NULL;
1578       ierr = MPI_Comm_split(prec_comm,test_coarse_color,rank_prec_comm,&test_coarse_comm);CHKERRQ(ierr);
1579       if (im_active) {
1580         ierr = PetscMalloc(n_subdomains*sizeof(PetscMPIInt),&old_ranks);
1581         ierr = PetscMalloc(size_prec_comm*sizeof(PetscInt),&new_ranks);
1582         ierr = MPI_Comm_rank(test_coarse_comm,&rank_coarse_comm);CHKERRQ(ierr);
1583         ierr = MPI_Comm_size(test_coarse_comm,&j);CHKERRQ(ierr);
1584         ierr = MPI_Allgather(&rank_prec_comm,1,MPIU_INT,old_ranks,1,MPIU_INT,test_coarse_comm);CHKERRQ(ierr);
1585         for (i=0; i<size_prec_comm; i++) new_ranks[i] = -1;
1586         for (i=0; i<n_subdomains; i++) new_ranks[old_ranks[i]] = i;
1587         ierr = PetscViewerASCIIOpen(test_coarse_comm,"test_mat_part.out",&viewer_test);CHKERRQ(ierr);
1588         k = pcis->n_neigh-1;
1589         ierr = PetscMalloc(2*sizeof(PetscInt),&ii);
1590         ii[0]=0;
1591         ii[1]=k;
1592         ierr = PetscMalloc(k*sizeof(PetscInt),&jj);
1593         for (i=0; i<k; i++) jj[i]=new_ranks[pcis->neigh[i+1]];
1594         ierr = PetscSortInt(k,jj);CHKERRQ(ierr);
1595         ierr = MatCreateMPIAdj(test_coarse_comm,1,n_subdomains,ii,jj,NULL,&mat_adj);CHKERRQ(ierr);
1596         ierr = MatView(mat_adj,viewer_test);CHKERRQ(ierr);
1597         ierr = MatPartitioningCreate(test_coarse_comm,&mat_part);CHKERRQ(ierr);
1598         ierr = MatPartitioningSetAdjacency(mat_part,mat_adj);CHKERRQ(ierr);
1599         ierr = MatPartitioningSetFromOptions(mat_part);CHKERRQ(ierr);
1600         printf("Setting Nparts %d\n",n_parts);
1601         ierr = MatPartitioningSetNParts(mat_part,n_parts);CHKERRQ(ierr);
1602         ierr = MatPartitioningView(mat_part,viewer_test);CHKERRQ(ierr);
1603         ierr = MatPartitioningApply(mat_part,&coarse_new_decomposition);CHKERRQ(ierr);
1604         ierr = ISView(coarse_new_decomposition,viewer_test);CHKERRQ(ierr);
1605         ierr = ISPartitioningToNumbering(coarse_new_decomposition,&is_numbering);CHKERRQ(ierr);
1606         ierr = ISView(is_numbering,viewer_test);CHKERRQ(ierr);
1607         ierr = PetscViewerDestroy(&viewer_test);CHKERRQ(ierr);
1608         ierr = ISDestroy(&coarse_new_decomposition);CHKERRQ(ierr);
1609         ierr = ISDestroy(&is_numbering);CHKERRQ(ierr);
1610         ierr = MatPartitioningDestroy(&mat_part);CHKERRQ(ierr);
1611         ierr = PetscFree(old_ranks);CHKERRQ(ierr);
1612         ierr = PetscFree(new_ranks);CHKERRQ(ierr);
1613         ierr = MPI_Comm_free(&test_coarse_comm);CHKERRQ(ierr);
1614       }
1615 #endif
1616 
1617       /* build CSR graph of subdomains' connectivity */
1618       ierr = PetscMalloc (pcis->n*sizeof(PetscInt),&array_int);CHKERRQ(ierr);
1619       ierr = PetscMemzero(array_int,pcis->n*sizeof(PetscInt));CHKERRQ(ierr);
1620       for (i=1;i<pcis->n_neigh;i++){/* i=1 so I don't count myself -> faces nodes counts to 1 */
1621         for (j=0;j<pcis->n_shared[i];j++){
1622           array_int[ pcis->shared[i][j] ]+=1;
1623         }
1624       }
1625       for (i=1;i<pcis->n_neigh;i++){
1626         for (j=0;j<pcis->n_shared[i];j++){
1627           if (array_int[ pcis->shared[i][j] ] > 0 ){
1628             my_faces++;
1629             break;
1630           }
1631         }
1632       }
1633 
1634       ierr = MPI_Reduce(&my_faces,&total_faces,1,MPIU_INT,MPI_SUM,master_proc,prec_comm);CHKERRQ(ierr);
1635       ierr = PetscMalloc (my_faces*sizeof(PetscInt),&my_faces_connectivity);CHKERRQ(ierr);
1636       my_faces=0;
1637       for (i=1;i<pcis->n_neigh;i++){
1638         for (j=0;j<pcis->n_shared[i];j++){
1639           if (array_int[ pcis->shared[i][j] ] > 0 ){
1640             my_faces_connectivity[my_faces]=pcis->neigh[i];
1641             my_faces++;
1642             break;
1643           }
1644         }
1645       }
1646       if (rank_prec_comm == master_proc) {
1647         ierr = PetscMalloc (total_faces*sizeof(PetscMPIInt),&petsc_faces_adjncy);CHKERRQ(ierr);
1648         ierr = PetscMalloc (size_prec_comm*sizeof(PetscMPIInt),&number_of_faces);CHKERRQ(ierr);
1649         ierr = PetscMalloc (total_faces*sizeof(MetisInt),&faces_adjncy);CHKERRQ(ierr);
1650         ierr = PetscMalloc ((n_subdomains+1)*sizeof(MetisInt),&faces_xadj);CHKERRQ(ierr);
1651         ierr = PetscMalloc ((size_prec_comm+1)*sizeof(PetscMPIInt),&faces_displacements);CHKERRQ(ierr);
1652       }
1653       ierr = MPI_Gather(&my_faces,1,MPIU_INT,&number_of_faces[0],1,MPIU_INT,master_proc,prec_comm);CHKERRQ(ierr);
1654       if (rank_prec_comm == master_proc) {
1655         faces_xadj[0]=0;
1656         faces_displacements[0]=0;
1657         j=0;
1658         for (i=1;i<size_prec_comm+1;i++) {
1659           faces_displacements[i]=faces_displacements[i-1]+number_of_faces[i-1];
1660           if (number_of_faces[i-1]) {
1661             j++;
1662             faces_xadj[j]=faces_xadj[j-1]+number_of_faces[i-1];
1663           }
1664         }
1665       }
1666       ierr = MPI_Gatherv(&my_faces_connectivity[0],my_faces,MPIU_INT,&petsc_faces_adjncy[0],number_of_faces,faces_displacements,MPIU_INT,master_proc,prec_comm);CHKERRQ(ierr);
1667       ierr = PetscFree(my_faces_connectivity);CHKERRQ(ierr);
1668       ierr = PetscFree(array_int);CHKERRQ(ierr);
1669       if (rank_prec_comm == master_proc) {
1670         for (i=0;i<total_faces;i++) faces_adjncy[i]=(MetisInt)(petsc_faces_adjncy[i]/ranks_stretching_ratio); /* cast to MetisInt */
1671         ierr = PetscFree(faces_displacements);CHKERRQ(ierr);
1672         ierr = PetscFree(number_of_faces);CHKERRQ(ierr);
1673         ierr = PetscFree(petsc_faces_adjncy);CHKERRQ(ierr);
1674       }
1675 
1676       if ( rank_prec_comm == master_proc ) {
1677 
1678         PetscInt heuristic_for_metis=3;
1679 
1680         ncon=1;
1681         faces_nvtxs=n_subdomains;
1682         /* partition graoh induced by face connectivity */
1683         ierr = PetscMalloc (n_subdomains*sizeof(MetisInt),&metis_coarse_subdivision);CHKERRQ(ierr);
1684         ierr = METIS_SetDefaultOptions(options);
1685         /* we need a contiguous partition of the coarse mesh */
1686         options[METIS_OPTION_CONTIG]=1;
1687         options[METIS_OPTION_NITER]=30;
1688         if (pcbddc->coarsening_ratio > 1) {
1689           if (n_subdomains>n_parts*heuristic_for_metis) {
1690             options[METIS_OPTION_IPTYPE]=METIS_IPTYPE_EDGE;
1691             options[METIS_OPTION_OBJTYPE]=METIS_OBJTYPE_CUT;
1692             ierr = METIS_PartGraphKway(&faces_nvtxs,&ncon,faces_xadj,faces_adjncy,NULL,NULL,NULL,&n_parts,NULL,NULL,options,&objval,metis_coarse_subdivision);
1693             if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in METIS_PartGraphKway (metis error code %D) called from PCBDDCSetUpCoarseEnvironment\n",ierr);
1694           } else {
1695             ierr = METIS_PartGraphRecursive(&faces_nvtxs,&ncon,faces_xadj,faces_adjncy,NULL,NULL,NULL,&n_parts,NULL,NULL,options,&objval,metis_coarse_subdivision);
1696             if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in METIS_PartGraphRecursive (metis error code %D) called from PCBDDCSetUpCoarseEnvironment\n",ierr);
1697           }
1698         } else {
1699           for (i=0;i<n_subdomains;i++) metis_coarse_subdivision[i]=i;
1700         }
1701         ierr = PetscFree(faces_xadj);CHKERRQ(ierr);
1702         ierr = PetscFree(faces_adjncy);CHKERRQ(ierr);
1703         ierr = PetscMalloc(size_prec_comm*sizeof(PetscMPIInt),&coarse_subdivision);CHKERRQ(ierr);
1704 
1705         /* copy/cast values avoiding possible type conflicts between PETSc, MPI and METIS */
1706         for (i=0;i<size_prec_comm;i++) coarse_subdivision[i]=MPI_PROC_NULL;
1707         for (i=0;i<n_subdomains;i++) coarse_subdivision[ranks_stretching_ratio*i]=(PetscInt)(metis_coarse_subdivision[i]);
1708         ierr = PetscFree(metis_coarse_subdivision);CHKERRQ(ierr);
1709       }
1710 
1711       /* Create new communicator for coarse problem splitting the old one */
1712       if ( !(rank_prec_comm%procs_jumps_coarse_comm) && rank_prec_comm < procs_jumps_coarse_comm*n_parts ){
1713         coarse_color=0;              /* for communicator splitting */
1714         active_rank=rank_prec_comm;  /* for insertion of matrix values */
1715       }
1716       /* procs with coarse_color = MPI_UNDEFINED will have coarse_comm = MPI_COMM_NULL (from mpi standards)
1717          key = rank_prec_comm -> keep same ordering of ranks from the old to the new communicator */
1718       ierr = MPI_Comm_split(prec_comm,coarse_color,rank_prec_comm,&coarse_comm);CHKERRQ(ierr);
1719 
1720       if ( coarse_color == 0 ) {
1721         ierr = MPI_Comm_size(coarse_comm,&size_coarse_comm);CHKERRQ(ierr);
1722         ierr = MPI_Comm_rank(coarse_comm,&rank_coarse_comm);CHKERRQ(ierr);
1723       } else {
1724         rank_coarse_comm = MPI_PROC_NULL;
1725       }
1726 
1727       /* master proc take care of arranging and distributing coarse information */
1728       if (rank_coarse_comm == master_proc) {
1729         ierr = PetscMalloc (size_coarse_comm*sizeof(PetscMPIInt),&displacements_recv);CHKERRQ(ierr);
1730         ierr = PetscMalloc (size_coarse_comm*sizeof(PetscMPIInt),&total_count_recv);CHKERRQ(ierr);
1731         ierr = PetscMalloc (n_subdomains*sizeof(PetscMPIInt),&total_ranks_recv);CHKERRQ(ierr);
1732         /* some initializations */
1733         displacements_recv[0]=0;
1734         ierr = PetscMemzero(total_count_recv,size_coarse_comm*sizeof(PetscMPIInt));CHKERRQ(ierr);
1735         /* count from how many processes the j-th process of the coarse decomposition will receive data */
1736         for (j=0;j<size_coarse_comm;j++) {
1737           for (i=0;i<size_prec_comm;i++) {
1738           if (coarse_subdivision[i]==j) total_count_recv[j]++;
1739           }
1740         }
1741         /* displacements needed for scatterv of total_ranks_recv */
1742       for (i=1; i<size_coarse_comm; i++) displacements_recv[i]=displacements_recv[i-1]+total_count_recv[i-1];
1743 
1744         /* Now fill properly total_ranks_recv -> each coarse process will receive the ranks (in prec_comm communicator) of its friend (sending) processes */
1745         ierr = PetscMemzero(total_count_recv,size_coarse_comm*sizeof(PetscMPIInt));CHKERRQ(ierr);
1746         for (j=0;j<size_coarse_comm;j++) {
1747           for (i=0;i<size_prec_comm;i++) {
1748             if (coarse_subdivision[i]==j) {
1749               total_ranks_recv[displacements_recv[j]+total_count_recv[j]]=i;
1750               total_count_recv[j]+=1;
1751             }
1752           }
1753         }
1754         /*for (j=0;j<size_coarse_comm;j++) {
1755           printf("process %d in new rank will receive from %d processes (original ranks follows)\n",j,total_count_recv[j]);
1756           for (i=0;i<total_count_recv[j];i++) {
1757             printf("%d ",total_ranks_recv[displacements_recv[j]+i]);
1758           }
1759           printf("\n");
1760         }*/
1761 
1762         /* identify new decomposition in terms of ranks in the old communicator */
1763         for (i=0;i<n_subdomains;i++) {
1764           coarse_subdivision[ranks_stretching_ratio*i]=coarse_subdivision[ranks_stretching_ratio*i]*procs_jumps_coarse_comm;
1765         }
1766         /*printf("coarse_subdivision in old end new ranks\n");
1767         for (i=0;i<size_prec_comm;i++)
1768           if (coarse_subdivision[i]!=MPI_PROC_NULL) {
1769             printf("%d=(%d %d), ",i,coarse_subdivision[i],coarse_subdivision[i]/procs_jumps_coarse_comm);
1770           } else {
1771             printf("%d=(%d %d), ",i,coarse_subdivision[i],coarse_subdivision[i]);
1772           }
1773         printf("\n");*/
1774       }
1775 
1776       /* Scatter new decomposition for send details */
1777       ierr = MPI_Scatter(&coarse_subdivision[0],1,MPIU_INT,&rank_coarse_proc_send_to,1,MPIU_INT,master_proc,prec_comm);CHKERRQ(ierr);
1778       /* Scatter receiving details to members of coarse decomposition */
1779       if ( coarse_color == 0) {
1780         ierr = MPI_Scatter(&total_count_recv[0],1,MPIU_INT,&count_recv,1,MPIU_INT,master_proc,coarse_comm);CHKERRQ(ierr);
1781         ierr = PetscMalloc (count_recv*sizeof(PetscMPIInt),&ranks_recv);CHKERRQ(ierr);
1782         ierr = MPI_Scatterv(&total_ranks_recv[0],total_count_recv,displacements_recv,MPIU_INT,&ranks_recv[0],count_recv,MPIU_INT,master_proc,coarse_comm);CHKERRQ(ierr);
1783       }
1784 
1785       /*printf("I will send my matrix data to proc  %d\n",rank_coarse_proc_send_to);
1786       if (coarse_color == 0) {
1787         printf("I will receive some matrix data from %d processes (ranks follows)\n",count_recv);
1788         for (i=0;i<count_recv;i++)
1789           printf("%d ",ranks_recv[i]);
1790         printf("\n");
1791       }*/
1792 
1793       if (rank_prec_comm == master_proc) {
1794         ierr = PetscFree(coarse_subdivision);CHKERRQ(ierr);
1795         ierr = PetscFree(total_count_recv);CHKERRQ(ierr);
1796         ierr = PetscFree(total_ranks_recv);CHKERRQ(ierr);
1797         ierr = PetscFree(displacements_recv);CHKERRQ(ierr);
1798       }
1799 #endif
1800       break;
1801     }
1802 
1803     case(REPLICATED_BDDC):
1804 
1805       pcbddc->coarse_communications_type = GATHERS_BDDC;
1806       coarse_mat_type = MATSEQAIJ;
1807       coarse_pc_type  = PCLU;
1808       coarse_ksp_type  = KSPPREONLY;
1809       coarse_comm = PETSC_COMM_SELF;
1810       active_rank = rank_prec_comm;
1811       break;
1812 
1813     case(PARALLEL_BDDC):
1814 
1815       pcbddc->coarse_communications_type = SCATTERS_BDDC;
1816       coarse_mat_type = MATAIJ;
1817       coarse_pc_type  = PCREDUNDANT;
1818       coarse_ksp_type  = KSPPREONLY;
1819       coarse_comm = prec_comm;
1820       active_rank = rank_prec_comm;
1821       break;
1822 
1823     case(SEQUENTIAL_BDDC):
1824       pcbddc->coarse_communications_type = GATHERS_BDDC;
1825       coarse_mat_type = MATAIJ;
1826       coarse_pc_type = PCLU;
1827       coarse_ksp_type  = KSPPREONLY;
1828       coarse_comm = PETSC_COMM_SELF;
1829       active_rank = master_proc;
1830       break;
1831   }
1832 
1833   switch(pcbddc->coarse_communications_type){
1834 
1835     case(SCATTERS_BDDC):
1836       {
1837         if (pcbddc->coarse_problem_type==MULTILEVEL_BDDC) {
1838 
1839           IS coarse_IS;
1840 
1841           if(pcbddc->coarsening_ratio == 1) {
1842             ins_local_primal_size = pcbddc->local_primal_size;
1843             ins_local_primal_indices = pcbddc->local_primal_indices;
1844             if (coarse_color == 0) { ierr = PetscFree(ranks_recv);CHKERRQ(ierr); }
1845             /* nonzeros */
1846             ierr = PetscMalloc(ins_local_primal_size*sizeof(PetscInt),&dnz);CHKERRQ(ierr);
1847             ierr = PetscMemzero(dnz,ins_local_primal_size*sizeof(PetscInt));CHKERRQ(ierr);
1848             for (i=0;i<ins_local_primal_size;i++) {
1849               dnz[i] = ins_local_primal_size;
1850             }
1851           } else {
1852             PetscMPIInt send_size;
1853             PetscMPIInt *send_buffer;
1854             PetscInt    *aux_ins_indices;
1855             PetscInt    ii,jj;
1856             MPI_Request *requests;
1857 
1858             ierr = PetscMalloc(count_recv*sizeof(PetscMPIInt),&localdispl2);CHKERRQ(ierr);
1859             /* reusing pcbddc->local_primal_displacements and pcbddc->replicated_primal_size */
1860             ierr = PetscFree(pcbddc->local_primal_displacements);CHKERRQ(ierr);
1861             ierr = PetscMalloc((count_recv+1)*sizeof(PetscMPIInt),&pcbddc->local_primal_displacements);CHKERRQ(ierr);
1862             pcbddc->replicated_primal_size = count_recv;
1863             j = 0;
1864             for (i=0;i<count_recv;i++) {
1865               pcbddc->local_primal_displacements[i] = j;
1866               j += pcbddc->local_primal_sizes[ranks_recv[i]];
1867             }
1868             pcbddc->local_primal_displacements[count_recv] = j;
1869             ierr = PetscMalloc(j*sizeof(PetscMPIInt),&pcbddc->replicated_local_primal_indices);CHKERRQ(ierr);
1870             /* allocate auxiliary space */
1871             ierr = PetscMalloc(count_recv*sizeof(PetscMPIInt),&localsizes2);CHKERRQ(ierr);
1872             ierr = PetscMalloc(pcbddc->coarse_size*sizeof(PetscInt),&aux_ins_indices);CHKERRQ(ierr);
1873             ierr = PetscMemzero(aux_ins_indices,pcbddc->coarse_size*sizeof(PetscInt));CHKERRQ(ierr);
1874             /* allocate stuffs for message massing */
1875             ierr = PetscMalloc((count_recv+1)*sizeof(MPI_Request),&requests);CHKERRQ(ierr);
1876             for (i=0;i<count_recv+1;i++) { requests[i]=MPI_REQUEST_NULL; }
1877             /* send indices to be inserted */
1878             for (i=0;i<count_recv;i++) {
1879               send_size = pcbddc->local_primal_sizes[ranks_recv[i]];
1880               ierr = MPI_Irecv(&pcbddc->replicated_local_primal_indices[pcbddc->local_primal_displacements[i]],send_size,MPIU_INT,ranks_recv[i],999,prec_comm,&requests[i]);CHKERRQ(ierr);
1881             }
1882             if (rank_coarse_proc_send_to != MPI_PROC_NULL ) {
1883               send_size = pcbddc->local_primal_size;
1884               ierr = PetscMalloc(send_size*sizeof(PetscMPIInt),&send_buffer);CHKERRQ(ierr);
1885               for (i=0;i<send_size;i++) {
1886                 send_buffer[i]=(PetscMPIInt)pcbddc->local_primal_indices[i];
1887               }
1888               ierr = MPI_Isend(send_buffer,send_size,MPIU_INT,rank_coarse_proc_send_to,999,prec_comm,&requests[count_recv]);CHKERRQ(ierr);
1889             }
1890             ierr = MPI_Waitall(count_recv+1,requests,MPI_STATUSES_IGNORE);CHKERRQ(ierr);
1891             if (rank_coarse_proc_send_to != MPI_PROC_NULL ) {
1892               ierr = PetscFree(send_buffer);CHKERRQ(ierr);
1893             }
1894             j = 0;
1895             for (i=0;i<count_recv;i++) {
1896               ii = pcbddc->local_primal_displacements[i+1]-pcbddc->local_primal_displacements[i];
1897               localsizes2[i] = ii*ii;
1898               localdispl2[i] = j;
1899               j += localsizes2[i];
1900               jj = pcbddc->local_primal_displacements[i];
1901               /* it counts the coarse subdomains sharing the coarse node */
1902               for (k=0;k<ii;k++) {
1903                 aux_ins_indices[pcbddc->replicated_local_primal_indices[jj+k]] += 1;
1904               }
1905             }
1906             /* temp_coarse_mat_vals used to store matrix values to be received */
1907             ierr = PetscMalloc(j*sizeof(PetscScalar),&temp_coarse_mat_vals);CHKERRQ(ierr);
1908             /* evaluate how many values I will insert in coarse mat */
1909             ins_local_primal_size = 0;
1910             for (i=0;i<pcbddc->coarse_size;i++) {
1911               if (aux_ins_indices[i]) {
1912                 ins_local_primal_size++;
1913               }
1914             }
1915             /* evaluate indices I will insert in coarse mat */
1916             ierr = PetscMalloc(ins_local_primal_size*sizeof(PetscInt),&ins_local_primal_indices);CHKERRQ(ierr);
1917             j = 0;
1918             for(i=0;i<pcbddc->coarse_size;i++) {
1919               if(aux_ins_indices[i]) {
1920                 ins_local_primal_indices[j] = i;
1921                 j++;
1922               }
1923             }
1924             /* processes partecipating in coarse problem receive matrix data from their friends */
1925             for (i=0;i<count_recv;i++) {
1926               ierr = MPI_Irecv(&temp_coarse_mat_vals[localdispl2[i]],localsizes2[i],MPIU_SCALAR,ranks_recv[i],666,prec_comm,&requests[i]);CHKERRQ(ierr);
1927             }
1928             if (rank_coarse_proc_send_to != MPI_PROC_NULL ) {
1929               send_size = pcbddc->local_primal_size*pcbddc->local_primal_size;
1930               ierr = MPI_Isend(&coarse_submat_vals[0],send_size,MPIU_SCALAR,rank_coarse_proc_send_to,666,prec_comm,&requests[count_recv]);CHKERRQ(ierr);
1931             }
1932             ierr = MPI_Waitall(count_recv+1,requests,MPI_STATUSES_IGNORE);CHKERRQ(ierr);
1933             /* nonzeros */
1934             ierr = PetscMalloc(ins_local_primal_size*sizeof(PetscInt),&dnz);CHKERRQ(ierr);
1935             ierr = PetscMemzero(dnz,ins_local_primal_size*sizeof(PetscInt));CHKERRQ(ierr);
1936             /* use aux_ins_indices to realize a global to local mapping */
1937             j=0;
1938             for(i=0;i<pcbddc->coarse_size;i++){
1939               if(aux_ins_indices[i]==0){
1940                 aux_ins_indices[i]=-1;
1941               } else {
1942                 aux_ins_indices[i]=j;
1943                 j++;
1944               }
1945             }
1946             for (i=0;i<count_recv;i++) {
1947               j = pcbddc->local_primal_sizes[ranks_recv[i]];
1948               for (k=0;k<j;k++) {
1949                 dnz[aux_ins_indices[pcbddc->replicated_local_primal_indices[pcbddc->local_primal_displacements[i]+k]]] += j;
1950               }
1951             }
1952             /* check */
1953             for (i=0;i<ins_local_primal_size;i++) {
1954               if (dnz[i] > ins_local_primal_size) {
1955                 dnz[i] = ins_local_primal_size;
1956               }
1957             }
1958             ierr = PetscFree(requests);CHKERRQ(ierr);
1959             ierr = PetscFree(aux_ins_indices);CHKERRQ(ierr);
1960             if (coarse_color == 0) { ierr = PetscFree(ranks_recv);CHKERRQ(ierr); }
1961           }
1962           /* create local to global mapping needed by coarse MATIS */
1963           if (coarse_comm != MPI_COMM_NULL ) {ierr = MPI_Comm_free(&coarse_comm);CHKERRQ(ierr);}
1964           coarse_comm = prec_comm;
1965           active_rank = rank_prec_comm;
1966           ierr = ISCreateGeneral(coarse_comm,ins_local_primal_size,ins_local_primal_indices,PETSC_COPY_VALUES,&coarse_IS);CHKERRQ(ierr);
1967           ierr = ISLocalToGlobalMappingCreateIS(coarse_IS,&coarse_ISLG);CHKERRQ(ierr);
1968           ierr = ISDestroy(&coarse_IS);CHKERRQ(ierr);
1969         } else if (pcbddc->coarse_problem_type==PARALLEL_BDDC) {
1970           /* arrays for values insertion */
1971           ins_local_primal_size = pcbddc->local_primal_size;
1972           ierr = PetscMalloc(ins_local_primal_size*sizeof(PetscInt),&ins_local_primal_indices);CHKERRQ(ierr);
1973           ierr = PetscMalloc(ins_local_primal_size*ins_local_primal_size*sizeof(PetscScalar),&ins_coarse_mat_vals);CHKERRQ(ierr);
1974           for (j=0;j<ins_local_primal_size;j++){
1975             ins_local_primal_indices[j]=pcbddc->local_primal_indices[j];
1976             for (i=0;i<ins_local_primal_size;i++) {
1977               ins_coarse_mat_vals[j*ins_local_primal_size+i]=coarse_submat_vals[j*ins_local_primal_size+i];
1978             }
1979           }
1980         }
1981         break;
1982 
1983     }
1984 
1985     case(GATHERS_BDDC):
1986       {
1987 
1988         PetscMPIInt mysize,mysize2;
1989         PetscMPIInt *send_buffer;
1990 
1991         if (rank_prec_comm==active_rank) {
1992           ierr = PetscMalloc ( pcbddc->replicated_primal_size*sizeof(PetscMPIInt),&pcbddc->replicated_local_primal_indices);CHKERRQ(ierr);
1993           ierr = PetscMalloc ( pcbddc->replicated_primal_size*sizeof(PetscScalar),&pcbddc->replicated_local_primal_values);CHKERRQ(ierr);
1994           ierr = PetscMalloc ( size_prec_comm*sizeof(PetscMPIInt),&localsizes2);CHKERRQ(ierr);
1995           ierr = PetscMalloc ( size_prec_comm*sizeof(PetscMPIInt),&localdispl2);CHKERRQ(ierr);
1996           /* arrays for values insertion */
1997       for (i=0;i<size_prec_comm;i++) localsizes2[i]=pcbddc->local_primal_sizes[i]*pcbddc->local_primal_sizes[i];
1998           localdispl2[0]=0;
1999       for (i=1;i<size_prec_comm;i++) localdispl2[i]=localsizes2[i-1]+localdispl2[i-1];
2000           j=0;
2001       for (i=0;i<size_prec_comm;i++) j+=localsizes2[i];
2002           ierr = PetscMalloc ( j*sizeof(PetscScalar),&temp_coarse_mat_vals);CHKERRQ(ierr);
2003         }
2004 
2005         mysize=pcbddc->local_primal_size;
2006         mysize2=pcbddc->local_primal_size*pcbddc->local_primal_size;
2007         ierr = PetscMalloc(mysize*sizeof(PetscMPIInt),&send_buffer);CHKERRQ(ierr);
2008     for (i=0; i<mysize; i++) send_buffer[i]=(PetscMPIInt)pcbddc->local_primal_indices[i];
2009 
2010         if (pcbddc->coarse_problem_type == SEQUENTIAL_BDDC){
2011           ierr = MPI_Gatherv(send_buffer,mysize,MPIU_INT,&pcbddc->replicated_local_primal_indices[0],pcbddc->local_primal_sizes,pcbddc->local_primal_displacements,MPIU_INT,master_proc,prec_comm);CHKERRQ(ierr);
2012           ierr = MPI_Gatherv(&coarse_submat_vals[0],mysize2,MPIU_SCALAR,&temp_coarse_mat_vals[0],localsizes2,localdispl2,MPIU_SCALAR,master_proc,prec_comm);CHKERRQ(ierr);
2013         } else {
2014           ierr = MPI_Allgatherv(send_buffer,mysize,MPIU_INT,&pcbddc->replicated_local_primal_indices[0],pcbddc->local_primal_sizes,pcbddc->local_primal_displacements,MPIU_INT,prec_comm);CHKERRQ(ierr);
2015           ierr = MPI_Allgatherv(&coarse_submat_vals[0],mysize2,MPIU_SCALAR,&temp_coarse_mat_vals[0],localsizes2,localdispl2,MPIU_SCALAR,prec_comm);CHKERRQ(ierr);
2016         }
2017         ierr = PetscFree(send_buffer);CHKERRQ(ierr);
2018         break;
2019       }/* switch on coarse problem and communications associated with finished */
2020   }
2021 
2022   /* Now create and fill up coarse matrix */
2023   if ( rank_prec_comm == active_rank ) {
2024 
2025     Mat matis_coarse_local_mat;
2026 
2027     if (pcbddc->coarse_problem_type != MULTILEVEL_BDDC) {
2028       ierr = MatCreate(coarse_comm,&pcbddc->coarse_mat);CHKERRQ(ierr);
2029       ierr = MatSetSizes(pcbddc->coarse_mat,PETSC_DECIDE,PETSC_DECIDE,pcbddc->coarse_size,pcbddc->coarse_size);CHKERRQ(ierr);
2030       ierr = MatSetType(pcbddc->coarse_mat,coarse_mat_type);CHKERRQ(ierr);
2031       ierr = MatSetOptionsPrefix(pcbddc->coarse_mat,"coarse_");CHKERRQ(ierr);
2032       ierr = MatSetFromOptions(pcbddc->coarse_mat);CHKERRQ(ierr);
2033       ierr = MatSetUp(pcbddc->coarse_mat);CHKERRQ(ierr);
2034       ierr = MatSetOption(pcbddc->coarse_mat,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr); /* local values stored in column major */
2035       ierr = MatSetOption(pcbddc->coarse_mat,MAT_IGNORE_ZERO_ENTRIES,PETSC_TRUE);CHKERRQ(ierr);
2036     } else {
2037       ierr = MatCreateIS(coarse_comm,1,PETSC_DECIDE,PETSC_DECIDE,pcbddc->coarse_size,pcbddc->coarse_size,coarse_ISLG,&pcbddc->coarse_mat);CHKERRQ(ierr);
2038       ierr = MatSetUp(pcbddc->coarse_mat);CHKERRQ(ierr);
2039       ierr = MatISGetLocalMat(pcbddc->coarse_mat,&matis_coarse_local_mat);CHKERRQ(ierr);
2040       ierr = MatSetOptionsPrefix(pcbddc->coarse_mat,"coarse_");CHKERRQ(ierr);
2041       ierr = MatSetFromOptions(pcbddc->coarse_mat);CHKERRQ(ierr);
2042       ierr = MatSetUp(matis_coarse_local_mat);CHKERRQ(ierr);
2043       ierr = MatSetOption(matis_coarse_local_mat,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr); /* local values stored in column major */
2044       ierr = MatSetOption(matis_coarse_local_mat,MAT_IGNORE_ZERO_ENTRIES,PETSC_TRUE);CHKERRQ(ierr);
2045     }
2046     /* preallocation */
2047     if (pcbddc->coarse_problem_type != MULTILEVEL_BDDC) {
2048 
2049       PetscInt lrows,lcols,bs;
2050 
2051       ierr = MatGetLocalSize(pcbddc->coarse_mat,&lrows,&lcols);CHKERRQ(ierr);
2052       ierr = MatPreallocateInitialize(coarse_comm,lrows,lcols,dnz,onz);CHKERRQ(ierr);
2053       ierr = MatGetBlockSize(pcbddc->coarse_mat,&bs);CHKERRQ(ierr);
2054 
2055       if (pcbddc->coarse_problem_type == PARALLEL_BDDC) {
2056 
2057         Vec         vec_dnz,vec_onz;
2058         PetscScalar *my_dnz,*my_onz,*array;
2059         PetscInt    *mat_ranges,*row_ownership;
2060         PetscInt    coarse_index_row,coarse_index_col,owner;
2061 
2062         ierr = VecCreate(prec_comm,&vec_dnz);CHKERRQ(ierr);
2063         ierr = VecSetBlockSize(vec_dnz,bs);CHKERRQ(ierr);
2064         ierr = VecSetSizes(vec_dnz,PETSC_DECIDE,pcbddc->coarse_size);CHKERRQ(ierr);
2065         ierr = VecSetType(vec_dnz,VECMPI);CHKERRQ(ierr);
2066         ierr = VecDuplicate(vec_dnz,&vec_onz);CHKERRQ(ierr);
2067 
2068         ierr = PetscMalloc(pcbddc->local_primal_size*sizeof(PetscScalar),&my_dnz);CHKERRQ(ierr);
2069         ierr = PetscMalloc(pcbddc->local_primal_size*sizeof(PetscScalar),&my_onz);CHKERRQ(ierr);
2070         ierr = PetscMemzero(my_dnz,pcbddc->local_primal_size*sizeof(PetscScalar));CHKERRQ(ierr);
2071         ierr = PetscMemzero(my_onz,pcbddc->local_primal_size*sizeof(PetscScalar));CHKERRQ(ierr);
2072 
2073         ierr = PetscMalloc(pcbddc->coarse_size*sizeof(PetscInt),&row_ownership);CHKERRQ(ierr);
2074         ierr = MatGetOwnershipRanges(pcbddc->coarse_mat,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
2075         for (i=0;i<size_prec_comm;i++) {
2076           for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
2077             row_ownership[j]=i;
2078           }
2079         }
2080 
2081         for (i=0;i<pcbddc->local_primal_size;i++) {
2082           coarse_index_row = pcbddc->local_primal_indices[i];
2083           owner = row_ownership[coarse_index_row];
2084           for (j=i;j<pcbddc->local_primal_size;j++) {
2085             owner = row_ownership[coarse_index_row];
2086             coarse_index_col = pcbddc->local_primal_indices[j];
2087             if (coarse_index_col > mat_ranges[owner]-1 && coarse_index_col < mat_ranges[owner+1] ) {
2088               my_dnz[i] += 1.0;
2089             } else {
2090               my_onz[i] += 1.0;
2091             }
2092             if (i != j) {
2093               owner = row_ownership[coarse_index_col];
2094               if (coarse_index_row > mat_ranges[owner]-1 && coarse_index_row < mat_ranges[owner+1] ) {
2095                 my_dnz[j] += 1.0;
2096               } else {
2097                 my_onz[j] += 1.0;
2098               }
2099             }
2100           }
2101         }
2102         ierr = VecSet(vec_dnz,0.0);CHKERRQ(ierr);
2103         ierr = VecSet(vec_onz,0.0);CHKERRQ(ierr);
2104         if (pcbddc->local_primal_size) {
2105           ierr = VecSetValues(vec_dnz,pcbddc->local_primal_size,pcbddc->local_primal_indices,my_dnz,ADD_VALUES);CHKERRQ(ierr);
2106           ierr = VecSetValues(vec_onz,pcbddc->local_primal_size,pcbddc->local_primal_indices,my_onz,ADD_VALUES);CHKERRQ(ierr);
2107         }
2108         ierr = VecAssemblyBegin(vec_dnz);CHKERRQ(ierr);
2109         ierr = VecAssemblyBegin(vec_onz);CHKERRQ(ierr);
2110         ierr = VecAssemblyEnd(vec_dnz);CHKERRQ(ierr);
2111         ierr = VecAssemblyEnd(vec_onz);CHKERRQ(ierr);
2112         j = mat_ranges[rank_prec_comm+1]-mat_ranges[rank_prec_comm];
2113         ierr = VecGetArray(vec_dnz,&array);CHKERRQ(ierr);
2114         for (i=0; i<j; i++) dnz[i] = (PetscInt)PetscRealPart(array[i]);
2115 
2116         ierr = VecRestoreArray(vec_dnz,&array);CHKERRQ(ierr);
2117         ierr = VecGetArray(vec_onz,&array);CHKERRQ(ierr);
2118         for (i=0;i<j;i++) onz[i] = (PetscInt)PetscRealPart(array[i]);
2119 
2120         ierr = VecRestoreArray(vec_onz,&array);CHKERRQ(ierr);
2121         ierr = PetscFree(my_dnz);CHKERRQ(ierr);
2122         ierr = PetscFree(my_onz);CHKERRQ(ierr);
2123         ierr = PetscFree(row_ownership);CHKERRQ(ierr);
2124         ierr = VecDestroy(&vec_dnz);CHKERRQ(ierr);
2125         ierr = VecDestroy(&vec_onz);CHKERRQ(ierr);
2126       } else {
2127         for (k=0;k<size_prec_comm;k++){
2128           offset=pcbddc->local_primal_displacements[k];
2129           offset2=localdispl2[k];
2130           ins_local_primal_size = pcbddc->local_primal_sizes[k];
2131           ierr = PetscMalloc(ins_local_primal_size*sizeof(PetscInt),&ins_local_primal_indices);CHKERRQ(ierr);
2132           for (j=0;j<ins_local_primal_size;j++){
2133             ins_local_primal_indices[j]=(PetscInt)pcbddc->replicated_local_primal_indices[offset+j];
2134           }
2135           for (j=0;j<ins_local_primal_size;j++) {
2136             ierr = MatPreallocateSet(ins_local_primal_indices[j],ins_local_primal_size,ins_local_primal_indices,dnz,onz);CHKERRQ(ierr);
2137           }
2138           ierr = PetscFree(ins_local_primal_indices);CHKERRQ(ierr);
2139         }
2140       }
2141 
2142       /* check */
2143       for (i=0;i<lrows;i++) {
2144         if (dnz[i]>lcols) dnz[i]=lcols;
2145         if (onz[i]>pcbddc->coarse_size-lcols) onz[i]=pcbddc->coarse_size-lcols;
2146       }
2147       ierr = MatSeqAIJSetPreallocation(pcbddc->coarse_mat,0,dnz);CHKERRQ(ierr);
2148       ierr = MatMPIAIJSetPreallocation(pcbddc->coarse_mat,0,dnz,0,onz);CHKERRQ(ierr);
2149       ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
2150     } else {
2151       ierr = MatSeqAIJSetPreallocation(matis_coarse_local_mat,0,dnz);CHKERRQ(ierr);
2152       ierr = PetscFree(dnz);CHKERRQ(ierr);
2153     }
2154     /* insert values */
2155     if (pcbddc->coarse_problem_type == PARALLEL_BDDC) {
2156       ierr = MatSetValues(pcbddc->coarse_mat,ins_local_primal_size,ins_local_primal_indices,ins_local_primal_size,ins_local_primal_indices,ins_coarse_mat_vals,ADD_VALUES);CHKERRQ(ierr);
2157     } else if (pcbddc->coarse_problem_type == MULTILEVEL_BDDC) {
2158       if (pcbddc->coarsening_ratio == 1) {
2159         ins_coarse_mat_vals = coarse_submat_vals;
2160         ierr = MatSetValues(pcbddc->coarse_mat,ins_local_primal_size,ins_local_primal_indices,ins_local_primal_size,ins_local_primal_indices,ins_coarse_mat_vals,INSERT_VALUES);CHKERRQ(ierr);
2161       } else {
2162         ierr = PetscFree(ins_local_primal_indices);CHKERRQ(ierr);
2163         for (k=0;k<pcbddc->replicated_primal_size;k++) {
2164           offset = pcbddc->local_primal_displacements[k];
2165           offset2 = localdispl2[k];
2166           ins_local_primal_size = pcbddc->local_primal_displacements[k+1]-pcbddc->local_primal_displacements[k];
2167           ierr = PetscMalloc(ins_local_primal_size*sizeof(PetscInt),&ins_local_primal_indices);CHKERRQ(ierr);
2168           for (j=0;j<ins_local_primal_size;j++){
2169             ins_local_primal_indices[j]=(PetscInt)pcbddc->replicated_local_primal_indices[offset+j];
2170           }
2171           ins_coarse_mat_vals = &temp_coarse_mat_vals[offset2];
2172           ierr = MatSetValues(pcbddc->coarse_mat,ins_local_primal_size,ins_local_primal_indices,ins_local_primal_size,ins_local_primal_indices,ins_coarse_mat_vals,ADD_VALUES);CHKERRQ(ierr);
2173           ierr = PetscFree(ins_local_primal_indices);CHKERRQ(ierr);
2174         }
2175       }
2176       ins_local_primal_indices = 0;
2177       ins_coarse_mat_vals = 0;
2178     } else {
2179       for (k=0;k<size_prec_comm;k++){
2180         offset=pcbddc->local_primal_displacements[k];
2181         offset2=localdispl2[k];
2182         ins_local_primal_size = pcbddc->local_primal_sizes[k];
2183         ierr = PetscMalloc(ins_local_primal_size*sizeof(PetscInt),&ins_local_primal_indices);CHKERRQ(ierr);
2184         for (j=0;j<ins_local_primal_size;j++){
2185           ins_local_primal_indices[j]=(PetscInt)pcbddc->replicated_local_primal_indices[offset+j];
2186         }
2187         ins_coarse_mat_vals = &temp_coarse_mat_vals[offset2];
2188         ierr = MatSetValues(pcbddc->coarse_mat,ins_local_primal_size,ins_local_primal_indices,ins_local_primal_size,ins_local_primal_indices,ins_coarse_mat_vals,ADD_VALUES);CHKERRQ(ierr);
2189         ierr = PetscFree(ins_local_primal_indices);CHKERRQ(ierr);
2190       }
2191       ins_local_primal_indices = 0;
2192       ins_coarse_mat_vals = 0;
2193     }
2194     ierr = MatAssemblyBegin(pcbddc->coarse_mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2195     ierr = MatAssemblyEnd(pcbddc->coarse_mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2196     /* symmetry of coarse matrix */
2197     if (issym) {
2198       ierr = MatSetOption(pcbddc->coarse_mat,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
2199     }
2200     ierr = MatGetVecs(pcbddc->coarse_mat,&pcbddc->coarse_vec,&pcbddc->coarse_rhs);CHKERRQ(ierr);
2201   }
2202 
2203   /* create loc to glob scatters if needed */
2204   if (pcbddc->coarse_communications_type == SCATTERS_BDDC) {
2205      IS local_IS,global_IS;
2206      ierr = ISCreateStride(PETSC_COMM_SELF,pcbddc->local_primal_size,0,1,&local_IS);CHKERRQ(ierr);
2207      ierr = ISCreateGeneral(PETSC_COMM_SELF,pcbddc->local_primal_size,pcbddc->local_primal_indices,PETSC_COPY_VALUES,&global_IS);CHKERRQ(ierr);
2208      ierr = VecScatterCreate(pcbddc->vec1_P,local_IS,pcbddc->coarse_vec,global_IS,&pcbddc->coarse_loc_to_glob);CHKERRQ(ierr);
2209      ierr = ISDestroy(&local_IS);CHKERRQ(ierr);
2210      ierr = ISDestroy(&global_IS);CHKERRQ(ierr);
2211   }
2212 
2213   /* free memory no longer needed */
2214   if (coarse_ISLG)              { ierr = ISLocalToGlobalMappingDestroy(&coarse_ISLG);CHKERRQ(ierr); }
2215   if (ins_local_primal_indices) { ierr = PetscFree(ins_local_primal_indices);CHKERRQ(ierr); }
2216   if (ins_coarse_mat_vals)      { ierr = PetscFree(ins_coarse_mat_vals);CHKERRQ(ierr); }
2217   if (localsizes2)              { ierr = PetscFree(localsizes2);CHKERRQ(ierr); }
2218   if (localdispl2)              { ierr = PetscFree(localdispl2);CHKERRQ(ierr); }
2219   if (temp_coarse_mat_vals)     { ierr = PetscFree(temp_coarse_mat_vals);CHKERRQ(ierr); }
2220 
2221   /* Compute coarse null space */
2222   CoarseNullSpace = 0;
2223   if (pcbddc->NullSpace) {
2224     ierr = PCBDDCNullSpaceAssembleCoarse(pc,&CoarseNullSpace);CHKERRQ(ierr);
2225   }
2226 
2227   /* KSP for coarse problem */
2228   if (rank_prec_comm == active_rank) {
2229     PetscBool isbddc=PETSC_FALSE;
2230 
2231     ierr = KSPCreate(coarse_comm,&pcbddc->coarse_ksp);CHKERRQ(ierr);
2232     ierr = PetscObjectIncrementTabLevel((PetscObject)pcbddc->coarse_ksp,(PetscObject)pc,1);CHKERRQ(ierr);
2233     ierr = KSPSetOperators(pcbddc->coarse_ksp,pcbddc->coarse_mat,pcbddc->coarse_mat,SAME_PRECONDITIONER);CHKERRQ(ierr);
2234     ierr = KSPSetTolerances(pcbddc->coarse_ksp,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,max_it_coarse_ksp);CHKERRQ(ierr);
2235     ierr = KSPSetType(pcbddc->coarse_ksp,coarse_ksp_type);CHKERRQ(ierr);
2236     ierr = KSPGetPC(pcbddc->coarse_ksp,&pc_temp);CHKERRQ(ierr);
2237     ierr = PCSetType(pc_temp,coarse_pc_type);CHKERRQ(ierr);
2238     /* Allow user's customization */
2239     ierr = KSPSetOptionsPrefix(pcbddc->coarse_ksp,"coarse_");CHKERRQ(ierr);
2240     /* Set Up PC for coarse problem BDDC */
2241     if (pcbddc->coarse_problem_type == MULTILEVEL_BDDC) {
2242       i = pcbddc->current_level+1;
2243       ierr = PCBDDCSetLevel(pc_temp,i);CHKERRQ(ierr);
2244       ierr = PCBDDCSetCoarseningRatio(pc_temp,pcbddc->coarsening_ratio);CHKERRQ(ierr);
2245       ierr = PCBDDCSetMaxLevels(pc_temp,pcbddc->max_levels);CHKERRQ(ierr);
2246       ierr = PCBDDCSetCoarseProblemType(pc_temp,MULTILEVEL_BDDC);CHKERRQ(ierr);
2247       if (CoarseNullSpace) {
2248         ierr = PCBDDCSetNullSpace(pc_temp,CoarseNullSpace);CHKERRQ(ierr);
2249       }
2250       if (dbg_flag) {
2251         ierr = PetscViewerASCIIPrintf(viewer,"----------------Level %d: Setting up level %d---------------\n",pcbddc->current_level,i);CHKERRQ(ierr);
2252         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
2253       }
2254     } else {
2255       if (CoarseNullSpace) {
2256         ierr = KSPSetNullSpace(pcbddc->coarse_ksp,CoarseNullSpace);CHKERRQ(ierr);
2257       }
2258     }
2259     ierr = KSPSetFromOptions(pcbddc->coarse_ksp);CHKERRQ(ierr);
2260     ierr = KSPSetUp(pcbddc->coarse_ksp);CHKERRQ(ierr);
2261 
2262     ierr = KSPGetTolerances(pcbddc->coarse_ksp,NULL,NULL,NULL,&j);CHKERRQ(ierr);
2263     ierr = KSPGetPC(pcbddc->coarse_ksp,&pc_temp);CHKERRQ(ierr);
2264     ierr = PetscObjectTypeCompare((PetscObject)pc_temp,PCBDDC,&isbddc);CHKERRQ(ierr);
2265     if (j == 1) {
2266       ierr = KSPSetNormType(pcbddc->coarse_ksp,KSP_NORM_NONE);CHKERRQ(ierr);
2267       if (isbddc) {
2268         ierr = PCBDDCSetUseExactDirichlet(pc_temp,PETSC_FALSE);CHKERRQ(ierr);
2269       }
2270     }
2271   }
2272   /* Check coarse problem if requested */
2273   if ( dbg_flag && rank_prec_comm == active_rank ) {
2274     KSP check_ksp;
2275     PC  check_pc;
2276     Vec check_vec;
2277     PetscReal   abs_infty_error,infty_error,lambda_min,lambda_max;
2278     KSPType check_ksp_type;
2279 
2280     /* Create ksp object suitable for extreme eigenvalues' estimation */
2281     ierr = KSPCreate(coarse_comm,&check_ksp);CHKERRQ(ierr);
2282     ierr = KSPSetOperators(check_ksp,pcbddc->coarse_mat,pcbddc->coarse_mat,SAME_PRECONDITIONER);CHKERRQ(ierr);
2283     ierr = KSPSetTolerances(check_ksp,1.e-12,1.e-12,PETSC_DEFAULT,pcbddc->coarse_size);CHKERRQ(ierr);
2284     if (pcbddc->coarse_problem_type == MULTILEVEL_BDDC) {
2285       if (issym) check_ksp_type = KSPCG;
2286       else check_ksp_type = KSPGMRES;
2287       ierr = KSPSetComputeSingularValues(check_ksp,PETSC_TRUE);CHKERRQ(ierr);
2288     } else {
2289       check_ksp_type = KSPPREONLY;
2290     }
2291     ierr = KSPSetType(check_ksp,check_ksp_type);CHKERRQ(ierr);
2292     ierr = KSPGetPC(pcbddc->coarse_ksp,&check_pc);CHKERRQ(ierr);
2293     ierr = KSPSetPC(check_ksp,check_pc);CHKERRQ(ierr);
2294     ierr = KSPSetUp(check_ksp);CHKERRQ(ierr);
2295     /* create random vec */
2296     ierr = VecDuplicate(pcbddc->coarse_vec,&check_vec);CHKERRQ(ierr);
2297     ierr = VecSetRandom(check_vec,NULL);CHKERRQ(ierr);
2298     if (CoarseNullSpace) {
2299       ierr = MatNullSpaceRemove(CoarseNullSpace,check_vec);CHKERRQ(ierr);
2300     }
2301     ierr = MatMult(pcbddc->coarse_mat,check_vec,pcbddc->coarse_rhs);CHKERRQ(ierr);
2302     /* solve coarse problem */
2303     ierr = KSPSolve(check_ksp,pcbddc->coarse_rhs,pcbddc->coarse_vec);CHKERRQ(ierr);
2304     if (CoarseNullSpace) {
2305       ierr = MatNullSpaceRemove(CoarseNullSpace,pcbddc->coarse_vec);CHKERRQ(ierr);
2306     }
2307     /* check coarse problem residual error */
2308     ierr = VecAXPY(check_vec,-1.0,pcbddc->coarse_vec);CHKERRQ(ierr);
2309     ierr = VecNorm(check_vec,NORM_INFINITY,&infty_error);CHKERRQ(ierr);
2310     ierr = MatMult(pcbddc->coarse_mat,check_vec,pcbddc->coarse_rhs);CHKERRQ(ierr);
2311     ierr = VecNorm(pcbddc->coarse_rhs,NORM_INFINITY,&abs_infty_error);CHKERRQ(ierr);
2312     ierr = VecDestroy(&check_vec);CHKERRQ(ierr);
2313     /* get eigenvalue estimation if inexact */
2314     if (pcbddc->coarse_problem_type == MULTILEVEL_BDDC) {
2315       ierr = KSPComputeExtremeSingularValues(check_ksp,&lambda_max,&lambda_min);CHKERRQ(ierr);
2316       ierr = KSPGetIterationNumber(check_ksp,&k);CHKERRQ(ierr);
2317       ierr = PetscViewerASCIIPrintf(viewer,"Coarse problem eigenvalues estimated with %d iterations of %s.\n",k,check_ksp_type);CHKERRQ(ierr);
2318       ierr = PetscViewerASCIIPrintf(viewer,"Coarse problem eigenvalues: % 1.14e %1.14e\n",lambda_min,lambda_max);CHKERRQ(ierr);
2319     }
2320     ierr = PetscViewerASCIIPrintf(viewer,"Coarse problem exact infty_error   : %1.14e\n",infty_error);CHKERRQ(ierr);
2321     ierr = PetscViewerASCIIPrintf(viewer,"Coarse problem residual infty_error: %1.14e\n",abs_infty_error);CHKERRQ(ierr);
2322     ierr = KSPDestroy(&check_ksp);CHKERRQ(ierr);
2323   }
2324   if (dbg_flag) {
2325     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
2326   }
2327   ierr = MatNullSpaceDestroy(&CoarseNullSpace);CHKERRQ(ierr);
2328 
2329   PetscFunctionReturn(0);
2330 }
2331 
2332