xref: /petsc/src/ksp/pc/impls/bddc/bddc.c (revision 5ce978aba59d24001dc7096786ee80b307d7800e)
1 /* TODOLIST
2    DofSplitting and DM attached to pc?
3    Change SetNeumannBoundaries to SetNeumannBoundariesLocal and provide new SetNeumannBoundaries (same Dirichlet)
4    change how to deal with the coarse problem (PCBDDCSetCoarseEnvironment):
5      - simplify coarse problem structure -> PCBDDC or PCREDUDANT, nothing else -> same comm for all levels?
6      - remove coarse enums and allow use of PCBDDCGetCoarseKSP
7      - remove metis dependency -> use MatPartitioning for multilevel -> Assemble serial adjacency in PCBDDCAnalyzeInterface?
8    code refactoring:
9      - pick up better names for static functions
10    change options structure:
11      - insert BDDC into MG framework?
12    provide other ops? Ask to developers
13    remove all unused printf
14    man pages
15 */
16 
17 /* ----------------------------------------------------------------------------------------------------------------------------------------------
18    Implementation of BDDC preconditioner based on:
19    C. Dohrmann "An approximate BDDC preconditioner", Numerical Linear Algebra with Applications Volume 14, Issue 2, pages 149-168, March 2007
20    ---------------------------------------------------------------------------------------------------------------------------------------------- */
21 
22 #include "bddc.h" /*I "petscpc.h" I*/  /* includes for fortran wrappers */
23 #include "bddcprivate.h"
24 #include <petscblaslapack.h>
25 
26 /* prototypes for static functions contained in bddc.c */
27 static PetscErrorCode PCBDDCSetUseExactDirichlet(PC,PetscBool);
28 static PetscErrorCode PCBDDCSetLevel(PC,PetscInt);
29 static PetscErrorCode PCBDDCCoarseSetUp(PC);
30 static PetscErrorCode PCBDDCSetUpCoarseEnvironment(PC,PetscScalar*);
31 
32 /* -------------------------------------------------------------------------- */
33 #undef __FUNCT__
34 #define __FUNCT__ "PCSetFromOptions_BDDC"
35 PetscErrorCode PCSetFromOptions_BDDC(PC pc)
36 {
37   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;
38   PetscErrorCode ierr;
39 
40   PetscFunctionBegin;
41   ierr = PetscOptionsHead("BDDC options");CHKERRQ(ierr);
42   /* Verbose debugging of main data structures */
43   ierr = PetscOptionsBool("-pc_bddc_check_all"       ,"Verbose (debugging) output for PCBDDC"                       ,"none",pcbddc->dbg_flag      ,&pcbddc->dbg_flag      ,NULL);CHKERRQ(ierr);
44   /* Some customization for default primal space */
45   ierr = PetscOptionsBool("-pc_bddc_vertices_only"   ,"Use only vertices in coarse space (i.e. discard constraints)","none",pcbddc->vertices_flag   ,&pcbddc->vertices_flag   ,NULL);CHKERRQ(ierr);
46   ierr = PetscOptionsBool("-pc_bddc_constraints_only","Use only constraints in coarse space (i.e. discard vertices)","none",pcbddc->constraints_flag,&pcbddc->constraints_flag,NULL);CHKERRQ(ierr);
47   ierr = PetscOptionsBool("-pc_bddc_faces_only"      ,"Use only faces among constraints of coarse space (i.e. discard edges)"         ,"none",pcbddc->faces_flag      ,&pcbddc->faces_flag      ,NULL);CHKERRQ(ierr);
48   ierr = PetscOptionsBool("-pc_bddc_edges_only"      ,"Use only edges among constraints of coarse space (i.e. discard faces)"         ,"none",pcbddc->edges_flag      ,&pcbddc->edges_flag      ,NULL);CHKERRQ(ierr);
49   /* Coarse solver context */
50   static const char * const avail_coarse_problems[] = {"sequential","replicated","parallel","multilevel","CoarseProblemType","PC_BDDC_",0}; /*order of choiches depends on ENUM defined in bddc.h */
51   ierr = PetscOptionsEnum("-pc_bddc_coarse_problem_type","Set coarse problem type","none",avail_coarse_problems,(PetscEnum)pcbddc->coarse_problem_type,(PetscEnum*)&pcbddc->coarse_problem_type,NULL);CHKERRQ(ierr);
52   /* Two different application of BDDC to the whole set of dofs, internal and interface */
53   ierr = PetscOptionsBool("-pc_bddc_switch_preconditioning_type","Switch between M_2 (default) and M_3 preconditioners (as defined by Dohrmann)","none",pcbddc->inexact_prec_type,&pcbddc->inexact_prec_type,NULL);CHKERRQ(ierr);
54   ierr = PetscOptionsBool("-pc_bddc_use_change_of_basis","Use change of basis approach for primal space","none",pcbddc->use_change_of_basis,&pcbddc->use_change_of_basis,NULL);CHKERRQ(ierr);
55   ierr = PetscOptionsBool("-pc_bddc_use_change_on_faces","Use change of basis approach for face constraints","none",pcbddc->use_change_on_faces,&pcbddc->use_change_on_faces,NULL);CHKERRQ(ierr);
56   if (!pcbddc->use_change_of_basis) {
57     pcbddc->use_change_on_faces = PETSC_FALSE;
58   }
59   ierr = PetscOptionsInt("-pc_bddc_coarsening_ratio","Set coarsening ratio used in multilevel coarsening","none",pcbddc->coarsening_ratio,&pcbddc->coarsening_ratio,NULL);CHKERRQ(ierr);
60   ierr = PetscOptionsInt("-pc_bddc_max_levels","Set maximum number of levels for multilevel","none",pcbddc->max_levels,&pcbddc->max_levels,NULL);CHKERRQ(ierr);
61   ierr = PetscOptionsBool("-pc_bddc_use_deluxe_scaling","Use deluxe scaling for BDDC","none",pcbddc->use_deluxe_scaling,&pcbddc->use_deluxe_scaling,NULL);CHKERRQ(ierr);
62   ierr = PetscOptionsTail();CHKERRQ(ierr);
63   PetscFunctionReturn(0);
64 }
65 /* -------------------------------------------------------------------------- */
66 #undef __FUNCT__
67 #define __FUNCT__ "PCBDDCSetPrimalVerticesLocalIS_BDDC"
68 static PetscErrorCode PCBDDCSetPrimalVerticesLocalIS_BDDC(PC pc, IS PrimalVertices)
69 {
70   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;
71   PetscErrorCode ierr;
72 
73   PetscFunctionBegin;
74   ierr = ISDestroy(&pcbddc->user_primal_vertices);CHKERRQ(ierr);
75   ierr = PetscObjectReference((PetscObject)PrimalVertices);CHKERRQ(ierr);
76   pcbddc->user_primal_vertices = PrimalVertices;
77   PetscFunctionReturn(0);
78 }
79 #undef __FUNCT__
80 #define __FUNCT__ "PCBDDCSetPrimalVerticesLocalIS"
81 /*@
82  PCBDDCSetPrimalVerticesLocalIS - Set user defined primal vertices in PCBDDC.
83 
84    Not collective
85 
86    Input Parameters:
87 +  pc - the preconditioning context
88 -  PrimalVertices - index sets of primal vertices in local numbering
89 
90    Level: intermediate
91 
92    Notes:
93 
94 .seealso: PCBDDC
95 @*/
96 PetscErrorCode PCBDDCSetPrimalVerticesLocalIS(PC pc, IS PrimalVertices)
97 {
98   PetscErrorCode ierr;
99 
100   PetscFunctionBegin;
101   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
102   PetscValidHeaderSpecific(PrimalVertices,IS_CLASSID,2);
103   ierr = PetscTryMethod(pc,"PCBDDCSetPrimalVerticesLocalIS_C",(PC,IS),(pc,PrimalVertices));CHKERRQ(ierr);
104   PetscFunctionReturn(0);
105 }
106 /* -------------------------------------------------------------------------- */
107 #undef __FUNCT__
108 #define __FUNCT__ "PCBDDCSetCoarseProblemType_BDDC"
109 static PetscErrorCode PCBDDCSetCoarseProblemType_BDDC(PC pc, CoarseProblemType CPT)
110 {
111   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
112 
113   PetscFunctionBegin;
114   pcbddc->coarse_problem_type = CPT;
115   PetscFunctionReturn(0);
116 }
117 
118 #undef __FUNCT__
119 #define __FUNCT__ "PCBDDCSetCoarseProblemType"
120 /*@
121  PCBDDCSetCoarseProblemType - Set coarse problem type in PCBDDC.
122 
123    Not collective
124 
125    Input Parameters:
126 +  pc - the preconditioning context
127 -  CoarseProblemType - pick a better name and explain what this is
128 
129    Level: intermediate
130 
131    Notes:
132    Not collective but all procs must call with same arguments.
133 
134 .seealso: PCBDDC
135 @*/
136 PetscErrorCode PCBDDCSetCoarseProblemType(PC pc, CoarseProblemType CPT)
137 {
138   PetscErrorCode ierr;
139 
140   PetscFunctionBegin;
141   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
142   ierr = PetscTryMethod(pc,"PCBDDCSetCoarseProblemType_C",(PC,CoarseProblemType),(pc,CPT));CHKERRQ(ierr);
143   PetscFunctionReturn(0);
144 }
145 /* -------------------------------------------------------------------------- */
146 #undef __FUNCT__
147 #define __FUNCT__ "PCBDDCSetCoarseningRatio_BDDC"
148 static PetscErrorCode PCBDDCSetCoarseningRatio_BDDC(PC pc,PetscInt k)
149 {
150   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
151 
152   PetscFunctionBegin;
153   pcbddc->coarsening_ratio=k;
154   PetscFunctionReturn(0);
155 }
156 
157 #undef __FUNCT__
158 #define __FUNCT__ "PCBDDCSetCoarseningRatio"
159 /*@
160  PCBDDCSetCoarseningRatio - Set coarsening ratio used in multilevel coarsening
161 
162    Logically collective on PC
163 
164    Input Parameters:
165 +  pc - the preconditioning context
166 -  k - coarsening ratio
167 
168    Approximatively k subdomains at the finer level will be aggregated into a single subdomain at the coarser level.
169 
170    Level: intermediate
171 
172    Notes:
173 
174 .seealso: PCBDDC
175 @*/
176 PetscErrorCode PCBDDCSetCoarseningRatio(PC pc,PetscInt k)
177 {
178   PetscErrorCode ierr;
179 
180   PetscFunctionBegin;
181   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
182   ierr = PetscTryMethod(pc,"PCBDDCSetCoarseningRatio_C",(PC,PetscInt),(pc,k));CHKERRQ(ierr);
183   PetscFunctionReturn(0);
184 }
185 /* -------------------------------------------------------------------------- */
186 
187 #undef __FUNCT__
188 #define __FUNCT__ "PCBDDCSetMaxLevels_BDDC"
189 static PetscErrorCode PCBDDCSetMaxLevels_BDDC(PC pc,PetscInt max_levels)
190 {
191   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
192 
193   PetscFunctionBegin;
194   pcbddc->max_levels=max_levels;
195   PetscFunctionReturn(0);
196 }
197 
198 #undef __FUNCT__
199 #define __FUNCT__ "PCBDDCSetMaxLevels"
200 /*@
201  PCBDDCSetMaxLevels - Sets the maximum number of levels within the multilevel approach.
202 
203    Logically collective on PC
204 
205    Input Parameters:
206 +  pc - the preconditioning context
207 -  max_levels - the maximum number of levels
208 
209    Default value is 1, i.e. coarse problem will be solved inexactly with one application
210    of PCBDDC preconditioner if the multilevel approach is requested.
211 
212    Level: intermediate
213 
214    Notes:
215 
216 .seealso: PCBDDC
217 @*/
218 PetscErrorCode PCBDDCSetMaxLevels(PC pc,PetscInt max_levels)
219 {
220   PetscErrorCode ierr;
221 
222   PetscFunctionBegin;
223   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
224   ierr = PetscTryMethod(pc,"PCBDDCSetMaxLevels_C",(PC,PetscInt),(pc,max_levels));CHKERRQ(ierr);
225   PetscFunctionReturn(0);
226 }
227 /* -------------------------------------------------------------------------- */
228 
229 #undef __FUNCT__
230 #define __FUNCT__ "PCBDDCSetNullSpace_BDDC"
231 static PetscErrorCode PCBDDCSetNullSpace_BDDC(PC pc,MatNullSpace NullSpace)
232 {
233   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
234   PetscErrorCode ierr;
235 
236   PetscFunctionBegin;
237   ierr = PetscObjectReference((PetscObject)NullSpace);CHKERRQ(ierr);
238   ierr = MatNullSpaceDestroy(&pcbddc->NullSpace);CHKERRQ(ierr);
239   pcbddc->NullSpace=NullSpace;
240   PetscFunctionReturn(0);
241 }
242 
243 #undef __FUNCT__
244 #define __FUNCT__ "PCBDDCSetNullSpace"
245 /*@
246  PCBDDCSetNullSpace - Set NullSpace of global operator of BDDC preconditioned mat.
247 
248    Logically collective on PC and MatNullSpace
249 
250    Input Parameters:
251 +  pc - the preconditioning context
252 -  NullSpace - Null space of the linear operator to be preconditioned.
253 
254    Level: intermediate
255 
256    Notes:
257 
258 .seealso: PCBDDC
259 @*/
260 PetscErrorCode PCBDDCSetNullSpace(PC pc,MatNullSpace NullSpace)
261 {
262   PetscErrorCode ierr;
263 
264   PetscFunctionBegin;
265   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
266   PetscValidHeaderSpecific(NullSpace,MAT_NULLSPACE_CLASSID,2);
267   ierr = PetscTryMethod(pc,"PCBDDCSetNullSpace_C",(PC,MatNullSpace),(pc,NullSpace));CHKERRQ(ierr);
268   PetscFunctionReturn(0);
269 }
270 /* -------------------------------------------------------------------------- */
271 
272 #undef __FUNCT__
273 #define __FUNCT__ "PCBDDCSetDirichletBoundaries_BDDC"
274 static PetscErrorCode PCBDDCSetDirichletBoundaries_BDDC(PC pc,IS DirichletBoundaries)
275 {
276   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
277   PetscErrorCode ierr;
278 
279   PetscFunctionBegin;
280   ierr = ISDestroy(&pcbddc->DirichletBoundaries);CHKERRQ(ierr);
281   ierr = PetscObjectReference((PetscObject)DirichletBoundaries);CHKERRQ(ierr);
282   pcbddc->DirichletBoundaries=DirichletBoundaries;
283   PetscFunctionReturn(0);
284 }
285 
286 #undef __FUNCT__
287 #define __FUNCT__ "PCBDDCSetDirichletBoundaries"
288 /*@
289  PCBDDCSetDirichletBoundaries - Set index set defining subdomain part (in local ordering)
290                               of Dirichlet boundaries for the global problem.
291 
292    Not collective
293 
294    Input Parameters:
295 +  pc - the preconditioning context
296 -  DirichletBoundaries - sequential index set defining the subdomain part of Dirichlet boundaries (can be NULL)
297 
298    Level: intermediate
299 
300    Notes:
301 
302 .seealso: PCBDDC
303 @*/
304 PetscErrorCode PCBDDCSetDirichletBoundaries(PC pc,IS DirichletBoundaries)
305 {
306   PetscErrorCode ierr;
307 
308   PetscFunctionBegin;
309   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
310   PetscValidHeaderSpecific(DirichletBoundaries,IS_CLASSID,2);
311   ierr = PetscTryMethod(pc,"PCBDDCSetDirichletBoundaries_C",(PC,IS),(pc,DirichletBoundaries));CHKERRQ(ierr);
312   PetscFunctionReturn(0);
313 }
314 /* -------------------------------------------------------------------------- */
315 
316 #undef __FUNCT__
317 #define __FUNCT__ "PCBDDCSetNeumannBoundaries_BDDC"
318 static PetscErrorCode PCBDDCSetNeumannBoundaries_BDDC(PC pc,IS NeumannBoundaries)
319 {
320   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
321   PetscErrorCode ierr;
322 
323   PetscFunctionBegin;
324   ierr = ISDestroy(&pcbddc->NeumannBoundaries);CHKERRQ(ierr);
325   ierr = PetscObjectReference((PetscObject)NeumannBoundaries);CHKERRQ(ierr);
326   pcbddc->NeumannBoundaries=NeumannBoundaries;
327   PetscFunctionReturn(0);
328 }
329 
330 #undef __FUNCT__
331 #define __FUNCT__ "PCBDDCSetNeumannBoundaries"
332 /*@
333  PCBDDCSetNeumannBoundaries - Set index set defining subdomain part (in local ordering)
334                               of Neumann boundaries for the global problem.
335 
336    Not collective
337 
338    Input Parameters:
339 +  pc - the preconditioning context
340 -  NeumannBoundaries - sequential index set defining the subdomain part of Neumann boundaries (can be NULL)
341 
342    Level: intermediate
343 
344    Notes:
345 
346 .seealso: PCBDDC
347 @*/
348 PetscErrorCode PCBDDCSetNeumannBoundaries(PC pc,IS NeumannBoundaries)
349 {
350   PetscErrorCode ierr;
351 
352   PetscFunctionBegin;
353   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
354   PetscValidHeaderSpecific(NeumannBoundaries,IS_CLASSID,2);
355   ierr = PetscTryMethod(pc,"PCBDDCSetNeumannBoundaries_C",(PC,IS),(pc,NeumannBoundaries));CHKERRQ(ierr);
356   PetscFunctionReturn(0);
357 }
358 /* -------------------------------------------------------------------------- */
359 
360 #undef __FUNCT__
361 #define __FUNCT__ "PCBDDCGetDirichletBoundaries_BDDC"
362 static PetscErrorCode PCBDDCGetDirichletBoundaries_BDDC(PC pc,IS *DirichletBoundaries)
363 {
364   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
365 
366   PetscFunctionBegin;
367   *DirichletBoundaries = pcbddc->DirichletBoundaries;
368   PetscFunctionReturn(0);
369 }
370 
371 #undef __FUNCT__
372 #define __FUNCT__ "PCBDDCGetDirichletBoundaries"
373 /*@
374  PCBDDCGetDirichletBoundaries - Get index set defining subdomain part (in local ordering)
375                                 of Dirichlet boundaries for the global problem.
376 
377    Not collective
378 
379    Input Parameters:
380 +  pc - the preconditioning context
381 
382    Output Parameters:
383 +  DirichletBoundaries - index set defining the subdomain part of Dirichlet boundaries
384 
385    Level: intermediate
386 
387    Notes:
388 
389 .seealso: PCBDDC
390 @*/
391 PetscErrorCode PCBDDCGetDirichletBoundaries(PC pc,IS *DirichletBoundaries)
392 {
393   PetscErrorCode ierr;
394 
395   PetscFunctionBegin;
396   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
397   ierr = PetscUseMethod(pc,"PCBDDCGetDirichletBoundaries_C",(PC,IS*),(pc,DirichletBoundaries));CHKERRQ(ierr);
398   PetscFunctionReturn(0);
399 }
400 /* -------------------------------------------------------------------------- */
401 
402 #undef __FUNCT__
403 #define __FUNCT__ "PCBDDCGetNeumannBoundaries_BDDC"
404 static PetscErrorCode PCBDDCGetNeumannBoundaries_BDDC(PC pc,IS *NeumannBoundaries)
405 {
406   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
407 
408   PetscFunctionBegin;
409   *NeumannBoundaries = pcbddc->NeumannBoundaries;
410   PetscFunctionReturn(0);
411 }
412 
413 #undef __FUNCT__
414 #define __FUNCT__ "PCBDDCGetNeumannBoundaries"
415 /*@
416  PCBDDCGetNeumannBoundaries - Get index set defining subdomain part (in local ordering)
417                               of Neumann boundaries for the global problem.
418 
419    Not collective
420 
421    Input Parameters:
422 +  pc - the preconditioning context
423 
424    Output Parameters:
425 +  NeumannBoundaries - index set defining the subdomain part of Neumann boundaries
426 
427    Level: intermediate
428 
429    Notes:
430 
431 .seealso: PCBDDC
432 @*/
433 PetscErrorCode PCBDDCGetNeumannBoundaries(PC pc,IS *NeumannBoundaries)
434 {
435   PetscErrorCode ierr;
436 
437   PetscFunctionBegin;
438   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
439   ierr = PetscUseMethod(pc,"PCBDDCGetNeumannBoundaries_C",(PC,IS*),(pc,NeumannBoundaries));CHKERRQ(ierr);
440   PetscFunctionReturn(0);
441 }
442 /* -------------------------------------------------------------------------- */
443 
444 #undef __FUNCT__
445 #define __FUNCT__ "PCBDDCSetLocalAdjacencyGraph_BDDC"
446 static PetscErrorCode PCBDDCSetLocalAdjacencyGraph_BDDC(PC pc, PetscInt nvtxs,const PetscInt xadj[],const PetscInt adjncy[], PetscCopyMode copymode)
447 {
448   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;
449   PCBDDCGraph    mat_graph = pcbddc->mat_graph;
450   PetscErrorCode ierr;
451 
452   PetscFunctionBegin;
453   /* free old CSR */
454   ierr = PCBDDCGraphResetCSR(mat_graph);CHKERRQ(ierr);
455   /* get CSR into graph structure */
456   if (copymode == PETSC_COPY_VALUES) {
457     ierr = PetscMalloc((nvtxs+1)*sizeof(PetscInt),&mat_graph->xadj);CHKERRQ(ierr);
458     ierr = PetscMalloc(xadj[nvtxs]*sizeof(PetscInt),&mat_graph->adjncy);CHKERRQ(ierr);
459     ierr = PetscMemcpy(mat_graph->xadj,xadj,(nvtxs+1)*sizeof(PetscInt));CHKERRQ(ierr);
460     ierr = PetscMemcpy(mat_graph->adjncy,adjncy,xadj[nvtxs]*sizeof(PetscInt));CHKERRQ(ierr);
461   } else if (copymode == PETSC_OWN_POINTER) {
462     mat_graph->xadj = (PetscInt*)xadj;
463     mat_graph->adjncy = (PetscInt*)adjncy;
464   }
465   PetscFunctionReturn(0);
466 }
467 
468 #undef __FUNCT__
469 #define __FUNCT__ "PCBDDCSetLocalAdjacencyGraph"
470 /*@
471  PCBDDCSetLocalAdjacencyGraph - Set CSR graph of local matrix for use of PCBDDC.
472 
473    Not collective
474 
475    Input Parameters:
476 +  pc - the preconditioning context
477 -  nvtxs - number of local vertices of the graph
478 -  xadj, adjncy - the CSR graph
479 -  copymode - either PETSC_COPY_VALUES or PETSC_OWN_POINTER. In the former case the user must free the array passed in;
480                                                              in the latter case, memory must be obtained with PetscMalloc.
481 
482    Level: intermediate
483 
484    Notes:
485 
486 .seealso: PCBDDC
487 @*/
488 PetscErrorCode PCBDDCSetLocalAdjacencyGraph(PC pc,PetscInt nvtxs,const PetscInt xadj[],const PetscInt adjncy[], PetscCopyMode copymode)
489 {
490   PetscInt       nrows,ncols;
491   Mat_IS         *matis = (Mat_IS*)pc->pmat->data;
492   PetscErrorCode ierr;
493 
494   PetscFunctionBegin;
495   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
496   PetscValidIntPointer(xadj,3);
497   PetscValidIntPointer(xadj,4);
498   if (copymode != PETSC_COPY_VALUES && copymode != PETSC_OWN_POINTER) {
499     SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unsupported copy mode %d in %s\n",copymode,__FUNCT__);
500   }
501   /* pcis info could not be available at this point */
502   ierr = MatGetSize(matis->A,&nrows,&ncols);CHKERRQ(ierr);
503   if (nvtxs != nrows) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local adjacency size %d passed in %s differs from local problem size %d!\n",nvtxs,__FUNCT__,nrows);
504   ierr = PetscTryMethod(pc,"PCBDDCSetLocalAdjacencyGraph_C",(PC,PetscInt,const PetscInt[],const PetscInt[],PetscCopyMode),(pc,nvtxs,xadj,adjncy,copymode));CHKERRQ(ierr);
505   PetscFunctionReturn(0);
506 }
507 /* -------------------------------------------------------------------------- */
508 
509 #undef __FUNCT__
510 #define __FUNCT__ "PCBDDCSetDofsSplitting_BDDC"
511 static PetscErrorCode PCBDDCSetDofsSplitting_BDDC(PC pc,PetscInt n_is, IS ISForDofs[])
512 {
513   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
514   PetscInt i;
515   PetscErrorCode ierr;
516 
517   PetscFunctionBegin;
518   /* Destroy ISes if they were already set */
519   for (i=0;i<pcbddc->n_ISForDofs;i++) {
520     ierr = ISDestroy(&pcbddc->ISForDofs[i]);CHKERRQ(ierr);
521   }
522   ierr = PetscFree(pcbddc->ISForDofs);CHKERRQ(ierr);
523   /* allocate space then set */
524   ierr = PetscMalloc(n_is*sizeof(IS),&pcbddc->ISForDofs);CHKERRQ(ierr);
525   for (i=0;i<n_is;i++) {
526     ierr = PetscObjectReference((PetscObject)ISForDofs[i]);CHKERRQ(ierr);
527     pcbddc->ISForDofs[i]=ISForDofs[i];
528   }
529   pcbddc->n_ISForDofs=n_is;
530   PetscFunctionReturn(0);
531 }
532 
533 #undef __FUNCT__
534 #define __FUNCT__ "PCBDDCSetDofsSplitting"
535 /*@
536  PCBDDCSetDofsSplitting - Set index sets defining fields of local mat.
537 
538    Not collective
539 
540    Input Parameters:
541 +  pc - the preconditioning context
542 -  n - number of index sets defining the fields
543 -  IS[] - array of IS describing the fields
544 
545    Level: intermediate
546 
547    Notes:
548 
549 .seealso: PCBDDC
550 @*/
551 PetscErrorCode PCBDDCSetDofsSplitting(PC pc,PetscInt n_is, IS ISForDofs[])
552 {
553   PetscErrorCode ierr;
554 
555   PetscFunctionBegin;
556   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
557   ierr = PetscTryMethod(pc,"PCBDDCSetDofsSplitting_C",(PC,PetscInt,IS[]),(pc,n_is,ISForDofs));CHKERRQ(ierr);
558   PetscFunctionReturn(0);
559 }
560 /* -------------------------------------------------------------------------- */
561 #undef __FUNCT__
562 #define __FUNCT__ "PCPreSolve_BDDC"
563 /* -------------------------------------------------------------------------- */
564 /*
565    PCPreSolve_BDDC - Changes the right hand side and (if necessary) the initial
566                      guess if a transformation of basis approach has been selected.
567 
568    Input Parameter:
569 +  pc - the preconditioner contex
570 
571    Application Interface Routine: PCPreSolve()
572 
573    Notes:
574    The interface routine PCPreSolve() is not usually called directly by
575    the user, but instead is called by KSPSolve().
576 */
577 static PetscErrorCode PCPreSolve_BDDC(PC pc, KSP ksp, Vec rhs, Vec x)
578 {
579   PetscErrorCode ierr;
580   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;
581   PC_IS          *pcis = (PC_IS*)(pc->data);
582   Mat_IS         *matis = (Mat_IS*)pc->pmat->data;
583   Mat            temp_mat;
584   IS             dirIS;
585   PetscInt       dirsize,i,*is_indices;
586   PetscScalar    *array_x,*array_diagonal;
587   Vec            used_vec;
588   PetscBool      guess_nonzero;
589 
590   PetscFunctionBegin;
591   if (x) {
592     ierr = PetscObjectReference((PetscObject)x);CHKERRQ(ierr);
593     used_vec = x;
594   } else {
595     ierr = PetscObjectReference((PetscObject)pcbddc->temp_solution);CHKERRQ(ierr);
596     used_vec = pcbddc->temp_solution;
597     ierr = VecSet(used_vec,0.0);CHKERRQ(ierr);
598   }
599   /* hack into ksp data structure PCPreSolve comes earlier in src/ksp/ksp/interface/itfunc.c */
600   if (ksp) {
601     ierr = KSPGetInitialGuessNonzero(ksp,&guess_nonzero);CHKERRQ(ierr);
602     if ( !guess_nonzero ) {
603       ierr = VecSet(used_vec,0.0);CHKERRQ(ierr);
604     }
605   }
606   /* store the original rhs */
607   ierr = VecCopy(rhs,pcbddc->original_rhs);CHKERRQ(ierr);
608 
609   /* Take into account zeroed rows -> change rhs and store solution removed */
610   ierr = MatGetDiagonal(pc->pmat,pcis->vec1_global);CHKERRQ(ierr);
611   ierr = VecPointwiseDivide(pcis->vec1_global,rhs,pcis->vec1_global);CHKERRQ(ierr);
612   ierr = VecScatterBegin(matis->ctx,pcis->vec1_global,pcis->vec2_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
613   ierr = VecScatterEnd(matis->ctx,pcis->vec1_global,pcis->vec2_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
614   ierr = VecScatterBegin(matis->ctx,used_vec,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
615   ierr = VecScatterEnd(matis->ctx,used_vec,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
616   ierr = PCBDDCGetDirichletBoundaries(pc,&dirIS);CHKERRQ(ierr);
617   if (dirIS) {
618     ierr = ISGetSize(dirIS,&dirsize);CHKERRQ(ierr);
619     ierr = VecGetArray(pcis->vec1_N,&array_x);CHKERRQ(ierr);
620     ierr = VecGetArray(pcis->vec2_N,&array_diagonal);CHKERRQ(ierr);
621     ierr = ISGetIndices(dirIS,(const PetscInt**)&is_indices);CHKERRQ(ierr);
622     for (i=0; i<dirsize; i++) array_x[is_indices[i]] = array_diagonal[is_indices[i]];
623     ierr = ISRestoreIndices(dirIS,(const PetscInt**)&is_indices);CHKERRQ(ierr);
624     ierr = VecRestoreArray(pcis->vec2_N,&array_diagonal);CHKERRQ(ierr);
625     ierr = VecRestoreArray(pcis->vec1_N,&array_x);CHKERRQ(ierr);
626   }
627   ierr = VecScatterBegin(matis->ctx,pcis->vec1_N,used_vec,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
628   ierr = VecScatterEnd(matis->ctx,pcis->vec1_N,used_vec,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
629 
630   /* remove the computed solution from the rhs */
631   ierr = VecScale(used_vec,-1.0);CHKERRQ(ierr);
632   ierr = MatMultAdd(pc->pmat,used_vec,rhs,rhs);CHKERRQ(ierr);
633   ierr = VecScale(used_vec,-1.0);CHKERRQ(ierr);
634 
635   /* store partially computed solution and set initial guess */
636   if (x) {
637     ierr = VecCopy(used_vec,pcbddc->temp_solution);CHKERRQ(ierr);
638     ierr = VecSet(used_vec,0.0);CHKERRQ(ierr);
639     if (pcbddc->use_exact_dirichlet) {
640       ierr = VecScatterBegin(pcis->global_to_D,rhs,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
641       ierr = VecScatterEnd  (pcis->global_to_D,rhs,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
642       ierr = KSPSolve(pcbddc->ksp_D,pcis->vec1_D,pcis->vec2_D);CHKERRQ(ierr);
643       ierr = VecScatterBegin(pcis->global_to_D,pcis->vec2_D,used_vec,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
644       ierr = VecScatterEnd  (pcis->global_to_D,pcis->vec2_D,used_vec,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
645       if (ksp) {
646         ierr = KSPSetInitialGuessNonzero(ksp,PETSC_TRUE);CHKERRQ(ierr);
647       }
648     }
649   }
650 
651   /* rhs change of basis */
652   if (pcbddc->use_change_of_basis) {
653     /* swap pointers for local matrices */
654     temp_mat = matis->A;
655     matis->A = pcbddc->local_mat;
656     pcbddc->local_mat = temp_mat;
657     /* Get local rhs and apply transformation of basis */
658     ierr = VecScatterBegin(pcis->global_to_B,rhs,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
659     ierr = VecScatterEnd  (pcis->global_to_B,rhs,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
660     /* from original basis to modified basis */
661     ierr = MatMultTranspose(pcbddc->ChangeOfBasisMatrix,pcis->vec1_B,pcis->vec2_B);CHKERRQ(ierr);
662     /* put back modified values into the global vec using INSERT_VALUES copy mode */
663     ierr = VecScatterBegin(pcis->global_to_B,pcis->vec2_B,rhs,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
664     ierr = VecScatterEnd  (pcis->global_to_B,pcis->vec2_B,rhs,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
665   }
666   if (ksp && pcbddc->NullSpace) {
667     ierr = MatNullSpaceRemove(pcbddc->NullSpace,used_vec,NULL);CHKERRQ(ierr);
668     ierr = MatNullSpaceRemove(pcbddc->NullSpace,rhs,NULL);CHKERRQ(ierr);
669   }
670   ierr = VecDestroy(&used_vec);CHKERRQ(ierr);
671   PetscFunctionReturn(0);
672 }
673 /* -------------------------------------------------------------------------- */
674 #undef __FUNCT__
675 #define __FUNCT__ "PCPostSolve_BDDC"
676 /* -------------------------------------------------------------------------- */
677 /*
678    PCPostSolve_BDDC - Changes the computed solution if a transformation of basis
679                      approach has been selected. Also, restores rhs to its original state.
680 
681    Input Parameter:
682 +  pc - the preconditioner contex
683 
684    Application Interface Routine: PCPostSolve()
685 
686    Notes:
687    The interface routine PCPostSolve() is not usually called directly by
688    the user, but instead is called by KSPSolve().
689 */
690 static PetscErrorCode PCPostSolve_BDDC(PC pc, KSP ksp, Vec rhs, Vec x)
691 {
692   PetscErrorCode ierr;
693   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;
694   PC_IS          *pcis   = (PC_IS*)(pc->data);
695   Mat_IS         *matis = (Mat_IS*)pc->pmat->data;
696   Mat            temp_mat;
697 
698   PetscFunctionBegin;
699   if (pcbddc->use_change_of_basis) {
700     /* swap pointers for local matrices */
701     temp_mat = matis->A;
702     matis->A = pcbddc->local_mat;
703     pcbddc->local_mat = temp_mat;
704     /* restore rhs to its original state */
705     if (rhs) {
706       ierr = VecCopy(pcbddc->original_rhs,rhs);CHKERRQ(ierr);
707     }
708     /* Get Local boundary and apply transformation of basis to solution vector */
709     ierr = VecScatterBegin(pcis->global_to_B,x,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
710     ierr = VecScatterEnd  (pcis->global_to_B,x,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
711     /* from modified basis to original basis */
712     ierr = MatMult(pcbddc->ChangeOfBasisMatrix,pcis->vec1_B,pcis->vec2_B);CHKERRQ(ierr);
713     /* put back modified values into the global vec using INSERT_VALUES copy mode */
714     ierr = VecScatterBegin(pcis->global_to_B,pcis->vec2_B,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
715     ierr = VecScatterEnd  (pcis->global_to_B,pcis->vec2_B,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
716   }
717   /* add solution removed in presolve */
718   if (x) {
719     ierr = VecAXPY(x,1.0,pcbddc->temp_solution);CHKERRQ(ierr);
720   }
721   PetscFunctionReturn(0);
722 }
723 /* -------------------------------------------------------------------------- */
724 #undef __FUNCT__
725 #define __FUNCT__ "PCSetUp_BDDC"
726 /* -------------------------------------------------------------------------- */
727 /*
728    PCSetUp_BDDC - Prepares for the use of the BDDC preconditioner
729                   by setting data structures and options.
730 
731    Input Parameter:
732 +  pc - the preconditioner context
733 
734    Application Interface Routine: PCSetUp()
735 
736    Notes:
737    The interface routine PCSetUp() is not usually called directly by
738    the user, but instead is called by PCApply() if necessary.
739 */
740 PetscErrorCode PCSetUp_BDDC(PC pc)
741 {
742   PetscErrorCode ierr;
743   PC_BDDC*       pcbddc = (PC_BDDC*)pc->data;
744   MatStructure   flag;
745   PetscBool      computeis,computetopography,computesolvers;
746 
747   PetscFunctionBegin;
748   /* the following lines of code should be replaced by a better logic between PCIS, PCNN, PCBDDC and other nonoverlapping preconditioners */
749   /* For BDDC we need to define a local "Neumann" problem different to that defined in PCISSetup
750      So, we set to pcnone the Neumann problem of pcis in order to avoid unneeded computation
751      Also, we decide to directly build the (same) Dirichlet problem */
752   ierr = PetscOptionsSetValue("-is_localN_pc_type","none");CHKERRQ(ierr);
753   ierr = PetscOptionsSetValue("-is_localD_pc_type","none");CHKERRQ(ierr);
754   /* Get stdout for dbg */
755   if (pcbddc->dbg_flag && !pcbddc->dbg_viewer) {
756     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc),&pcbddc->dbg_viewer);CHKERRQ(ierr);
757     ierr = PetscViewerASCIISynchronizedAllow(pcbddc->dbg_viewer,PETSC_TRUE);CHKERRQ(ierr);
758   }
759   /* first attempt to split work */
760   if (pc->setupcalled) {
761     computeis = PETSC_FALSE;
762     ierr = PCGetOperators(pc,NULL,NULL,&flag);CHKERRQ(ierr);
763     if (flag == SAME_PRECONDITIONER) {
764       computetopography = PETSC_FALSE;
765       computesolvers = PETSC_FALSE;
766     } else if (flag == SAME_NONZERO_PATTERN) {
767       computetopography = PETSC_FALSE;
768       computesolvers = PETSC_TRUE;
769     } else { /* DIFFERENT_NONZERO_PATTERN */
770       computetopography = PETSC_TRUE;
771       computesolvers = PETSC_TRUE;
772     }
773   } else {
774     computeis = PETSC_TRUE;
775     computetopography = PETSC_TRUE;
776     computesolvers = PETSC_TRUE;
777   }
778   /* Set up all the "iterative substructuring" common block */
779   if (computeis) {
780     ierr = PCISSetUp(pc);CHKERRQ(ierr);
781   }
782   /* Analyze interface and set up local constraint and change of basis matrices */
783   if (computetopography) {
784     /* reset data */
785     ierr = PCBDDCResetTopography(pc);CHKERRQ(ierr);
786     ierr = PCBDDCAnalyzeInterface(pc);CHKERRQ(ierr);
787     ierr = PCBDDCConstraintsSetUp(pc);CHKERRQ(ierr);
788   }
789   if (computesolvers) {
790     /* reset data */
791     ierr = PCBDDCResetSolvers(pc);CHKERRQ(ierr);
792     ierr = PCBDDCScalingDestroy(pc);CHKERRQ(ierr);
793     /* Create coarse and local stuffs used for evaluating action of preconditioner */
794     ierr = PCBDDCCoarseSetUp(pc);CHKERRQ(ierr);
795     ierr = PCBDDCScalingSetUp(pc);CHKERRQ(ierr);
796   }
797   PetscFunctionReturn(0);
798 }
799 
800 /* -------------------------------------------------------------------------- */
801 /*
802    PCApply_BDDC - Applies the BDDC preconditioner to a vector.
803 
804    Input Parameters:
805 .  pc - the preconditioner context
806 .  r - input vector (global)
807 
808    Output Parameter:
809 .  z - output vector (global)
810 
811    Application Interface Routine: PCApply()
812  */
813 #undef __FUNCT__
814 #define __FUNCT__ "PCApply_BDDC"
815 PetscErrorCode PCApply_BDDC(PC pc,Vec r,Vec z)
816 {
817   PC_IS             *pcis = (PC_IS*)(pc->data);
818   PC_BDDC           *pcbddc = (PC_BDDC*)(pc->data);
819   PetscErrorCode    ierr;
820   const PetscScalar one = 1.0;
821   const PetscScalar m_one = -1.0;
822   const PetscScalar zero = 0.0;
823 
824 /* This code is similar to that provided in nn.c for PCNN
825    NN interface preconditioner changed to BDDC
826    Added support for M_3 preconditioner in the reference article (code is active if pcbddc->inexact_prec_type = PETSC_TRUE) */
827 
828   PetscFunctionBegin;
829   if (!pcbddc->use_exact_dirichlet) {
830     /* First Dirichlet solve */
831     ierr = VecScatterBegin(pcis->global_to_D,r,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
832     ierr = VecScatterEnd  (pcis->global_to_D,r,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
833     ierr = KSPSolve(pcbddc->ksp_D,pcis->vec1_D,pcis->vec2_D);CHKERRQ(ierr);
834     /*
835       Assembling right hand side for BDDC operator
836       - pcis->vec1_D for the Dirichlet part (if needed, i.e. prec_flag=PETSC_TRUE)
837       - pcis->vec1_B the interface part of the global vector z
838     */
839     ierr = VecScale(pcis->vec2_D,m_one);CHKERRQ(ierr);
840     ierr = MatMult(pcis->A_BI,pcis->vec2_D,pcis->vec1_B);CHKERRQ(ierr);
841     if (pcbddc->inexact_prec_type) { ierr = MatMultAdd(pcis->A_II,pcis->vec2_D,pcis->vec1_D,pcis->vec1_D);CHKERRQ(ierr); }
842     ierr = VecScale(pcis->vec2_D,m_one);CHKERRQ(ierr);
843     ierr = VecCopy(r,z);CHKERRQ(ierr);
844     ierr = VecScatterBegin(pcis->global_to_B,pcis->vec1_B,z,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
845     ierr = VecScatterEnd  (pcis->global_to_B,pcis->vec1_B,z,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
846     ierr = PCBDDCScalingRestriction(pc,z,pcis->vec1_B);CHKERRQ(ierr);
847   } else {
848     ierr = VecSet(pcis->vec1_D,zero);CHKERRQ(ierr);
849     ierr = VecSet(pcis->vec2_D,zero);CHKERRQ(ierr);
850     ierr = PCBDDCScalingRestriction(pc,r,pcis->vec1_B);CHKERRQ(ierr);
851   }
852 
853   /* Apply interface preconditioner
854      input/output vecs: pcis->vec1_B and pcis->vec1_D */
855   ierr = PCBDDCApplyInterfacePreconditioner(pc);CHKERRQ(ierr);
856 
857   /* Apply transpose of partition of unity operator */
858   ierr = PCBDDCScalingExtension(pc,pcis->vec1_B,z);CHKERRQ(ierr);
859 
860   /* Second Dirichlet solve and assembling of output */
861   ierr = VecScatterBegin(pcis->global_to_B,z,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
862   ierr = VecScatterEnd  (pcis->global_to_B,z,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
863   ierr = MatMult(pcis->A_IB,pcis->vec1_B,pcis->vec3_D);CHKERRQ(ierr);
864   if (pcbddc->inexact_prec_type) { ierr = MatMultAdd(pcis->A_II,pcis->vec1_D,pcis->vec3_D,pcis->vec3_D);CHKERRQ(ierr); }
865   ierr = KSPSolve(pcbddc->ksp_D,pcis->vec3_D,pcbddc->vec4_D);CHKERRQ(ierr);
866   ierr = VecScale(pcbddc->vec4_D,m_one);CHKERRQ(ierr);
867   if (pcbddc->inexact_prec_type) { ierr = VecAXPY (pcbddc->vec4_D,one,pcis->vec1_D);CHKERRQ(ierr); }
868   ierr = VecAXPY (pcis->vec2_D,one,pcbddc->vec4_D);CHKERRQ(ierr);
869   ierr = VecScatterBegin(pcis->global_to_D,pcis->vec2_D,z,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
870   ierr = VecScatterEnd  (pcis->global_to_D,pcis->vec2_D,z,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
871   PetscFunctionReturn(0);
872 }
873 /* -------------------------------------------------------------------------- */
874 
875 #undef __FUNCT__
876 #define __FUNCT__ "PCDestroy_BDDC"
877 PetscErrorCode PCDestroy_BDDC(PC pc)
878 {
879   PC_BDDC        *pcbddc = (PC_BDDC*)pc->data;
880   PetscErrorCode ierr;
881 
882   PetscFunctionBegin;
883   /* free data created by PCIS */
884   ierr = PCISDestroy(pc);CHKERRQ(ierr);
885   /* free BDDC custom data  */
886   ierr = PCBDDCResetCustomization(pc);CHKERRQ(ierr);
887   /* destroy objects related to topography */
888   ierr = PCBDDCResetTopography(pc);CHKERRQ(ierr);
889   /* free allocated graph structure */
890   ierr = PetscFree(pcbddc->mat_graph);CHKERRQ(ierr);
891   /* free data for scaling operator */
892   ierr = PCBDDCScalingDestroy(pc);CHKERRQ(ierr);
893   /* free solvers stuff */
894   ierr = PCBDDCResetSolvers(pc);CHKERRQ(ierr);
895   /* remove functions */
896   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetPrimalVerticesLocalIS_C",NULL);CHKERRQ(ierr);
897   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetCoarseningRatio_C",NULL);CHKERRQ(ierr);
898   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetMaxLevels_C",NULL);CHKERRQ(ierr);
899   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetNullSpace_C",NULL);CHKERRQ(ierr);
900   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDirichletBoundaries_C",NULL);CHKERRQ(ierr);
901   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetNeumannBoundaries_C",NULL);CHKERRQ(ierr);
902   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetDirichletBoundaries_C",NULL);CHKERRQ(ierr);
903   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetNeumannBoundaries_C",NULL);CHKERRQ(ierr);
904   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetCoarseProblemType_C",NULL);CHKERRQ(ierr);
905   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDofsSplitting_C",NULL);CHKERRQ(ierr);
906   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetLocalAdjacencyGraph_C",NULL);CHKERRQ(ierr);
907   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCCreateFETIDPOperators_C",NULL);CHKERRQ(ierr);
908   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCMatFETIDPGetRHS_C",NULL);CHKERRQ(ierr);
909   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCMatFETIDPGetSolution_C",NULL);CHKERRQ(ierr);
910   /* Free the private data structure */
911   ierr = PetscFree(pc->data);CHKERRQ(ierr);
912   PetscFunctionReturn(0);
913 }
914 /* -------------------------------------------------------------------------- */
915 
916 #undef __FUNCT__
917 #define __FUNCT__ "PCBDDCMatFETIDPGetRHS_BDDC"
918 static PetscErrorCode PCBDDCMatFETIDPGetRHS_BDDC(Mat fetidp_mat, Vec standard_rhs, Vec fetidp_flux_rhs)
919 {
920   FETIDPMat_ctx  mat_ctx;
921   PC_IS*         pcis;
922   PC_BDDC*       pcbddc;
923   PetscErrorCode ierr;
924 
925   PetscFunctionBegin;
926   ierr = MatShellGetContext(fetidp_mat,&mat_ctx);CHKERRQ(ierr);
927   pcis = (PC_IS*)mat_ctx->pc->data;
928   pcbddc = (PC_BDDC*)mat_ctx->pc->data;
929 
930   /* change of basis for physical rhs if needed
931      It also changes the rhs in case of dirichlet boundaries */
932   (*mat_ctx->pc->ops->presolve)(mat_ctx->pc,NULL,standard_rhs,NULL);
933   /* store vectors for computation of fetidp final solution */
934   ierr = VecScatterBegin(pcis->global_to_D,standard_rhs,mat_ctx->temp_solution_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
935   ierr = VecScatterEnd(pcis->global_to_D,standard_rhs,mat_ctx->temp_solution_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
936   /* scale rhs since it should be unassembled : TODO use counter scaling? (also below) */
937   ierr = VecScatterBegin(pcis->global_to_B,standard_rhs,mat_ctx->temp_solution_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
938   ierr = VecScatterEnd(pcis->global_to_B,standard_rhs,mat_ctx->temp_solution_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
939   /* Apply partition of unity */
940   ierr = VecPointwiseMult(mat_ctx->temp_solution_B,pcis->D,mat_ctx->temp_solution_B);CHKERRQ(ierr);
941   /* ierr = PCBDDCScalingRestriction(mat_ctx->pc,standard_rhs,mat_ctx->temp_solution_B);CHKERRQ(ierr); */
942   if (!pcbddc->inexact_prec_type) {
943     /* compute partially subassembled Schur complement right-hand side */
944     ierr = KSPSolve(pcbddc->ksp_D,mat_ctx->temp_solution_D,pcis->vec1_D);CHKERRQ(ierr);
945     ierr = MatMult(pcis->A_BI,pcis->vec1_D,pcis->vec1_B);CHKERRQ(ierr);
946     ierr = VecAXPY(mat_ctx->temp_solution_B,-1.0,pcis->vec1_B);CHKERRQ(ierr);
947     ierr = VecSet(standard_rhs,0.0);CHKERRQ(ierr);
948     ierr = VecScatterBegin(pcis->global_to_B,mat_ctx->temp_solution_B,standard_rhs,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
949     ierr = VecScatterEnd(pcis->global_to_B,mat_ctx->temp_solution_B,standard_rhs,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
950     /* ierr = PCBDDCScalingRestriction(mat_ctx->pc,standard_rhs,mat_ctx->temp_solution_B);CHKERRQ(ierr); */
951     ierr = VecScatterBegin(pcis->global_to_B,standard_rhs,mat_ctx->temp_solution_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
952     ierr = VecScatterEnd(pcis->global_to_B,standard_rhs,mat_ctx->temp_solution_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
953     ierr = VecPointwiseMult(mat_ctx->temp_solution_B,pcis->D,mat_ctx->temp_solution_B);CHKERRQ(ierr);
954   }
955   /* BDDC rhs */
956   ierr = VecCopy(mat_ctx->temp_solution_B,pcis->vec1_B);CHKERRQ(ierr);
957   if (pcbddc->inexact_prec_type) {
958     ierr = VecCopy(mat_ctx->temp_solution_D,pcis->vec1_D);CHKERRQ(ierr);
959   }
960   /* apply BDDC */
961   ierr = PCBDDCApplyInterfacePreconditioner(mat_ctx->pc);CHKERRQ(ierr);
962   /* Application of B_delta and assembling of rhs for fetidp fluxes */
963   ierr = VecSet(fetidp_flux_rhs,0.0);CHKERRQ(ierr);
964   ierr = MatMult(mat_ctx->B_delta,pcis->vec1_B,mat_ctx->lambda_local);CHKERRQ(ierr);
965   ierr = VecScatterBegin(mat_ctx->l2g_lambda,mat_ctx->lambda_local,fetidp_flux_rhs,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
966   ierr = VecScatterEnd  (mat_ctx->l2g_lambda,mat_ctx->lambda_local,fetidp_flux_rhs,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
967   /* restore original rhs */
968   ierr = VecCopy(pcbddc->original_rhs,standard_rhs);CHKERRQ(ierr);
969   PetscFunctionReturn(0);
970 }
971 
972 #undef __FUNCT__
973 #define __FUNCT__ "PCBDDCMatFETIDPGetRHS"
974 /*@
975  PCBDDCMatFETIDPGetRHS - Get rhs for FETIDP linear system.
976 
977    Collective
978 
979    Input Parameters:
980 +  fetidp_mat   - the FETIDP mat obtained by a call to PCBDDCCreateFETIDPOperators
981 +  standard_rhs - the rhs of your linear system
982 
983    Output Parameters:
984 +  fetidp_flux_rhs   - the rhs of the FETIDP linear system
985 
986    Level: developer
987 
988    Notes:
989 
990 .seealso: PCBDDC
991 @*/
992 PetscErrorCode PCBDDCMatFETIDPGetRHS(Mat fetidp_mat, Vec standard_rhs, Vec fetidp_flux_rhs)
993 {
994   FETIDPMat_ctx  mat_ctx;
995   PetscErrorCode ierr;
996 
997   PetscFunctionBegin;
998   ierr = MatShellGetContext(fetidp_mat,&mat_ctx);CHKERRQ(ierr);
999   ierr = PetscTryMethod(mat_ctx->pc,"PCBDDCMatFETIDPGetRHS_C",(Mat,Vec,Vec),(fetidp_mat,standard_rhs,fetidp_flux_rhs));CHKERRQ(ierr);
1000   PetscFunctionReturn(0);
1001 }
1002 /* -------------------------------------------------------------------------- */
1003 
1004 #undef __FUNCT__
1005 #define __FUNCT__ "PCBDDCMatFETIDPGetSolution_BDDC"
1006 static PetscErrorCode PCBDDCMatFETIDPGetSolution_BDDC(Mat fetidp_mat, Vec fetidp_flux_sol, Vec standard_sol)
1007 {
1008   FETIDPMat_ctx  mat_ctx;
1009   PC_IS*         pcis;
1010   PC_BDDC*       pcbddc;
1011   PetscErrorCode ierr;
1012 
1013   PetscFunctionBegin;
1014   ierr = MatShellGetContext(fetidp_mat,&mat_ctx);CHKERRQ(ierr);
1015   pcis = (PC_IS*)mat_ctx->pc->data;
1016   pcbddc = (PC_BDDC*)mat_ctx->pc->data;
1017 
1018   /* apply B_delta^T */
1019   ierr = VecScatterBegin(mat_ctx->l2g_lambda,fetidp_flux_sol,mat_ctx->lambda_local,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1020   ierr = VecScatterEnd  (mat_ctx->l2g_lambda,fetidp_flux_sol,mat_ctx->lambda_local,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1021   ierr = MatMultTranspose(mat_ctx->B_delta,mat_ctx->lambda_local,pcis->vec1_B);CHKERRQ(ierr);
1022   /* compute rhs for BDDC application */
1023   ierr = VecAYPX(pcis->vec1_B,-1.0,mat_ctx->temp_solution_B);CHKERRQ(ierr);
1024   if (pcbddc->inexact_prec_type) {
1025     ierr = VecCopy(mat_ctx->temp_solution_D,pcis->vec1_D);CHKERRQ(ierr);
1026   }
1027   /* apply BDDC */
1028   ierr = PCBDDCApplyInterfacePreconditioner(mat_ctx->pc);CHKERRQ(ierr);
1029   /* put values into standard global vector */
1030   ierr = VecScatterBegin(pcis->global_to_B,pcis->vec1_B,standard_sol,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1031   ierr = VecScatterEnd  (pcis->global_to_B,pcis->vec1_B,standard_sol,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1032   if (!pcbddc->inexact_prec_type) {
1033     /* compute values into the interior if solved for the partially subassembled Schur complement */
1034     ierr = MatMult(pcis->A_IB,pcis->vec1_B,pcis->vec1_D);CHKERRQ(ierr);
1035     ierr = VecAXPY(mat_ctx->temp_solution_D,-1.0,pcis->vec1_D);CHKERRQ(ierr);
1036     ierr = KSPSolve(pcbddc->ksp_D,mat_ctx->temp_solution_D,pcis->vec1_D);CHKERRQ(ierr);
1037   }
1038   ierr = VecScatterBegin(pcis->global_to_D,pcis->vec1_D,standard_sol,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1039   ierr = VecScatterEnd  (pcis->global_to_D,pcis->vec1_D,standard_sol,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1040   /* final change of basis if needed
1041      Is also sums the dirichlet part removed during RHS assembling */
1042   (*mat_ctx->pc->ops->postsolve)(mat_ctx->pc,NULL,NULL,standard_sol);
1043   PetscFunctionReturn(0);
1044 
1045 }
1046 
1047 #undef __FUNCT__
1048 #define __FUNCT__ "PCBDDCMatFETIDPGetSolution"
1049 /*@
1050  PCBDDCMatFETIDPGetSolution - Get Solution for FETIDP linear system.
1051 
1052    Collective
1053 
1054    Input Parameters:
1055 +  fetidp_mat        - the FETIDP mat obtained by a call to PCBDDCCreateFETIDPOperators
1056 +  fetidp_flux_sol - the solution of the FETIDP linear system
1057 
1058    Output Parameters:
1059 +  standard_sol      - the solution on the global domain
1060 
1061    Level: developer
1062 
1063    Notes:
1064 
1065 .seealso: PCBDDC
1066 @*/
1067 PetscErrorCode PCBDDCMatFETIDPGetSolution(Mat fetidp_mat, Vec fetidp_flux_sol, Vec standard_sol)
1068 {
1069   FETIDPMat_ctx  mat_ctx;
1070   PetscErrorCode ierr;
1071 
1072   PetscFunctionBegin;
1073   ierr = MatShellGetContext(fetidp_mat,&mat_ctx);CHKERRQ(ierr);
1074   ierr = PetscTryMethod(mat_ctx->pc,"PCBDDCMatFETIDPGetSolution_C",(Mat,Vec,Vec),(fetidp_mat,fetidp_flux_sol,standard_sol));CHKERRQ(ierr);
1075   PetscFunctionReturn(0);
1076 }
1077 /* -------------------------------------------------------------------------- */
1078 
1079 extern PetscErrorCode FETIDPMatMult(Mat,Vec,Vec);
1080 extern PetscErrorCode PCBDDCDestroyFETIDPMat(Mat);
1081 extern PetscErrorCode FETIDPPCApply(PC,Vec,Vec);
1082 extern PetscErrorCode PCBDDCDestroyFETIDPPC(PC);
1083 
1084 #undef __FUNCT__
1085 #define __FUNCT__ "PCBDDCCreateFETIDPOperators_BDDC"
1086 static PetscErrorCode PCBDDCCreateFETIDPOperators_BDDC(PC pc, Mat *fetidp_mat, PC *fetidp_pc)
1087 {
1088 
1089   FETIDPMat_ctx  fetidpmat_ctx;
1090   Mat            newmat;
1091   FETIDPPC_ctx   fetidppc_ctx;
1092   PC             newpc;
1093   MPI_Comm       comm;
1094   PetscErrorCode ierr;
1095 
1096   PetscFunctionBegin;
1097   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
1098   /* FETIDP linear matrix */
1099   ierr = PCBDDCCreateFETIDPMatContext(pc,&fetidpmat_ctx);CHKERRQ(ierr);
1100   ierr = PCBDDCSetupFETIDPMatContext(fetidpmat_ctx);CHKERRQ(ierr);
1101   ierr = MatCreateShell(comm,PETSC_DECIDE,PETSC_DECIDE,fetidpmat_ctx->n_lambda,fetidpmat_ctx->n_lambda,fetidpmat_ctx,&newmat);CHKERRQ(ierr);
1102   ierr = MatShellSetOperation(newmat,MATOP_MULT,(void (*)(void))FETIDPMatMult);CHKERRQ(ierr);
1103   ierr = MatShellSetOperation(newmat,MATOP_DESTROY,(void (*)(void))PCBDDCDestroyFETIDPMat);CHKERRQ(ierr);
1104   ierr = MatSetUp(newmat);CHKERRQ(ierr);
1105   /* FETIDP preconditioner */
1106   ierr = PCBDDCCreateFETIDPPCContext(pc,&fetidppc_ctx);CHKERRQ(ierr);
1107   ierr = PCBDDCSetupFETIDPPCContext(newmat,fetidppc_ctx);CHKERRQ(ierr);
1108   ierr = PCCreate(comm,&newpc);CHKERRQ(ierr);
1109   ierr = PCSetType(newpc,PCSHELL);CHKERRQ(ierr);
1110   ierr = PCShellSetContext(newpc,fetidppc_ctx);CHKERRQ(ierr);
1111   ierr = PCShellSetApply(newpc,FETIDPPCApply);CHKERRQ(ierr);
1112   ierr = PCShellSetDestroy(newpc,PCBDDCDestroyFETIDPPC);CHKERRQ(ierr);
1113   ierr = PCSetOperators(newpc,newmat,newmat,SAME_PRECONDITIONER);CHKERRQ(ierr);
1114   ierr = PCSetUp(newpc);CHKERRQ(ierr);
1115   /* return pointers for objects created */
1116   *fetidp_mat=newmat;
1117   *fetidp_pc=newpc;
1118   PetscFunctionReturn(0);
1119 }
1120 
1121 #undef __FUNCT__
1122 #define __FUNCT__ "PCBDDCCreateFETIDPOperators"
1123 /*@
1124  PCBDDCCreateFETIDPOperators - Create operators for FETIDP.
1125 
1126    Collective
1127 
1128    Input Parameters:
1129 +  pc - the BDDC preconditioning context (setup must be already called)
1130 
1131    Level: developer
1132 
1133    Notes:
1134 
1135 .seealso: PCBDDC
1136 @*/
1137 PetscErrorCode PCBDDCCreateFETIDPOperators(PC pc, Mat *fetidp_mat, PC *fetidp_pc)
1138 {
1139   PetscErrorCode ierr;
1140 
1141   PetscFunctionBegin;
1142   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1143   if (pc->setupcalled) {
1144     ierr = PetscTryMethod(pc,"PCBDDCCreateFETIDPOperators_C",(PC,Mat*,PC*),(pc,fetidp_mat,fetidp_pc));CHKERRQ(ierr);
1145   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"You must call PCSetup_BDDC() first \n");
1146   PetscFunctionReturn(0);
1147 }
1148 /* -------------------------------------------------------------------------- */
1149 /*MC
1150    PCBDDC - Balancing Domain Decomposition by Constraints.
1151 
1152    Options Database Keys:
1153 .    -pcbddc ??? -
1154 
1155    Level: intermediate
1156 
1157    Notes: The matrix used with this preconditioner must be of type MATIS
1158 
1159           Unlike more 'conventional' interface preconditioners, this iterates over ALL the
1160           degrees of freedom, NOT just those on the interface (this allows the use of approximate solvers
1161           on the subdomains).
1162 
1163           Options for the coarse grid preconditioner can be set with -
1164           Options for the Dirichlet subproblem can be set with -
1165           Options for the Neumann subproblem can be set with -
1166 
1167    Contributed by Stefano Zampini
1168 
1169 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,  MATIS
1170 M*/
1171 
1172 #undef __FUNCT__
1173 #define __FUNCT__ "PCCreate_BDDC"
1174 PETSC_EXTERN PetscErrorCode PCCreate_BDDC(PC pc)
1175 {
1176   PetscErrorCode      ierr;
1177   PC_BDDC             *pcbddc;
1178 
1179   PetscFunctionBegin;
1180   /* Creates the private data structure for this preconditioner and attach it to the PC object. */
1181   ierr      = PetscNewLog(pc,PC_BDDC,&pcbddc);CHKERRQ(ierr);
1182   pc->data  = (void*)pcbddc;
1183 
1184   /* create PCIS data structure */
1185   ierr = PCISCreate(pc);CHKERRQ(ierr);
1186 
1187   /* BDDC specific */
1188   pcbddc->user_primal_vertices       = 0;
1189   pcbddc->NullSpace                  = 0;
1190   pcbddc->temp_solution              = 0;
1191   pcbddc->original_rhs               = 0;
1192   pcbddc->local_mat                  = 0;
1193   pcbddc->ChangeOfBasisMatrix        = 0;
1194   pcbddc->use_change_of_basis        = PETSC_TRUE;
1195   pcbddc->use_change_on_faces        = PETSC_FALSE;
1196   pcbddc->coarse_vec                 = 0;
1197   pcbddc->coarse_rhs                 = 0;
1198   pcbddc->coarse_ksp                 = 0;
1199   pcbddc->coarse_phi_B               = 0;
1200   pcbddc->coarse_phi_D               = 0;
1201   pcbddc->vec1_P                     = 0;
1202   pcbddc->vec1_R                     = 0;
1203   pcbddc->vec2_R                     = 0;
1204   pcbddc->local_auxmat1              = 0;
1205   pcbddc->local_auxmat2              = 0;
1206   pcbddc->R_to_B                     = 0;
1207   pcbddc->R_to_D                     = 0;
1208   pcbddc->ksp_D                      = 0;
1209   pcbddc->ksp_R                      = 0;
1210   pcbddc->local_primal_indices       = 0;
1211   pcbddc->inexact_prec_type          = PETSC_FALSE;
1212   pcbddc->NeumannBoundaries          = 0;
1213   pcbddc->ISForDofs                  = 0;
1214   pcbddc->ConstraintMatrix           = 0;
1215   pcbddc->use_nnsp_true              = PETSC_FALSE;
1216   pcbddc->local_primal_sizes         = 0;
1217   pcbddc->local_primal_displacements = 0;
1218   pcbddc->coarse_loc_to_glob         = 0;
1219   pcbddc->dbg_flag                   = PETSC_FALSE;
1220   pcbddc->coarsening_ratio           = 8;
1221   pcbddc->use_exact_dirichlet        = PETSC_TRUE;
1222   pcbddc->current_level              = 0;
1223   pcbddc->max_levels                 = 1;
1224   pcbddc->replicated_local_primal_indices = 0;
1225   pcbddc->replicated_local_primal_values  = 0;
1226 
1227   /* create local graph structure */
1228   ierr = PCBDDCGraphCreate(&pcbddc->mat_graph);CHKERRQ(ierr);
1229 
1230   /* scaling */
1231   pcbddc->use_deluxe_scaling         = PETSC_FALSE;
1232   pcbddc->work_scaling               = 0;
1233 
1234   /* function pointers */
1235   pc->ops->apply               = PCApply_BDDC;
1236   pc->ops->applytranspose      = 0;
1237   pc->ops->setup               = PCSetUp_BDDC;
1238   pc->ops->destroy             = PCDestroy_BDDC;
1239   pc->ops->setfromoptions      = PCSetFromOptions_BDDC;
1240   pc->ops->view                = 0;
1241   pc->ops->applyrichardson     = 0;
1242   pc->ops->applysymmetricleft  = 0;
1243   pc->ops->applysymmetricright = 0;
1244   pc->ops->presolve            = PCPreSolve_BDDC;
1245   pc->ops->postsolve           = PCPostSolve_BDDC;
1246 
1247   /* composing function */
1248 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetPrimalVerticesLocalIS_C",PCBDDCSetPrimalVerticesLocalIS_BDDC);CHKERRQ(ierr);
1249   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetCoarseningRatio_C",PCBDDCSetCoarseningRatio_BDDC);CHKERRQ(ierr);
1250   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetMaxLevels_C",PCBDDCSetMaxLevels_BDDC);CHKERRQ(ierr);
1251   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetNullSpace_C",PCBDDCSetNullSpace_BDDC);CHKERRQ(ierr);
1252   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDirichletBoundaries_C",PCBDDCSetDirichletBoundaries_BDDC);CHKERRQ(ierr);
1253   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetNeumannBoundaries_C",PCBDDCSetNeumannBoundaries_BDDC);CHKERRQ(ierr);
1254   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetDirichletBoundaries_C",PCBDDCGetDirichletBoundaries_BDDC);CHKERRQ(ierr);
1255   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetNeumannBoundaries_C",PCBDDCGetNeumannBoundaries_BDDC);CHKERRQ(ierr);
1256   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetCoarseProblemType_C",PCBDDCSetCoarseProblemType_BDDC);CHKERRQ(ierr);
1257   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDofsSplitting_C",PCBDDCSetDofsSplitting_BDDC);CHKERRQ(ierr);
1258   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetLocalAdjacencyGraph_C",PCBDDCSetLocalAdjacencyGraph_BDDC);CHKERRQ(ierr);
1259   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCCreateFETIDPOperators_C",PCBDDCCreateFETIDPOperators_BDDC);CHKERRQ(ierr);
1260   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCMatFETIDPGetRHS_C",PCBDDCMatFETIDPGetRHS_BDDC);CHKERRQ(ierr);
1261   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBDDCMatFETIDPGetSolution_C",PCBDDCMatFETIDPGetSolution_BDDC);CHKERRQ(ierr);
1262   PetscFunctionReturn(0);
1263 }
1264 
1265 /* -------------------------------------------------------------------------- */
1266 /* All static functions from now on                                           */
1267 /* -------------------------------------------------------------------------- */
1268 
1269 #undef __FUNCT__
1270 #define __FUNCT__ "PCBDDCSetUseExactDirichlet"
1271 static PetscErrorCode PCBDDCSetUseExactDirichlet(PC pc,PetscBool use)
1272 {
1273   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
1274 
1275   PetscFunctionBegin;
1276   pcbddc->use_exact_dirichlet=use;
1277   PetscFunctionReturn(0);
1278 }
1279 
1280 #undef __FUNCT__
1281 #define __FUNCT__ "PCBDDCSetLevel"
1282 static PetscErrorCode PCBDDCSetLevel(PC pc,PetscInt level)
1283 {
1284   PC_BDDC  *pcbddc = (PC_BDDC*)pc->data;
1285 
1286   PetscFunctionBegin;
1287   pcbddc->current_level=level;
1288   PetscFunctionReturn(0);
1289 }
1290 
1291 /* -------------------------------------------------------------------------- */
1292 #undef __FUNCT__
1293 #define __FUNCT__ "PCBDDCCoarseSetUp"
1294 static PetscErrorCode PCBDDCCoarseSetUp(PC pc)
1295 {
1296   PetscErrorCode  ierr;
1297 
1298   PC_IS*            pcis = (PC_IS*)(pc->data);
1299   PC_BDDC*          pcbddc = (PC_BDDC*)pc->data;
1300   Mat_IS            *matis = (Mat_IS*)pc->pmat->data;
1301   Mat               change_mat_all;
1302   IS                is_R_local;
1303   IS                is_V_local;
1304   IS                is_C_local;
1305   IS                is_aux1;
1306   IS                is_aux2;
1307   VecType           impVecType;
1308   MatType           impMatType;
1309   PetscInt          n_R=0;
1310   PetscInt          n_D=0;
1311   PetscInt          n_B=0;
1312   PetscScalar       zero=0.0;
1313   PetscScalar       one=1.0;
1314   PetscScalar       m_one=-1.0;
1315   PetscScalar*      array;
1316   PetscScalar       *coarse_submat_vals;
1317   PetscInt          *idx_R_local;
1318   PetscInt          *idx_V_B;
1319   PetscScalar       *coarsefunctions_errors;
1320   PetscScalar       *constraints_errors;
1321   /* auxiliary indices */
1322   PetscInt          i,j,k;
1323   /* for verbose output of bddc */
1324   PetscViewer       viewer=pcbddc->dbg_viewer;
1325   PetscBool         dbg_flag=pcbddc->dbg_flag;
1326   /* for counting coarse dofs */
1327   PetscInt          n_vertices,n_constraints;
1328   PetscInt          size_of_constraint;
1329   PetscInt          *row_cmat_indices;
1330   PetscScalar       *row_cmat_values;
1331   PetscInt          *vertices,*nnz,*is_indices,*temp_indices;
1332   ISLocalToGlobalMapping BtoNmap;
1333 
1334   PetscFunctionBegin;
1335   /* Set Non-overlapping dimensions */
1336   n_B = pcis->n_B; n_D = pcis->n - n_B;
1337   /* Set types for local objects needed by BDDC precondtioner */
1338   impMatType = MATSEQDENSE;
1339   impVecType = VECSEQ;
1340   /* get vertex indices from constraint matrix */
1341   ierr = PCBDDCGetPrimalVerticesLocalIdx(pc,&n_vertices,&vertices);CHKERRQ(ierr);
1342   /* Set number of constraints */
1343   n_constraints = pcbddc->local_primal_size-n_vertices;
1344 
1345   /* vertices in boundary numbering */
1346   ierr = PetscMalloc(n_vertices*sizeof(PetscInt),&idx_V_B);CHKERRQ(ierr);
1347   ierr = ISLocalToGlobalMappingCreateIS(pcis->is_B_local,&BtoNmap);CHKERRQ(ierr);
1348   ierr = ISGlobalToLocalMappingApply(BtoNmap,IS_GTOLM_DROP,n_vertices,vertices,&i,idx_V_B);CHKERRQ(ierr);
1349   if (i != n_vertices) {
1350     SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Error in boundary numbering for BDDC vertices! %d != %d\n",n_vertices,i);
1351   }
1352 
1353   /* transform local matrices if needed */
1354   if (pcbddc->use_change_of_basis) {
1355     ierr = PetscMalloc(pcis->n*sizeof(PetscInt),&nnz);CHKERRQ(ierr);
1356     ierr = ISGetIndices(pcis->is_I_local,(const PetscInt**)&is_indices);CHKERRQ(ierr);
1357     for (i=0;i<n_D;i++) nnz[is_indices[i]] = 1;
1358     ierr = ISRestoreIndices(pcis->is_I_local,(const PetscInt**)&is_indices);CHKERRQ(ierr);
1359     ierr = ISGetIndices(pcis->is_B_local,(const PetscInt**)&is_indices);CHKERRQ(ierr);
1360     k=1;
1361     for (i=0;i<n_B;i++) {
1362       ierr = MatGetRow(pcbddc->ChangeOfBasisMatrix,i,&j,NULL,NULL);CHKERRQ(ierr);
1363       nnz[is_indices[i]]=j;
1364       if (k < j) k = j;
1365       ierr = MatRestoreRow(pcbddc->ChangeOfBasisMatrix,i,&j,NULL,NULL);CHKERRQ(ierr);
1366     }
1367     ierr = ISRestoreIndices(pcis->is_B_local,(const PetscInt**)&is_indices);CHKERRQ(ierr);
1368     /* assemble change of basis matrix on the whole set of local dofs */
1369     ierr = PetscMalloc(k*sizeof(PetscInt),&temp_indices);CHKERRQ(ierr);
1370     ierr = MatCreate(PETSC_COMM_SELF,&change_mat_all);CHKERRQ(ierr);
1371     ierr = MatSetSizes(change_mat_all,pcis->n,pcis->n,pcis->n,pcis->n);CHKERRQ(ierr);
1372     ierr = MatSetType(change_mat_all,MATSEQAIJ);CHKERRQ(ierr);
1373     ierr = MatSeqAIJSetPreallocation(change_mat_all,0,nnz);CHKERRQ(ierr);
1374     ierr = ISGetIndices(pcis->is_I_local,(const PetscInt**)&is_indices);CHKERRQ(ierr);
1375     for (i=0;i<n_D;i++) {
1376       ierr = MatSetValue(change_mat_all,is_indices[i],is_indices[i],1.0,INSERT_VALUES);CHKERRQ(ierr);
1377     }
1378     ierr = ISRestoreIndices(pcis->is_I_local,(const PetscInt**)&is_indices);CHKERRQ(ierr);
1379     ierr = ISGetIndices(pcis->is_B_local,(const PetscInt**)&is_indices);CHKERRQ(ierr);
1380     for (i=0;i<n_B;i++) {
1381       ierr = MatGetRow(pcbddc->ChangeOfBasisMatrix,i,&j,(const PetscInt**)&row_cmat_indices,(const PetscScalar**)&row_cmat_values);CHKERRQ(ierr);
1382       for (k=0; k<j; k++) temp_indices[k]=is_indices[row_cmat_indices[k]];
1383       ierr = MatSetValues(change_mat_all,1,&is_indices[i],j,temp_indices,row_cmat_values,INSERT_VALUES);CHKERRQ(ierr);
1384       ierr = MatRestoreRow(pcbddc->ChangeOfBasisMatrix,i,&j,(const PetscInt**)&row_cmat_indices,(const PetscScalar**)&row_cmat_values);CHKERRQ(ierr);
1385     }
1386     ierr = MatAssemblyBegin(change_mat_all,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1387     ierr = MatAssemblyEnd(change_mat_all,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1388     /* TODO: HOW TO WORK WITH BAIJ? PtAP not provided */
1389     ierr = MatGetBlockSize(matis->A,&i);CHKERRQ(ierr);
1390     if (i==1) {
1391       ierr = MatPtAP(matis->A,change_mat_all,MAT_INITIAL_MATRIX,1.0,&pcbddc->local_mat);CHKERRQ(ierr);
1392     } else {
1393       Mat work_mat;
1394       ierr = MatConvert(matis->A,MATSEQAIJ,MAT_INITIAL_MATRIX,&work_mat);CHKERRQ(ierr);
1395       ierr = MatPtAP(work_mat,change_mat_all,MAT_INITIAL_MATRIX,1.0,&pcbddc->local_mat);CHKERRQ(ierr);
1396       ierr = MatDestroy(&work_mat);CHKERRQ(ierr);
1397     }
1398     ierr = MatDestroy(&pcis->A_IB);CHKERRQ(ierr);
1399     ierr = MatDestroy(&pcis->A_BI);CHKERRQ(ierr);
1400     ierr = MatDestroy(&pcis->A_BB);CHKERRQ(ierr);
1401     ierr = MatGetSubMatrix(pcbddc->local_mat,pcis->is_I_local,pcis->is_B_local,MAT_INITIAL_MATRIX,&pcis->A_IB);CHKERRQ(ierr);
1402     ierr = MatGetSubMatrix(pcbddc->local_mat,pcis->is_B_local,pcis->is_I_local,MAT_INITIAL_MATRIX,&pcis->A_BI);CHKERRQ(ierr);
1403     ierr = MatGetSubMatrix(pcbddc->local_mat,pcis->is_B_local,pcis->is_B_local,MAT_INITIAL_MATRIX,&pcis->A_BB);CHKERRQ(ierr);
1404     ierr = MatDestroy(&change_mat_all);CHKERRQ(ierr);
1405     ierr = PetscFree(nnz);CHKERRQ(ierr);
1406     ierr = PetscFree(temp_indices);CHKERRQ(ierr);
1407   } else {
1408     /* without change of basis, the local matrix is unchanged */
1409     ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr);
1410     pcbddc->local_mat = matis->A;
1411   }
1412   /* Change global null space passed in by the user if change of basis has been requested */
1413   if (pcbddc->NullSpace && pcbddc->use_change_of_basis) {
1414     ierr = PCBDDCNullSpaceAdaptGlobal(pc);CHKERRQ(ierr);
1415   }
1416 
1417   /* Dohrmann's notation: dofs splitted in R (Remaining: all dofs but the vertices) and V (Vertices) */
1418   ierr = VecSet(pcis->vec1_N,one);CHKERRQ(ierr);
1419   ierr = VecGetArray(pcis->vec1_N,&array);CHKERRQ(ierr);
1420   for (i=0;i<n_vertices;i++) array[vertices[i]] = zero;
1421   ierr = PetscMalloc(( pcis->n - n_vertices )*sizeof(PetscInt),&idx_R_local);CHKERRQ(ierr);
1422   for (i=0, n_R=0; i<pcis->n; i++) {
1423     if (array[i] == one) {
1424       idx_R_local[n_R] = i;
1425       n_R++;
1426     }
1427   }
1428   ierr = VecRestoreArray(pcis->vec1_N,&array);CHKERRQ(ierr);
1429   if (dbg_flag) {
1430     ierr = PetscViewerASCIIPrintf(viewer,"--------------------------------------------------\n");CHKERRQ(ierr);
1431     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1432     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d local dimensions\n",PetscGlobalRank);CHKERRQ(ierr);
1433     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"local_size = %d, dirichlet_size = %d, boundary_size = %d\n",pcis->n,n_D,n_B);CHKERRQ(ierr);
1434     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"r_size = %d, v_size = %d, constraints = %d, local_primal_size = %d\n",n_R,n_vertices,n_constraints,pcbddc->local_primal_size);CHKERRQ(ierr);
1435     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"pcbddc->n_vertices = %d, pcbddc->n_constraints = %d\n",pcbddc->n_vertices,pcbddc->n_constraints);CHKERRQ(ierr);
1436     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1437   }
1438 
1439   /* Allocate needed vectors */
1440   ierr = VecDuplicate(pcis->vec1_global,&pcbddc->original_rhs);CHKERRQ(ierr);
1441   ierr = VecDuplicate(pcis->vec1_global,&pcbddc->temp_solution);CHKERRQ(ierr);
1442   ierr = VecDuplicate(pcis->vec1_D,&pcbddc->vec4_D);CHKERRQ(ierr);
1443   ierr = VecCreate(PETSC_COMM_SELF,&pcbddc->vec1_R);CHKERRQ(ierr);
1444   ierr = VecSetSizes(pcbddc->vec1_R,n_R,n_R);CHKERRQ(ierr);
1445   ierr = VecSetType(pcbddc->vec1_R,impVecType);CHKERRQ(ierr);
1446   ierr = VecDuplicate(pcbddc->vec1_R,&pcbddc->vec2_R);CHKERRQ(ierr);
1447   ierr = VecCreate(PETSC_COMM_SELF,&pcbddc->vec1_P);CHKERRQ(ierr);
1448   ierr = VecSetSizes(pcbddc->vec1_P,pcbddc->local_primal_size,pcbddc->local_primal_size);CHKERRQ(ierr);
1449   ierr = VecSetType(pcbddc->vec1_P,impVecType);CHKERRQ(ierr);
1450 
1451   /* Creating some index sets needed  */
1452   /* For submatrices */
1453   ierr = ISCreateGeneral(PETSC_COMM_SELF,n_R,idx_R_local,PETSC_OWN_POINTER,&is_R_local);CHKERRQ(ierr);
1454   if (n_vertices)    {
1455     ierr = ISCreateGeneral(PETSC_COMM_SELF,n_vertices,vertices,PETSC_OWN_POINTER,&is_V_local);CHKERRQ(ierr);
1456   }
1457   if (n_constraints) {
1458     ierr = ISCreateStride(PETSC_COMM_SELF,n_constraints,n_vertices,1,&is_C_local);CHKERRQ(ierr);
1459   }
1460 
1461   /* For VecScatters pcbddc->R_to_B and (optionally) pcbddc->R_to_D */
1462   {
1463     PetscInt   *aux_array1;
1464     PetscInt   *aux_array2;
1465     PetscInt   *idx_I_local;
1466 
1467     ierr = PetscMalloc( (pcis->n_B-n_vertices)*sizeof(PetscInt),&aux_array1);CHKERRQ(ierr);
1468     ierr = PetscMalloc( (pcis->n_B-n_vertices)*sizeof(PetscInt),&aux_array2);CHKERRQ(ierr);
1469 
1470     ierr = ISGetIndices(pcis->is_I_local,(const PetscInt**)&idx_I_local);CHKERRQ(ierr);
1471     ierr = VecGetArray(pcis->vec1_N,&array);CHKERRQ(ierr);
1472     for (i=0; i<n_D; i++) array[idx_I_local[i]] = 0;
1473     ierr = ISRestoreIndices(pcis->is_I_local,(const PetscInt**)&idx_I_local);CHKERRQ(ierr);
1474     for (i=0, j=0; i<n_R; i++) {
1475       if (array[idx_R_local[i]] == one) {
1476         aux_array1[j] = i;
1477         j++;
1478       }
1479     }
1480     ierr = VecRestoreArray(pcis->vec1_N,&array);CHKERRQ(ierr);
1481     ierr = ISCreateGeneral(PETSC_COMM_SELF,j,aux_array1,PETSC_COPY_VALUES,&is_aux1);CHKERRQ(ierr);
1482     ierr = VecScatterBegin(pcis->N_to_B,pcis->vec1_N,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1483     ierr = VecScatterEnd  (pcis->N_to_B,pcis->vec1_N,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1484     ierr = VecGetArray(pcis->vec1_B,&array);CHKERRQ(ierr);
1485     for (i=0, j=0; i<n_B; i++) {
1486       if (array[i] == one) {
1487         aux_array2[j] = i; j++;
1488       }
1489     }
1490     ierr = VecRestoreArray(pcis->vec1_B,&array);CHKERRQ(ierr);
1491     ierr = ISCreateGeneral(PETSC_COMM_SELF,j,aux_array2,PETSC_COPY_VALUES,&is_aux2);CHKERRQ(ierr);
1492     ierr = VecScatterCreate(pcbddc->vec1_R,is_aux1,pcis->vec1_B,is_aux2,&pcbddc->R_to_B);CHKERRQ(ierr);
1493     ierr = PetscFree(aux_array1);CHKERRQ(ierr);
1494     ierr = PetscFree(aux_array2);CHKERRQ(ierr);
1495     ierr = ISDestroy(&is_aux1);CHKERRQ(ierr);
1496     ierr = ISDestroy(&is_aux2);CHKERRQ(ierr);
1497 
1498     if (pcbddc->inexact_prec_type || dbg_flag ) {
1499       ierr = PetscMalloc(n_D*sizeof(PetscInt),&aux_array1);CHKERRQ(ierr);
1500       ierr = VecGetArray(pcis->vec1_N,&array);CHKERRQ(ierr);
1501       for (i=0, j=0; i<n_R; i++) {
1502         if (array[idx_R_local[i]] == zero) {
1503           aux_array1[j] = i;
1504           j++;
1505         }
1506       }
1507       ierr = VecRestoreArray(pcis->vec1_N,&array);CHKERRQ(ierr);
1508       ierr = ISCreateGeneral(PETSC_COMM_SELF,j,aux_array1,PETSC_COPY_VALUES,&is_aux1);CHKERRQ(ierr);
1509       ierr = VecScatterCreate(pcbddc->vec1_R,is_aux1,pcis->vec1_D,(IS)0,&pcbddc->R_to_D);CHKERRQ(ierr);
1510       ierr = PetscFree(aux_array1);CHKERRQ(ierr);
1511       ierr = ISDestroy(&is_aux1);CHKERRQ(ierr);
1512     }
1513   }
1514 
1515   /* Creating PC contexts for local Dirichlet and Neumann problems */
1516   {
1517     Mat          A_RR;
1518     PC           pc_temp;
1519     MatStructure matstruct;
1520     /* Matrix for Dirichlet problem is A_II */
1521     /* HACK (TODO) A_II can be changed between nonlinear iterations */
1522     ierr = PCGetOperators(pc,NULL,NULL,&matstruct);CHKERRQ(ierr);
1523     if (matstruct == SAME_NONZERO_PATTERN) {
1524       ierr = MatGetSubMatrix(pcbddc->local_mat,pcis->is_I_local,pcis->is_I_local,MAT_REUSE_MATRIX,&pcis->A_II);CHKERRQ(ierr);
1525     } else {
1526       ierr = MatDestroy(&pcis->A_II);CHKERRQ(ierr);
1527       ierr = MatGetSubMatrix(pcbddc->local_mat,pcis->is_I_local,pcis->is_I_local,MAT_INITIAL_MATRIX,&pcis->A_II);CHKERRQ(ierr);
1528     }
1529     ierr = KSPCreate(PETSC_COMM_SELF,&pcbddc->ksp_D);CHKERRQ(ierr);
1530     ierr = PetscObjectIncrementTabLevel((PetscObject)pcbddc->ksp_D,(PetscObject)pc,1);CHKERRQ(ierr);
1531     ierr = KSPSetOperators(pcbddc->ksp_D,pcis->A_II,pcis->A_II,SAME_PRECONDITIONER);CHKERRQ(ierr);
1532     ierr = KSPSetType(pcbddc->ksp_D,KSPPREONLY);CHKERRQ(ierr);
1533     ierr = KSPSetOptionsPrefix(pcbddc->ksp_D,"dirichlet_");CHKERRQ(ierr);
1534     /* default */
1535     ierr = KSPGetPC(pcbddc->ksp_D,&pc_temp);CHKERRQ(ierr);
1536     ierr = PCSetType(pc_temp,PCLU);CHKERRQ(ierr);
1537     /* Allow user's customization */
1538     ierr = KSPSetFromOptions(pcbddc->ksp_D);CHKERRQ(ierr);
1539     /* umfpack interface has a bug when matrix dimension is zero */
1540     if (!n_D) {
1541       ierr = PCSetType(pc_temp,PCNONE);CHKERRQ(ierr);
1542     }
1543     /* Set Up KSP for Dirichlet problem of BDDC */
1544     ierr = KSPSetUp(pcbddc->ksp_D);CHKERRQ(ierr);
1545     /* set ksp_D into pcis data */
1546     ierr = KSPDestroy(&pcis->ksp_D);CHKERRQ(ierr);
1547     ierr = PetscObjectReference((PetscObject)pcbddc->ksp_D);CHKERRQ(ierr);
1548     pcis->ksp_D = pcbddc->ksp_D;
1549     /* Matrix for Neumann problem is A_RR -> we need to create it */
1550     ierr = MatGetSubMatrix(pcbddc->local_mat,is_R_local,is_R_local,MAT_INITIAL_MATRIX,&A_RR);CHKERRQ(ierr);
1551     ierr = KSPCreate(PETSC_COMM_SELF,&pcbddc->ksp_R);CHKERRQ(ierr);
1552     ierr = PetscObjectIncrementTabLevel((PetscObject)pcbddc->ksp_R,(PetscObject)pc,1);CHKERRQ(ierr);
1553     ierr = KSPSetOperators(pcbddc->ksp_R,A_RR,A_RR,SAME_PRECONDITIONER);CHKERRQ(ierr);
1554     ierr = KSPSetType(pcbddc->ksp_R,KSPPREONLY);CHKERRQ(ierr);
1555     ierr = KSPSetOptionsPrefix(pcbddc->ksp_R,"neumann_");CHKERRQ(ierr);
1556     /* default */
1557     ierr = KSPGetPC(pcbddc->ksp_R,&pc_temp);CHKERRQ(ierr);
1558     ierr = PCSetType(pc_temp,PCLU);CHKERRQ(ierr);
1559     /* Allow user's customization */
1560     ierr = KSPSetFromOptions(pcbddc->ksp_R);CHKERRQ(ierr);
1561     /* umfpack interface has a bug when matrix dimension is zero */
1562     if (!n_R) {
1563       ierr = PCSetType(pc_temp,PCNONE);CHKERRQ(ierr);
1564     }
1565     /* Set Up KSP for Neumann problem of BDDC */
1566     ierr = KSPSetUp(pcbddc->ksp_R);CHKERRQ(ierr);
1567     /* check Dirichlet and Neumann solvers and adapt them if a nullspace correction is needed */
1568     {
1569       Vec         temp_vec;
1570       PetscReal   value;
1571       PetscMPIInt use_exact,use_exact_reduced;
1572 
1573       ierr = VecDuplicate(pcis->vec1_D,&temp_vec);CHKERRQ(ierr);
1574       ierr = VecSetRandom(pcis->vec1_D,NULL);CHKERRQ(ierr);
1575       ierr = MatMult(pcis->A_II,pcis->vec1_D,pcis->vec2_D);CHKERRQ(ierr);
1576       ierr = KSPSolve(pcbddc->ksp_D,pcis->vec2_D,temp_vec);CHKERRQ(ierr);
1577       ierr = VecAXPY(temp_vec,m_one,pcis->vec1_D);CHKERRQ(ierr);
1578       ierr = VecNorm(temp_vec,NORM_INFINITY,&value);CHKERRQ(ierr);
1579       ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
1580       use_exact = 1;
1581       if (PetscAbsReal(value) > 1.e-4) use_exact = 0;
1582 
1583       ierr = MPI_Allreduce(&use_exact,&use_exact_reduced,1,MPIU_INT,MPI_LAND,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
1584       pcbddc->use_exact_dirichlet = (PetscBool) use_exact_reduced;
1585       if (dbg_flag) {
1586         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1587         ierr = PetscViewerASCIIPrintf(viewer,"--------------------------------------------------\n");CHKERRQ(ierr);
1588         ierr = PetscViewerASCIIPrintf(viewer,"Checking solution of Dirichlet and Neumann problems\n");CHKERRQ(ierr);
1589         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d infinity error for Dirichlet solve = % 1.14e \n",PetscGlobalRank,value);CHKERRQ(ierr);
1590         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1591       }
1592       if (n_D && pcbddc->NullSpace && !use_exact_reduced && !pcbddc->inexact_prec_type) {
1593         ierr = PCBDDCNullSpaceAssembleCorrection(pc,pcis->is_I_local);
1594       }
1595       ierr = VecDuplicate(pcbddc->vec1_R,&temp_vec);CHKERRQ(ierr);
1596       ierr = VecSetRandom(pcbddc->vec1_R,NULL);CHKERRQ(ierr);
1597       ierr = MatMult(A_RR,pcbddc->vec1_R,pcbddc->vec2_R);CHKERRQ(ierr);
1598       ierr = KSPSolve(pcbddc->ksp_R,pcbddc->vec2_R,temp_vec);CHKERRQ(ierr);
1599       ierr = VecAXPY(temp_vec,m_one,pcbddc->vec1_R);CHKERRQ(ierr);
1600       ierr = VecNorm(temp_vec,NORM_INFINITY,&value);CHKERRQ(ierr);
1601       ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
1602       use_exact = 1;
1603       if (PetscAbsReal(value) > 1.e-4) use_exact = 0;
1604       ierr = MPI_Allreduce(&use_exact,&use_exact_reduced,1,MPIU_INT,MPI_LAND,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
1605       if (dbg_flag) {
1606         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d infinity error for  Neumann  solve = % 1.14e \n",PetscGlobalRank,value);CHKERRQ(ierr);
1607         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1608       }
1609       if (n_R && pcbddc->NullSpace && !use_exact_reduced) {
1610         ierr = PCBDDCNullSpaceAssembleCorrection(pc,is_R_local);
1611       }
1612     }
1613     /* free Neumann problem's matrix */
1614     ierr = MatDestroy(&A_RR);CHKERRQ(ierr);
1615   }
1616 
1617   /* Assemble all remaining stuff needed to apply BDDC  */
1618   {
1619     Mat          A_RV,A_VR,A_VV;
1620     Mat          M1;
1621     Mat          C_CR;
1622     Mat          AUXMAT;
1623     Vec          vec1_C;
1624     Vec          vec2_C;
1625     Vec          vec1_V;
1626     Vec          vec2_V;
1627     PetscInt     *nnz;
1628     PetscInt     *auxindices;
1629     PetscInt     index;
1630     PetscScalar* array2;
1631     MatFactorInfo matinfo;
1632 
1633     /* Allocating some extra storage just to be safe */
1634     ierr = PetscMalloc (pcis->n*sizeof(PetscInt),&nnz);CHKERRQ(ierr);
1635     ierr = PetscMalloc (pcis->n*sizeof(PetscInt),&auxindices);CHKERRQ(ierr);
1636     for (i=0;i<pcis->n;i++) auxindices[i]=i;
1637 
1638     /* some work vectors on vertices and/or constraints */
1639     if (n_vertices) {
1640       ierr = VecCreate(PETSC_COMM_SELF,&vec1_V);CHKERRQ(ierr);
1641       ierr = VecSetSizes(vec1_V,n_vertices,n_vertices);CHKERRQ(ierr);
1642       ierr = VecSetType(vec1_V,impVecType);CHKERRQ(ierr);
1643       ierr = VecDuplicate(vec1_V,&vec2_V);CHKERRQ(ierr);
1644     }
1645     if (n_constraints) {
1646       ierr = VecCreate(PETSC_COMM_SELF,&vec1_C);CHKERRQ(ierr);
1647       ierr = VecSetSizes(vec1_C,n_constraints,n_constraints);CHKERRQ(ierr);
1648       ierr = VecSetType(vec1_C,impVecType);CHKERRQ(ierr);
1649       ierr = VecDuplicate(vec1_C,&vec2_C);CHKERRQ(ierr);
1650       ierr = VecDuplicate(vec1_C,&pcbddc->vec1_C);CHKERRQ(ierr);
1651     }
1652     /* Precompute stuffs needed for preprocessing and application of BDDC*/
1653     if (n_constraints) {
1654       ierr = MatCreate(PETSC_COMM_SELF,&pcbddc->local_auxmat2);CHKERRQ(ierr);
1655       ierr = MatSetSizes(pcbddc->local_auxmat2,n_R,n_constraints,n_R,n_constraints);CHKERRQ(ierr);
1656       ierr = MatSetType(pcbddc->local_auxmat2,impMatType);CHKERRQ(ierr);
1657       ierr = MatSeqDenseSetPreallocation(pcbddc->local_auxmat2,NULL);CHKERRQ(ierr);
1658 
1659       /* Create Constraint matrix on R nodes: C_{CR}  */
1660       ierr = MatGetSubMatrix(pcbddc->ConstraintMatrix,is_C_local,is_R_local,MAT_INITIAL_MATRIX,&C_CR);CHKERRQ(ierr);
1661       ierr = ISDestroy(&is_C_local);CHKERRQ(ierr);
1662 
1663       /* Assemble local_auxmat2 = - A_{RR}^{-1} C^T_{CR} needed by BDDC application */
1664       for (i=0;i<n_constraints;i++) {
1665         ierr = VecSet(pcbddc->vec1_R,zero);CHKERRQ(ierr);
1666         /* Get row of constraint matrix in R numbering */
1667         ierr = VecGetArray(pcbddc->vec1_R,&array);CHKERRQ(ierr);
1668         ierr = MatGetRow(C_CR,i,&size_of_constraint,(const PetscInt**)&row_cmat_indices,(const PetscScalar**)&row_cmat_values);CHKERRQ(ierr);
1669         for (j=0;j<size_of_constraint;j++) array[row_cmat_indices[j]] = -row_cmat_values[j];
1670         ierr = MatRestoreRow(C_CR,i,&size_of_constraint,(const PetscInt**)&row_cmat_indices,(const PetscScalar**)&row_cmat_values);CHKERRQ(ierr);
1671         ierr = VecRestoreArray(pcbddc->vec1_R,&array);CHKERRQ(ierr);
1672 
1673         /* Solve for row of constraint matrix in R numbering */
1674         ierr = KSPSolve(pcbddc->ksp_R,pcbddc->vec1_R,pcbddc->vec2_R);CHKERRQ(ierr);
1675 
1676         /* Set values */
1677         ierr = VecGetArray(pcbddc->vec2_R,&array);CHKERRQ(ierr);
1678         ierr = MatSetValues(pcbddc->local_auxmat2,n_R,auxindices,1,&i,array,INSERT_VALUES);CHKERRQ(ierr);
1679         ierr = VecRestoreArray(pcbddc->vec2_R,&array);CHKERRQ(ierr);
1680       }
1681       ierr = MatAssemblyBegin(pcbddc->local_auxmat2,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1682       ierr = MatAssemblyEnd(pcbddc->local_auxmat2,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1683 
1684       /* Assemble AUXMAT = ( LUFactor )( -C_{CR} A_{RR}^{-1} C^T_{CR} )^{-1} */
1685       ierr = MatMatMult(C_CR,pcbddc->local_auxmat2,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&AUXMAT);CHKERRQ(ierr);
1686       ierr = MatFactorInfoInitialize(&matinfo);CHKERRQ(ierr);
1687       ierr = ISCreateStride(PETSC_COMM_SELF,n_constraints,0,1,&is_aux1);CHKERRQ(ierr);
1688       ierr = MatLUFactor(AUXMAT,is_aux1,is_aux1,&matinfo);CHKERRQ(ierr);
1689       ierr = ISDestroy(&is_aux1);CHKERRQ(ierr);
1690 
1691       /* Assemble explicitly M1 = ( C_{CR} A_{RR}^{-1} C^T_{CR} )^{-1} needed in preproc  */
1692       ierr = MatCreate(PETSC_COMM_SELF,&M1);CHKERRQ(ierr);
1693       ierr = MatSetSizes(M1,n_constraints,n_constraints,n_constraints,n_constraints);CHKERRQ(ierr);
1694       ierr = MatSetType(M1,impMatType);CHKERRQ(ierr);
1695       ierr = MatSeqDenseSetPreallocation(M1,NULL);CHKERRQ(ierr);
1696       for (i=0;i<n_constraints;i++) {
1697         ierr = VecSet(vec1_C,zero);CHKERRQ(ierr);
1698         ierr = VecSetValue(vec1_C,i,one,INSERT_VALUES);CHKERRQ(ierr);
1699         ierr = VecAssemblyBegin(vec1_C);CHKERRQ(ierr);
1700         ierr = VecAssemblyEnd(vec1_C);CHKERRQ(ierr);
1701         ierr = MatSolve(AUXMAT,vec1_C,vec2_C);CHKERRQ(ierr);
1702         ierr = VecScale(vec2_C,m_one);CHKERRQ(ierr);
1703         ierr = VecGetArray(vec2_C,&array);CHKERRQ(ierr);
1704         ierr = MatSetValues(M1,n_constraints,auxindices,1,&i,array,INSERT_VALUES);CHKERRQ(ierr);
1705         ierr = VecRestoreArray(vec2_C,&array);CHKERRQ(ierr);
1706       }
1707       ierr = MatAssemblyBegin(M1,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1708       ierr = MatAssemblyEnd(M1,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1709       ierr = MatDestroy(&AUXMAT);CHKERRQ(ierr);
1710       /* Assemble local_auxmat1 = M1*C_{CR} needed by BDDC application in KSP and in preproc */
1711       ierr = MatMatMult(M1,C_CR,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&pcbddc->local_auxmat1);CHKERRQ(ierr);
1712 
1713     }
1714 
1715     /* Get submatrices from subdomain matrix */
1716     if (n_vertices){
1717       ierr = MatGetSubMatrix(pcbddc->local_mat,is_R_local,is_V_local,MAT_INITIAL_MATRIX,&A_RV);CHKERRQ(ierr);
1718       ierr = MatGetSubMatrix(pcbddc->local_mat,is_V_local,is_R_local,MAT_INITIAL_MATRIX,&A_VR);CHKERRQ(ierr);
1719       ierr = MatGetSubMatrix(pcbddc->local_mat,is_V_local,is_V_local,MAT_INITIAL_MATRIX,&A_VV);CHKERRQ(ierr);
1720     }
1721 
1722     /* Matrix of coarse basis functions (local) */
1723     ierr = MatCreate(PETSC_COMM_SELF,&pcbddc->coarse_phi_B);CHKERRQ(ierr);
1724     ierr = MatSetSizes(pcbddc->coarse_phi_B,n_B,pcbddc->local_primal_size,n_B,pcbddc->local_primal_size);CHKERRQ(ierr);
1725     ierr = MatSetType(pcbddc->coarse_phi_B,impMatType);CHKERRQ(ierr);
1726     ierr = MatSeqDenseSetPreallocation(pcbddc->coarse_phi_B,NULL);CHKERRQ(ierr);
1727     if (pcbddc->inexact_prec_type || dbg_flag ) {
1728       ierr = MatCreate(PETSC_COMM_SELF,&pcbddc->coarse_phi_D);CHKERRQ(ierr);
1729       ierr = MatSetSizes(pcbddc->coarse_phi_D,n_D,pcbddc->local_primal_size,n_D,pcbddc->local_primal_size);CHKERRQ(ierr);
1730       ierr = MatSetType(pcbddc->coarse_phi_D,impMatType);CHKERRQ(ierr);
1731       ierr = MatSeqDenseSetPreallocation(pcbddc->coarse_phi_D,NULL);CHKERRQ(ierr);
1732     }
1733 
1734     if (dbg_flag) {
1735       ierr = PetscMalloc( pcbddc->local_primal_size*sizeof(PetscScalar),&coarsefunctions_errors);CHKERRQ(ierr);
1736       ierr = PetscMalloc( pcbddc->local_primal_size*sizeof(PetscScalar),&constraints_errors);CHKERRQ(ierr);
1737     }
1738     /* Subdomain contribution (Non-overlapping) to coarse matrix  */
1739     ierr = PetscMalloc ((pcbddc->local_primal_size)*(pcbddc->local_primal_size)*sizeof(PetscScalar),&coarse_submat_vals);CHKERRQ(ierr);
1740 
1741     /* We are now ready to evaluate coarse basis functions and subdomain contribution to coarse problem */
1742     for (i=0;i<n_vertices;i++){
1743       ierr = VecSet(vec1_V,zero);CHKERRQ(ierr);
1744       ierr = VecSetValue(vec1_V,i,one,INSERT_VALUES);CHKERRQ(ierr);
1745       ierr = VecAssemblyBegin(vec1_V);CHKERRQ(ierr);
1746       ierr = VecAssemblyEnd(vec1_V);CHKERRQ(ierr);
1747       /* solution of saddle point problem */
1748       ierr = MatMult(A_RV,vec1_V,pcbddc->vec1_R);CHKERRQ(ierr);
1749       ierr = KSPSolve(pcbddc->ksp_R,pcbddc->vec1_R,pcbddc->vec1_R);CHKERRQ(ierr);
1750       ierr = VecScale(pcbddc->vec1_R,m_one);CHKERRQ(ierr);
1751       if (n_constraints) {
1752         ierr = MatMult(pcbddc->local_auxmat1,pcbddc->vec1_R,vec1_C);CHKERRQ(ierr);
1753         ierr = MatMultAdd(pcbddc->local_auxmat2,vec1_C,pcbddc->vec1_R,pcbddc->vec1_R);CHKERRQ(ierr);
1754         ierr = VecScale(vec1_C,m_one);CHKERRQ(ierr);
1755       }
1756       ierr = MatMult(A_VR,pcbddc->vec1_R,vec2_V);CHKERRQ(ierr);
1757       ierr = MatMultAdd(A_VV,vec1_V,vec2_V,vec2_V);CHKERRQ(ierr);
1758 
1759       /* Set values in coarse basis function and subdomain part of coarse_mat */
1760       /* coarse basis functions */
1761       ierr = VecSet(pcis->vec1_B,zero);CHKERRQ(ierr);
1762       ierr = VecScatterBegin(pcbddc->R_to_B,pcbddc->vec1_R,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1763       ierr = VecScatterEnd  (pcbddc->R_to_B,pcbddc->vec1_R,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1764       ierr = VecGetArray(pcis->vec1_B,&array);CHKERRQ(ierr);
1765       ierr = MatSetValues(pcbddc->coarse_phi_B,n_B,auxindices,1,&i,array,INSERT_VALUES);CHKERRQ(ierr);
1766       ierr = VecRestoreArray(pcis->vec1_B,&array);CHKERRQ(ierr);
1767       ierr = MatSetValue(pcbddc->coarse_phi_B,idx_V_B[i],i,one,INSERT_VALUES);CHKERRQ(ierr);
1768       if ( pcbddc->inexact_prec_type || dbg_flag  ) {
1769         ierr = VecScatterBegin(pcbddc->R_to_D,pcbddc->vec1_R,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1770         ierr = VecScatterEnd(pcbddc->R_to_D,pcbddc->vec1_R,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1771         ierr = VecGetArray(pcis->vec1_D,&array);CHKERRQ(ierr);
1772         ierr = MatSetValues(pcbddc->coarse_phi_D,n_D,auxindices,1,&i,array,INSERT_VALUES);CHKERRQ(ierr);
1773         ierr = VecRestoreArray(pcis->vec1_D,&array);CHKERRQ(ierr);
1774       }
1775       /* subdomain contribution to coarse matrix */
1776       ierr = VecGetArray(vec2_V,&array);CHKERRQ(ierr);
1777       for (j=0; j<n_vertices; j++) coarse_submat_vals[i*pcbddc->local_primal_size+j] = array[j];   /* WARNING -> column major ordering */
1778       ierr = VecRestoreArray(vec2_V,&array);CHKERRQ(ierr);
1779       if (n_constraints) {
1780         ierr = VecGetArray(vec1_C,&array);CHKERRQ(ierr);
1781         for (j=0; j<n_constraints; j++) coarse_submat_vals[i*pcbddc->local_primal_size+j+n_vertices] = array[j];   /* WARNING -> column major ordering */
1782         ierr = VecRestoreArray(vec1_C,&array);CHKERRQ(ierr);
1783       }
1784 
1785       if ( dbg_flag ) {
1786         /* assemble subdomain vector on nodes */
1787         ierr = VecSet(pcis->vec1_N,zero);CHKERRQ(ierr);
1788         ierr = VecGetArray(pcis->vec1_N,&array);CHKERRQ(ierr);
1789         ierr = VecGetArray(pcbddc->vec1_R,&array2);CHKERRQ(ierr);
1790         for (j=0;j<n_R;j++) array[idx_R_local[j]] = array2[j];
1791         array[ vertices[i] ] = one;
1792         ierr = VecRestoreArray(pcbddc->vec1_R,&array2);CHKERRQ(ierr);
1793         ierr = VecRestoreArray(pcis->vec1_N,&array);CHKERRQ(ierr);
1794         /* assemble subdomain vector of lagrange multipliers (i.e. primal nodes) */
1795         ierr = VecSet(pcbddc->vec1_P,zero);CHKERRQ(ierr);
1796         ierr = VecGetArray(pcbddc->vec1_P,&array2);CHKERRQ(ierr);
1797         ierr = VecGetArray(vec2_V,&array);CHKERRQ(ierr);
1798         for (j=0;j<n_vertices;j++) array2[j]=array[j];
1799         ierr = VecRestoreArray(vec2_V,&array);CHKERRQ(ierr);
1800         if (n_constraints) {
1801           ierr = VecGetArray(vec1_C,&array);CHKERRQ(ierr);
1802           for (j=0;j<n_constraints;j++) array2[j+n_vertices]=array[j];
1803           ierr = VecRestoreArray(vec1_C,&array);CHKERRQ(ierr);
1804         }
1805         ierr = VecRestoreArray(pcbddc->vec1_P,&array2);CHKERRQ(ierr);
1806         ierr = VecScale(pcbddc->vec1_P,m_one);CHKERRQ(ierr);
1807         /* check saddle point solution */
1808         ierr = MatMult(pcbddc->local_mat,pcis->vec1_N,pcis->vec2_N);CHKERRQ(ierr);
1809         ierr = MatMultTransposeAdd(pcbddc->ConstraintMatrix,pcbddc->vec1_P,pcis->vec2_N,pcis->vec2_N);CHKERRQ(ierr);
1810         ierr = VecNorm(pcis->vec2_N,NORM_INFINITY,&coarsefunctions_errors[i]);CHKERRQ(ierr);
1811         ierr = MatMult(pcbddc->ConstraintMatrix,pcis->vec1_N,pcbddc->vec1_P);CHKERRQ(ierr);
1812         ierr = VecGetArray(pcbddc->vec1_P,&array);CHKERRQ(ierr);
1813         array[i]=array[i]+m_one;  /* shift by the identity matrix */
1814         ierr = VecRestoreArray(pcbddc->vec1_P,&array);CHKERRQ(ierr);
1815         ierr = VecNorm(pcbddc->vec1_P,NORM_INFINITY,&constraints_errors[i]);CHKERRQ(ierr);
1816       }
1817     }
1818 
1819     for (i=0;i<n_constraints;i++){
1820       ierr = VecSet(vec2_C,zero);CHKERRQ(ierr);
1821       ierr = VecSetValue(vec2_C,i,m_one,INSERT_VALUES);CHKERRQ(ierr);
1822       ierr = VecAssemblyBegin(vec2_C);CHKERRQ(ierr);
1823       ierr = VecAssemblyEnd(vec2_C);CHKERRQ(ierr);
1824       /* solution of saddle point problem */
1825       ierr = MatMult(M1,vec2_C,vec1_C);CHKERRQ(ierr);
1826       ierr = MatMult(pcbddc->local_auxmat2,vec1_C,pcbddc->vec1_R);CHKERRQ(ierr);
1827       ierr = VecScale(vec1_C,m_one);CHKERRQ(ierr);
1828       if (n_vertices) { ierr = MatMult(A_VR,pcbddc->vec1_R,vec2_V);CHKERRQ(ierr); }
1829       /* Set values in coarse basis function and subdomain part of coarse_mat */
1830       /* coarse basis functions */
1831       index=i+n_vertices;
1832       ierr = VecSet(pcis->vec1_B,zero);CHKERRQ(ierr);
1833       ierr = VecScatterBegin(pcbddc->R_to_B,pcbddc->vec1_R,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1834       ierr = VecScatterEnd  (pcbddc->R_to_B,pcbddc->vec1_R,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1835       ierr = VecGetArray(pcis->vec1_B,&array);CHKERRQ(ierr);
1836       ierr = MatSetValues(pcbddc->coarse_phi_B,n_B,auxindices,1,&index,array,INSERT_VALUES);CHKERRQ(ierr);
1837       ierr = VecRestoreArray(pcis->vec1_B,&array);CHKERRQ(ierr);
1838       if ( pcbddc->inexact_prec_type || dbg_flag ) {
1839         ierr = VecScatterBegin(pcbddc->R_to_D,pcbddc->vec1_R,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1840         ierr = VecScatterEnd(pcbddc->R_to_D,pcbddc->vec1_R,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1841         ierr = VecGetArray(pcis->vec1_D,&array);CHKERRQ(ierr);
1842         ierr = MatSetValues(pcbddc->coarse_phi_D,n_D,auxindices,1,&index,array,INSERT_VALUES);CHKERRQ(ierr);
1843         ierr = VecRestoreArray(pcis->vec1_D,&array);CHKERRQ(ierr);
1844       }
1845       /* subdomain contribution to coarse matrix */
1846       if (n_vertices) {
1847         ierr = VecGetArray(vec2_V,&array);CHKERRQ(ierr);
1848         for (j=0; j<n_vertices; j++) coarse_submat_vals[index*pcbddc->local_primal_size+j]=array[j]; /* WARNING -> column major ordering */
1849         ierr = VecRestoreArray(vec2_V,&array);CHKERRQ(ierr);
1850       }
1851       ierr = VecGetArray(vec1_C,&array);CHKERRQ(ierr);
1852       for (j=0; j<n_constraints; j++) coarse_submat_vals[index*pcbddc->local_primal_size+j+n_vertices]=array[j]; /* WARNING -> column major ordering */
1853       ierr = VecRestoreArray(vec1_C,&array);CHKERRQ(ierr);
1854 
1855       if ( dbg_flag ) {
1856         /* assemble subdomain vector on nodes */
1857         ierr = VecSet(pcis->vec1_N,zero);CHKERRQ(ierr);
1858         ierr = VecGetArray(pcis->vec1_N,&array);CHKERRQ(ierr);
1859         ierr = VecGetArray(pcbddc->vec1_R,&array2);CHKERRQ(ierr);
1860         for (j=0;j<n_R;j++) array[idx_R_local[j]] = array2[j];
1861         ierr = VecRestoreArray(pcbddc->vec1_R,&array2);CHKERRQ(ierr);
1862         ierr = VecRestoreArray(pcis->vec1_N,&array);CHKERRQ(ierr);
1863         /* assemble subdomain vector of lagrange multipliers */
1864         ierr = VecSet(pcbddc->vec1_P,zero);CHKERRQ(ierr);
1865         ierr = VecGetArray(pcbddc->vec1_P,&array2);CHKERRQ(ierr);
1866         if ( n_vertices) {
1867           ierr = VecGetArray(vec2_V,&array);CHKERRQ(ierr);
1868           for (j=0;j<n_vertices;j++) array2[j]=-array[j];
1869           ierr = VecRestoreArray(vec2_V,&array);CHKERRQ(ierr);
1870         }
1871         ierr = VecGetArray(vec1_C,&array);CHKERRQ(ierr);
1872         for (j=0;j<n_constraints;j++) {array2[j+n_vertices]=-array[j];}
1873         ierr = VecRestoreArray(vec1_C,&array);CHKERRQ(ierr);
1874         ierr = VecRestoreArray(pcbddc->vec1_P,&array2);CHKERRQ(ierr);
1875         /* check saddle point solution */
1876         ierr = MatMult(pcbddc->local_mat,pcis->vec1_N,pcis->vec2_N);CHKERRQ(ierr);
1877         ierr = MatMultTransposeAdd(pcbddc->ConstraintMatrix,pcbddc->vec1_P,pcis->vec2_N,pcis->vec2_N);CHKERRQ(ierr);
1878         ierr = VecNorm(pcis->vec2_N,NORM_INFINITY,&coarsefunctions_errors[index]);CHKERRQ(ierr);
1879         ierr = MatMult(pcbddc->ConstraintMatrix,pcis->vec1_N,pcbddc->vec1_P);CHKERRQ(ierr);
1880         ierr = VecGetArray(pcbddc->vec1_P,&array);CHKERRQ(ierr);
1881         array[index]=array[index]+m_one; /* shift by the identity matrix */
1882         ierr = VecRestoreArray(pcbddc->vec1_P,&array);CHKERRQ(ierr);
1883         ierr = VecNorm(pcbddc->vec1_P,NORM_INFINITY,&constraints_errors[index]);CHKERRQ(ierr);
1884       }
1885     }
1886     ierr = MatAssemblyBegin(pcbddc->coarse_phi_B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1887     ierr = MatAssemblyEnd  (pcbddc->coarse_phi_B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1888     if ( pcbddc->inexact_prec_type || dbg_flag ) {
1889       ierr = MatAssemblyBegin(pcbddc->coarse_phi_D,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1890       ierr = MatAssemblyEnd  (pcbddc->coarse_phi_D,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1891     }
1892     /* Checking coarse_sub_mat and coarse basis functios */
1893     /* It shuld be \Phi^{(j)^T} A^{(j)} \Phi^{(j)}=coarse_sub_mat */
1894     if (dbg_flag) {
1895       Mat         coarse_sub_mat;
1896       Mat         TM1,TM2,TM3,TM4;
1897       Mat         coarse_phi_D,coarse_phi_B,A_II,A_BB,A_IB,A_BI;
1898       MatType     checkmattype=MATSEQAIJ;
1899       PetscScalar value;
1900 
1901       ierr = MatConvert(pcis->A_II,checkmattype,MAT_INITIAL_MATRIX,&A_II);CHKERRQ(ierr);
1902       ierr = MatConvert(pcis->A_IB,checkmattype,MAT_INITIAL_MATRIX,&A_IB);CHKERRQ(ierr);
1903       ierr = MatConvert(pcis->A_BI,checkmattype,MAT_INITIAL_MATRIX,&A_BI);CHKERRQ(ierr);
1904       ierr = MatConvert(pcis->A_BB,checkmattype,MAT_INITIAL_MATRIX,&A_BB);CHKERRQ(ierr);
1905       ierr = MatConvert(pcbddc->coarse_phi_D,checkmattype,MAT_INITIAL_MATRIX,&coarse_phi_D);CHKERRQ(ierr);
1906       ierr = MatConvert(pcbddc->coarse_phi_B,checkmattype,MAT_INITIAL_MATRIX,&coarse_phi_B);CHKERRQ(ierr);
1907       ierr = MatCreateSeqDense(PETSC_COMM_SELF,pcbddc->local_primal_size,pcbddc->local_primal_size,coarse_submat_vals,&coarse_sub_mat);CHKERRQ(ierr);
1908       ierr = MatConvert(coarse_sub_mat,checkmattype,MAT_REUSE_MATRIX,&coarse_sub_mat);CHKERRQ(ierr);
1909 
1910       ierr = PetscViewerASCIIPrintf(viewer,"--------------------------------------------------\n");CHKERRQ(ierr);
1911       ierr = PetscViewerASCIIPrintf(viewer,"Check coarse sub mat and local basis functions\n");CHKERRQ(ierr);
1912       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1913       ierr = MatPtAP(A_II,coarse_phi_D,MAT_INITIAL_MATRIX,1.0,&TM1);CHKERRQ(ierr);
1914       ierr = MatPtAP(A_BB,coarse_phi_B,MAT_INITIAL_MATRIX,1.0,&TM2);CHKERRQ(ierr);
1915       ierr = MatMatMult(A_IB,coarse_phi_B,MAT_INITIAL_MATRIX,1.0,&AUXMAT);CHKERRQ(ierr);
1916       ierr = MatTransposeMatMult(coarse_phi_D,AUXMAT,MAT_INITIAL_MATRIX,1.0,&TM3);CHKERRQ(ierr);
1917       ierr = MatDestroy(&AUXMAT);CHKERRQ(ierr);
1918       ierr = MatMatMult(A_BI,coarse_phi_D,MAT_INITIAL_MATRIX,1.0,&AUXMAT);CHKERRQ(ierr);
1919       ierr = MatTransposeMatMult(coarse_phi_B,AUXMAT,MAT_INITIAL_MATRIX,1.0,&TM4);CHKERRQ(ierr);
1920       ierr = MatDestroy(&AUXMAT);CHKERRQ(ierr);
1921       ierr = MatAXPY(TM1,one,TM2,DIFFERENT_NONZERO_PATTERN);CHKERRQ(ierr);
1922       ierr = MatAXPY(TM1,one,TM3,DIFFERENT_NONZERO_PATTERN);CHKERRQ(ierr);
1923       ierr = MatAXPY(TM1,one,TM4,DIFFERENT_NONZERO_PATTERN);CHKERRQ(ierr);
1924       ierr = MatAXPY(TM1,m_one,coarse_sub_mat,DIFFERENT_NONZERO_PATTERN);CHKERRQ(ierr);
1925       ierr = MatNorm(TM1,NORM_INFINITY,&value);CHKERRQ(ierr);
1926       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"----------------------------------\n");CHKERRQ(ierr);
1927       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d \n",PetscGlobalRank);CHKERRQ(ierr);
1928       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"matrix error = % 1.14e\n",value);CHKERRQ(ierr);
1929       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"coarse functions errors\n");CHKERRQ(ierr);
1930       for (i=0;i<pcbddc->local_primal_size;i++) { ierr = PetscViewerASCIISynchronizedPrintf(viewer,"local %02d-th function error = % 1.14e\n",i,coarsefunctions_errors[i]);CHKERRQ(ierr); }
1931       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"constraints errors\n");CHKERRQ(ierr);
1932       for (i=0;i<pcbddc->local_primal_size;i++) { ierr = PetscViewerASCIISynchronizedPrintf(viewer,"local %02d-th function error = % 1.14e\n",i,constraints_errors[i]);CHKERRQ(ierr); }
1933       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1934       ierr = MatDestroy(&A_II);CHKERRQ(ierr);
1935       ierr = MatDestroy(&A_BB);CHKERRQ(ierr);
1936       ierr = MatDestroy(&A_IB);CHKERRQ(ierr);
1937       ierr = MatDestroy(&A_BI);CHKERRQ(ierr);
1938       ierr = MatDestroy(&TM1);CHKERRQ(ierr);
1939       ierr = MatDestroy(&TM2);CHKERRQ(ierr);
1940       ierr = MatDestroy(&TM3);CHKERRQ(ierr);
1941       ierr = MatDestroy(&TM4);CHKERRQ(ierr);
1942       ierr = MatDestroy(&coarse_phi_D);CHKERRQ(ierr);
1943       ierr = MatDestroy(&coarse_sub_mat);CHKERRQ(ierr);
1944       ierr = MatDestroy(&coarse_phi_B);CHKERRQ(ierr);
1945       ierr = PetscFree(coarsefunctions_errors);CHKERRQ(ierr);
1946       ierr = PetscFree(constraints_errors);CHKERRQ(ierr);
1947     }
1948     /* free memory */
1949     if (n_vertices) {
1950       ierr = VecDestroy(&vec1_V);CHKERRQ(ierr);
1951       ierr = VecDestroy(&vec2_V);CHKERRQ(ierr);
1952       ierr = MatDestroy(&A_RV);CHKERRQ(ierr);
1953       ierr = MatDestroy(&A_VR);CHKERRQ(ierr);
1954       ierr = MatDestroy(&A_VV);CHKERRQ(ierr);
1955     }
1956     if (n_constraints) {
1957       ierr = VecDestroy(&vec1_C);CHKERRQ(ierr);
1958       ierr = VecDestroy(&vec2_C);CHKERRQ(ierr);
1959       ierr = MatDestroy(&M1);CHKERRQ(ierr);
1960       ierr = MatDestroy(&C_CR);CHKERRQ(ierr);
1961     }
1962     ierr = PetscFree(auxindices);CHKERRQ(ierr);
1963     ierr = PetscFree(nnz);CHKERRQ(ierr);
1964     /* create coarse matrix and data structures for message passing associated actual choice of coarse problem type */
1965     ierr = PCBDDCSetUpCoarseEnvironment(pc,coarse_submat_vals);CHKERRQ(ierr);
1966     ierr = PetscFree(coarse_submat_vals);CHKERRQ(ierr);
1967   }
1968   /* free memory */
1969   if (n_vertices) {
1970     ierr = ISDestroy(&is_V_local);CHKERRQ(ierr);
1971   }
1972   ierr = PetscFree(idx_V_B);CHKERRQ(ierr);
1973   ierr = ISLocalToGlobalMappingDestroy(&BtoNmap);CHKERRQ(ierr);
1974   ierr = ISDestroy(&is_R_local);CHKERRQ(ierr);
1975 
1976   PetscFunctionReturn(0);
1977 }
1978 
1979 /* -------------------------------------------------------------------------- */
1980 
1981 #undef __FUNCT__
1982 #define __FUNCT__ "PCBDDCSetUpCoarseEnvironment"
1983 static PetscErrorCode PCBDDCSetUpCoarseEnvironment(PC pc,PetscScalar* coarse_submat_vals)
1984 {
1985 
1986 
1987   Mat_IS    *matis    = (Mat_IS*)pc->pmat->data;
1988   PC_BDDC   *pcbddc   = (PC_BDDC*)pc->data;
1989   PC_IS     *pcis     = (PC_IS*)pc->data;
1990   MPI_Comm  prec_comm;
1991   MPI_Comm  coarse_comm;
1992 
1993   MatNullSpace CoarseNullSpace;
1994 
1995   /* common to all choiches */
1996   PetscScalar *temp_coarse_mat_vals;
1997   PetscScalar *ins_coarse_mat_vals;
1998   PetscInt    *ins_local_primal_indices;
1999   PetscMPIInt *localsizes2,*localdispl2;
2000   PetscMPIInt size_prec_comm;
2001   PetscMPIInt rank_prec_comm;
2002   PetscMPIInt active_rank=MPI_PROC_NULL;
2003   PetscMPIInt master_proc=0;
2004   PetscInt    ins_local_primal_size;
2005   /* specific to MULTILEVEL_BDDC */
2006   PetscMPIInt *ranks_recv;
2007   PetscMPIInt count_recv=0;
2008   PetscMPIInt rank_coarse_proc_send_to;
2009   PetscMPIInt coarse_color = MPI_UNDEFINED;
2010   ISLocalToGlobalMapping coarse_ISLG;
2011   /* some other variables */
2012   PetscErrorCode ierr;
2013   MatType coarse_mat_type;
2014   PCType  coarse_pc_type;
2015   KSPType coarse_ksp_type;
2016   PC pc_temp;
2017   PetscInt i,j,k;
2018   PetscInt max_it_coarse_ksp=1;  /* don't increase this value */
2019   /* verbose output viewer */
2020   PetscViewer viewer=pcbddc->dbg_viewer;
2021   PetscBool   dbg_flag=pcbddc->dbg_flag;
2022 
2023   PetscInt      offset,offset2;
2024   PetscMPIInt   im_active,active_procs;
2025   PetscInt      *dnz,*onz;
2026 
2027   PetscBool     setsym,issym=PETSC_FALSE;
2028 
2029   PetscFunctionBegin;
2030   ierr = PetscObjectGetComm((PetscObject)pc,&prec_comm);CHKERRQ(ierr);
2031   ins_local_primal_indices = 0;
2032   ins_coarse_mat_vals      = 0;
2033   localsizes2              = 0;
2034   localdispl2              = 0;
2035   temp_coarse_mat_vals     = 0;
2036   coarse_ISLG              = 0;
2037 
2038   ierr = MPI_Comm_size(prec_comm,&size_prec_comm);CHKERRQ(ierr);
2039   ierr = MPI_Comm_rank(prec_comm,&rank_prec_comm);CHKERRQ(ierr);
2040   ierr = MatIsSymmetricKnown(pc->pmat,&setsym,&issym);CHKERRQ(ierr);
2041 
2042   /* Assign global numbering to coarse dofs */
2043   {
2044     PetscInt     *auxlocal_primal,*aux_idx;
2045     PetscMPIInt  mpi_local_primal_size;
2046     PetscScalar  coarsesum,*array;
2047 
2048     mpi_local_primal_size = (PetscMPIInt)pcbddc->local_primal_size;
2049 
2050     /* Construct needed data structures for message passing */
2051     j = 0;
2052     if (rank_prec_comm == 0 || pcbddc->coarse_problem_type == REPLICATED_BDDC || pcbddc->coarse_problem_type == MULTILEVEL_BDDC) {
2053       j = size_prec_comm;
2054     }
2055     ierr = PetscMalloc(j*sizeof(PetscMPIInt),&pcbddc->local_primal_sizes);CHKERRQ(ierr);
2056     ierr = PetscMalloc(j*sizeof(PetscMPIInt),&pcbddc->local_primal_displacements);CHKERRQ(ierr);
2057     /* Gather local_primal_size information for all processes  */
2058     if (pcbddc->coarse_problem_type == REPLICATED_BDDC || pcbddc->coarse_problem_type == MULTILEVEL_BDDC) {
2059       ierr = MPI_Allgather(&mpi_local_primal_size,1,MPIU_INT,&pcbddc->local_primal_sizes[0],1,MPIU_INT,prec_comm);CHKERRQ(ierr);
2060     } else {
2061       ierr = MPI_Gather(&mpi_local_primal_size,1,MPIU_INT,&pcbddc->local_primal_sizes[0],1,MPIU_INT,0,prec_comm);CHKERRQ(ierr);
2062     }
2063     pcbddc->replicated_primal_size = 0;
2064     for (i=0; i<j; i++) {
2065       pcbddc->local_primal_displacements[i] = pcbddc->replicated_primal_size ;
2066       pcbddc->replicated_primal_size += pcbddc->local_primal_sizes[i];
2067     }
2068 
2069     /* First let's count coarse dofs.
2070        This code fragment assumes that the number of local constraints per connected component
2071        is not greater than the number of nodes defined for the connected component
2072        (otherwise we will surely have linear dependence between constraints and thus a singular coarse problem) */
2073     ierr = PetscMalloc(pcbddc->local_primal_size*sizeof(PetscInt),&auxlocal_primal);CHKERRQ(ierr);
2074     ierr = PCBDDCGetPrimalVerticesLocalIdx(pc,&i,&aux_idx);CHKERRQ(ierr);
2075     ierr = PetscMemcpy(auxlocal_primal,aux_idx,i*sizeof(PetscInt));CHKERRQ(ierr);
2076     ierr = PetscFree(aux_idx);CHKERRQ(ierr);
2077     ierr = PCBDDCGetPrimalConstraintsLocalIdx(pc,&j,&aux_idx);CHKERRQ(ierr);
2078     ierr = PetscMemcpy(&auxlocal_primal[i],aux_idx,j*sizeof(PetscInt));CHKERRQ(ierr);
2079     ierr = PetscFree(aux_idx);CHKERRQ(ierr);
2080     /* Compute number of coarse dofs */
2081     ierr = PCBDDCSubsetNumbering(prec_comm,matis->mapping,pcbddc->local_primal_size,auxlocal_primal,NULL,&pcbddc->coarse_size,&pcbddc->local_primal_indices);CHKERRQ(ierr);
2082 
2083     if (dbg_flag) {
2084       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
2085       ierr = PetscViewerASCIIPrintf(viewer,"--------------------------------------------------\n");CHKERRQ(ierr);
2086       ierr = PetscViewerASCIIPrintf(viewer,"Check coarse indices\n");CHKERRQ(ierr);
2087       ierr = VecSet(pcis->vec1_N,0.0);CHKERRQ(ierr);
2088       ierr = VecGetArray(pcis->vec1_N,&array);CHKERRQ(ierr);
2089       for (i=0;i<pcbddc->local_primal_size;i++) array[auxlocal_primal[i]]=1.0;
2090       ierr = VecRestoreArray(pcis->vec1_N,&array);CHKERRQ(ierr);
2091       ierr = VecSet(pcis->vec1_global,0.0);CHKERRQ(ierr);
2092       ierr = VecScatterBegin(matis->ctx,pcis->vec1_N,pcis->vec1_global,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
2093       ierr = VecScatterEnd  (matis->ctx,pcis->vec1_N,pcis->vec1_global,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
2094       ierr = VecScatterBegin(matis->ctx,pcis->vec1_global,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2095       ierr = VecScatterEnd  (matis->ctx,pcis->vec1_global,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2096       ierr = VecGetArray(pcis->vec1_N,&array);CHKERRQ(ierr);
2097       for (i=0;i<pcis->n;i++) {
2098         if (array[i] == 1.0) {
2099           ierr = ISLocalToGlobalMappingApply(matis->mapping,1,&i,&j);CHKERRQ(ierr);
2100           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d: WRONG COARSE INDEX %d (local %d)\n",PetscGlobalRank,j,i);CHKERRQ(ierr);
2101         }
2102       }
2103       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
2104       for (i=0;i<pcis->n;i++) {
2105         if (array[i] > 0.0) array[i] = 1.0/array[i];
2106       }
2107       ierr = VecRestoreArray(pcis->vec1_N,&array);CHKERRQ(ierr);
2108       ierr = VecSet(pcis->vec1_global,0.0);CHKERRQ(ierr);
2109       ierr = VecScatterBegin(matis->ctx,pcis->vec1_N,pcis->vec1_global,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
2110       ierr = VecScatterEnd  (matis->ctx,pcis->vec1_N,pcis->vec1_global,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
2111       ierr = VecSum(pcis->vec1_global,&coarsesum);CHKERRQ(ierr);
2112       ierr = PetscViewerASCIIPrintf(viewer,"Size of coarse problem SHOULD be %lf\n",coarsesum);CHKERRQ(ierr);
2113       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
2114     }
2115     ierr = PetscFree(auxlocal_primal);CHKERRQ(ierr);
2116   }
2117 
2118   if (dbg_flag) {
2119     ierr = PetscViewerASCIIPrintf(viewer,"Size of coarse problem is %d\n",pcbddc->coarse_size);CHKERRQ(ierr);
2120     ierr = PetscViewerASCIIPrintf(viewer,"Distribution of local primal indices\n");CHKERRQ(ierr);
2121     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
2122     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d\n",PetscGlobalRank);CHKERRQ(ierr);
2123     for (i=0;i<pcbddc->local_primal_size;i++) {
2124       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"local_primal_indices[%d]=%d \n",i,pcbddc->local_primal_indices[i]);
2125     }
2126     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
2127   }
2128 
2129   im_active = 0;
2130   if (pcis->n) im_active = 1;
2131   ierr = MPI_Allreduce(&im_active,&active_procs,1,MPIU_INT,MPI_SUM,prec_comm);CHKERRQ(ierr);
2132 
2133   /* adapt coarse problem type */
2134   if (pcbddc->coarse_problem_type == MULTILEVEL_BDDC) {
2135     if (pcbddc->current_level < pcbddc->max_levels) {
2136       if ( (active_procs/pcbddc->coarsening_ratio) < 2 ) {
2137         if (dbg_flag) {
2138           ierr = PetscViewerASCIIPrintf(viewer,"Not enough active processes on level %d (active %d,ratio %d). Parallel direct solve for coarse problem\n",pcbddc->current_level,active_procs,pcbddc->coarsening_ratio);CHKERRQ(ierr);
2139          ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
2140         }
2141         pcbddc->coarse_problem_type = PARALLEL_BDDC;
2142       }
2143     } else {
2144       if (dbg_flag) {
2145         ierr = PetscViewerASCIIPrintf(viewer,"Max number of levels reached. Using parallel direct solve for coarse problem\n",pcbddc->max_levels,active_procs,pcbddc->coarsening_ratio);CHKERRQ(ierr);
2146         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
2147       }
2148       pcbddc->coarse_problem_type = PARALLEL_BDDC;
2149     }
2150   }
2151 
2152   switch(pcbddc->coarse_problem_type){
2153 
2154     case(MULTILEVEL_BDDC):   /* we define a coarse mesh where subdomains are elements */
2155     {
2156       /* we need additional variables */
2157       MetisInt    n_subdomains,n_parts,objval,ncon,faces_nvtxs;
2158       MetisInt    *metis_coarse_subdivision;
2159       MetisInt    options[METIS_NOPTIONS];
2160       PetscMPIInt size_coarse_comm,rank_coarse_comm;
2161       PetscMPIInt procs_jumps_coarse_comm;
2162       PetscMPIInt *coarse_subdivision;
2163       PetscMPIInt *total_count_recv;
2164       PetscMPIInt *total_ranks_recv;
2165       PetscMPIInt *displacements_recv;
2166       PetscMPIInt *my_faces_connectivity;
2167       PetscMPIInt *petsc_faces_adjncy;
2168       MetisInt    *faces_adjncy;
2169       MetisInt    *faces_xadj;
2170       PetscMPIInt *number_of_faces;
2171       PetscMPIInt *faces_displacements;
2172       PetscInt    *array_int;
2173       PetscMPIInt my_faces=0;
2174       PetscMPIInt total_faces=0;
2175       PetscInt    ranks_stretching_ratio;
2176 
2177       /* define some quantities */
2178       pcbddc->coarse_communications_type = SCATTERS_BDDC;
2179       coarse_mat_type = MATIS;
2180       coarse_pc_type  = PCBDDC;
2181       coarse_ksp_type = KSPRICHARDSON;
2182 
2183       /* details of coarse decomposition */
2184       n_subdomains = active_procs;
2185       n_parts      = n_subdomains/pcbddc->coarsening_ratio;
2186       ranks_stretching_ratio = size_prec_comm/active_procs;
2187       procs_jumps_coarse_comm = pcbddc->coarsening_ratio*ranks_stretching_ratio;
2188 
2189 #if 0
2190       PetscMPIInt *old_ranks;
2191       PetscInt    *new_ranks,*jj,*ii;
2192       MatPartitioning mat_part;
2193       IS coarse_new_decomposition,is_numbering;
2194       PetscViewer viewer_test;
2195       MPI_Comm    test_coarse_comm;
2196       PetscMPIInt test_coarse_color;
2197       Mat         mat_adj;
2198       /* Create new communicator for coarse problem splitting the old one */
2199       /* procs with coarse_color = MPI_UNDEFINED will have coarse_comm = MPI_COMM_NULL (from mpi standards)
2200          key = rank_prec_comm -> keep same ordering of ranks from the old to the new communicator */
2201       test_coarse_color = ( im_active ? 0 : MPI_UNDEFINED );
2202       test_coarse_comm = MPI_COMM_NULL;
2203       ierr = MPI_Comm_split(prec_comm,test_coarse_color,rank_prec_comm,&test_coarse_comm);CHKERRQ(ierr);
2204       if (im_active) {
2205         ierr = PetscMalloc(n_subdomains*sizeof(PetscMPIInt),&old_ranks);
2206         ierr = PetscMalloc(size_prec_comm*sizeof(PetscInt),&new_ranks);
2207         ierr = MPI_Comm_rank(test_coarse_comm,&rank_coarse_comm);CHKERRQ(ierr);
2208         ierr = MPI_Comm_size(test_coarse_comm,&j);CHKERRQ(ierr);
2209         ierr = MPI_Allgather(&rank_prec_comm,1,MPIU_INT,old_ranks,1,MPIU_INT,test_coarse_comm);CHKERRQ(ierr);
2210         for (i=0; i<size_prec_comm; i++) new_ranks[i] = -1;
2211         for (i=0; i<n_subdomains; i++) new_ranks[old_ranks[i]] = i;
2212         ierr = PetscViewerASCIIOpen(test_coarse_comm,"test_mat_part.out",&viewer_test);CHKERRQ(ierr);
2213         k = pcis->n_neigh-1;
2214         ierr = PetscMalloc(2*sizeof(PetscInt),&ii);
2215         ii[0]=0;
2216         ii[1]=k;
2217         ierr = PetscMalloc(k*sizeof(PetscInt),&jj);
2218         for (i=0; i<k; i++) jj[i]=new_ranks[pcis->neigh[i+1]];
2219         ierr = PetscSortInt(k,jj);CHKERRQ(ierr);
2220         ierr = MatCreateMPIAdj(test_coarse_comm,1,n_subdomains,ii,jj,NULL,&mat_adj);CHKERRQ(ierr);
2221         ierr = MatView(mat_adj,viewer_test);CHKERRQ(ierr);
2222         ierr = MatPartitioningCreate(test_coarse_comm,&mat_part);CHKERRQ(ierr);
2223         ierr = MatPartitioningSetAdjacency(mat_part,mat_adj);CHKERRQ(ierr);
2224         ierr = MatPartitioningSetFromOptions(mat_part);CHKERRQ(ierr);
2225         printf("Setting Nparts %d\n",n_parts);
2226         ierr = MatPartitioningSetNParts(mat_part,n_parts);CHKERRQ(ierr);
2227         ierr = MatPartitioningView(mat_part,viewer_test);CHKERRQ(ierr);
2228         ierr = MatPartitioningApply(mat_part,&coarse_new_decomposition);CHKERRQ(ierr);
2229         ierr = ISView(coarse_new_decomposition,viewer_test);CHKERRQ(ierr);
2230         ierr = ISPartitioningToNumbering(coarse_new_decomposition,&is_numbering);CHKERRQ(ierr);
2231         ierr = ISView(is_numbering,viewer_test);CHKERRQ(ierr);
2232         ierr = PetscViewerDestroy(&viewer_test);CHKERRQ(ierr);
2233         ierr = ISDestroy(&coarse_new_decomposition);CHKERRQ(ierr);
2234         ierr = ISDestroy(&is_numbering);CHKERRQ(ierr);
2235         ierr = MatPartitioningDestroy(&mat_part);CHKERRQ(ierr);
2236         ierr = PetscFree(old_ranks);CHKERRQ(ierr);
2237         ierr = PetscFree(new_ranks);CHKERRQ(ierr);
2238         ierr = MPI_Comm_free(&test_coarse_comm);CHKERRQ(ierr);
2239       }
2240 #endif
2241 
2242       /* build CSR graph of subdomains' connectivity */
2243       ierr = PetscMalloc (pcis->n*sizeof(PetscInt),&array_int);CHKERRQ(ierr);
2244       ierr = PetscMemzero(array_int,pcis->n*sizeof(PetscInt));CHKERRQ(ierr);
2245       for (i=1;i<pcis->n_neigh;i++){/* i=1 so I don't count myself -> faces nodes counts to 1 */
2246         for (j=0;j<pcis->n_shared[i];j++){
2247           array_int[ pcis->shared[i][j] ]+=1;
2248         }
2249       }
2250       for (i=1;i<pcis->n_neigh;i++){
2251         for (j=0;j<pcis->n_shared[i];j++){
2252           if (array_int[ pcis->shared[i][j] ] > 0 ){
2253             my_faces++;
2254             break;
2255           }
2256         }
2257       }
2258 
2259       ierr = MPI_Reduce(&my_faces,&total_faces,1,MPIU_INT,MPI_SUM,master_proc,prec_comm);CHKERRQ(ierr);
2260       ierr = PetscMalloc (my_faces*sizeof(PetscInt),&my_faces_connectivity);CHKERRQ(ierr);
2261       my_faces=0;
2262       for (i=1;i<pcis->n_neigh;i++){
2263         for (j=0;j<pcis->n_shared[i];j++){
2264           if (array_int[ pcis->shared[i][j] ] > 0 ){
2265             my_faces_connectivity[my_faces]=pcis->neigh[i];
2266             my_faces++;
2267             break;
2268           }
2269         }
2270       }
2271       if (rank_prec_comm == master_proc) {
2272         ierr = PetscMalloc (total_faces*sizeof(PetscMPIInt),&petsc_faces_adjncy);CHKERRQ(ierr);
2273         ierr = PetscMalloc (size_prec_comm*sizeof(PetscMPIInt),&number_of_faces);CHKERRQ(ierr);
2274         ierr = PetscMalloc (total_faces*sizeof(MetisInt),&faces_adjncy);CHKERRQ(ierr);
2275         ierr = PetscMalloc ((n_subdomains+1)*sizeof(MetisInt),&faces_xadj);CHKERRQ(ierr);
2276         ierr = PetscMalloc ((size_prec_comm+1)*sizeof(PetscMPIInt),&faces_displacements);CHKERRQ(ierr);
2277       }
2278       ierr = MPI_Gather(&my_faces,1,MPIU_INT,&number_of_faces[0],1,MPIU_INT,master_proc,prec_comm);CHKERRQ(ierr);
2279       if (rank_prec_comm == master_proc) {
2280         faces_xadj[0]=0;
2281         faces_displacements[0]=0;
2282         j=0;
2283         for (i=1;i<size_prec_comm+1;i++) {
2284           faces_displacements[i]=faces_displacements[i-1]+number_of_faces[i-1];
2285           if (number_of_faces[i-1]) {
2286             j++;
2287             faces_xadj[j]=faces_xadj[j-1]+number_of_faces[i-1];
2288           }
2289         }
2290       }
2291       ierr = MPI_Gatherv(&my_faces_connectivity[0],my_faces,MPIU_INT,&petsc_faces_adjncy[0],number_of_faces,faces_displacements,MPIU_INT,master_proc,prec_comm);CHKERRQ(ierr);
2292       ierr = PetscFree(my_faces_connectivity);CHKERRQ(ierr);
2293       ierr = PetscFree(array_int);CHKERRQ(ierr);
2294       if (rank_prec_comm == master_proc) {
2295         for (i=0;i<total_faces;i++) faces_adjncy[i]=(MetisInt)(petsc_faces_adjncy[i]/ranks_stretching_ratio); /* cast to MetisInt */
2296         ierr = PetscFree(faces_displacements);CHKERRQ(ierr);
2297         ierr = PetscFree(number_of_faces);CHKERRQ(ierr);
2298         ierr = PetscFree(petsc_faces_adjncy);CHKERRQ(ierr);
2299       }
2300 
2301       if ( rank_prec_comm == master_proc ) {
2302 
2303         PetscInt heuristic_for_metis=3;
2304 
2305         ncon=1;
2306         faces_nvtxs=n_subdomains;
2307         /* partition graoh induced by face connectivity */
2308         ierr = PetscMalloc (n_subdomains*sizeof(MetisInt),&metis_coarse_subdivision);CHKERRQ(ierr);
2309         ierr = METIS_SetDefaultOptions(options);
2310         /* we need a contiguous partition of the coarse mesh */
2311         options[METIS_OPTION_CONTIG]=1;
2312         options[METIS_OPTION_NITER]=30;
2313         if (pcbddc->coarsening_ratio > 1) {
2314           if (n_subdomains>n_parts*heuristic_for_metis) {
2315             options[METIS_OPTION_IPTYPE]=METIS_IPTYPE_EDGE;
2316             options[METIS_OPTION_OBJTYPE]=METIS_OBJTYPE_CUT;
2317             ierr = METIS_PartGraphKway(&faces_nvtxs,&ncon,faces_xadj,faces_adjncy,NULL,NULL,NULL,&n_parts,NULL,NULL,options,&objval,metis_coarse_subdivision);
2318             if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in METIS_PartGraphKway (metis error code %D) called from PCBDDCSetUpCoarseEnvironment\n",ierr);
2319           } else {
2320             ierr = METIS_PartGraphRecursive(&faces_nvtxs,&ncon,faces_xadj,faces_adjncy,NULL,NULL,NULL,&n_parts,NULL,NULL,options,&objval,metis_coarse_subdivision);
2321             if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in METIS_PartGraphRecursive (metis error code %D) called from PCBDDCSetUpCoarseEnvironment\n",ierr);
2322           }
2323         } else {
2324           for (i=0;i<n_subdomains;i++) metis_coarse_subdivision[i]=i;
2325         }
2326         ierr = PetscFree(faces_xadj);CHKERRQ(ierr);
2327         ierr = PetscFree(faces_adjncy);CHKERRQ(ierr);
2328         ierr = PetscMalloc(size_prec_comm*sizeof(PetscMPIInt),&coarse_subdivision);CHKERRQ(ierr);
2329 
2330         /* copy/cast values avoiding possible type conflicts between PETSc, MPI and METIS */
2331         for (i=0;i<size_prec_comm;i++) coarse_subdivision[i]=MPI_PROC_NULL;
2332         for (i=0;i<n_subdomains;i++) coarse_subdivision[ranks_stretching_ratio*i]=(PetscInt)(metis_coarse_subdivision[i]);
2333         ierr = PetscFree(metis_coarse_subdivision);CHKERRQ(ierr);
2334       }
2335 
2336       /* Create new communicator for coarse problem splitting the old one */
2337       if ( !(rank_prec_comm%procs_jumps_coarse_comm) && rank_prec_comm < procs_jumps_coarse_comm*n_parts ){
2338         coarse_color=0;              /* for communicator splitting */
2339         active_rank=rank_prec_comm;  /* for insertion of matrix values */
2340       }
2341       /* procs with coarse_color = MPI_UNDEFINED will have coarse_comm = MPI_COMM_NULL (from mpi standards)
2342          key = rank_prec_comm -> keep same ordering of ranks from the old to the new communicator */
2343       ierr = MPI_Comm_split(prec_comm,coarse_color,rank_prec_comm,&coarse_comm);CHKERRQ(ierr);
2344 
2345       if ( coarse_color == 0 ) {
2346         ierr = MPI_Comm_size(coarse_comm,&size_coarse_comm);CHKERRQ(ierr);
2347         ierr = MPI_Comm_rank(coarse_comm,&rank_coarse_comm);CHKERRQ(ierr);
2348       } else {
2349         rank_coarse_comm = MPI_PROC_NULL;
2350       }
2351 
2352       /* master proc take care of arranging and distributing coarse information */
2353       if (rank_coarse_comm == master_proc) {
2354         ierr = PetscMalloc (size_coarse_comm*sizeof(PetscMPIInt),&displacements_recv);CHKERRQ(ierr);
2355         ierr = PetscMalloc (size_coarse_comm*sizeof(PetscMPIInt),&total_count_recv);CHKERRQ(ierr);
2356         ierr = PetscMalloc (n_subdomains*sizeof(PetscMPIInt),&total_ranks_recv);CHKERRQ(ierr);
2357         /* some initializations */
2358         displacements_recv[0]=0;
2359         ierr = PetscMemzero(total_count_recv,size_coarse_comm*sizeof(PetscMPIInt));CHKERRQ(ierr);
2360         /* count from how many processes the j-th process of the coarse decomposition will receive data */
2361         for (j=0;j<size_coarse_comm;j++) {
2362           for (i=0;i<size_prec_comm;i++) {
2363           if (coarse_subdivision[i]==j) total_count_recv[j]++;
2364           }
2365         }
2366         /* displacements needed for scatterv of total_ranks_recv */
2367       for (i=1; i<size_coarse_comm; i++) displacements_recv[i]=displacements_recv[i-1]+total_count_recv[i-1];
2368 
2369         /* Now fill properly total_ranks_recv -> each coarse process will receive the ranks (in prec_comm communicator) of its friend (sending) processes */
2370         ierr = PetscMemzero(total_count_recv,size_coarse_comm*sizeof(PetscMPIInt));CHKERRQ(ierr);
2371         for (j=0;j<size_coarse_comm;j++) {
2372           for (i=0;i<size_prec_comm;i++) {
2373             if (coarse_subdivision[i]==j) {
2374               total_ranks_recv[displacements_recv[j]+total_count_recv[j]]=i;
2375               total_count_recv[j]+=1;
2376             }
2377           }
2378         }
2379         /*for (j=0;j<size_coarse_comm;j++) {
2380           printf("process %d in new rank will receive from %d processes (original ranks follows)\n",j,total_count_recv[j]);
2381           for (i=0;i<total_count_recv[j];i++) {
2382             printf("%d ",total_ranks_recv[displacements_recv[j]+i]);
2383           }
2384           printf("\n");
2385         }*/
2386 
2387         /* identify new decomposition in terms of ranks in the old communicator */
2388         for (i=0;i<n_subdomains;i++) {
2389           coarse_subdivision[ranks_stretching_ratio*i]=coarse_subdivision[ranks_stretching_ratio*i]*procs_jumps_coarse_comm;
2390         }
2391         /*printf("coarse_subdivision in old end new ranks\n");
2392         for (i=0;i<size_prec_comm;i++)
2393           if (coarse_subdivision[i]!=MPI_PROC_NULL) {
2394             printf("%d=(%d %d), ",i,coarse_subdivision[i],coarse_subdivision[i]/procs_jumps_coarse_comm);
2395           } else {
2396             printf("%d=(%d %d), ",i,coarse_subdivision[i],coarse_subdivision[i]);
2397           }
2398         printf("\n");*/
2399       }
2400 
2401       /* Scatter new decomposition for send details */
2402       ierr = MPI_Scatter(&coarse_subdivision[0],1,MPIU_INT,&rank_coarse_proc_send_to,1,MPIU_INT,master_proc,prec_comm);CHKERRQ(ierr);
2403       /* Scatter receiving details to members of coarse decomposition */
2404       if ( coarse_color == 0) {
2405         ierr = MPI_Scatter(&total_count_recv[0],1,MPIU_INT,&count_recv,1,MPIU_INT,master_proc,coarse_comm);CHKERRQ(ierr);
2406         ierr = PetscMalloc (count_recv*sizeof(PetscMPIInt),&ranks_recv);CHKERRQ(ierr);
2407         ierr = MPI_Scatterv(&total_ranks_recv[0],total_count_recv,displacements_recv,MPIU_INT,&ranks_recv[0],count_recv,MPIU_INT,master_proc,coarse_comm);CHKERRQ(ierr);
2408       }
2409 
2410       /*printf("I will send my matrix data to proc  %d\n",rank_coarse_proc_send_to);
2411       if (coarse_color == 0) {
2412         printf("I will receive some matrix data from %d processes (ranks follows)\n",count_recv);
2413         for (i=0;i<count_recv;i++)
2414           printf("%d ",ranks_recv[i]);
2415         printf("\n");
2416       }*/
2417 
2418       if (rank_prec_comm == master_proc) {
2419         ierr = PetscFree(coarse_subdivision);CHKERRQ(ierr);
2420         ierr = PetscFree(total_count_recv);CHKERRQ(ierr);
2421         ierr = PetscFree(total_ranks_recv);CHKERRQ(ierr);
2422         ierr = PetscFree(displacements_recv);CHKERRQ(ierr);
2423       }
2424       break;
2425     }
2426 
2427     case(REPLICATED_BDDC):
2428 
2429       pcbddc->coarse_communications_type = GATHERS_BDDC;
2430       coarse_mat_type = MATSEQAIJ;
2431       coarse_pc_type  = PCLU;
2432       coarse_ksp_type  = KSPPREONLY;
2433       coarse_comm = PETSC_COMM_SELF;
2434       active_rank = rank_prec_comm;
2435       break;
2436 
2437     case(PARALLEL_BDDC):
2438 
2439       pcbddc->coarse_communications_type = SCATTERS_BDDC;
2440       coarse_mat_type = MATAIJ;
2441       coarse_pc_type  = PCREDUNDANT;
2442       coarse_ksp_type  = KSPPREONLY;
2443       coarse_comm = prec_comm;
2444       active_rank = rank_prec_comm;
2445       break;
2446 
2447     case(SEQUENTIAL_BDDC):
2448       pcbddc->coarse_communications_type = GATHERS_BDDC;
2449       coarse_mat_type = MATAIJ;
2450       coarse_pc_type = PCLU;
2451       coarse_ksp_type  = KSPPREONLY;
2452       coarse_comm = PETSC_COMM_SELF;
2453       active_rank = master_proc;
2454       break;
2455   }
2456 
2457   switch(pcbddc->coarse_communications_type){
2458 
2459     case(SCATTERS_BDDC):
2460       {
2461         if (pcbddc->coarse_problem_type==MULTILEVEL_BDDC) {
2462 
2463           IS coarse_IS;
2464 
2465           if(pcbddc->coarsening_ratio == 1) {
2466             ins_local_primal_size = pcbddc->local_primal_size;
2467             ins_local_primal_indices = pcbddc->local_primal_indices;
2468             if (coarse_color == 0) { ierr = PetscFree(ranks_recv);CHKERRQ(ierr); }
2469             /* nonzeros */
2470             ierr = PetscMalloc(ins_local_primal_size*sizeof(PetscInt),&dnz);CHKERRQ(ierr);
2471             ierr = PetscMemzero(dnz,ins_local_primal_size*sizeof(PetscInt));CHKERRQ(ierr);
2472             for (i=0;i<ins_local_primal_size;i++) {
2473               dnz[i] = ins_local_primal_size;
2474             }
2475           } else {
2476             PetscMPIInt send_size;
2477             PetscMPIInt *send_buffer;
2478             PetscInt    *aux_ins_indices;
2479             PetscInt    ii,jj;
2480             MPI_Request *requests;
2481 
2482             ierr = PetscMalloc(count_recv*sizeof(PetscMPIInt),&localdispl2);CHKERRQ(ierr);
2483             /* reusing pcbddc->local_primal_displacements and pcbddc->replicated_primal_size */
2484             ierr = PetscFree(pcbddc->local_primal_displacements);CHKERRQ(ierr);
2485             ierr = PetscMalloc((count_recv+1)*sizeof(PetscMPIInt),&pcbddc->local_primal_displacements);CHKERRQ(ierr);
2486             pcbddc->replicated_primal_size = count_recv;
2487             j = 0;
2488             for (i=0;i<count_recv;i++) {
2489               pcbddc->local_primal_displacements[i] = j;
2490               j += pcbddc->local_primal_sizes[ranks_recv[i]];
2491             }
2492             pcbddc->local_primal_displacements[count_recv] = j;
2493             ierr = PetscMalloc(j*sizeof(PetscMPIInt),&pcbddc->replicated_local_primal_indices);CHKERRQ(ierr);
2494             /* allocate auxiliary space */
2495             ierr = PetscMalloc(count_recv*sizeof(PetscMPIInt),&localsizes2);CHKERRQ(ierr);
2496             ierr = PetscMalloc(pcbddc->coarse_size*sizeof(PetscInt),&aux_ins_indices);CHKERRQ(ierr);
2497             ierr = PetscMemzero(aux_ins_indices,pcbddc->coarse_size*sizeof(PetscInt));CHKERRQ(ierr);
2498             /* allocate stuffs for message massing */
2499             ierr = PetscMalloc((count_recv+1)*sizeof(MPI_Request),&requests);CHKERRQ(ierr);
2500             for (i=0;i<count_recv+1;i++) { requests[i]=MPI_REQUEST_NULL; }
2501             /* send indices to be inserted */
2502             for (i=0;i<count_recv;i++) {
2503               send_size = pcbddc->local_primal_sizes[ranks_recv[i]];
2504               ierr = MPI_Irecv(&pcbddc->replicated_local_primal_indices[pcbddc->local_primal_displacements[i]],send_size,MPIU_INT,ranks_recv[i],999,prec_comm,&requests[i]);CHKERRQ(ierr);
2505             }
2506             if (rank_coarse_proc_send_to != MPI_PROC_NULL ) {
2507               send_size = pcbddc->local_primal_size;
2508               ierr = PetscMalloc(send_size*sizeof(PetscMPIInt),&send_buffer);CHKERRQ(ierr);
2509               for (i=0;i<send_size;i++) {
2510                 send_buffer[i]=(PetscMPIInt)pcbddc->local_primal_indices[i];
2511               }
2512               ierr = MPI_Isend(send_buffer,send_size,MPIU_INT,rank_coarse_proc_send_to,999,prec_comm,&requests[count_recv]);CHKERRQ(ierr);
2513             }
2514             ierr = MPI_Waitall(count_recv+1,requests,MPI_STATUSES_IGNORE);CHKERRQ(ierr);
2515             if (rank_coarse_proc_send_to != MPI_PROC_NULL ) {
2516               ierr = PetscFree(send_buffer);CHKERRQ(ierr);
2517             }
2518             j = 0;
2519             for (i=0;i<count_recv;i++) {
2520               ii = pcbddc->local_primal_displacements[i+1]-pcbddc->local_primal_displacements[i];
2521               localsizes2[i] = ii*ii;
2522               localdispl2[i] = j;
2523               j += localsizes2[i];
2524               jj = pcbddc->local_primal_displacements[i];
2525               /* it counts the coarse subdomains sharing the coarse node */
2526               for (k=0;k<ii;k++) {
2527                 aux_ins_indices[pcbddc->replicated_local_primal_indices[jj+k]] += 1;
2528               }
2529             }
2530             /* temp_coarse_mat_vals used to store matrix values to be received */
2531             ierr = PetscMalloc(j*sizeof(PetscScalar),&temp_coarse_mat_vals);CHKERRQ(ierr);
2532             /* evaluate how many values I will insert in coarse mat */
2533             ins_local_primal_size = 0;
2534             for (i=0;i<pcbddc->coarse_size;i++) {
2535               if (aux_ins_indices[i]) {
2536                 ins_local_primal_size++;
2537               }
2538             }
2539             /* evaluate indices I will insert in coarse mat */
2540             ierr = PetscMalloc(ins_local_primal_size*sizeof(PetscInt),&ins_local_primal_indices);CHKERRQ(ierr);
2541             j = 0;
2542             for(i=0;i<pcbddc->coarse_size;i++) {
2543               if(aux_ins_indices[i]) {
2544                 ins_local_primal_indices[j] = i;
2545                 j++;
2546               }
2547             }
2548             /* processes partecipating in coarse problem receive matrix data from their friends */
2549             for (i=0;i<count_recv;i++) {
2550               ierr = MPI_Irecv(&temp_coarse_mat_vals[localdispl2[i]],localsizes2[i],MPIU_SCALAR,ranks_recv[i],666,prec_comm,&requests[i]);CHKERRQ(ierr);
2551             }
2552             if (rank_coarse_proc_send_to != MPI_PROC_NULL ) {
2553               send_size = pcbddc->local_primal_size*pcbddc->local_primal_size;
2554               ierr = MPI_Isend(&coarse_submat_vals[0],send_size,MPIU_SCALAR,rank_coarse_proc_send_to,666,prec_comm,&requests[count_recv]);CHKERRQ(ierr);
2555             }
2556             ierr = MPI_Waitall(count_recv+1,requests,MPI_STATUSES_IGNORE);CHKERRQ(ierr);
2557             /* nonzeros */
2558             ierr = PetscMalloc(ins_local_primal_size*sizeof(PetscInt),&dnz);CHKERRQ(ierr);
2559             ierr = PetscMemzero(dnz,ins_local_primal_size*sizeof(PetscInt));CHKERRQ(ierr);
2560             /* use aux_ins_indices to realize a global to local mapping */
2561             j=0;
2562             for(i=0;i<pcbddc->coarse_size;i++){
2563               if(aux_ins_indices[i]==0){
2564                 aux_ins_indices[i]=-1;
2565               } else {
2566                 aux_ins_indices[i]=j;
2567                 j++;
2568               }
2569             }
2570             for (i=0;i<count_recv;i++) {
2571               j = pcbddc->local_primal_sizes[ranks_recv[i]];
2572               for (k=0;k<j;k++) {
2573                 dnz[aux_ins_indices[pcbddc->replicated_local_primal_indices[pcbddc->local_primal_displacements[i]+k]]] += j;
2574               }
2575             }
2576             /* check */
2577             for (i=0;i<ins_local_primal_size;i++) {
2578               if (dnz[i] > ins_local_primal_size) {
2579                 dnz[i] = ins_local_primal_size;
2580               }
2581             }
2582             ierr = PetscFree(requests);CHKERRQ(ierr);
2583             ierr = PetscFree(aux_ins_indices);CHKERRQ(ierr);
2584             if (coarse_color == 0) { ierr = PetscFree(ranks_recv);CHKERRQ(ierr); }
2585           }
2586           /* create local to global mapping needed by coarse MATIS */
2587           if (coarse_comm != MPI_COMM_NULL ) {ierr = MPI_Comm_free(&coarse_comm);CHKERRQ(ierr);}
2588           coarse_comm = prec_comm;
2589           active_rank = rank_prec_comm;
2590           ierr = ISCreateGeneral(coarse_comm,ins_local_primal_size,ins_local_primal_indices,PETSC_COPY_VALUES,&coarse_IS);CHKERRQ(ierr);
2591           ierr = ISLocalToGlobalMappingCreateIS(coarse_IS,&coarse_ISLG);CHKERRQ(ierr);
2592           ierr = ISDestroy(&coarse_IS);CHKERRQ(ierr);
2593         } else if (pcbddc->coarse_problem_type==PARALLEL_BDDC) {
2594           /* arrays for values insertion */
2595           ins_local_primal_size = pcbddc->local_primal_size;
2596           ierr = PetscMalloc(ins_local_primal_size*sizeof(PetscInt),&ins_local_primal_indices);CHKERRQ(ierr);
2597           ierr = PetscMalloc(ins_local_primal_size*ins_local_primal_size*sizeof(PetscScalar),&ins_coarse_mat_vals);CHKERRQ(ierr);
2598           for (j=0;j<ins_local_primal_size;j++){
2599             ins_local_primal_indices[j]=pcbddc->local_primal_indices[j];
2600             for (i=0;i<ins_local_primal_size;i++) {
2601               ins_coarse_mat_vals[j*ins_local_primal_size+i]=coarse_submat_vals[j*ins_local_primal_size+i];
2602             }
2603           }
2604         }
2605         break;
2606 
2607     }
2608 
2609     case(GATHERS_BDDC):
2610       {
2611 
2612         PetscMPIInt mysize,mysize2;
2613         PetscMPIInt *send_buffer;
2614 
2615         if (rank_prec_comm==active_rank) {
2616           ierr = PetscMalloc ( pcbddc->replicated_primal_size*sizeof(PetscMPIInt),&pcbddc->replicated_local_primal_indices);CHKERRQ(ierr);
2617           ierr = PetscMalloc ( pcbddc->replicated_primal_size*sizeof(PetscScalar),&pcbddc->replicated_local_primal_values);CHKERRQ(ierr);
2618           ierr = PetscMalloc ( size_prec_comm*sizeof(PetscMPIInt),&localsizes2);CHKERRQ(ierr);
2619           ierr = PetscMalloc ( size_prec_comm*sizeof(PetscMPIInt),&localdispl2);CHKERRQ(ierr);
2620           /* arrays for values insertion */
2621       for (i=0;i<size_prec_comm;i++) localsizes2[i]=pcbddc->local_primal_sizes[i]*pcbddc->local_primal_sizes[i];
2622           localdispl2[0]=0;
2623       for (i=1;i<size_prec_comm;i++) localdispl2[i]=localsizes2[i-1]+localdispl2[i-1];
2624           j=0;
2625       for (i=0;i<size_prec_comm;i++) j+=localsizes2[i];
2626           ierr = PetscMalloc ( j*sizeof(PetscScalar),&temp_coarse_mat_vals);CHKERRQ(ierr);
2627         }
2628 
2629         mysize=pcbddc->local_primal_size;
2630         mysize2=pcbddc->local_primal_size*pcbddc->local_primal_size;
2631         ierr = PetscMalloc(mysize*sizeof(PetscMPIInt),&send_buffer);CHKERRQ(ierr);
2632     for (i=0; i<mysize; i++) send_buffer[i]=(PetscMPIInt)pcbddc->local_primal_indices[i];
2633 
2634         if (pcbddc->coarse_problem_type == SEQUENTIAL_BDDC){
2635           ierr = MPI_Gatherv(send_buffer,mysize,MPIU_INT,&pcbddc->replicated_local_primal_indices[0],pcbddc->local_primal_sizes,pcbddc->local_primal_displacements,MPIU_INT,master_proc,prec_comm);CHKERRQ(ierr);
2636           ierr = MPI_Gatherv(&coarse_submat_vals[0],mysize2,MPIU_SCALAR,&temp_coarse_mat_vals[0],localsizes2,localdispl2,MPIU_SCALAR,master_proc,prec_comm);CHKERRQ(ierr);
2637         } else {
2638           ierr = MPI_Allgatherv(send_buffer,mysize,MPIU_INT,&pcbddc->replicated_local_primal_indices[0],pcbddc->local_primal_sizes,pcbddc->local_primal_displacements,MPIU_INT,prec_comm);CHKERRQ(ierr);
2639           ierr = MPI_Allgatherv(&coarse_submat_vals[0],mysize2,MPIU_SCALAR,&temp_coarse_mat_vals[0],localsizes2,localdispl2,MPIU_SCALAR,prec_comm);CHKERRQ(ierr);
2640         }
2641         ierr = PetscFree(send_buffer);CHKERRQ(ierr);
2642         break;
2643       }/* switch on coarse problem and communications associated with finished */
2644   }
2645 
2646   /* Now create and fill up coarse matrix */
2647   if ( rank_prec_comm == active_rank ) {
2648 
2649     Mat matis_coarse_local_mat;
2650 
2651     if (pcbddc->coarse_problem_type != MULTILEVEL_BDDC) {
2652       ierr = MatCreate(coarse_comm,&pcbddc->coarse_mat);CHKERRQ(ierr);
2653       ierr = MatSetSizes(pcbddc->coarse_mat,PETSC_DECIDE,PETSC_DECIDE,pcbddc->coarse_size,pcbddc->coarse_size);CHKERRQ(ierr);
2654       ierr = MatSetType(pcbddc->coarse_mat,coarse_mat_type);CHKERRQ(ierr);
2655       ierr = MatSetOptionsPrefix(pcbddc->coarse_mat,"coarse_");CHKERRQ(ierr);
2656       ierr = MatSetFromOptions(pcbddc->coarse_mat);CHKERRQ(ierr);
2657       ierr = MatSetUp(pcbddc->coarse_mat);CHKERRQ(ierr);
2658       ierr = MatSetOption(pcbddc->coarse_mat,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr); /* local values stored in column major */
2659       ierr = MatSetOption(pcbddc->coarse_mat,MAT_IGNORE_ZERO_ENTRIES,PETSC_TRUE);CHKERRQ(ierr);
2660     } else {
2661       ierr = MatCreateIS(coarse_comm,1,PETSC_DECIDE,PETSC_DECIDE,pcbddc->coarse_size,pcbddc->coarse_size,coarse_ISLG,&pcbddc->coarse_mat);CHKERRQ(ierr);
2662       ierr = MatSetUp(pcbddc->coarse_mat);CHKERRQ(ierr);
2663       ierr = MatISGetLocalMat(pcbddc->coarse_mat,&matis_coarse_local_mat);CHKERRQ(ierr);
2664       ierr = MatSetOptionsPrefix(pcbddc->coarse_mat,"coarse_");CHKERRQ(ierr);
2665       ierr = MatSetFromOptions(pcbddc->coarse_mat);CHKERRQ(ierr);
2666       ierr = MatSetUp(matis_coarse_local_mat);CHKERRQ(ierr);
2667       ierr = MatSetOption(matis_coarse_local_mat,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr); /* local values stored in column major */
2668       ierr = MatSetOption(matis_coarse_local_mat,MAT_IGNORE_ZERO_ENTRIES,PETSC_TRUE);CHKERRQ(ierr);
2669     }
2670     /* preallocation */
2671     if (pcbddc->coarse_problem_type != MULTILEVEL_BDDC) {
2672 
2673       PetscInt lrows,lcols,bs;
2674 
2675       ierr = MatGetLocalSize(pcbddc->coarse_mat,&lrows,&lcols);CHKERRQ(ierr);
2676       ierr = MatPreallocateInitialize(coarse_comm,lrows,lcols,dnz,onz);CHKERRQ(ierr);
2677       ierr = MatGetBlockSize(pcbddc->coarse_mat,&bs);CHKERRQ(ierr);
2678 
2679       if (pcbddc->coarse_problem_type == PARALLEL_BDDC) {
2680 
2681         Vec         vec_dnz,vec_onz;
2682         PetscScalar *my_dnz,*my_onz,*array;
2683         PetscInt    *mat_ranges,*row_ownership;
2684         PetscInt    coarse_index_row,coarse_index_col,owner;
2685 
2686         ierr = VecCreate(prec_comm,&vec_dnz);CHKERRQ(ierr);
2687         ierr = VecSetBlockSize(vec_dnz,bs);CHKERRQ(ierr);
2688         ierr = VecSetSizes(vec_dnz,PETSC_DECIDE,pcbddc->coarse_size);CHKERRQ(ierr);
2689         ierr = VecSetType(vec_dnz,VECMPI);CHKERRQ(ierr);
2690         ierr = VecDuplicate(vec_dnz,&vec_onz);CHKERRQ(ierr);
2691 
2692         ierr = PetscMalloc(pcbddc->local_primal_size*sizeof(PetscScalar),&my_dnz);CHKERRQ(ierr);
2693         ierr = PetscMalloc(pcbddc->local_primal_size*sizeof(PetscScalar),&my_onz);CHKERRQ(ierr);
2694         ierr = PetscMemzero(my_dnz,pcbddc->local_primal_size*sizeof(PetscScalar));CHKERRQ(ierr);
2695         ierr = PetscMemzero(my_onz,pcbddc->local_primal_size*sizeof(PetscScalar));CHKERRQ(ierr);
2696 
2697         ierr = PetscMalloc(pcbddc->coarse_size*sizeof(PetscInt),&row_ownership);CHKERRQ(ierr);
2698         ierr = MatGetOwnershipRanges(pcbddc->coarse_mat,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
2699         for (i=0;i<size_prec_comm;i++) {
2700           for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
2701             row_ownership[j]=i;
2702           }
2703         }
2704 
2705         for (i=0;i<pcbddc->local_primal_size;i++) {
2706           coarse_index_row = pcbddc->local_primal_indices[i];
2707           owner = row_ownership[coarse_index_row];
2708           for (j=i;j<pcbddc->local_primal_size;j++) {
2709             owner = row_ownership[coarse_index_row];
2710             coarse_index_col = pcbddc->local_primal_indices[j];
2711             if (coarse_index_col > mat_ranges[owner]-1 && coarse_index_col < mat_ranges[owner+1] ) {
2712               my_dnz[i] += 1.0;
2713             } else {
2714               my_onz[i] += 1.0;
2715             }
2716             if (i != j) {
2717               owner = row_ownership[coarse_index_col];
2718               if (coarse_index_row > mat_ranges[owner]-1 && coarse_index_row < mat_ranges[owner+1] ) {
2719                 my_dnz[j] += 1.0;
2720               } else {
2721                 my_onz[j] += 1.0;
2722               }
2723             }
2724           }
2725         }
2726         ierr = VecSet(vec_dnz,0.0);CHKERRQ(ierr);
2727         ierr = VecSet(vec_onz,0.0);CHKERRQ(ierr);
2728         if (pcbddc->local_primal_size) {
2729           ierr = VecSetValues(vec_dnz,pcbddc->local_primal_size,pcbddc->local_primal_indices,my_dnz,ADD_VALUES);CHKERRQ(ierr);
2730           ierr = VecSetValues(vec_onz,pcbddc->local_primal_size,pcbddc->local_primal_indices,my_onz,ADD_VALUES);CHKERRQ(ierr);
2731         }
2732         ierr = VecAssemblyBegin(vec_dnz);CHKERRQ(ierr);
2733         ierr = VecAssemblyBegin(vec_onz);CHKERRQ(ierr);
2734         ierr = VecAssemblyEnd(vec_dnz);CHKERRQ(ierr);
2735         ierr = VecAssemblyEnd(vec_onz);CHKERRQ(ierr);
2736         j = mat_ranges[rank_prec_comm+1]-mat_ranges[rank_prec_comm];
2737         ierr = VecGetArray(vec_dnz,&array);CHKERRQ(ierr);
2738         for (i=0; i<j; i++) dnz[i] = (PetscInt)array[i];
2739 
2740         ierr = VecRestoreArray(vec_dnz,&array);CHKERRQ(ierr);
2741         ierr = VecGetArray(vec_onz,&array);CHKERRQ(ierr);
2742         for (i=0;i<j;i++) onz[i] = (PetscInt)array[i];
2743 
2744         ierr = VecRestoreArray(vec_onz,&array);CHKERRQ(ierr);
2745         ierr = PetscFree(my_dnz);CHKERRQ(ierr);
2746         ierr = PetscFree(my_onz);CHKERRQ(ierr);
2747         ierr = PetscFree(row_ownership);CHKERRQ(ierr);
2748         ierr = VecDestroy(&vec_dnz);CHKERRQ(ierr);
2749         ierr = VecDestroy(&vec_onz);CHKERRQ(ierr);
2750       } else {
2751         for (k=0;k<size_prec_comm;k++){
2752           offset=pcbddc->local_primal_displacements[k];
2753           offset2=localdispl2[k];
2754           ins_local_primal_size = pcbddc->local_primal_sizes[k];
2755           ierr = PetscMalloc(ins_local_primal_size*sizeof(PetscInt),&ins_local_primal_indices);CHKERRQ(ierr);
2756           for (j=0;j<ins_local_primal_size;j++){
2757             ins_local_primal_indices[j]=(PetscInt)pcbddc->replicated_local_primal_indices[offset+j];
2758           }
2759           for (j=0;j<ins_local_primal_size;j++) {
2760             ierr = MatPreallocateSet(ins_local_primal_indices[j],ins_local_primal_size,ins_local_primal_indices,dnz,onz);CHKERRQ(ierr);
2761           }
2762           ierr = PetscFree(ins_local_primal_indices);CHKERRQ(ierr);
2763         }
2764       }
2765 
2766       /* check */
2767       for (i=0;i<lrows;i++) {
2768         if (dnz[i]>lcols) dnz[i]=lcols;
2769         if (onz[i]>pcbddc->coarse_size-lcols) onz[i]=pcbddc->coarse_size-lcols;
2770       }
2771       ierr = MatSeqAIJSetPreallocation(pcbddc->coarse_mat,0,dnz);CHKERRQ(ierr);
2772       ierr = MatMPIAIJSetPreallocation(pcbddc->coarse_mat,0,dnz,0,onz);CHKERRQ(ierr);
2773       ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
2774     } else {
2775       ierr = MatSeqAIJSetPreallocation(matis_coarse_local_mat,0,dnz);CHKERRQ(ierr);
2776       ierr = PetscFree(dnz);CHKERRQ(ierr);
2777     }
2778     /* insert values */
2779     if (pcbddc->coarse_problem_type == PARALLEL_BDDC) {
2780       ierr = MatSetValues(pcbddc->coarse_mat,ins_local_primal_size,ins_local_primal_indices,ins_local_primal_size,ins_local_primal_indices,ins_coarse_mat_vals,ADD_VALUES);CHKERRQ(ierr);
2781     } else if (pcbddc->coarse_problem_type == MULTILEVEL_BDDC) {
2782       if (pcbddc->coarsening_ratio == 1) {
2783         ins_coarse_mat_vals = coarse_submat_vals;
2784         ierr = MatSetValues(pcbddc->coarse_mat,ins_local_primal_size,ins_local_primal_indices,ins_local_primal_size,ins_local_primal_indices,ins_coarse_mat_vals,INSERT_VALUES);CHKERRQ(ierr);
2785       } else {
2786         ierr = PetscFree(ins_local_primal_indices);CHKERRQ(ierr);
2787         for (k=0;k<pcbddc->replicated_primal_size;k++) {
2788           offset = pcbddc->local_primal_displacements[k];
2789           offset2 = localdispl2[k];
2790           ins_local_primal_size = pcbddc->local_primal_displacements[k+1]-pcbddc->local_primal_displacements[k];
2791           ierr = PetscMalloc(ins_local_primal_size*sizeof(PetscInt),&ins_local_primal_indices);CHKERRQ(ierr);
2792           for (j=0;j<ins_local_primal_size;j++){
2793             ins_local_primal_indices[j]=(PetscInt)pcbddc->replicated_local_primal_indices[offset+j];
2794           }
2795           ins_coarse_mat_vals = &temp_coarse_mat_vals[offset2];
2796           ierr = MatSetValues(pcbddc->coarse_mat,ins_local_primal_size,ins_local_primal_indices,ins_local_primal_size,ins_local_primal_indices,ins_coarse_mat_vals,ADD_VALUES);CHKERRQ(ierr);
2797           ierr = PetscFree(ins_local_primal_indices);CHKERRQ(ierr);
2798         }
2799       }
2800       ins_local_primal_indices = 0;
2801       ins_coarse_mat_vals = 0;
2802     } else {
2803       for (k=0;k<size_prec_comm;k++){
2804         offset=pcbddc->local_primal_displacements[k];
2805         offset2=localdispl2[k];
2806         ins_local_primal_size = pcbddc->local_primal_sizes[k];
2807         ierr = PetscMalloc(ins_local_primal_size*sizeof(PetscInt),&ins_local_primal_indices);CHKERRQ(ierr);
2808         for (j=0;j<ins_local_primal_size;j++){
2809           ins_local_primal_indices[j]=(PetscInt)pcbddc->replicated_local_primal_indices[offset+j];
2810         }
2811         ins_coarse_mat_vals = &temp_coarse_mat_vals[offset2];
2812         ierr = MatSetValues(pcbddc->coarse_mat,ins_local_primal_size,ins_local_primal_indices,ins_local_primal_size,ins_local_primal_indices,ins_coarse_mat_vals,ADD_VALUES);CHKERRQ(ierr);
2813         ierr = PetscFree(ins_local_primal_indices);CHKERRQ(ierr);
2814       }
2815       ins_local_primal_indices = 0;
2816       ins_coarse_mat_vals = 0;
2817     }
2818     ierr = MatAssemblyBegin(pcbddc->coarse_mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2819     ierr = MatAssemblyEnd(pcbddc->coarse_mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2820     /* symmetry of coarse matrix */
2821     if (issym) {
2822       ierr = MatSetOption(pcbddc->coarse_mat,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
2823     }
2824     ierr = MatGetVecs(pcbddc->coarse_mat,&pcbddc->coarse_vec,&pcbddc->coarse_rhs);CHKERRQ(ierr);
2825   }
2826 
2827   /* create loc to glob scatters if needed */
2828   if (pcbddc->coarse_communications_type == SCATTERS_BDDC) {
2829      IS local_IS,global_IS;
2830      ierr = ISCreateStride(PETSC_COMM_SELF,pcbddc->local_primal_size,0,1,&local_IS);CHKERRQ(ierr);
2831      ierr = ISCreateGeneral(PETSC_COMM_SELF,pcbddc->local_primal_size,pcbddc->local_primal_indices,PETSC_COPY_VALUES,&global_IS);CHKERRQ(ierr);
2832      ierr = VecScatterCreate(pcbddc->vec1_P,local_IS,pcbddc->coarse_vec,global_IS,&pcbddc->coarse_loc_to_glob);CHKERRQ(ierr);
2833      ierr = ISDestroy(&local_IS);CHKERRQ(ierr);
2834      ierr = ISDestroy(&global_IS);CHKERRQ(ierr);
2835   }
2836 
2837   /* free memory no longer needed */
2838   if (coarse_ISLG)              { ierr = ISLocalToGlobalMappingDestroy(&coarse_ISLG);CHKERRQ(ierr); }
2839   if (ins_local_primal_indices) { ierr = PetscFree(ins_local_primal_indices);CHKERRQ(ierr); }
2840   if (ins_coarse_mat_vals)      { ierr = PetscFree(ins_coarse_mat_vals);CHKERRQ(ierr); }
2841   if (localsizes2)              { ierr = PetscFree(localsizes2);CHKERRQ(ierr); }
2842   if (localdispl2)              { ierr = PetscFree(localdispl2);CHKERRQ(ierr); }
2843   if (temp_coarse_mat_vals)     { ierr = PetscFree(temp_coarse_mat_vals);CHKERRQ(ierr); }
2844 
2845   /* Compute coarse null space */
2846   CoarseNullSpace = 0;
2847   if (pcbddc->NullSpace) {
2848     ierr = PCBDDCNullSpaceAssembleCoarse(pc,&CoarseNullSpace);CHKERRQ(ierr);
2849   }
2850 
2851   /* KSP for coarse problem */
2852   if (rank_prec_comm == active_rank) {
2853     PetscBool isbddc=PETSC_FALSE;
2854 
2855     ierr = KSPCreate(coarse_comm,&pcbddc->coarse_ksp);CHKERRQ(ierr);
2856     ierr = PetscObjectIncrementTabLevel((PetscObject)pcbddc->coarse_ksp,(PetscObject)pc,1);CHKERRQ(ierr);
2857     ierr = KSPSetOperators(pcbddc->coarse_ksp,pcbddc->coarse_mat,pcbddc->coarse_mat,SAME_PRECONDITIONER);CHKERRQ(ierr);
2858     ierr = KSPSetTolerances(pcbddc->coarse_ksp,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,max_it_coarse_ksp);CHKERRQ(ierr);
2859     ierr = KSPSetType(pcbddc->coarse_ksp,coarse_ksp_type);CHKERRQ(ierr);
2860     ierr = KSPGetPC(pcbddc->coarse_ksp,&pc_temp);CHKERRQ(ierr);
2861     ierr = PCSetType(pc_temp,coarse_pc_type);CHKERRQ(ierr);
2862     /* Allow user's customization */
2863     ierr = KSPSetOptionsPrefix(pcbddc->coarse_ksp,"coarse_");CHKERRQ(ierr);
2864     /* Set Up PC for coarse problem BDDC */
2865     if (pcbddc->coarse_problem_type == MULTILEVEL_BDDC) {
2866       i = pcbddc->current_level+1;
2867       ierr = PCBDDCSetLevel(pc_temp,i);CHKERRQ(ierr);
2868       ierr = PCBDDCSetCoarseningRatio(pc_temp,pcbddc->coarsening_ratio);CHKERRQ(ierr);
2869       ierr = PCBDDCSetMaxLevels(pc_temp,pcbddc->max_levels);CHKERRQ(ierr);
2870       ierr = PCBDDCSetCoarseProblemType(pc_temp,MULTILEVEL_BDDC);CHKERRQ(ierr);
2871       if (CoarseNullSpace) {
2872         ierr = PCBDDCSetNullSpace(pc_temp,CoarseNullSpace);CHKERRQ(ierr);
2873       }
2874       if (dbg_flag) {
2875         ierr = PetscViewerASCIIPrintf(viewer,"----------------Level %d: Setting up level %d---------------\n",pcbddc->current_level,i);CHKERRQ(ierr);
2876         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
2877       }
2878     } else {
2879       if (CoarseNullSpace) {
2880         ierr = KSPSetNullSpace(pcbddc->coarse_ksp,CoarseNullSpace);CHKERRQ(ierr);
2881       }
2882     }
2883     ierr = KSPSetFromOptions(pcbddc->coarse_ksp);CHKERRQ(ierr);
2884     ierr = KSPSetUp(pcbddc->coarse_ksp);CHKERRQ(ierr);
2885 
2886     ierr = KSPGetTolerances(pcbddc->coarse_ksp,NULL,NULL,NULL,&j);CHKERRQ(ierr);
2887     ierr = KSPGetPC(pcbddc->coarse_ksp,&pc_temp);CHKERRQ(ierr);
2888     ierr = PetscObjectTypeCompare((PetscObject)pc_temp,PCBDDC,&isbddc);CHKERRQ(ierr);
2889     if (j == 1) {
2890       ierr = KSPSetNormType(pcbddc->coarse_ksp,KSP_NORM_NONE);CHKERRQ(ierr);
2891       if (isbddc) {
2892         ierr = PCBDDCSetUseExactDirichlet(pc_temp,PETSC_FALSE);CHKERRQ(ierr);
2893       }
2894     }
2895   }
2896   /* Check coarse problem if requested */
2897   if ( dbg_flag && rank_prec_comm == active_rank ) {
2898     KSP check_ksp;
2899     PC  check_pc;
2900     Vec check_vec;
2901     PetscReal   abs_infty_error,infty_error,lambda_min,lambda_max;
2902     KSPType check_ksp_type;
2903 
2904     /* Create ksp object suitable for extreme eigenvalues' estimation */
2905     ierr = KSPCreate(coarse_comm,&check_ksp);CHKERRQ(ierr);
2906     ierr = KSPSetOperators(check_ksp,pcbddc->coarse_mat,pcbddc->coarse_mat,SAME_PRECONDITIONER);CHKERRQ(ierr);
2907     ierr = KSPSetTolerances(check_ksp,1.e-12,1.e-12,PETSC_DEFAULT,pcbddc->coarse_size);CHKERRQ(ierr);
2908     if (pcbddc->coarse_problem_type == MULTILEVEL_BDDC) {
2909       if (issym) check_ksp_type = KSPCG;
2910       else check_ksp_type = KSPGMRES;
2911       ierr = KSPSetComputeSingularValues(check_ksp,PETSC_TRUE);CHKERRQ(ierr);
2912     } else {
2913       check_ksp_type = KSPPREONLY;
2914     }
2915     ierr = KSPSetType(check_ksp,check_ksp_type);CHKERRQ(ierr);
2916     ierr = KSPGetPC(pcbddc->coarse_ksp,&check_pc);CHKERRQ(ierr);
2917     ierr = KSPSetPC(check_ksp,check_pc);CHKERRQ(ierr);
2918     ierr = KSPSetUp(check_ksp);CHKERRQ(ierr);
2919     /* create random vec */
2920     ierr = VecDuplicate(pcbddc->coarse_vec,&check_vec);CHKERRQ(ierr);
2921     ierr = VecSetRandom(check_vec,NULL);CHKERRQ(ierr);
2922     if (CoarseNullSpace) {
2923       ierr = MatNullSpaceRemove(CoarseNullSpace,check_vec,NULL);CHKERRQ(ierr);
2924     }
2925     ierr = MatMult(pcbddc->coarse_mat,check_vec,pcbddc->coarse_rhs);CHKERRQ(ierr);
2926     /* solve coarse problem */
2927     ierr = KSPSolve(check_ksp,pcbddc->coarse_rhs,pcbddc->coarse_vec);CHKERRQ(ierr);
2928     if (CoarseNullSpace) {
2929       ierr = MatNullSpaceRemove(CoarseNullSpace,pcbddc->coarse_vec,NULL);CHKERRQ(ierr);
2930     }
2931     /* check coarse problem residual error */
2932     ierr = VecAXPY(check_vec,-1.0,pcbddc->coarse_vec);CHKERRQ(ierr);
2933     ierr = VecNorm(check_vec,NORM_INFINITY,&infty_error);CHKERRQ(ierr);
2934     ierr = MatMult(pcbddc->coarse_mat,check_vec,pcbddc->coarse_rhs);CHKERRQ(ierr);
2935     ierr = VecNorm(pcbddc->coarse_rhs,NORM_INFINITY,&abs_infty_error);CHKERRQ(ierr);
2936     ierr = VecDestroy(&check_vec);CHKERRQ(ierr);
2937     /* get eigenvalue estimation if inexact */
2938     if (pcbddc->coarse_problem_type == MULTILEVEL_BDDC) {
2939       ierr = KSPComputeExtremeSingularValues(check_ksp,&lambda_max,&lambda_min);CHKERRQ(ierr);
2940       ierr = KSPGetIterationNumber(check_ksp,&k);CHKERRQ(ierr);
2941       ierr = PetscViewerASCIIPrintf(viewer,"Coarse problem eigenvalues estimated with %d iterations of %s.\n",k,check_ksp_type);CHKERRQ(ierr);
2942       ierr = PetscViewerASCIIPrintf(viewer,"Coarse problem eigenvalues: % 1.14e %1.14e\n",lambda_min,lambda_max);CHKERRQ(ierr);
2943     }
2944     ierr = PetscViewerASCIIPrintf(viewer,"Coarse problem exact infty_error   : %1.14e\n",infty_error);CHKERRQ(ierr);
2945     ierr = PetscViewerASCIIPrintf(viewer,"Coarse problem residual infty_error: %1.14e\n",abs_infty_error);CHKERRQ(ierr);
2946     ierr = KSPDestroy(&check_ksp);CHKERRQ(ierr);
2947   }
2948   if (dbg_flag) {
2949     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
2950   }
2951   ierr = MatNullSpaceDestroy(&CoarseNullSpace);CHKERRQ(ierr);
2952 
2953   PetscFunctionReturn(0);
2954 }
2955 
2956