xref: /petsc/src/ksp/pc/impls/is/pcis.c (revision 443836d0654b40846fccabc3a79cd82bae4da55d)
1 
2 #include "../src/ksp/pc/impls/is/pcis.h" /*I "petscpc.h" I*/
3 
4 EXTERN_C_BEGIN
5 #undef __FUNCT__
6 #define __FUNCT__ "PCISSetUseStiffnessScaling_IS"
7 static PetscErrorCode PCISSetUseStiffnessScaling_IS(PC pc, PetscBool use)
8 {
9   PetscErrorCode ierr;
10   PC_IS  *pcis = (PC_IS*)pc->data;
11 
12   PetscFunctionBegin;
13   pcis->use_stiffness_scaling = use;
14   PetscFunctionReturn(0);
15 }
16 EXTERN_C_END
17 
18 #undef __FUNCT__
19 #define __FUNCT__ "PCISSetUseStiffnessScaling"
20 /*@
21  PCISSetUseStiffnessScaling - Tells PCIS to construct partition of unity using
22                               local matrices' diagonal.
23 
24    Not collective
25 
26    Input Parameters:
27 +  pc - the preconditioning context
28 -  use - whether or not pcis use matrix diagonal to build partition of unity.
29 
30    Level: intermediate
31 
32    Notes:
33 
34 .seealso: PCBDDC
35 @*/
36 PetscErrorCode PCISSetUseStiffnessScaling(PC pc, PetscBool use)
37 {
38   PetscErrorCode ierr;
39 
40   PetscFunctionBegin;
41   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
42   ierr = PetscTryMethod(pc,"PCISSetUseStiffnessScaling_C",(PC,PetscBool),(pc,use));CHKERRQ(ierr);
43   PetscFunctionReturn(0);
44 }
45 
46 EXTERN_C_BEGIN
47 #undef __FUNCT__
48 #define __FUNCT__ "PCISSetSubdomainDiagonalScaling_IS"
49 static PetscErrorCode PCISSetSubdomainDiagonalScaling_IS(PC pc, Vec scaling_factors)
50 {
51   PetscErrorCode ierr;
52   PC_IS  *pcis = (PC_IS*)pc->data;
53 
54   PetscFunctionBegin;
55   ierr = VecDestroy(&pcis->D);CHKERRQ(ierr);
56   ierr = PetscObjectReference((PetscObject)scaling_factors);CHKERRQ(ierr);
57   pcis->D = scaling_factors;
58   PetscFunctionReturn(0);
59 }
60 EXTERN_C_END
61 
62 #undef __FUNCT__
63 #define __FUNCT__ "PCISSetSubdomainDiagonalScaling"
64 /*@
65  PCISSetSubdomainDiagonalScaling - Set diagonal scaling for PCIS.
66 
67    Not collective
68 
69    Input Parameters:
70 +  pc - the preconditioning context
71 -  scaling_factors - scaling factors for the subdomain
72 
73    Level: intermediate
74 
75    Notes:
76    Intended to use with jumping coefficients cases.
77 
78 .seealso: PCBDDC
79 @*/
80 PetscErrorCode PCISSetSubdomainDiagonalScaling(PC pc, Vec scaling_factors)
81 {
82   PetscErrorCode ierr;
83 
84   PetscFunctionBegin;
85   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
86   ierr = PetscTryMethod(pc,"PCISSetSubdomainDiagonalScaling_C",(PC,Vec),(pc,scaling_factors));CHKERRQ(ierr);
87   PetscFunctionReturn(0);
88 }
89 
90 EXTERN_C_BEGIN
91 #undef __FUNCT__
92 #define __FUNCT__ "PCISSetSubdomainScalingFactor_IS"
93 static PetscErrorCode PCISSetSubdomainScalingFactor_IS(PC pc, PetscScalar scal)
94 {
95   PC_IS  *pcis = (PC_IS*)pc->data;
96 
97   PetscFunctionBegin;
98   pcis->scaling_factor = scal;
99   PetscFunctionReturn(0);
100 }
101 EXTERN_C_END
102 
103 #undef __FUNCT__
104 #define __FUNCT__ "PCISSetSubdomainScalingFactor"
105 /*@
106  PCISSetSubdomainScalingFactor - Set scaling factor for PCIS.
107 
108    Not collective
109 
110    Input Parameters:
111 +  pc - the preconditioning context
112 -  scal - scaling factor for the subdomain
113 
114    Level: intermediate
115 
116    Notes:
117    Intended to use with jumping coefficients cases.
118 
119 .seealso: PCBDDC
120 @*/
121 PetscErrorCode PCISSetSubdomainScalingFactor(PC pc, PetscScalar scal)
122 {
123   PetscErrorCode ierr;
124 
125   PetscFunctionBegin;
126   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
127   ierr = PetscTryMethod(pc,"PCISSetSubdomainScalingFactor_C",(PC,PetscScalar),(pc,scal));CHKERRQ(ierr);
128   PetscFunctionReturn(0);
129 }
130 
131 
132 /* -------------------------------------------------------------------------- */
133 /*
134    PCISSetUp -
135 */
136 #undef __FUNCT__
137 #define __FUNCT__ "PCISSetUp"
138 PetscErrorCode  PCISSetUp(PC pc)
139 {
140   PC_IS           *pcis = (PC_IS*)(pc->data);
141   Mat_IS          *matis = (Mat_IS*)pc->mat->data;
142   PetscInt        i;
143   PetscErrorCode  ierr;
144   PetscBool       flg;
145   Vec    counter;
146 
147   PetscFunctionBegin;
148   ierr = PetscObjectTypeCompare((PetscObject)pc->mat,MATIS,&flg);CHKERRQ(ierr);
149   if (!flg) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONG,"Preconditioner type of Neumann Neumman requires matrix of type MATIS");
150 
151   pcis->pure_neumann = matis->pure_neumann;
152 
153   /*
154     Creating the local vector vec1_N, containing the inverse of the number
155     of subdomains to which each local node (either owned or ghost)
156     pertains. To accomplish that, we scatter local vectors of 1's to
157     a global vector (adding the values); scatter the result back to
158     local vectors and finally invert the result.
159   */
160   ierr = VecDuplicate(matis->x,&pcis->vec1_N);CHKERRQ(ierr);
161   ierr = MatGetVecs(pc->pmat,&counter,0);CHKERRQ(ierr); /* temporary auxiliar vector */
162   ierr = VecSet(counter,0.0);CHKERRQ(ierr);
163   ierr = VecSet(pcis->vec1_N,1.0);CHKERRQ(ierr);
164   ierr = VecScatterBegin(matis->ctx,pcis->vec1_N,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
165   ierr = VecScatterEnd  (matis->ctx,pcis->vec1_N,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
166   ierr = VecScatterBegin(matis->ctx,counter,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
167   ierr = VecScatterEnd  (matis->ctx,counter,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
168   /*
169     Creating local and global index sets for interior and
170     inteface nodes. Notice that interior nodes have D[i]==1.0.
171   */
172   {
173     PetscInt     n_I;
174     PetscInt    *idx_I_local,*idx_B_local,*idx_I_global,*idx_B_global;
175     PetscScalar *array;
176     /* Identifying interior and interface nodes, in local numbering */
177     ierr = VecGetSize(pcis->vec1_N,&pcis->n);CHKERRQ(ierr);
178     ierr = VecGetArray(pcis->vec1_N,&array);CHKERRQ(ierr);
179     ierr = PetscMalloc(pcis->n*sizeof(PetscInt),&idx_I_local);CHKERRQ(ierr);
180     ierr = PetscMalloc(pcis->n*sizeof(PetscInt),&idx_B_local);CHKERRQ(ierr);
181     for (i=0, pcis->n_B=0, n_I=0; i<pcis->n; i++) {
182       if (array[i] == 1.0) { idx_I_local[n_I]       = i; n_I++;       }
183       else                 { idx_B_local[pcis->n_B] = i; pcis->n_B++; }
184     }
185     /* Getting the global numbering */
186     idx_B_global = idx_I_local + n_I; /* Just avoiding allocating extra memory, since we have vacant space */
187     idx_I_global = idx_B_local + pcis->n_B;
188     ierr = ISLocalToGlobalMappingApply(matis->mapping,pcis->n_B,idx_B_local,idx_B_global);CHKERRQ(ierr);
189     ierr = ISLocalToGlobalMappingApply(matis->mapping,n_I,      idx_I_local,idx_I_global);CHKERRQ(ierr);
190     /* Creating the index sets. */
191     ierr = ISCreateGeneral(MPI_COMM_SELF,pcis->n_B,idx_B_local,PETSC_COPY_VALUES, &pcis->is_B_local);CHKERRQ(ierr);
192     ierr = ISCreateGeneral(MPI_COMM_SELF,pcis->n_B,idx_B_global,PETSC_COPY_VALUES,&pcis->is_B_global);CHKERRQ(ierr);
193     ierr = ISCreateGeneral(MPI_COMM_SELF,n_I      ,idx_I_local,PETSC_COPY_VALUES, &pcis->is_I_local);CHKERRQ(ierr);
194     ierr = ISCreateGeneral(MPI_COMM_SELF,n_I      ,idx_I_global,PETSC_COPY_VALUES,&pcis->is_I_global);CHKERRQ(ierr);
195     /* Freeing memory and restoring arrays */
196     ierr = PetscFree(idx_B_local);CHKERRQ(ierr);
197     ierr = PetscFree(idx_I_local);CHKERRQ(ierr);
198     ierr = VecRestoreArray(pcis->vec1_N,&array);CHKERRQ(ierr);
199   }
200 
201   /*
202     Extracting the blocks A_II, A_BI, A_IB and A_BB from A. If the numbering
203     is such that interior nodes come first than the interface ones, we have
204 
205     [           |      ]
206     [    A_II   | A_IB ]
207     A = [           |      ]
208     [-----------+------]
209     [    A_BI   | A_BB ]
210   */
211 
212   ierr = MatGetSubMatrix(matis->A,pcis->is_I_local,pcis->is_I_local,MAT_INITIAL_MATRIX,&pcis->A_II);CHKERRQ(ierr);
213   ierr = MatGetSubMatrix(matis->A,pcis->is_I_local,pcis->is_B_local,MAT_INITIAL_MATRIX,&pcis->A_IB);CHKERRQ(ierr);
214   ierr = MatGetSubMatrix(matis->A,pcis->is_B_local,pcis->is_I_local,MAT_INITIAL_MATRIX,&pcis->A_BI);CHKERRQ(ierr);
215   ierr = MatGetSubMatrix(matis->A,pcis->is_B_local,pcis->is_B_local,MAT_INITIAL_MATRIX,&pcis->A_BB);CHKERRQ(ierr);
216 
217   /*
218     Creating work vectors and arrays
219   */
220   /* pcis->vec1_N has already been created */
221   ierr = VecDuplicate(pcis->vec1_N,&pcis->vec2_N);CHKERRQ(ierr);
222   ierr = VecCreateSeq(PETSC_COMM_SELF,pcis->n-pcis->n_B,&pcis->vec1_D);CHKERRQ(ierr);
223   ierr = VecDuplicate(pcis->vec1_D,&pcis->vec2_D);CHKERRQ(ierr);
224   ierr = VecDuplicate(pcis->vec1_D,&pcis->vec3_D);CHKERRQ(ierr);
225   ierr = VecCreateSeq(PETSC_COMM_SELF,pcis->n_B,&pcis->vec1_B);CHKERRQ(ierr);
226   ierr = VecDuplicate(pcis->vec1_B,&pcis->vec2_B);CHKERRQ(ierr);
227   ierr = VecDuplicate(pcis->vec1_B,&pcis->vec3_B);CHKERRQ(ierr);
228   ierr = MatGetVecs(pc->pmat,&pcis->vec1_global,0);CHKERRQ(ierr);
229   ierr = PetscMalloc((pcis->n)*sizeof(PetscScalar),&pcis->work_N);CHKERRQ(ierr);
230 
231   /* Creating the scatter contexts */
232   ierr = VecScatterCreate(pcis->vec1_global,pcis->is_I_global,pcis->vec1_D,(IS)0,&pcis->global_to_D);CHKERRQ(ierr);
233   ierr = VecScatterCreate(pcis->vec1_N,pcis->is_B_local,pcis->vec1_B,(IS)0,&pcis->N_to_B);CHKERRQ(ierr);
234   ierr = VecScatterCreate(pcis->vec1_global,pcis->is_B_global,pcis->vec1_B,(IS)0,&pcis->global_to_B);CHKERRQ(ierr);
235 
236   /* Creating scaling "matrix" D */
237   ierr = PetscOptionsGetBool(((PetscObject)pc)->prefix,"-pc_is_use_stiffness_scaling",&pcis->use_stiffness_scaling,PETSC_NULL);CHKERRQ(ierr);
238   if ( !pcis->D ) {
239     ierr = VecDuplicate(pcis->vec1_B,&pcis->D);CHKERRQ(ierr);
240     if (!pcis->use_stiffness_scaling) {
241       ierr = VecSet(pcis->D,pcis->scaling_factor);CHKERRQ(ierr);
242     } else {
243       ierr = MatGetDiagonal(matis->A,pcis->vec1_N);CHKERRQ(ierr);
244       ierr = VecScatterBegin(pcis->N_to_B,pcis->vec1_N,pcis->D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
245       ierr = VecScatterEnd  (pcis->N_to_B,pcis->vec1_N,pcis->D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
246     }
247   }
248   ierr = VecCopy(pcis->D,pcis->vec1_B);CHKERRQ(ierr);
249   ierr = VecSet(counter,0.0);CHKERRQ(ierr);
250   ierr = VecScatterBegin(pcis->global_to_B,pcis->vec1_B,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
251   ierr = VecScatterEnd  (pcis->global_to_B,pcis->vec1_B,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
252   ierr = VecScatterBegin(pcis->global_to_B,counter,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
253   ierr = VecScatterEnd  (pcis->global_to_B,counter,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
254   ierr = VecPointwiseDivide(pcis->D,pcis->D,pcis->vec1_B);CHKERRQ(ierr);
255 
256   /* See historical note 01, at the bottom of this file. */
257 
258   /*
259     Creating the KSP contexts for the local Dirichlet and Neumann problems.
260   */
261   {
262     PC  pc_ctx;
263     /* Dirichlet */
264     ierr = KSPCreate(PETSC_COMM_SELF,&pcis->ksp_D);CHKERRQ(ierr);
265     ierr = PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_D,(PetscObject)pc,1);CHKERRQ(ierr);
266     ierr = KSPSetOperators(pcis->ksp_D,pcis->A_II,pcis->A_II,SAME_PRECONDITIONER);CHKERRQ(ierr);
267     ierr = KSPSetOptionsPrefix(pcis->ksp_D,"is_localD_");CHKERRQ(ierr);
268     ierr = KSPGetPC(pcis->ksp_D,&pc_ctx);CHKERRQ(ierr);
269     ierr = PCSetType(pc_ctx,PCLU);CHKERRQ(ierr);
270     ierr = KSPSetType(pcis->ksp_D,KSPPREONLY);CHKERRQ(ierr);
271     ierr = KSPSetFromOptions(pcis->ksp_D);CHKERRQ(ierr);
272     /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
273     ierr = KSPSetUp(pcis->ksp_D);CHKERRQ(ierr);
274     /* Neumann */
275     ierr = KSPCreate(PETSC_COMM_SELF,&pcis->ksp_N);CHKERRQ(ierr);
276     ierr = PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_N,(PetscObject)pc,1);CHKERRQ(ierr);
277     ierr = KSPSetOperators(pcis->ksp_N,matis->A,matis->A,SAME_PRECONDITIONER);CHKERRQ(ierr);
278     ierr = KSPSetOptionsPrefix(pcis->ksp_N,"is_localN_");CHKERRQ(ierr);
279     ierr = KSPGetPC(pcis->ksp_N,&pc_ctx);CHKERRQ(ierr);
280     ierr = PCSetType(pc_ctx,PCLU);CHKERRQ(ierr);
281     ierr = KSPSetType(pcis->ksp_N,KSPPREONLY);CHKERRQ(ierr);
282     ierr = KSPSetFromOptions(pcis->ksp_N);CHKERRQ(ierr);
283     {
284       PetscBool  damp_fixed = PETSC_FALSE,
285                  remove_nullspace_fixed = PETSC_FALSE,
286                  set_damping_factor_floating = PETSC_FALSE,
287                  not_damp_floating = PETSC_FALSE,
288                  not_remove_nullspace_floating = PETSC_FALSE;
289       PetscReal  fixed_factor,
290                  floating_factor;
291 
292       ierr = PetscOptionsGetReal(((PetscObject)pc_ctx)->prefix,"-pc_is_damp_fixed",&fixed_factor,&damp_fixed);CHKERRQ(ierr);
293       if (!damp_fixed) { fixed_factor = 0.0; }
294       ierr = PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_damp_fixed",&damp_fixed,PETSC_NULL);CHKERRQ(ierr);
295 
296       ierr = PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_remove_nullspace_fixed",&remove_nullspace_fixed,PETSC_NULL);CHKERRQ(ierr);
297 
298       ierr = PetscOptionsGetReal(((PetscObject)pc_ctx)->prefix,"-pc_is_set_damping_factor_floating",
299 			      &floating_factor,&set_damping_factor_floating);CHKERRQ(ierr);
300       if (!set_damping_factor_floating) { floating_factor = 0.0; }
301       ierr = PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_set_damping_factor_floating",&set_damping_factor_floating,PETSC_NULL);CHKERRQ(ierr);
302       if (!set_damping_factor_floating) { floating_factor = 1.e-12; }
303 
304       ierr = PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_not_damp_floating",&not_damp_floating,PETSC_NULL);CHKERRQ(ierr);
305 
306       ierr = PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_not_remove_nullspace_floating",&not_remove_nullspace_floating,PETSC_NULL);CHKERRQ(ierr);
307 
308       if (pcis->pure_neumann) {  /* floating subdomain */
309 	if (!(not_damp_floating)) {
310           ierr = PCFactorSetShiftType(pc_ctx,MAT_SHIFT_NONZERO);CHKERRQ(ierr);
311           ierr = PCFactorSetShiftAmount(pc_ctx,floating_factor);CHKERRQ(ierr);
312 	}
313 	if (!(not_remove_nullspace_floating)){
314 	  MatNullSpace nullsp;
315 	  ierr = MatNullSpaceCreate(PETSC_COMM_SELF,PETSC_TRUE,0,PETSC_NULL,&nullsp);CHKERRQ(ierr);
316 	  ierr = KSPSetNullSpace(pcis->ksp_N,nullsp);CHKERRQ(ierr);
317 	  ierr = MatNullSpaceDestroy(&nullsp);CHKERRQ(ierr);
318 	}
319       } else {  /* fixed subdomain */
320 	if (damp_fixed) {
321           ierr = PCFactorSetShiftType(pc_ctx,MAT_SHIFT_NONZERO);CHKERRQ(ierr);
322           ierr = PCFactorSetShiftAmount(pc_ctx,floating_factor);CHKERRQ(ierr);
323 	}
324 	if (remove_nullspace_fixed) {
325 	  MatNullSpace nullsp;
326 	  ierr = MatNullSpaceCreate(PETSC_COMM_SELF,PETSC_TRUE,0,PETSC_NULL,&nullsp);CHKERRQ(ierr);
327 	  ierr = KSPSetNullSpace(pcis->ksp_N,nullsp);CHKERRQ(ierr);
328 	  ierr = MatNullSpaceDestroy(&nullsp);CHKERRQ(ierr);
329 	}
330       }
331     }
332     /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
333     ierr = KSPSetUp(pcis->ksp_N);CHKERRQ(ierr);
334   }
335 
336   ierr = ISLocalToGlobalMappingGetInfo(((Mat_IS*)(pc->mat->data))->mapping,&(pcis->n_neigh),&(pcis->neigh),&(pcis->n_shared),&(pcis->shared));CHKERRQ(ierr);
337   pcis->ISLocalToGlobalMappingGetInfoWasCalled = PETSC_TRUE;
338   ierr = VecDestroy(&counter);CHKERRQ(ierr);
339   PetscFunctionReturn(0);
340 }
341 
342 /* -------------------------------------------------------------------------- */
343 /*
344    PCISDestroy -
345 */
346 #undef __FUNCT__
347 #define __FUNCT__ "PCISDestroy"
348 PetscErrorCode  PCISDestroy(PC pc)
349 {
350   PC_IS          *pcis = (PC_IS*)(pc->data);
351   PetscErrorCode ierr;
352 
353   PetscFunctionBegin;
354   ierr = ISDestroy(&pcis->is_B_local);CHKERRQ(ierr);
355   ierr = ISDestroy(&pcis->is_I_local);CHKERRQ(ierr);
356   ierr = ISDestroy(&pcis->is_B_global);CHKERRQ(ierr);
357   ierr = ISDestroy(&pcis->is_I_global);CHKERRQ(ierr);
358   ierr = MatDestroy(&pcis->A_II);CHKERRQ(ierr);
359   ierr = MatDestroy(&pcis->A_IB);CHKERRQ(ierr);
360   ierr = MatDestroy(&pcis->A_BI);CHKERRQ(ierr);
361   ierr = MatDestroy(&pcis->A_BB);CHKERRQ(ierr);
362   ierr = VecDestroy(&pcis->D);CHKERRQ(ierr);
363   ierr = KSPDestroy(&pcis->ksp_N);CHKERRQ(ierr);
364   ierr = KSPDestroy(&pcis->ksp_D);CHKERRQ(ierr);
365   ierr = VecDestroy(&pcis->vec1_N);CHKERRQ(ierr);
366   ierr = VecDestroy(&pcis->vec2_N);CHKERRQ(ierr);
367   ierr = VecDestroy(&pcis->vec1_D);CHKERRQ(ierr);
368   ierr = VecDestroy(&pcis->vec2_D);CHKERRQ(ierr);
369   ierr = VecDestroy(&pcis->vec3_D);CHKERRQ(ierr);
370   ierr = VecDestroy(&pcis->vec1_B);CHKERRQ(ierr);
371   ierr = VecDestroy(&pcis->vec2_B);CHKERRQ(ierr);
372   ierr = VecDestroy(&pcis->vec3_B);CHKERRQ(ierr);
373   ierr = VecDestroy(&pcis->vec1_global);CHKERRQ(ierr);
374   ierr = VecScatterDestroy(&pcis->global_to_D);CHKERRQ(ierr);
375   ierr = VecScatterDestroy(&pcis->N_to_B);CHKERRQ(ierr);
376   ierr = VecScatterDestroy(&pcis->global_to_B);CHKERRQ(ierr);
377   ierr = PetscFree(pcis->work_N);CHKERRQ(ierr);
378   if (pcis->ISLocalToGlobalMappingGetInfoWasCalled) {
379     ierr = ISLocalToGlobalMappingRestoreInfo((ISLocalToGlobalMapping)0,&(pcis->n_neigh),&(pcis->neigh),&(pcis->n_shared),&(pcis->shared));CHKERRQ(ierr);
380   }
381   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCISSetUseStiffnessScaling_C","",PETSC_NULL);CHKERRQ(ierr);
382   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCISSetSubdomainScalingFactor_C","",PETSC_NULL);CHKERRQ(ierr);
383   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCISSetSubdomainDiagonalScaling_C","",PETSC_NULL);CHKERRQ(ierr);
384   PetscFunctionReturn(0);
385 }
386 
387 /* -------------------------------------------------------------------------- */
388 /*
389    PCISCreate -
390 */
391 #undef __FUNCT__
392 #define __FUNCT__ "PCISCreate"
393 PetscErrorCode  PCISCreate(PC pc)
394 {
395   PC_IS *pcis = (PC_IS*)(pc->data);
396   PetscErrorCode ierr;
397 
398   PetscFunctionBegin;
399   pcis->is_B_local  = 0;
400   pcis->is_I_local  = 0;
401   pcis->is_B_global = 0;
402   pcis->is_I_global = 0;
403   pcis->A_II        = 0;
404   pcis->A_IB        = 0;
405   pcis->A_BI        = 0;
406   pcis->A_BB        = 0;
407   pcis->D           = 0;
408   pcis->ksp_N      = 0;
409   pcis->ksp_D      = 0;
410   pcis->vec1_N      = 0;
411   pcis->vec2_N      = 0;
412   pcis->vec1_D      = 0;
413   pcis->vec2_D      = 0;
414   pcis->vec3_D      = 0;
415   pcis->vec1_B      = 0;
416   pcis->vec2_B      = 0;
417   pcis->vec3_B      = 0;
418   pcis->vec1_global = 0;
419   pcis->work_N      = 0;
420   pcis->global_to_D = 0;
421   pcis->N_to_B      = 0;
422   pcis->global_to_B = 0;
423   pcis->ISLocalToGlobalMappingGetInfoWasCalled = PETSC_FALSE;
424   pcis->scaling_factor = 1.0;
425   /* composing functions */
426   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCISSetUseStiffnessScaling_C","PCISSetUseStiffnessScaling_IS",
427                     PCISSetUseStiffnessScaling_IS);CHKERRQ(ierr);
428   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCISSetSubdomainScalingFactor_C","PCISSetSubdomainScalingFactor_IS",
429                     PCISSetSubdomainScalingFactor_IS);CHKERRQ(ierr);
430   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCISSetSubdomainDiagonalScaling_C","PCISSetSubdomainDiagonalScaling_IS",
431                     PCISSetSubdomainDiagonalScaling_IS);CHKERRQ(ierr);
432   PetscFunctionReturn(0);
433 }
434 
435 /* -------------------------------------------------------------------------- */
436 /*
437    PCISApplySchur -
438 
439    Input parameters:
440 .  pc - preconditioner context
441 .  v - vector to which the Schur complement is to be applied (it is NOT modified inside this function, UNLESS vec2_B is null)
442 
443    Output parameters:
444 .  vec1_B - result of Schur complement applied to chunk
445 .  vec2_B - garbage (used as work space), or null (and v is used as workspace)
446 .  vec1_D - garbage (used as work space)
447 .  vec2_D - garbage (used as work space)
448 
449 */
450 #undef __FUNCT__
451 #define __FUNCT__ "PCISApplySchur"
452 PetscErrorCode  PCISApplySchur(PC pc, Vec v, Vec vec1_B, Vec vec2_B, Vec vec1_D, Vec vec2_D)
453 {
454   PetscErrorCode ierr;
455   PC_IS          *pcis = (PC_IS*)(pc->data);
456 
457   PetscFunctionBegin;
458   if (!vec2_B) { vec2_B = v; }
459 
460   ierr = MatMult(pcis->A_BB,v,vec1_B);CHKERRQ(ierr);
461   ierr = MatMult(pcis->A_IB,v,vec1_D);CHKERRQ(ierr);
462   ierr = KSPSolve(pcis->ksp_D,vec1_D,vec2_D);CHKERRQ(ierr);
463   ierr = MatMult(pcis->A_BI,vec2_D,vec2_B);CHKERRQ(ierr);
464   ierr = VecAXPY(vec1_B,-1.0,vec2_B);CHKERRQ(ierr);
465   PetscFunctionReturn(0);
466 }
467 
468 /* -------------------------------------------------------------------------- */
469 /*
470    PCISScatterArrayNToVecB - Scatters interface node values from a big array (of all local nodes, interior or interface,
471    including ghosts) into an interface vector, when in SCATTER_FORWARD mode, or vice-versa, when in SCATTER_REVERSE
472    mode.
473 
474    Input parameters:
475 .  pc - preconditioner context
476 .  array_N - [when in SCATTER_FORWARD mode] Array to be scattered into the vector
477 .  v_B - [when in SCATTER_REVERSE mode] Vector to be scattered into the array
478 
479    Output parameter:
480 .  array_N - [when in SCATTER_REVERSE mode] Array to receive the scattered vector
481 .  v_B - [when in SCATTER_FORWARD mode] Vector to receive the scattered array
482 
483    Notes:
484    The entries in the array that do not correspond to interface nodes remain unaltered.
485 */
486 #undef __FUNCT__
487 #define __FUNCT__ "PCISScatterArrayNToVecB"
488 PetscErrorCode  PCISScatterArrayNToVecB (PetscScalar *array_N, Vec v_B, InsertMode imode, ScatterMode smode, PC pc)
489 {
490   PetscInt       i;
491   const PetscInt *idex;
492   PetscErrorCode ierr;
493   PetscScalar    *array_B;
494   PC_IS          *pcis = (PC_IS*)(pc->data);
495 
496   PetscFunctionBegin;
497   ierr = VecGetArray(v_B,&array_B);CHKERRQ(ierr);
498   ierr = ISGetIndices(pcis->is_B_local,&idex);CHKERRQ(ierr);
499 
500   if (smode == SCATTER_FORWARD) {
501     if (imode == INSERT_VALUES) {
502       for (i=0; i<pcis->n_B; i++) { array_B[i]  = array_N[idex[i]]; }
503     } else {  /* ADD_VALUES */
504       for (i=0; i<pcis->n_B; i++) { array_B[i] += array_N[idex[i]]; }
505     }
506   } else {  /* SCATTER_REVERSE */
507     if (imode == INSERT_VALUES) {
508       for (i=0; i<pcis->n_B; i++) { array_N[idex[i]]  = array_B[i]; }
509     } else {  /* ADD_VALUES */
510       for (i=0; i<pcis->n_B; i++) { array_N[idex[i]] += array_B[i]; }
511     }
512   }
513   ierr = ISRestoreIndices(pcis->is_B_local,&idex);CHKERRQ(ierr);
514   ierr = VecRestoreArray(v_B,&array_B);CHKERRQ(ierr);
515   PetscFunctionReturn(0);
516 }
517 
518 /* -------------------------------------------------------------------------- */
519 /*
520    PCISApplyInvSchur - Solves the Neumann problem related to applying the inverse of the Schur complement.
521    More precisely, solves the problem:
522                                         [ A_II  A_IB ] [ . ]   [ 0 ]
523                                         [            ] [   ] = [   ]
524                                         [ A_BI  A_BB ] [ x ]   [ b ]
525 
526    Input parameters:
527 .  pc - preconditioner context
528 .  b - vector of local interface nodes (including ghosts)
529 
530    Output parameters:
531 .  x - vector of local interface nodes (including ghosts); returns the application of the inverse of the Schur
532        complement to b
533 .  vec1_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)
534 .  vec2_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)
535 
536 */
537 #undef __FUNCT__
538 #define __FUNCT__ "PCISApplyInvSchur"
539 PetscErrorCode  PCISApplyInvSchur (PC pc, Vec b, Vec x, Vec vec1_N, Vec vec2_N)
540 {
541   PetscErrorCode ierr;
542   PC_IS          *pcis = (PC_IS*)(pc->data);
543 
544   PetscFunctionBegin;
545   /*
546     Neumann solvers.
547     Applying the inverse of the local Schur complement, i.e, solving a Neumann
548     Problem with zero at the interior nodes of the RHS and extracting the interface
549     part of the solution. inverse Schur complement is applied to b and the result
550     is stored in x.
551   */
552   /* Setting the RHS vec1_N */
553   ierr = VecSet(vec1_N,0.0);CHKERRQ(ierr);
554   ierr = VecScatterBegin(pcis->N_to_B,b,vec1_N,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
555   ierr = VecScatterEnd  (pcis->N_to_B,b,vec1_N,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
556   /* Checking for consistency of the RHS */
557   {
558     PetscBool  flg = PETSC_FALSE;
559     ierr = PetscOptionsGetBool(PETSC_NULL,"-pc_is_check_consistency",&flg,PETSC_NULL);CHKERRQ(ierr);
560     if (flg) {
561       PetscScalar average;
562       PetscViewer viewer;
563       ierr = PetscViewerASCIIGetStdout(((PetscObject)pc)->comm,&viewer);CHKERRQ(ierr);
564 
565       ierr = VecSum(vec1_N,&average);CHKERRQ(ierr);
566       average = average / ((PetscReal)pcis->n);
567       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
568       if (pcis->pure_neumann) {
569         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d is floating. Average = % 1.14e\n",PetscGlobalRank,PetscAbsScalar(average));CHKERRQ(ierr);
570       } else {
571         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d is fixed.    Average = % 1.14e\n",PetscGlobalRank,PetscAbsScalar(average));CHKERRQ(ierr);
572       }
573       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
574       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
575     }
576   }
577   /* Solving the system for vec2_N */
578   ierr = KSPSolve(pcis->ksp_N,vec1_N,vec2_N);CHKERRQ(ierr);
579   /* Extracting the local interface vector out of the solution */
580   ierr = VecScatterBegin(pcis->N_to_B,vec2_N,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
581   ierr = VecScatterEnd  (pcis->N_to_B,vec2_N,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
582   PetscFunctionReturn(0);
583 }
584 
585 
586 
587 
588 
589 
590 
591 
592 
593