xref: /petsc/src/mat/impls/aij/mpi/mpiaijpc.c (revision 3a40ed3dce77c081171d005ae1a6ff4bb9d13b6f)
1 #ifdef PETSC_RCS_HEADER
2 static char vcid[] = "$Id: mpiaijpc.c,v 1.32 1997/08/22 15:13:39 bsmith Exp bsmith $";
3 #endif
4 /*
5    Defines a block Jacobi preconditioner for the SeqAIJ/MPIAIJ format.
6    Handles special case of  single block per processor.
7    This file knows about storage formats for SeqMPI/MPIAIJ matrices.
8    The general case is handled in aijpc.c
9 */
10 #include "src/mat/impls/aij/mpi/mpiaij.h"
11 #include "src/pc/pcimpl.h"
12 #include "src/vec/vecimpl.h"
13 #include "src/pc/impls/bjacobi/bjacobi.h"
14 #include "sles.h"
15 
16 typedef struct {
17   Vec  x, y;
18 } PC_BJacobi_MPIAIJ;
19 
20 #undef __FUNC__
21 #define __FUNC__ "PCDestroy_BJacobi_MPIAIJ"
22 int PCDestroy_BJacobi_MPIAIJ(PetscObject obj)
23 {
24   PC                pc = (PC) obj;
25   PC_BJacobi        *jac = (PC_BJacobi *) pc->data;
26   PC_BJacobi_MPIAIJ *bjac = (PC_BJacobi_MPIAIJ *) jac->data;
27   int               ierr;
28 
29   PetscFunctionBegin;
30   ierr = SLESDestroy(jac->sles[0]); CHKERRQ(ierr);
31   PetscFree(jac->sles);
32   ierr = VecDestroy(bjac->x); CHKERRQ(ierr);
33   ierr = VecDestroy(bjac->y); CHKERRQ(ierr);
34   if (jac->l_lens) PetscFree(jac->l_lens);
35   if (jac->g_lens) PetscFree(jac->g_lens);
36   PetscFree(bjac); PetscFree(jac);
37   PetscFunctionReturn(0);
38 }
39 
40 
41 #undef __FUNC__
42 #define __FUNC__ "PCSetUpOnBlocks_BJacobi_MPIAIJ"
43 int PCSetUpOnBlocks_BJacobi_MPIAIJ(PC pc)
44 {
45   int               ierr;
46   PC_BJacobi        *jac = (PC_BJacobi *) pc->data;
47   PC_BJacobi_MPIAIJ *bjac = (PC_BJacobi_MPIAIJ *) jac->data;
48 
49   PetscFunctionBegin;
50   ierr = SLESSetUp(jac->sles[0],bjac->x,bjac->y); CHKERRQ(ierr);
51   PetscFunctionReturn(0);
52 }
53 
54 #undef __FUNC__
55 #define __FUNC__ "PCApply_BJacobi_MPIAIJ"
56 int PCApply_BJacobi_MPIAIJ(PC pc,Vec x, Vec y)
57 {
58   int               ierr,its;
59   PC_BJacobi        *jac = (PC_BJacobi *) pc->data;
60   PC_BJacobi_MPIAIJ *bjac = (PC_BJacobi_MPIAIJ *) jac->data;
61   Scalar            *x_array,*x_true_array, *y_array,*y_true_array;
62 
63   PetscFunctionBegin;
64   /*
65       The VecPlaceArray() is to avoid having to copy the
66     y vector into the bjac->x vector. The reason for
67     the bjac->x vector is that we need a sequential vector
68     for the sequential solve.
69   */
70   VecGetArray_Fast(x,x_array);
71   VecGetArray_Fast(y,y_array);
72   VecGetArray_Fast(bjac->x,x_true_array);
73   VecGetArray_Fast(bjac->y,y_true_array);
74   VecPlaceArray_Fast(bjac->x,x_array);
75   VecPlaceArray_Fast(bjac->y,y_array);
76   ierr = SLESSolve(jac->sles[0],bjac->x,bjac->y,&its);
77   VecPlaceArray_Fast(bjac->x,x_true_array);
78   VecPlaceArray_Fast(bjac->y,y_true_array);
79   PetscFunctionReturn(0);
80 }
81 
82 #undef __FUNC__
83 #define __FUNC__ "PCSetUp_BJacobi_MPIAIJ"
84 int PCSetUp_BJacobi_MPIAIJ(PC pc)
85 {
86   PC_BJacobi        *jac = (PC_BJacobi *) pc->data;
87   Mat               mat = pc->mat, pmat = pc->pmat;
88   Mat_MPIAIJ        *pmatin = (Mat_MPIAIJ *) pmat->data;
89   Mat_MPIAIJ        *matin = 0;
90   int               ierr, m;
91   SLES              sles;
92   Vec               x,y;
93   PC_BJacobi_MPIAIJ *bjac;
94   KSP               subksp;
95   PC                subpc;
96   MatType           type;
97 
98   PetscFunctionBegin;
99   if (jac->use_true_local) {
100     MatGetType(pc->mat,&type,PETSC_NULL);
101     if (type != MATMPIAIJ) SETERRQ(1,0,"Incompatible matrix type.");
102     matin = (Mat_MPIAIJ *) mat->data;
103   }
104 
105   /* set default direct solver with no Krylov method */
106   if (!pc->setupcalled) {
107     char *prefix;
108     ierr = SLESCreate(PETSC_COMM_SELF,&sles); CHKERRQ(ierr);
109     PLogObjectParent(pc,sles);
110     ierr = SLESGetKSP(sles,&subksp); CHKERRQ(ierr);
111     ierr = KSPSetType(subksp,KSPPREONLY); CHKERRQ(ierr);
112     ierr = SLESGetPC(sles,&subpc); CHKERRQ(ierr);
113     ierr = PCSetType(subpc,PCILU); CHKERRQ(ierr);
114     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
115     ierr = SLESSetOptionsPrefix(sles,prefix); CHKERRQ(ierr);
116     ierr = SLESAppendOptionsPrefix(sles,"sub_"); CHKERRQ(ierr);
117     ierr = SLESSetFromOptions(sles); CHKERRQ(ierr);
118 /*
119    This is not so good. The only reason we need to generate this vector
120   is so KSP may generate seq vectors for the local solves
121 */
122     ierr = MatGetSize(pmatin->A,&m,&m); CHKERRQ(ierr);
123     ierr = VecCreateSeq(PETSC_COMM_SELF,m,&x); CHKERRQ(ierr);
124     ierr = VecCreateSeq(PETSC_COMM_SELF,m,&y); CHKERRQ(ierr);
125     PLogObjectParent(pc,x);
126     PLogObjectParent(pc,y);
127 
128     pc->destroy       = PCDestroy_BJacobi_MPIAIJ;
129     pc->apply         = PCApply_BJacobi_MPIAIJ;
130     pc->setuponblocks = PCSetUpOnBlocks_BJacobi_MPIAIJ;
131 
132     bjac         = (PC_BJacobi_MPIAIJ *) PetscMalloc(sizeof(PC_BJacobi_MPIAIJ));CHKPTRQ(bjac);
133     PLogObjectMemory(pc,sizeof(PC_BJacobi_MPIAIJ));
134     bjac->x      = x;
135     bjac->y      = y;
136 
137     jac->sles    = (SLES*) PetscMalloc( sizeof(SLES) ); CHKPTRQ(jac->sles);
138     jac->sles[0] = sles;
139     jac->data    = (void *) bjac;
140   } else {
141     sles = jac->sles[0];
142     bjac = (PC_BJacobi_MPIAIJ *)jac->data;
143   }
144   if (jac->use_true_local) {
145     ierr = SLESSetOperators(sles,matin->A,pmatin->A,pc->flag); CHKERRQ(ierr);
146   }  else {
147     ierr = SLESSetOperators(sles,pmatin->A,pmatin->A,pc->flag); CHKERRQ(ierr);
148   }
149   PetscFunctionReturn(0);
150 }
151 
152 #undef __FUNC__
153 #define __FUNC__ "PCSetUp_BJacobi_SeqAIJ"
154 int PCSetUp_BJacobi_SeqAIJ(PC pc)
155 {
156   PC_BJacobi        *jac = (PC_BJacobi *) pc->data;
157   Mat               mat = pc->mat, pmat = pc->pmat;
158   int               ierr, m;
159   SLES              sles;
160   Vec               x,y;
161   PC_BJacobi_MPIAIJ *bjac;
162   KSP               subksp;
163   PC                subpc;
164   MatType           type;
165 
166   PetscFunctionBegin;
167   if (jac->use_true_local) {
168     MatGetType(mat,&type,PETSC_NULL);
169     if (type != MATSEQAIJ) SETERRQ(1,0,"Incompatible matrix type.");
170   }
171 
172   /* set default direct solver with no Krylov method */
173   if (!pc->setupcalled) {
174     char *prefix;
175     ierr = SLESCreate(PETSC_COMM_SELF,&sles); CHKERRQ(ierr);
176     PLogObjectParent(pc,sles);
177     ierr = SLESGetKSP(sles,&subksp); CHKERRQ(ierr);
178     ierr = KSPSetType(subksp,KSPPREONLY); CHKERRQ(ierr);
179     ierr = SLESGetPC(sles,&subpc); CHKERRQ(ierr);
180     ierr = PCSetType(subpc,PCILU); CHKERRQ(ierr);
181     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
182     ierr = SLESSetOptionsPrefix(sles,prefix); CHKERRQ(ierr);
183     ierr = SLESAppendOptionsPrefix(sles,"sub_"); CHKERRQ(ierr);
184     ierr = SLESSetFromOptions(sles); CHKERRQ(ierr);
185 /*
186    This is not so good. The only reason we need to generate this vector
187   is so KSP may generate seq vectors for the local solves
188 */
189     ierr = MatGetSize(pmat,&m,&m); CHKERRQ(ierr);
190     ierr = VecCreateSeq(PETSC_COMM_SELF,m,&x); CHKERRQ(ierr);
191     ierr = VecCreateSeq(PETSC_COMM_SELF,m,&y); CHKERRQ(ierr);
192     PLogObjectParent(pc,x);
193     PLogObjectParent(pc,y);
194 
195     pc->destroy       = PCDestroy_BJacobi_MPIAIJ;
196     pc->apply         = PCApply_BJacobi_MPIAIJ;
197     pc->setuponblocks = PCSetUpOnBlocks_BJacobi_MPIAIJ;
198 
199     bjac         = (PC_BJacobi_MPIAIJ *) PetscMalloc(sizeof(PC_BJacobi_MPIAIJ));CHKPTRQ(bjac);
200     PLogObjectMemory(pc,sizeof(PC_BJacobi_MPIAIJ));
201     bjac->x      = x;
202     bjac->y      = y;
203 
204     jac->sles    = (SLES*) PetscMalloc( sizeof(SLES) ); CHKPTRQ(jac->sles);
205     jac->sles[0] = sles;
206     jac->data    = (void *) bjac;
207   } else {
208     sles = jac->sles[0];
209     bjac = (PC_BJacobi_MPIAIJ *)jac->data;
210   }
211   if (jac->use_true_local) {
212     ierr = SLESSetOperators(sles,mat,pmat,pc->flag); CHKERRQ(ierr);
213   }  else {
214     ierr = SLESSetOperators(sles,pmat,pmat,pc->flag); CHKERRQ(ierr);
215   }
216   PetscFunctionReturn(0);
217 }
218 
219