xref: /petsc/src/mat/impls/aij/mpi/mpiaijpc.c (revision e24b481b4a15e258c09dabebcc19da3246e723e1)
1 #ifdef PETSC_RCS_HEADER
2 static char vcid[] = "$Id: mpiaijpc.c,v 1.38 1998/10/09 19:22:12 bsmith Exp bsmith $";
3 #endif
4 /*
5    Defines a block Jacobi preconditioner for the SeqAIJ/MPIAIJ format.
6    Handles special case of  single block per processor.
7    This file knows about storage formats for SeqMPI/MPIAIJ matrices.
8    The general case is handled in aijpc.c
9 */
10 #include "src/mat/impls/aij/mpi/mpiaij.h"
11 #include "src/pc/pcimpl.h"
12 #include "src/vec/vecimpl.h"
13 #include "src/pc/impls/bjacobi/bjacobi.h"
14 #include "sles.h"
15 
16 typedef struct {
17   Vec  x, y;
18 } PC_BJacobi_MPIAIJ;
19 
20 #undef __FUNC__
21 #define __FUNC__ "PCDestroy_BJacobi_MPIAIJ"
22 int PCDestroy_BJacobi_MPIAIJ(PC pc)
23 {
24   PC_BJacobi        *jac = (PC_BJacobi *) pc->data;
25   PC_BJacobi_MPIAIJ *bjac = (PC_BJacobi_MPIAIJ *) jac->data;
26   int               ierr;
27 
28   PetscFunctionBegin;
29   ierr = SLESDestroy(jac->sles[0]); CHKERRQ(ierr);
30   PetscFree(jac->sles);
31   ierr = VecDestroy(bjac->x); CHKERRQ(ierr);
32   ierr = VecDestroy(bjac->y); CHKERRQ(ierr);
33   if (jac->l_lens) PetscFree(jac->l_lens);
34   if (jac->g_lens) PetscFree(jac->g_lens);
35   PetscFree(bjac); PetscFree(jac);
36   PetscFunctionReturn(0);
37 }
38 
39 
40 #undef __FUNC__
41 #define __FUNC__ "PCSetUpOnBlocks_BJacobi_MPIAIJ"
42 int PCSetUpOnBlocks_BJacobi_MPIAIJ(PC pc)
43 {
44   int               ierr;
45   PC_BJacobi        *jac = (PC_BJacobi *) pc->data;
46   PC_BJacobi_MPIAIJ *bjac = (PC_BJacobi_MPIAIJ *) jac->data;
47 
48   PetscFunctionBegin;
49   ierr = SLESSetUp(jac->sles[0],bjac->x,bjac->y); CHKERRQ(ierr);
50   PetscFunctionReturn(0);
51 }
52 
53 #undef __FUNC__
54 #define __FUNC__ "PCApply_BJacobi_MPIAIJ"
55 int PCApply_BJacobi_MPIAIJ(PC pc,Vec x, Vec y)
56 {
57   int               ierr,its;
58   PC_BJacobi        *jac = (PC_BJacobi *) pc->data;
59   PC_BJacobi_MPIAIJ *bjac = (PC_BJacobi_MPIAIJ *) jac->data;
60   Scalar            *x_array,*y_array;
61 
62   PetscFunctionBegin;
63   /*
64       The VecPlaceArray() is to avoid having to copy the
65     y vector into the bjac->x vector. The reason for
66     the bjac->x vector is that we need a sequential vector
67     for the sequential solve.
68   */
69   ierr = VecGetArray(x,&x_array); CHKERRQ(ierr);
70   ierr = VecGetArray(y,&y_array); CHKERRQ(ierr);
71   ierr = VecPlaceArray(bjac->x,x_array); CHKERRQ(ierr);
72   ierr = VecPlaceArray(bjac->y,y_array); CHKERRQ(ierr);
73   ierr = SLESSolve(jac->sles[0],bjac->x,bjac->y,&its); CHKERRQ(ierr);
74   ierr = VecRestoreArray(x,&x_array); CHKERRQ(ierr);
75   ierr = VecRestoreArray(y,&y_array); CHKERRQ(ierr);
76   PetscFunctionReturn(0);
77 }
78 
79 #undef __FUNC__
80 #define __FUNC__ "PCApplyTrans_BJacobi_MPIAIJ"
81 int PCApplyTrans_BJacobi_MPIAIJ(PC pc,Vec x, Vec y)
82 {
83   int               ierr,its;
84   PC_BJacobi        *jac = (PC_BJacobi *) pc->data;
85   PC_BJacobi_MPIAIJ *bjac = (PC_BJacobi_MPIAIJ *) jac->data;
86   Scalar            *x_array, *y_array;
87 
88   PetscFunctionBegin;
89   /*
90       The VecPlaceArray() is to avoid having to copy the
91     y vector into the bjac->x vector. The reason for
92     the bjac->x vector is that we need a sequential vector
93     for the sequential solve.
94   */
95   ierr = VecGetArray(x,&x_array); CHKERRQ(ierr);
96   ierr = VecGetArray(y,&y_array); CHKERRQ(ierr);
97   ierr = VecPlaceArray(bjac->x,x_array); CHKERRQ(ierr);
98   ierr = VecPlaceArray(bjac->y,y_array); CHKERRQ(ierr);
99   ierr = SLESSolveTrans(jac->sles[0],bjac->x,bjac->y,&its); CHKERRQ(ierr);
100   ierr = VecRestoreArray(x,&x_array); CHKERRQ(ierr);
101   ierr = VecRestoreArray(y,&y_array); CHKERRQ(ierr);
102   PetscFunctionReturn(0);
103 }
104 
105 #undef __FUNC__
106 #define __FUNC__ "PCSetUp_BJacobi_MPIAIJ"
107 int PCSetUp_BJacobi_MPIAIJ(PC pc)
108 {
109   PC_BJacobi        *jac = (PC_BJacobi *) pc->data;
110   Mat               mat = pc->mat, pmat = pc->pmat;
111   Mat_MPIAIJ        *pmatin = (Mat_MPIAIJ *) pmat->data;
112   Mat_MPIAIJ        *matin = 0;
113   int               ierr, m;
114   SLES              sles;
115   Vec               x,y;
116   PC_BJacobi_MPIAIJ *bjac;
117   KSP               subksp;
118   PC                subpc;
119   MatType           type;
120 
121   PetscFunctionBegin;
122   if (jac->use_true_local) {
123     ierr = MatGetType(pc->mat,&type,PETSC_NULL);CHKERRQ(ierr);
124     if (type != MATMPIAIJ) SETERRQ(PETSC_ERR_ARG_INCOMP,0,"Incompatible matrix type.");
125     matin = (Mat_MPIAIJ *) mat->data;
126   }
127 
128   /* set default direct solver with no Krylov method */
129   if (!pc->setupcalled) {
130     char *prefix;
131     ierr = SLESCreate(PETSC_COMM_SELF,&sles); CHKERRQ(ierr);
132     PLogObjectParent(pc,sles);
133     ierr = SLESGetKSP(sles,&subksp); CHKERRQ(ierr);
134     ierr = KSPSetType(subksp,KSPPREONLY); CHKERRQ(ierr);
135     ierr = SLESGetPC(sles,&subpc); CHKERRQ(ierr);
136     ierr = PCSetType(subpc,PCILU); CHKERRQ(ierr);
137     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
138     ierr = SLESSetOptionsPrefix(sles,prefix); CHKERRQ(ierr);
139     ierr = SLESAppendOptionsPrefix(sles,"sub_"); CHKERRQ(ierr);
140     ierr = SLESSetFromOptions(sles); CHKERRQ(ierr);
141     /*
142       The reason we need to generate these vectors is to serve
143       as the right-hand side and solution vector for the solve on the
144       block. We do not need to allocate space for the vectors since
145       that is provided via VecPlaceArray() just before the call to
146       SLESSolve() on the block.
147     */
148     ierr = MatGetSize(pmatin->A,&m,&m); CHKERRQ(ierr);
149     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,m,PETSC_NULL,&x); CHKERRQ(ierr);
150     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,m,PETSC_NULL,&y); CHKERRQ(ierr);
151     PLogObjectParent(pc,x);
152     PLogObjectParent(pc,y);
153 
154     pc->destroy       = PCDestroy_BJacobi_MPIAIJ;
155     pc->apply         = PCApply_BJacobi_MPIAIJ;
156     pc->applytrans    = PCApplyTrans_BJacobi_MPIAIJ;
157     pc->setuponblocks = PCSetUpOnBlocks_BJacobi_MPIAIJ;
158 
159     bjac         = (PC_BJacobi_MPIAIJ *) PetscMalloc(sizeof(PC_BJacobi_MPIAIJ));CHKPTRQ(bjac);
160     PLogObjectMemory(pc,sizeof(PC_BJacobi_MPIAIJ));
161     bjac->x      = x;
162     bjac->y      = y;
163 
164     jac->sles    = (SLES*) PetscMalloc( sizeof(SLES) ); CHKPTRQ(jac->sles);
165     jac->sles[0] = sles;
166     jac->data    = (void *) bjac;
167   } else {
168     sles = jac->sles[0];
169     bjac = (PC_BJacobi_MPIAIJ *)jac->data;
170   }
171   if (jac->use_true_local) {
172     ierr = SLESSetOperators(sles,matin->A,pmatin->A,pc->flag); CHKERRQ(ierr);
173   }  else {
174     ierr = SLESSetOperators(sles,pmatin->A,pmatin->A,pc->flag); CHKERRQ(ierr);
175   }
176   PetscFunctionReturn(0);
177 }
178 
179 #undef __FUNC__
180 #define __FUNC__ "PCSetUp_BJacobi_SeqAIJ"
181 int PCSetUp_BJacobi_SeqAIJ(PC pc)
182 {
183   PC_BJacobi        *jac = (PC_BJacobi *) pc->data;
184   Mat               mat = pc->mat, pmat = pc->pmat;
185   int               ierr, m;
186   SLES              sles;
187   Vec               x,y;
188   PC_BJacobi_MPIAIJ *bjac;
189   KSP               subksp;
190   PC                subpc;
191   MatType           type;
192 
193   PetscFunctionBegin;
194   if (jac->use_true_local) {
195     MatGetType(mat,&type,PETSC_NULL);
196     if (type != MATSEQAIJ) SETERRQ(PETSC_ERR_ARG_INCOMP,0,"Incompatible matrix type.");
197   }
198 
199   /* set default direct solver with no Krylov method */
200   if (!pc->setupcalled) {
201     char *prefix;
202     ierr = SLESCreate(PETSC_COMM_SELF,&sles); CHKERRQ(ierr);
203     PLogObjectParent(pc,sles);
204     ierr = SLESGetKSP(sles,&subksp); CHKERRQ(ierr);
205     ierr = KSPSetType(subksp,KSPPREONLY); CHKERRQ(ierr);
206     ierr = SLESGetPC(sles,&subpc); CHKERRQ(ierr);
207     ierr = PCSetType(subpc,PCILU); CHKERRQ(ierr);
208     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
209     ierr = SLESSetOptionsPrefix(sles,prefix); CHKERRQ(ierr);
210     ierr = SLESAppendOptionsPrefix(sles,"sub_"); CHKERRQ(ierr);
211     ierr = SLESSetFromOptions(sles); CHKERRQ(ierr);
212     /*
213       The reason we need to generate these vectors is to serve
214       as the right-hand side and solution vector for the solve on the
215       block. We do not need to allocate space for the vectors since
216       that is provided via VecPlaceArray() just before the call to
217       SLESSolve() on the block.
218     */
219     ierr = MatGetSize(pmat,&m,&m); CHKERRQ(ierr);
220     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,m,PETSC_NULL,&x); CHKERRQ(ierr);
221     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,m,PETSC_NULL,&y); CHKERRQ(ierr);
222     PLogObjectParent(pc,x);
223     PLogObjectParent(pc,y);
224 
225     pc->destroy       = PCDestroy_BJacobi_MPIAIJ;
226     pc->apply         = PCApply_BJacobi_MPIAIJ;
227     pc->applytrans    = PCApplyTrans_BJacobi_MPIAIJ;
228     pc->setuponblocks = PCSetUpOnBlocks_BJacobi_MPIAIJ;
229 
230     bjac         = (PC_BJacobi_MPIAIJ *) PetscMalloc(sizeof(PC_BJacobi_MPIAIJ));CHKPTRQ(bjac);
231     PLogObjectMemory(pc,sizeof(PC_BJacobi_MPIAIJ));
232     bjac->x      = x;
233     bjac->y      = y;
234 
235     jac->sles    = (SLES*) PetscMalloc( sizeof(SLES) ); CHKPTRQ(jac->sles);
236     jac->sles[0] = sles;
237     jac->data    = (void *) bjac;
238   } else {
239     sles = jac->sles[0];
240     bjac = (PC_BJacobi_MPIAIJ *)jac->data;
241   }
242   if (jac->use_true_local) {
243     ierr = SLESSetOperators(sles,mat,pmat,pc->flag); CHKERRQ(ierr);
244   }  else {
245     ierr = SLESSetOperators(sles,pmat,pmat,pc->flag); CHKERRQ(ierr);
246   }
247   PetscFunctionReturn(0);
248 }
249 
250