1 #ifndef lint 2 static char vcid[] = "$Id: mpiaijpc.c,v 1.1 1995/10/04 03:24:11 bsmith Exp bsmith $"; 3 #endif 4 /* 5 Defines a block Jacobi preconditioner for the MPIAIJ format. 6 At the moment only supports a single block per processor. 7 This file knows about storage formats for MPIAIJ matrices. 8 This code is nearly identical to that for the MPIROW format; 9 */ 10 #include "mpiaij.h" 11 #include "src/pc/pcimpl.h" 12 #include "src/pc/impls/bjacobi/bjacobi.h" 13 #include "sles.h" 14 15 typedef struct { 16 Vec x; 17 } PC_BJacobiMPIAIJ; 18 19 int PCDestroy_BJacobiMPIAIJ(PetscObject obj) 20 { 21 PC pc = (PC) obj; 22 PC_BJacobi *jac = (PC_BJacobi *) pc->data; 23 PC_BJacobiMPIAIJ *bjac = (PC_BJacobiMPIAIJ *) jac->data; 24 int i,ierr; 25 26 for ( i=0; i<jac->n_local; i++ ) { 27 ierr = SLESDestroy(jac->sles[i]); CHKERRQ(ierr); 28 } 29 PETSCFREE(jac->sles); 30 ierr = VecDestroy(bjac->x); CHKERRQ(ierr); 31 PETSCFREE(bjac); PETSCFREE(jac); 32 return 0; 33 } 34 35 int PCApply_BJacobiMPIAIJ(PC pc,Vec x, Vec y) 36 { 37 int ierr,its; 38 PC_BJacobi *jac = (PC_BJacobi *) pc->data; 39 PC_BJacobiMPIAIJ *bjac = (PC_BJacobiMPIAIJ *) jac->data; 40 41 ierr = SLESSolve(jac->sles[0],x,bjac->x,&its); CHKERRQ(ierr); 42 ierr = VecCopy(bjac->x,y); CHKERRQ(ierr); 43 return 0; 44 } 45 46 int PCSetUp_BJacobiMPIAIJ(PC pc) 47 { 48 PC_BJacobi *jac = (PC_BJacobi *) pc->data; 49 Mat mat = pc->mat, pmat = pc->pmat; 50 Mat_MPIAIJ *pmatin = (Mat_MPIAIJ *) pmat->data; 51 Mat_MPIAIJ *matin = 0; 52 int ierr, numtids = pmatin->numtids,m; 53 SLES sles; 54 Vec x; 55 PC_BJacobiMPIAIJ *bjac; 56 KSP subksp; 57 PC subpc; 58 MatType type; 59 60 if (jac->use_true_local) { 61 MatGetType(pc->mat,&type); 62 if (type != MATMPIAIJ) SETERRQ(1,"PCSetUp_BJacobiMPIAIJ:Incompatible matrix type."); 63 matin = (Mat_MPIAIJ *) mat->data; 64 } 65 66 if (numtids != jac->n) { 67 SETERRQ(1,"PCSetUp_BJacobiMPIAIJ:Can do only 1 block per processor\n"); 68 } 69 70 /* set default direct solver with no Krylov method */ 71 if (!pc->setupcalled) { 72 ierr = SLESCreate(MPI_COMM_SELF,&sles); CHKERRQ(ierr); 73 PLogObjectParent(pc,sles); 74 ierr = SLESGetKSP(sles,&subksp); CHKERRQ(ierr); 75 ierr = KSPSetMethod(subksp,KSPPREONLY); CHKERRQ(ierr); 76 ierr = SLESGetPC(sles,&subpc); CHKERRQ(ierr); 77 ierr = PCSetMethod(subpc,PCLU); CHKERRQ(ierr); 78 ierr = SLESSetOptionsPrefix(sles,"-sub_"); CHKERRQ(ierr); 79 ierr = SLESSetFromOptions(sles); CHKERRQ(ierr); 80 /* 81 This is not so good. The only reason we need to generate this vector 82 is so KSP may generate seq vectors for the local solves 83 */ 84 ierr = MatGetSize(pmatin->A,&m,&m); CHKERRQ(ierr); 85 ierr = VecCreateSeq(MPI_COMM_SELF,m,&x); CHKERRQ(ierr); 86 PLogObjectParent(pmat,x); 87 88 pc->destroy = PCDestroy_BJacobiMPIAIJ; 89 pc->apply = PCApply_BJacobiMPIAIJ; 90 91 bjac = (PC_BJacobiMPIAIJ *) PETSCMALLOC(sizeof(PC_BJacobiMPIAIJ)); 92 CHKPTRQ(bjac); 93 PLogObjectMemory(pc,sizeof(PC_BJacobiMPIAIJ)); 94 bjac->x = x; 95 96 jac->sles = (SLES*) PETSCMALLOC( sizeof(SLES) ); CHKPTRQ(jac->sles); 97 jac->sles[0] = sles; 98 jac->data = (void *) bjac; 99 } 100 else { 101 sles = jac->sles[0]; 102 } 103 if (jac->use_true_local) 104 ierr = SLESSetOperators(sles,matin->A,pmatin->A,pc->flag); 105 else 106 ierr = SLESSetOperators(sles,pmatin->A,pmatin->A,pc->flag); 107 CHKERRQ(ierr); 108 109 return 0; 110 } 111 112 113