1 #define PETSCKSP_DLL 2 3 4 #include "petscpc.h" /*I "petscpc.h" I*/ 5 #include "petscmg.h" /*I "petscmg.h" I*/ 6 #include "petscda.h" /*I "petscda.h" I*/ 7 #include "../src/ksp/pc/impls/mg/mgimpl.h" 8 9 typedef struct { 10 DA da; 11 PCExoticType type; 12 Mat P; /* the constructed interpolation matrix */ 13 PetscTruth directSolve; /* use direct LU factorization to construct interpolation */ 14 KSP ksp; 15 } PC_Exotic; 16 17 const char *PCExoticTypes[] = {"face","wirebasket","PCExoticType","PC_Exotic",0}; 18 19 20 #undef __FUNCT__ 21 #define __FUNCT__ "DAGetWireBasketInterpolation" 22 /* 23 DAGetWireBasketInterpolation - Gets the interpolation for a wirebasket based coarse space 24 25 */ 26 PetscErrorCode DAGetWireBasketInterpolation(PC_Exotic *exotic,Mat Aglobal,MatReuse reuse,Mat *P) 27 { 28 DA da = exotic->da; 29 PetscErrorCode ierr; 30 PetscInt dim,i,j,k,m,n,p,dof,Nint,Nface,Nwire,Nsurf,*Iint,*Isurf,cint = 0,csurf = 0,istart,jstart,kstart,*II,N,c = 0; 31 PetscInt mwidth,nwidth,pwidth,cnt,mp,np,pp,Ntotal,gl[26],*globals,Ng,*IIint,*IIsurf; 32 Mat Xint, Xsurf,Xint_tmp; 33 IS isint,issurf,is,row,col; 34 ISLocalToGlobalMapping ltg; 35 MPI_Comm comm; 36 Mat A,Aii,Ais,Asi,*Aholder,iAii; 37 MatFactorInfo info; 38 PetscScalar *xsurf,*xint; 39 #if defined(PETSC_USE_DEBUG_foo) 40 PetscScalar tmp; 41 #endif 42 PetscTable ht; 43 44 PetscFunctionBegin; 45 ierr = DAGetInfo(da,&dim,0,0,0,&mp,&np,&pp,&dof,0,0,0);CHKERRQ(ierr); 46 if (dof != 1) SETERRQ(PETSC_ERR_SUP,"Only for single field problems"); 47 if (dim != 3) SETERRQ(PETSC_ERR_SUP,"Only coded for 3d problems"); 48 ierr = DAGetCorners(da,0,0,0,&m,&n,&p);CHKERRQ(ierr); 49 ierr = DAGetGhostCorners(da,&istart,&jstart,&kstart,&mwidth,&nwidth,&pwidth);CHKERRQ(ierr); 50 istart = istart ? -1 : 0; 51 jstart = jstart ? -1 : 0; 52 kstart = kstart ? -1 : 0; 53 54 /* 55 the columns of P are the interpolation of each coarse grid point (one for each vertex and edge) 56 to all the local degrees of freedom (this includes the vertices, edges and faces). 57 58 Xint are the subset of the interpolation into the interior 59 60 Xface are the interpolation onto faces but not into the interior 61 62 Xsurf are the interpolation onto the vertices and edges (the surfbasket) 63 Xint 64 Symbolically one could write P = ( Xface ) after interchanging the rows to match the natural ordering on the domain 65 Xsurf 66 */ 67 N = (m - istart)*(n - jstart)*(p - kstart); 68 Nint = (m-2-istart)*(n-2-jstart)*(p-2-kstart); 69 Nface = 2*( (m-2-istart)*(n-2-jstart) + (m-2-istart)*(p-2-kstart) + (n-2-jstart)*(p-2-kstart) ); 70 Nwire = 4*( (m-2-istart) + (n-2-jstart) + (p-2-kstart) ) + 8; 71 Nsurf = Nface + Nwire; 72 ierr = MatCreateSeqDense(MPI_COMM_SELF,Nint,26,PETSC_NULL,&Xint);CHKERRQ(ierr); 73 ierr = MatCreateSeqDense(MPI_COMM_SELF,Nsurf,26,PETSC_NULL,&Xsurf);CHKERRQ(ierr); 74 ierr = MatGetArray(Xsurf,&xsurf);CHKERRQ(ierr); 75 76 /* 77 Require that all 12 edges and 6 faces have at least one grid point. Otherwise some of the columns of 78 Xsurf will be all zero (thus making the coarse matrix singular). 79 */ 80 if (m-istart < 3) SETERRQ(PETSC_ERR_SUP,"Number of grid points per process in X direction must be at least 3"); 81 if (n-jstart < 3) SETERRQ(PETSC_ERR_SUP,"Number of grid points per process in Y direction must be at least 3"); 82 if (p-kstart < 3) SETERRQ(PETSC_ERR_SUP,"Number of grid points per process in Z direction must be at least 3"); 83 84 cnt = 0; 85 xsurf[cnt++] = 1; for (i=1; i<m-istart-1; i++) { xsurf[cnt++ + Nsurf] = 1;} xsurf[cnt++ + 2*Nsurf] = 1; 86 for (j=1;j<n-1-jstart;j++) { xsurf[cnt++ + 3*Nsurf] = 1; for (i=1; i<m-istart-1; i++) { xsurf[cnt++ + 4*Nsurf] = 1;} xsurf[cnt++ + 5*Nsurf] = 1;} 87 xsurf[cnt++ + 6*Nsurf] = 1; for (i=1; i<m-istart-1; i++) { xsurf[cnt++ + 7*Nsurf] = 1;} xsurf[cnt++ + 8*Nsurf] = 1; 88 for (k=1;k<p-1-kstart;k++) { 89 xsurf[cnt++ + 9*Nsurf] = 1; for (i=1; i<m-istart-1; i++) { xsurf[cnt++ + 10*Nsurf] = 1;} xsurf[cnt++ + 11*Nsurf] = 1; 90 for (j=1;j<n-1-jstart;j++) { xsurf[cnt++ + 12*Nsurf] = 1; /* these are the interior nodes */ xsurf[cnt++ + 13*Nsurf] = 1;} 91 xsurf[cnt++ + 14*Nsurf] = 1; for (i=1; i<m-istart-1; i++) { xsurf[cnt++ + 15*Nsurf] = 1;} xsurf[cnt++ + 16*Nsurf] = 1; 92 } 93 xsurf[cnt++ + 17*Nsurf] = 1; for (i=1; i<m-istart-1; i++) { xsurf[cnt++ + 18*Nsurf] = 1;} xsurf[cnt++ + 19*Nsurf] = 1; 94 for (j=1;j<n-1-jstart;j++) { xsurf[cnt++ + 20*Nsurf] = 1; for (i=1; i<m-istart-1; i++) { xsurf[cnt++ + 21*Nsurf] = 1;} xsurf[cnt++ + 22*Nsurf] = 1;} 95 xsurf[cnt++ + 23*Nsurf] = 1; for (i=1; i<m-istart-1; i++) { xsurf[cnt++ + 24*Nsurf] = 1;} xsurf[cnt++ + 25*Nsurf] = 1; 96 97 /* interpolations only sum to 1 when using direct solver */ 98 #if defined(PETSC_USE_DEBUG_foo) 99 for (i=0; i<Nsurf; i++) { 100 tmp = 0.0; 101 for (j=0; j<26; j++) { 102 tmp += xsurf[i+j*Nsurf]; 103 } 104 if (PetscAbsScalar(tmp-1.0) > 1.e-10) SETERRQ2(PETSC_ERR_PLIB,"Wrong Xsurf interpolation at i %D value %G",i,PetscAbsScalar(tmp)); 105 } 106 #endif 107 ierr = MatRestoreArray(Xsurf,&xsurf);CHKERRQ(ierr); 108 /* ierr = MatView(Xsurf,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);*/ 109 110 111 /* 112 I are the indices for all the needed vertices (in global numbering) 113 Iint are the indices for the interior values, I surf for the surface values 114 (This is just for the part of the global matrix obtained with MatGetSubMatrix(), it 115 is NOT the local DA ordering.) 116 IIint and IIsurf are the same as the Iint, Isurf except they are in the global numbering 117 */ 118 #define Endpoint(a,start,b) (a == 0 || a == (b-1-start)) 119 ierr = PetscMalloc3(N,PetscInt,&II,Nint,PetscInt,&Iint,Nsurf,PetscInt,&Isurf);CHKERRQ(ierr); 120 ierr = PetscMalloc2(Nint,PetscInt,&IIint,Nsurf,PetscInt,&IIsurf);CHKERRQ(ierr); 121 for (k=0; k<p-kstart; k++) { 122 for (j=0; j<n-jstart; j++) { 123 for (i=0; i<m-istart; i++) { 124 II[c++] = i + j*mwidth + k*mwidth*nwidth; 125 126 if (!Endpoint(i,istart,m) && !Endpoint(j,jstart,n) && !Endpoint(k,kstart,p)) { 127 IIint[cint] = i + j*mwidth + k*mwidth*nwidth; 128 Iint[cint++] = i + j*(m-istart) + k*(m-istart)*(n-jstart); 129 } else { 130 IIsurf[csurf] = i + j*mwidth + k*mwidth*nwidth; 131 Isurf[csurf++] = i + j*(m-istart) + k*(m-istart)*(n-jstart); 132 } 133 } 134 } 135 } 136 if (c != N) SETERRQ(PETSC_ERR_PLIB,"c != N"); 137 if (cint != Nint) SETERRQ(PETSC_ERR_PLIB,"cint != Nint"); 138 if (csurf != Nsurf) SETERRQ(PETSC_ERR_PLIB,"csurf != Nsurf"); 139 ierr = DAGetISLocalToGlobalMapping(da,<g);CHKERRQ(ierr); 140 ierr = ISLocalToGlobalMappingApply(ltg,N,II,II);CHKERRQ(ierr); 141 ierr = ISLocalToGlobalMappingApply(ltg,Nint,IIint,IIint);CHKERRQ(ierr); 142 ierr = ISLocalToGlobalMappingApply(ltg,Nsurf,IIsurf,IIsurf);CHKERRQ(ierr); 143 ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr); 144 ierr = ISCreateGeneral(comm,N,II,&is);CHKERRQ(ierr); 145 ierr = ISCreateGeneral(PETSC_COMM_SELF,Nint,Iint,&isint);CHKERRQ(ierr); 146 ierr = ISCreateGeneral(PETSC_COMM_SELF,Nsurf,Isurf,&issurf);CHKERRQ(ierr); 147 ierr = PetscFree3(II,Iint,Isurf);CHKERRQ(ierr); 148 149 ierr = MatGetSubMatrices(Aglobal,1,&is,&is,MAT_INITIAL_MATRIX,&Aholder);CHKERRQ(ierr); 150 A = *Aholder; 151 ierr = PetscFree(Aholder);CHKERRQ(ierr); 152 153 ierr = MatGetSubMatrix(A,isint,isint,MAT_INITIAL_MATRIX,&Aii);CHKERRQ(ierr); 154 ierr = MatGetSubMatrix(A,isint,issurf,MAT_INITIAL_MATRIX,&Ais);CHKERRQ(ierr); 155 ierr = MatGetSubMatrix(A,issurf,isint,MAT_INITIAL_MATRIX,&Asi);CHKERRQ(ierr); 156 157 /* 158 Solve for the interpolation onto the interior Xint 159 */ 160 ierr = MatDuplicate(Xint,MAT_DO_NOT_COPY_VALUES,&Xint_tmp);CHKERRQ(ierr); 161 ierr = MatMatMult(Ais,Xsurf,MAT_REUSE_MATRIX,PETSC_DETERMINE,&Xint_tmp);CHKERRQ(ierr); 162 ierr = MatScale(Xint_tmp,-1.0);CHKERRQ(ierr); 163 if (exotic->directSolve) { 164 ierr = MatGetFactor(Aii,MAT_SOLVER_PETSC,MAT_FACTOR_LU,&iAii);CHKERRQ(ierr); 165 ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr); 166 ierr = MatGetOrdering(Aii,MATORDERING_ND,&row,&col);CHKERRQ(ierr); 167 ierr = MatLUFactorSymbolic(iAii,Aii,row,col,&info);CHKERRQ(ierr); 168 ierr = ISDestroy(row);CHKERRQ(ierr); 169 ierr = ISDestroy(col);CHKERRQ(ierr); 170 ierr = MatLUFactorNumeric(iAii,Aii,&info);CHKERRQ(ierr); 171 ierr = MatMatSolve(iAii,Xint_tmp,Xint);CHKERRQ(ierr); 172 ierr = MatDestroy(iAii);CHKERRQ(ierr); 173 } else { 174 Vec b,x; 175 PetscScalar *xint_tmp; 176 177 ierr = MatGetArray(Xint,&xint);CHKERRQ(ierr); 178 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,Nint,0,&x);CHKERRQ(ierr); 179 ierr = MatGetArray(Xint_tmp,&xint_tmp);CHKERRQ(ierr); 180 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,Nint,0,&b);CHKERRQ(ierr); 181 ierr = KSPSetOperators(exotic->ksp,Aii,Aii,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 182 for (i=0; i<26; i++) { 183 ierr = VecPlaceArray(x,xint+i*Nint);CHKERRQ(ierr); 184 ierr = VecPlaceArray(b,xint_tmp+i*Nint);CHKERRQ(ierr); 185 ierr = KSPSolve(exotic->ksp,b,x);CHKERRQ(ierr); 186 ierr = VecResetArray(x);CHKERRQ(ierr); 187 ierr = VecResetArray(b);CHKERRQ(ierr); 188 } 189 ierr = MatRestoreArray(Xint,&xint);CHKERRQ(ierr); 190 ierr = MatRestoreArray(Xint_tmp,&xint_tmp);CHKERRQ(ierr); 191 ierr = VecDestroy(x);CHKERRQ(ierr); 192 ierr = VecDestroy(b);CHKERRQ(ierr); 193 } 194 ierr = MatDestroy(Xint_tmp);CHKERRQ(ierr); 195 196 #if defined(PETSC_USE_DEBUG_foo) 197 ierr = MatGetArray(Xint,&xint);CHKERRQ(ierr); 198 for (i=0; i<Nint; i++) { 199 tmp = 0.0; 200 for (j=0; j<26; j++) { 201 tmp += xint[i+j*Nint]; 202 } 203 if (PetscAbsScalar(tmp-1.0) > 1.e-10) SETERRQ2(PETSC_ERR_PLIB,"Wrong Xint interpolation at i %D value %G",i,PetscAbsScalar(tmp)); 204 } 205 ierr = MatRestoreArray(Xint,&xint);CHKERRQ(ierr); 206 /* ierr =MatView(Xint,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); */ 207 #endif 208 209 210 /* total vertices total faces total edges */ 211 Ntotal = (mp + 1)*(np + 1)*(pp + 1) + mp*np*(pp+1) + mp*pp*(np+1) + np*pp*(mp+1) + mp*(np+1)*(pp+1) + np*(mp+1)*(pp+1) + pp*(mp+1)*(np+1); 212 213 /* 214 For each vertex, edge, face on process (in the same orderings as used above) determine its local number including ghost points 215 */ 216 cnt = 0; 217 gl[cnt++] = 0; { gl[cnt++] = 1;} gl[cnt++] = m-istart-1; 218 { gl[cnt++] = mwidth; { gl[cnt++] = mwidth+1;} gl[cnt++] = mwidth + m-istart-1;} 219 gl[cnt++] = mwidth*(n-jstart-1); { gl[cnt++] = mwidth*(n-jstart-1)+1;} gl[cnt++] = mwidth*(n-jstart-1) + m-istart-1; 220 { 221 gl[cnt++] = mwidth*nwidth; { gl[cnt++] = mwidth*nwidth+1;} gl[cnt++] = mwidth*nwidth+ m-istart-1; 222 { gl[cnt++] = mwidth*nwidth + mwidth; /* these are the interior nodes */ gl[cnt++] = mwidth*nwidth + mwidth+m-istart-1;} 223 gl[cnt++] = mwidth*nwidth+ mwidth*(n-jstart-1); { gl[cnt++] = mwidth*nwidth+mwidth*(n-jstart-1)+1;} gl[cnt++] = mwidth*nwidth+mwidth*(n-jstart-1) + m-istart-1; 224 } 225 gl[cnt++] = mwidth*nwidth*(p-kstart-1); { gl[cnt++] = mwidth*nwidth*(p-kstart-1)+1;} gl[cnt++] = mwidth*nwidth*(p-kstart-1) + m-istart-1; 226 { gl[cnt++] = mwidth*nwidth*(p-kstart-1) + mwidth; { gl[cnt++] = mwidth*nwidth*(p-kstart-1) + mwidth+1;} gl[cnt++] = mwidth*nwidth*(p-kstart-1)+mwidth+m-istart-1;} 227 gl[cnt++] = mwidth*nwidth*(p-kstart-1) + mwidth*(n-jstart-1); { gl[cnt++] = mwidth*nwidth*(p-kstart-1)+ mwidth*(n-jstart-1)+1;} gl[cnt++] = mwidth*nwidth*(p-kstart-1) + mwidth*(n-jstart-1) + m-istart-1; 228 229 /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */ 230 /* convert that to global numbering and get them on all processes */ 231 ierr = ISLocalToGlobalMappingApply(ltg,26,gl,gl);CHKERRQ(ierr); 232 /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */ 233 ierr = PetscMalloc(26*mp*np*pp*sizeof(PetscInt),&globals);CHKERRQ(ierr); 234 ierr = MPI_Allgather(gl,26,MPIU_INT,globals,26,MPIU_INT,((PetscObject)da)->comm);CHKERRQ(ierr); 235 236 /* Number the coarse grid points from 0 to Ntotal */ 237 ierr = PetscTableCreate(Ntotal/3,&ht);CHKERRQ(ierr); 238 for (i=0; i<26*mp*np*pp; i++){ 239 ierr = PetscTableAddCount(ht,globals[i]+1);CHKERRQ(ierr); 240 } 241 ierr = PetscTableGetCount(ht,&cnt);CHKERRQ(ierr); 242 if (cnt != Ntotal) SETERRQ2(PETSC_ERR_PLIB,"Hash table size %D not equal to total number coarse grid points %D",cnt,Ntotal); 243 ierr = PetscFree(globals);CHKERRQ(ierr); 244 for (i=0; i<26; i++) { 245 ierr = PetscTableFind(ht,gl[i]+1,&gl[i]);CHKERRQ(ierr); 246 gl[i]--; 247 } 248 ierr = PetscTableDestroy(ht);CHKERRQ(ierr); 249 /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */ 250 251 /* construct global interpolation matrix */ 252 ierr = MatGetLocalSize(Aglobal,&Ng,PETSC_NULL);CHKERRQ(ierr); 253 if (reuse == MAT_INITIAL_MATRIX) { 254 ierr = MatCreateMPIAIJ(((PetscObject)da)->comm,Ng,PETSC_DECIDE,PETSC_DECIDE,Ntotal,Nint+Nsurf,PETSC_NULL,Nint+Nsurf,PETSC_NULL,P);CHKERRQ(ierr); 255 } else { 256 ierr = MatZeroEntries(*P);CHKERRQ(ierr); 257 } 258 ierr = MatSetOption(*P,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr); 259 ierr = MatGetArray(Xint,&xint);CHKERRQ(ierr); 260 ierr = MatSetValues(*P,Nint,IIint,26,gl,xint,INSERT_VALUES);CHKERRQ(ierr); 261 ierr = MatRestoreArray(Xint,&xint);CHKERRQ(ierr); 262 ierr = MatGetArray(Xsurf,&xsurf);CHKERRQ(ierr); 263 ierr = MatSetValues(*P,Nsurf,IIsurf,26,gl,xsurf,INSERT_VALUES);CHKERRQ(ierr); 264 ierr = MatRestoreArray(Xsurf,&xsurf);CHKERRQ(ierr); 265 ierr = MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 266 ierr = MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 267 ierr = PetscFree2(IIint,IIsurf);CHKERRQ(ierr); 268 269 #if defined(PETSC_USE_DEBUG_foo) 270 { 271 Vec x,y; 272 PetscScalar *yy; 273 ierr = VecCreateMPI(((PetscObject)da)->comm,Ng,PETSC_DETERMINE,&y);CHKERRQ(ierr); 274 ierr = VecCreateMPI(((PetscObject)da)->comm,PETSC_DETERMINE,Ntotal,&x);CHKERRQ(ierr); 275 ierr = VecSet(x,1.0);CHKERRQ(ierr); 276 ierr = MatMult(*P,x,y);CHKERRQ(ierr); 277 ierr = VecGetArray(y,&yy);CHKERRQ(ierr); 278 for (i=0; i<Ng; i++) { 279 if (PetscAbsScalar(yy[i]-1.0) > 1.e-10) SETERRQ2(PETSC_ERR_PLIB,"Wrong p interpolation at i %D value %G",i,PetscAbsScalar(yy[i])); 280 } 281 ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr); 282 ierr = VecDestroy(x);CHKERRQ(ierr); 283 ierr = VecDestroy(y);CHKERRQ(ierr); 284 } 285 #endif 286 287 ierr = MatDestroy(Aii);CHKERRQ(ierr); 288 ierr = MatDestroy(Ais);CHKERRQ(ierr); 289 ierr = MatDestroy(Asi);CHKERRQ(ierr); 290 ierr = MatDestroy(A);CHKERRQ(ierr); 291 ierr = ISDestroy(is);CHKERRQ(ierr); 292 ierr = ISDestroy(isint);CHKERRQ(ierr); 293 ierr = ISDestroy(issurf);CHKERRQ(ierr); 294 ierr = MatDestroy(Xint);CHKERRQ(ierr); 295 ierr = MatDestroy(Xsurf);CHKERRQ(ierr); 296 PetscFunctionReturn(0); 297 } 298 299 #undef __FUNCT__ 300 #define __FUNCT__ "DAGetFaceInterpolation" 301 /* 302 DAGetFaceInterpolation - Gets the interpolation for a face based coarse space 303 304 */ 305 PetscErrorCode DAGetFaceInterpolation(PC_Exotic *exotic,Mat Aglobal,MatReuse reuse,Mat *P) 306 { 307 DA da = exotic->da; 308 PetscErrorCode ierr; 309 PetscInt dim,i,j,k,m,n,p,dof,Nint,Nface,Nwire,Nsurf,*Iint,*Isurf,cint = 0,csurf = 0,istart,jstart,kstart,*II,N,c = 0; 310 PetscInt mwidth,nwidth,pwidth,cnt,mp,np,pp,Ntotal,gl[6],*globals,Ng,*IIint,*IIsurf; 311 Mat Xint, Xsurf,Xint_tmp; 312 IS isint,issurf,is,row,col; 313 ISLocalToGlobalMapping ltg; 314 MPI_Comm comm; 315 Mat A,Aii,Ais,Asi,*Aholder,iAii; 316 MatFactorInfo info; 317 PetscScalar *xsurf,*xint; 318 #if defined(PETSC_USE_DEBUG_foo) 319 PetscScalar tmp; 320 #endif 321 PetscTable ht; 322 323 PetscFunctionBegin; 324 ierr = DAGetInfo(da,&dim,0,0,0,&mp,&np,&pp,&dof,0,0,0);CHKERRQ(ierr); 325 if (dof != 1) SETERRQ(PETSC_ERR_SUP,"Only for single field problems"); 326 if (dim != 3) SETERRQ(PETSC_ERR_SUP,"Only coded for 3d problems"); 327 ierr = DAGetCorners(da,0,0,0,&m,&n,&p);CHKERRQ(ierr); 328 ierr = DAGetGhostCorners(da,&istart,&jstart,&kstart,&mwidth,&nwidth,&pwidth);CHKERRQ(ierr); 329 istart = istart ? -1 : 0; 330 jstart = jstart ? -1 : 0; 331 kstart = kstart ? -1 : 0; 332 333 /* 334 the columns of P are the interpolation of each coarse grid point (one for each vertex and edge) 335 to all the local degrees of freedom (this includes the vertices, edges and faces). 336 337 Xint are the subset of the interpolation into the interior 338 339 Xface are the interpolation onto faces but not into the interior 340 341 Xsurf are the interpolation onto the vertices and edges (the surfbasket) 342 Xint 343 Symbolically one could write P = ( Xface ) after interchanging the rows to match the natural ordering on the domain 344 Xsurf 345 */ 346 N = (m - istart)*(n - jstart)*(p - kstart); 347 Nint = (m-2-istart)*(n-2-jstart)*(p-2-kstart); 348 Nface = 2*( (m-2-istart)*(n-2-jstart) + (m-2-istart)*(p-2-kstart) + (n-2-jstart)*(p-2-kstart) ); 349 Nwire = 4*( (m-2-istart) + (n-2-jstart) + (p-2-kstart) ) + 8; 350 Nsurf = Nface + Nwire; 351 ierr = MatCreateSeqDense(MPI_COMM_SELF,Nint,6,PETSC_NULL,&Xint);CHKERRQ(ierr); 352 ierr = MatCreateSeqDense(MPI_COMM_SELF,Nsurf,6,PETSC_NULL,&Xsurf);CHKERRQ(ierr); 353 ierr = MatGetArray(Xsurf,&xsurf);CHKERRQ(ierr); 354 355 /* 356 Require that all 12 edges and 6 faces have at least one grid point. Otherwise some of the columns of 357 Xsurf will be all zero (thus making the coarse matrix singular). 358 */ 359 if (m-istart < 3) SETERRQ(PETSC_ERR_SUP,"Number of grid points per process in X direction must be at least 3"); 360 if (n-jstart < 3) SETERRQ(PETSC_ERR_SUP,"Number of grid points per process in Y direction must be at least 3"); 361 if (p-kstart < 3) SETERRQ(PETSC_ERR_SUP,"Number of grid points per process in Z direction must be at least 3"); 362 363 cnt = 0; 364 for (j=1;j<n-1-jstart;j++) { for (i=1; i<m-istart-1; i++) { xsurf[cnt++ + 0*Nsurf] = 1;} } 365 for (k=1;k<p-1-kstart;k++) { 366 for (i=1; i<m-istart-1; i++) { xsurf[cnt++ + 1*Nsurf] = 1;} 367 for (j=1;j<n-1-jstart;j++) { xsurf[cnt++ + 2*Nsurf] = 1; /* these are the interior nodes */ xsurf[cnt++ + 3*Nsurf] = 1;} 368 for (i=1; i<m-istart-1; i++) { xsurf[cnt++ + 4*Nsurf] = 1;} 369 } 370 for (j=1;j<n-1-jstart;j++) {for (i=1; i<m-istart-1; i++) { xsurf[cnt++ + 5*Nsurf] = 1;} } 371 372 #if defined(PETSC_USE_DEBUG_foo) 373 for (i=0; i<Nsurf; i++) { 374 tmp = 0.0; 375 for (j=0; j<6; j++) { 376 tmp += xsurf[i+j*Nsurf]; 377 } 378 if (PetscAbsScalar(tmp-1.0) > 1.e-10) SETERRQ2(PETSC_ERR_PLIB,"Wrong Xsurf interpolation at i %D value %G",i,PetscAbsScalar(tmp)); 379 } 380 #endif 381 ierr = MatRestoreArray(Xsurf,&xsurf);CHKERRQ(ierr); 382 /* ierr = MatView(Xsurf,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);*/ 383 384 385 /* 386 I are the indices for all the needed vertices (in global numbering) 387 Iint are the indices for the interior values, I surf for the surface values 388 (This is just for the part of the global matrix obtained with MatGetSubMatrix(), it 389 is NOT the local DA ordering.) 390 IIint and IIsurf are the same as the Iint, Isurf except they are in the global numbering 391 */ 392 #define Endpoint(a,start,b) (a == 0 || a == (b-1-start)) 393 ierr = PetscMalloc3(N,PetscInt,&II,Nint,PetscInt,&Iint,Nsurf,PetscInt,&Isurf);CHKERRQ(ierr); 394 ierr = PetscMalloc2(Nint,PetscInt,&IIint,Nsurf,PetscInt,&IIsurf);CHKERRQ(ierr); 395 for (k=0; k<p-kstart; k++) { 396 for (j=0; j<n-jstart; j++) { 397 for (i=0; i<m-istart; i++) { 398 II[c++] = i + j*mwidth + k*mwidth*nwidth; 399 400 if (!Endpoint(i,istart,m) && !Endpoint(j,jstart,n) && !Endpoint(k,kstart,p)) { 401 IIint[cint] = i + j*mwidth + k*mwidth*nwidth; 402 Iint[cint++] = i + j*(m-istart) + k*(m-istart)*(n-jstart); 403 } else { 404 IIsurf[csurf] = i + j*mwidth + k*mwidth*nwidth; 405 Isurf[csurf++] = i + j*(m-istart) + k*(m-istart)*(n-jstart); 406 } 407 } 408 } 409 } 410 if (c != N) SETERRQ(PETSC_ERR_PLIB,"c != N"); 411 if (cint != Nint) SETERRQ(PETSC_ERR_PLIB,"cint != Nint"); 412 if (csurf != Nsurf) SETERRQ(PETSC_ERR_PLIB,"csurf != Nsurf"); 413 ierr = DAGetISLocalToGlobalMapping(da,<g);CHKERRQ(ierr); 414 ierr = ISLocalToGlobalMappingApply(ltg,N,II,II);CHKERRQ(ierr); 415 ierr = ISLocalToGlobalMappingApply(ltg,Nint,IIint,IIint);CHKERRQ(ierr); 416 ierr = ISLocalToGlobalMappingApply(ltg,Nsurf,IIsurf,IIsurf);CHKERRQ(ierr); 417 ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr); 418 ierr = ISCreateGeneral(comm,N,II,&is);CHKERRQ(ierr); 419 ierr = ISCreateGeneral(PETSC_COMM_SELF,Nint,Iint,&isint);CHKERRQ(ierr); 420 ierr = ISCreateGeneral(PETSC_COMM_SELF,Nsurf,Isurf,&issurf);CHKERRQ(ierr); 421 ierr = PetscFree3(II,Iint,Isurf);CHKERRQ(ierr); 422 423 ierr = MatGetSubMatrices(Aglobal,1,&is,&is,MAT_INITIAL_MATRIX,&Aholder);CHKERRQ(ierr); 424 A = *Aholder; 425 ierr = PetscFree(Aholder);CHKERRQ(ierr); 426 427 ierr = MatGetSubMatrix(A,isint,isint,MAT_INITIAL_MATRIX,&Aii);CHKERRQ(ierr); 428 ierr = MatGetSubMatrix(A,isint,issurf,MAT_INITIAL_MATRIX,&Ais);CHKERRQ(ierr); 429 ierr = MatGetSubMatrix(A,issurf,isint,MAT_INITIAL_MATRIX,&Asi);CHKERRQ(ierr); 430 431 /* 432 Solve for the interpolation onto the interior Xint 433 */ 434 ierr = MatDuplicate(Xint,MAT_DO_NOT_COPY_VALUES,&Xint_tmp);CHKERRQ(ierr); 435 ierr = MatMatMult(Ais,Xsurf,MAT_REUSE_MATRIX,PETSC_DETERMINE,&Xint_tmp);CHKERRQ(ierr); 436 ierr = MatScale(Xint_tmp,-1.0);CHKERRQ(ierr); 437 438 if (exotic->directSolve) { 439 ierr = MatGetFactor(Aii,MAT_SOLVER_PETSC,MAT_FACTOR_LU,&iAii);CHKERRQ(ierr); 440 ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr); 441 ierr = MatGetOrdering(Aii,MATORDERING_ND,&row,&col);CHKERRQ(ierr); 442 ierr = MatLUFactorSymbolic(iAii,Aii,row,col,&info);CHKERRQ(ierr); 443 ierr = ISDestroy(row);CHKERRQ(ierr); 444 ierr = ISDestroy(col);CHKERRQ(ierr); 445 ierr = MatLUFactorNumeric(iAii,Aii,&info);CHKERRQ(ierr); 446 ierr = MatMatSolve(iAii,Xint_tmp,Xint);CHKERRQ(ierr); 447 ierr = MatDestroy(iAii);CHKERRQ(ierr); 448 } else { 449 Vec b,x; 450 PetscScalar *xint_tmp; 451 452 ierr = MatGetArray(Xint,&xint);CHKERRQ(ierr); 453 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,Nint,0,&x);CHKERRQ(ierr); 454 ierr = MatGetArray(Xint_tmp,&xint_tmp);CHKERRQ(ierr); 455 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,Nint,0,&b);CHKERRQ(ierr); 456 ierr = KSPSetOperators(exotic->ksp,Aii,Aii,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 457 for (i=0; i<6; i++) { 458 ierr = VecPlaceArray(x,xint+i*Nint);CHKERRQ(ierr); 459 ierr = VecPlaceArray(b,xint_tmp+i*Nint);CHKERRQ(ierr); 460 ierr = KSPSolve(exotic->ksp,b,x);CHKERRQ(ierr); 461 ierr = VecResetArray(x);CHKERRQ(ierr); 462 ierr = VecResetArray(b);CHKERRQ(ierr); 463 } 464 ierr = MatRestoreArray(Xint,&xint);CHKERRQ(ierr); 465 ierr = MatRestoreArray(Xint_tmp,&xint_tmp);CHKERRQ(ierr); 466 ierr = VecDestroy(x);CHKERRQ(ierr); 467 ierr = VecDestroy(b);CHKERRQ(ierr); 468 } 469 ierr = MatDestroy(Xint_tmp);CHKERRQ(ierr); 470 471 #if defined(PETSC_USE_DEBUG_foo) 472 ierr = MatGetArray(Xint,&xint);CHKERRQ(ierr); 473 for (i=0; i<Nint; i++) { 474 tmp = 0.0; 475 for (j=0; j<6; j++) { 476 tmp += xint[i+j*Nint]; 477 } 478 if (PetscAbsScalar(tmp-1.0) > 1.e-10) SETERRQ2(PETSC_ERR_PLIB,"Wrong Xint interpolation at i %D value %G",i,PetscAbsScalar(tmp)); 479 } 480 ierr = MatRestoreArray(Xint,&xint);CHKERRQ(ierr); 481 /* ierr =MatView(Xint,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); */ 482 #endif 483 484 485 /* total faces */ 486 Ntotal = mp*np*(pp+1) + mp*pp*(np+1) + np*pp*(mp+1); 487 488 /* 489 For each vertex, edge, face on process (in the same orderings as used above) determine its local number including ghost points 490 */ 491 cnt = 0; 492 { gl[cnt++] = mwidth+1;} 493 { 494 { gl[cnt++] = mwidth*nwidth+1;} 495 { gl[cnt++] = mwidth*nwidth + mwidth; /* these are the interior nodes */ gl[cnt++] = mwidth*nwidth + mwidth+m-istart-1;} 496 { gl[cnt++] = mwidth*nwidth+mwidth*(n-jstart-1)+1;} 497 } 498 { gl[cnt++] = mwidth*nwidth*(p-kstart-1) + mwidth+1;} 499 500 /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */ 501 /* convert that to global numbering and get them on all processes */ 502 ierr = ISLocalToGlobalMappingApply(ltg,6,gl,gl);CHKERRQ(ierr); 503 /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */ 504 ierr = PetscMalloc(6*mp*np*pp*sizeof(PetscInt),&globals);CHKERRQ(ierr); 505 ierr = MPI_Allgather(gl,6,MPIU_INT,globals,6,MPIU_INT,((PetscObject)da)->comm);CHKERRQ(ierr); 506 507 /* Number the coarse grid points from 0 to Ntotal */ 508 ierr = PetscTableCreate(Ntotal/3,&ht);CHKERRQ(ierr); 509 for (i=0; i<6*mp*np*pp; i++){ 510 ierr = PetscTableAddCount(ht,globals[i]+1);CHKERRQ(ierr); 511 } 512 ierr = PetscTableGetCount(ht,&cnt);CHKERRQ(ierr); 513 if (cnt != Ntotal) SETERRQ2(PETSC_ERR_PLIB,"Hash table size %D not equal to total number coarse grid points %D",cnt,Ntotal); 514 ierr = PetscFree(globals);CHKERRQ(ierr); 515 for (i=0; i<6; i++) { 516 ierr = PetscTableFind(ht,gl[i]+1,&gl[i]);CHKERRQ(ierr); 517 gl[i]--; 518 } 519 ierr = PetscTableDestroy(ht);CHKERRQ(ierr); 520 /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */ 521 522 /* construct global interpolation matrix */ 523 ierr = MatGetLocalSize(Aglobal,&Ng,PETSC_NULL);CHKERRQ(ierr); 524 if (reuse == MAT_INITIAL_MATRIX) { 525 ierr = MatCreateMPIAIJ(((PetscObject)da)->comm,Ng,PETSC_DECIDE,PETSC_DECIDE,Ntotal,Nint+Nsurf,PETSC_NULL,Nint,PETSC_NULL,P);CHKERRQ(ierr); 526 } else { 527 ierr = MatZeroEntries(*P);CHKERRQ(ierr); 528 } 529 ierr = MatSetOption(*P,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr); 530 ierr = MatGetArray(Xint,&xint);CHKERRQ(ierr); 531 ierr = MatSetValues(*P,Nint,IIint,6,gl,xint,INSERT_VALUES);CHKERRQ(ierr); 532 ierr = MatRestoreArray(Xint,&xint);CHKERRQ(ierr); 533 ierr = MatGetArray(Xsurf,&xsurf);CHKERRQ(ierr); 534 ierr = MatSetValues(*P,Nsurf,IIsurf,6,gl,xsurf,INSERT_VALUES);CHKERRQ(ierr); 535 ierr = MatRestoreArray(Xsurf,&xsurf);CHKERRQ(ierr); 536 ierr = MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 537 ierr = MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 538 ierr = PetscFree2(IIint,IIsurf);CHKERRQ(ierr); 539 540 541 #if defined(PETSC_USE_DEBUG_foo) 542 { 543 Vec x,y; 544 PetscScalar *yy; 545 ierr = VecCreateMPI(((PetscObject)da)->comm,Ng,PETSC_DETERMINE,&y);CHKERRQ(ierr); 546 ierr = VecCreateMPI(((PetscObject)da)->comm,PETSC_DETERMINE,Ntotal,&x);CHKERRQ(ierr); 547 ierr = VecSet(x,1.0);CHKERRQ(ierr); 548 ierr = MatMult(*P,x,y);CHKERRQ(ierr); 549 ierr = VecGetArray(y,&yy);CHKERRQ(ierr); 550 for (i=0; i<Ng; i++) { 551 if (PetscAbsScalar(yy[i]-1.0) > 1.e-10) SETERRQ2(PETSC_ERR_PLIB,"Wrong p interpolation at i %D value %G",i,PetscAbsScalar(yy[i])); 552 } 553 ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr); 554 ierr = VecDestroy(x);CHKERRQ(ierr); 555 ierr = VecDestroy(y);CHKERRQ(ierr); 556 } 557 #endif 558 559 ierr = MatDestroy(Aii);CHKERRQ(ierr); 560 ierr = MatDestroy(Ais);CHKERRQ(ierr); 561 ierr = MatDestroy(Asi);CHKERRQ(ierr); 562 ierr = MatDestroy(A);CHKERRQ(ierr); 563 ierr = ISDestroy(is);CHKERRQ(ierr); 564 ierr = ISDestroy(isint);CHKERRQ(ierr); 565 ierr = ISDestroy(issurf);CHKERRQ(ierr); 566 ierr = MatDestroy(Xint);CHKERRQ(ierr); 567 ierr = MatDestroy(Xsurf);CHKERRQ(ierr); 568 PetscFunctionReturn(0); 569 } 570 571 572 #undef __FUNCT__ 573 #define __FUNCT__ "PCExoticSetType" 574 /*@ 575 PCExoticSetType - Sets the type of coarse grid interpolation to use 576 577 Collective on PC 578 579 Input Parameters: 580 + pc - the preconditioner context 581 - type - either PC_EXOTIC_FACE or PC_EXOTIC_WIREBASKET (defaults to face) 582 583 Notes: The face based interpolation has 1 degree of freedom per face and ignores the 584 edge and vertex values completely in the coarse problem. For any seven point 585 stencil the interpolation of a constant on all faces into the interior is that constant. 586 587 The wirebasket interpolation has 1 degree of freedom per vertex, per edge and 588 per face. A constant on the subdomain boundary is interpolated as that constant 589 in the interior of the domain. 590 591 The coarse grid matrix is obtained via the Galerkin computation A_c = R A R^T, hence 592 if A is nonsingular A_c is also nonsingular. 593 594 Both interpolations are suitable for only scalar problems. 595 596 Level: intermediate 597 598 599 .seealso: PCEXOTIC, PCExoticType() 600 @*/ 601 PetscErrorCode PETSCKSP_DLLEXPORT PCExoticSetType(PC pc,PCExoticType type) 602 { 603 PetscErrorCode ierr,(*f)(PC,PCExoticType); 604 605 PetscFunctionBegin; 606 PetscValidHeaderSpecific(pc,PC_COOKIE,1); 607 ierr = PetscObjectQueryFunction((PetscObject)pc,"PCExoticSetType_C",(void (**)(void))&f);CHKERRQ(ierr); 608 if (f) { 609 ierr = (*f)(pc,type);CHKERRQ(ierr); 610 } 611 PetscFunctionReturn(0); 612 } 613 614 #undef __FUNCT__ 615 #define __FUNCT__ "PCExoticSetType_Exotic" 616 PetscErrorCode PETSCKSP_DLLEXPORT PCExoticSetType_Exotic(PC pc,PCExoticType type) 617 { 618 PC_MG **mg = (PC_MG**)pc->data; 619 PC_Exotic *ctx = (PC_Exotic*) mg[0]->innerctx; 620 621 PetscFunctionBegin; 622 ctx->type = type; 623 PetscFunctionReturn(0); 624 } 625 626 #undef __FUNCT__ 627 #define __FUNCT__ "PCSetUp_Exotic" 628 PetscErrorCode PCSetUp_Exotic(PC pc) 629 { 630 PetscErrorCode ierr; 631 Mat A; 632 PC_MG **mg = (PC_MG**)pc->data; 633 PC_Exotic *ex = (PC_Exotic*) mg[0]->innerctx; 634 MatReuse reuse = (ex->P) ? MAT_REUSE_MATRIX : MAT_INITIAL_MATRIX; 635 636 PetscFunctionBegin; 637 ierr = PCGetOperators(pc,PETSC_NULL,&A,PETSC_NULL);CHKERRQ(ierr); 638 if (ex->type == PC_EXOTIC_FACE) { 639 ierr = DAGetFaceInterpolation(ex,A,reuse,&ex->P);CHKERRQ(ierr); 640 } else if (ex->type == PC_EXOTIC_WIREBASKET) { 641 ierr = DAGetWireBasketInterpolation(ex,A,reuse,&ex->P);CHKERRQ(ierr); 642 } else SETERRQ1(PETSC_ERR_PLIB,"Unknown exotic coarse space %d",ex->type); 643 ierr = PCMGSetInterpolation(pc,1,ex->P);CHKERRQ(ierr); 644 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 645 PetscFunctionReturn(0); 646 } 647 648 #undef __FUNCT__ 649 #define __FUNCT__ "PCDestroy_Exotic" 650 PetscErrorCode PCDestroy_Exotic(PC pc) 651 { 652 PetscErrorCode ierr; 653 PC_MG **mg = (PC_MG**)pc->data; 654 PC_Exotic *ctx = (PC_Exotic*) mg[0]->innerctx; 655 656 PetscFunctionBegin; 657 if (ctx->da) {ierr = DADestroy(ctx->da);CHKERRQ(ierr);} 658 if (ctx->P) {ierr = MatDestroy(ctx->P);CHKERRQ(ierr);} 659 if (ctx->ksp) {ierr = KSPDestroy(ctx->ksp);CHKERRQ(ierr);} 660 ierr = PetscFree(ctx);CHKERRQ(ierr); 661 ierr = PCDestroy_MG(pc);CHKERRQ(ierr); 662 PetscFunctionReturn(0); 663 } 664 665 #undef __FUNCT__ 666 #define __FUNCT__ "PCSetUp_Exotic_Error" 667 PetscErrorCode PCSetUp_Exotic_Error(PC pc) 668 { 669 PetscFunctionBegin; 670 SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You are using the Exotic preconditioner but never called PCSetDA()"); 671 PetscFunctionReturn(0); 672 } 673 674 #undef __FUNCT__ 675 #define __FUNCT__ "PCSetDA" 676 /*@ 677 PCSetDA - Sets the DA that is to be used by the PCEXOTIC or certain other preconditioners 678 679 Collective on PC 680 681 Input Parameters: 682 + pc - the preconditioner context 683 - da - the da 684 685 Level: intermediate 686 687 688 .seealso: PCEXOTIC, PCExoticType() 689 @*/ 690 PetscErrorCode PETSCKSP_DLLEXPORT PCSetDA(PC pc,DA da) 691 { 692 PetscErrorCode ierr,(*f)(PC,DA); 693 694 PetscFunctionBegin; 695 PetscValidHeaderSpecific(pc,PC_COOKIE,1); 696 PetscValidHeaderSpecific(da,DM_COOKIE,1); 697 ierr = PetscObjectQueryFunction((PetscObject)pc,"PCSetDA_C",(void (**)(void))&f);CHKERRQ(ierr); 698 if (f) { 699 ierr = (*f)(pc,da);CHKERRQ(ierr); 700 } 701 PetscFunctionReturn(0); 702 } 703 704 705 #undef __FUNCT__ 706 #define __FUNCT__ "PCSetDA_Exotic" 707 PetscErrorCode PETSCKSP_DLLEXPORT PCSetDA_Exotic(PC pc,DA da) 708 { 709 PetscErrorCode ierr; 710 PC_MG **mg = (PC_MG**)pc->data; 711 PC_Exotic *ctx = (PC_Exotic*) mg[0]->innerctx; 712 713 PetscFunctionBegin; 714 ctx->da = da; 715 pc->ops->setup = PCSetUp_Exotic; 716 ierr = PetscObjectReference((PetscObject)da);CHKERRQ(ierr); 717 PetscFunctionReturn(0); 718 } 719 720 #undef __FUNCT__ 721 #define __FUNCT__ "PCView_Exotic" 722 PetscErrorCode PCView_Exotic(PC pc,PetscViewer viewer) 723 { 724 PC_MG **mg = (PC_MG**)pc->data; 725 PetscErrorCode ierr; 726 PetscTruth iascii; 727 PC_Exotic *ctx = (PC_Exotic*) mg[0]->innerctx; 728 729 PetscFunctionBegin; 730 ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr); 731 if (iascii) { 732 ierr = PetscViewerASCIIPrintf(viewer," Exotic type = %s\n",PCExoticTypes[ctx->type]);CHKERRQ(ierr); 733 if (ctx->directSolve) { 734 ierr = PetscViewerASCIIPrintf(viewer," Using direct solver to construct interpolation\n");CHKERRQ(ierr); 735 } else { 736 PetscViewer sviewer; 737 PetscMPIInt rank; 738 739 ierr = PetscViewerASCIIPrintf(viewer," Using iterative solver to construct interpolation\n");CHKERRQ(ierr); 740 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 741 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); /* should not need to push this twice? */ 742 ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr); 743 ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr); 744 if (!rank) { 745 ierr = KSPView(ctx->ksp,sviewer);CHKERRQ(ierr); 746 } 747 ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr); 748 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 749 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 750 } 751 } 752 ierr = PCView_MG(pc,viewer);CHKERRQ(ierr); 753 PetscFunctionReturn(0); 754 } 755 756 #undef __FUNCT__ 757 #define __FUNCT__ "PCSetFromOptions_Exotic" 758 PetscErrorCode PCSetFromOptions_Exotic(PC pc) 759 { 760 PetscErrorCode ierr; 761 PetscTruth flg; 762 PC_MG **mg = (PC_MG**)pc->data; 763 PCExoticType mgctype; 764 PC_Exotic *ctx = (PC_Exotic*) mg[0]->innerctx; 765 766 PetscFunctionBegin; 767 ierr = PetscOptionsHead("Exotic coarse space options");CHKERRQ(ierr); 768 ierr = PetscOptionsEnum("-pc_exotic_type","face or wirebasket","PCExoticSetType",PCExoticTypes,(PetscEnum)ctx->type,(PetscEnum*)&mgctype,&flg);CHKERRQ(ierr); 769 if (flg) { 770 ierr = PCExoticSetType(pc,mgctype);CHKERRQ(ierr); 771 } 772 ierr = PetscOptionsTruth("-pc_exotic_direct_solver","use direct solver to construct interpolation","None",ctx->directSolve,&ctx->directSolve,PETSC_NULL);CHKERRQ(ierr); 773 if (!ctx->directSolve) { 774 if (!ctx->ksp) { 775 const char *prefix; 776 ierr = KSPCreate(PETSC_COMM_SELF,&ctx->ksp);CHKERRQ(ierr); 777 ierr = PetscObjectIncrementTabLevel((PetscObject)ctx->ksp,(PetscObject)pc,1);CHKERRQ(ierr); 778 ierr = PetscLogObjectParent(pc,ctx->ksp);CHKERRQ(ierr); 779 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 780 ierr = KSPSetOptionsPrefix(ctx->ksp,prefix);CHKERRQ(ierr); 781 ierr = KSPAppendOptionsPrefix(ctx->ksp,"exotic_");CHKERRQ(ierr); 782 } 783 ierr = KSPSetFromOptions(ctx->ksp);CHKERRQ(ierr); 784 } 785 ierr = PetscOptionsTail();CHKERRQ(ierr); 786 PetscFunctionReturn(0); 787 } 788 789 790 /*MC 791 PCEXOTIC - Two level overlapping Schwarz preconditioner with exotic (non-standard) coarse grid spaces 792 793 This uses the PCMG infrastructure restricted to two levels and the face and wirebasket based coarse 794 grid spaces. These coarse grid spaces originate in the work of Bramble, Pasciak and Schatz, "The Construction 795 of Preconditioners for Elliptic Problems by Substructing IV", Mathematics of Computation, volume 53 pages 1--24, 1989. 796 They were generalized slightly in "Domain Decomposition Method for Linear Elasticity", Ph. D. thesis, Barry Smith, 797 New York University, 1990. They were then explored in great detail in Dryja, Smith, Widlund, "Schwarz Analysis 798 of Iterative Substructuring Methods for Elliptic Problems in Three Dimensions, SIAM Journal on Numerical 799 Analysis, volume 31. pages 1662-1694, 1994. These were developed in the context of iterative substructuring preconditioners. 800 They were then ingeniously applied as coarse grid spaces for overlapping Schwarz methods by Dohrmann and Widlund. 801 They refer to them as GDSW (generalized Dryja, Smith, Widlund preconditioners). See, for example, 802 Clark R. Dohrmann, Axel Klawonn, and Olof B. Widlund. Extending theory for domain decomposition algorithms to irregular subdomains. In Ulrich Langer, Marco 803 Discacciati, David Keyes, Olof Widlund, and Walter Zulehner, editors, Proceedings 804 of the 17th International Conference on Domain Decomposition Methods in 805 Science and Engineering, held in Strobl, Austria, July 3-7, 2006, number 60 in 806 Springer-Verlag, Lecture Notes in Computational Science and Engineering, pages 255-261, 2007. 807 Clark R. Dohrmann, Axel Klawonn, and Olof B. Widlund. A family of energy min- 808 imizing coarse spaces for overlapping Schwarz preconditioners. In Ulrich Langer, 809 Marco Discacciati, David Keyes, OlofWidlund, andWalter Zulehner, editors, Proceedings 810 of the 17th International Conference on Domain Decomposition Methods 811 in Science and Engineering, held in Strobl, Austria, July 3-7, 2006, number 60 in 812 Springer-Verlag, Lecture Notes in Computational Science and Engineering, pages 247-254, 2007 813 Clark R. Dohrmann, Axel Klawonn, and Olof B. Widlund. Domain decomposition 814 for less regular subdomains: Overlapping Schwarz in two dimensions. SIAM J. 815 Numer. Anal., 46(4):2153-2168, 2008. 816 Clark R. Dohrmann and Olof B. Widlund. An overlapping Schwarz 817 algorithm for almost incompressible elasticity. Technical Report 818 TR2008-912, Department of Computer Science, Courant Institute 819 of Mathematical Sciences, New York University, May 2008. URL: 820 http://cs.nyu.edu/csweb/Research/TechReports/TR2008-912/TR2008-912.pdf 821 822 Options Database: The usual PCMG options are supported, such as -mg_levels_pc_type <type> -mg_coarse_pc_type <type> 823 -pc_mg_type <type> 824 825 Level: advanced 826 827 .seealso: PCMG, PCSetDA(), PCExoticType, PCExoticSetType() 828 M*/ 829 830 EXTERN_C_BEGIN 831 #undef __FUNCT__ 832 #define __FUNCT__ "PCCreate_Exotic" 833 PetscErrorCode PETSCKSP_DLLEXPORT PCCreate_Exotic(PC pc) 834 { 835 PetscErrorCode ierr; 836 PC_Exotic *ex; 837 PC_MG **mg; 838 839 PetscFunctionBegin; 840 /* if type was previously mg; must manually destroy it because call to PCSetType(pc,PCMG) will not destroy it */ 841 if (pc->ops->destroy) { ierr = (*pc->ops->destroy)(pc);CHKERRQ(ierr); pc->data = 0;} 842 ierr = PetscStrfree(((PetscObject)pc)->type_name);CHKERRQ(ierr); 843 ((PetscObject)pc)->type_name = 0; 844 845 ierr = PCSetType(pc,PCMG);CHKERRQ(ierr); 846 ierr = PCMGSetLevels(pc,2,PETSC_NULL);CHKERRQ(ierr); 847 ierr = PCMGSetGalerkin(pc);CHKERRQ(ierr); 848 ierr = PetscNew(PC_Exotic,&ex);CHKERRQ(ierr);\ 849 ex->type = PC_EXOTIC_FACE; 850 mg = (PC_MG**) pc->data; 851 mg[0]->innerctx = ex; 852 853 854 pc->ops->setfromoptions = PCSetFromOptions_Exotic; 855 pc->ops->view = PCView_Exotic; 856 pc->ops->destroy = PCDestroy_Exotic; 857 pc->ops->setup = PCSetUp_Exotic_Error; 858 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCExoticSetType_C","PCExoticSetType_Exotic",PCExoticSetType_Exotic);CHKERRQ(ierr); 859 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCSetDA_C","PCSetDA_Exotic",PCSetDA_Exotic);CHKERRQ(ierr); 860 PetscFunctionReturn(0); 861 } 862 EXTERN_C_END 863