1 /* 2 Creates a matrix class for using the Neumann-Neumann type preconditioners. 3 This stores the matrices in globally unassembled form. Each processor 4 assembles only its local Neumann problem and the parallel matrix vector 5 product is handled "implicitly". 6 7 Currently this allows for only one subdomain per processor. 8 */ 9 10 #include <../src/mat/impls/is/matis.h> /*I "petscmat.h" I*/ 11 12 #define MATIS_MAX_ENTRIES_INSERTION 2048 13 14 static PetscErrorCode MatISComputeSF_Private(Mat); 15 16 #undef __FUNCT__ 17 #define __FUNCT__ "MatTranspose_IS" 18 PetscErrorCode MatTranspose_IS(Mat A,MatReuse reuse,Mat *B) 19 { 20 Mat C,lC,lA; 21 ISLocalToGlobalMapping rl2g,cl2g; 22 PetscBool notset = PETSC_FALSE,cong = PETSC_TRUE; 23 PetscErrorCode ierr; 24 25 PetscFunctionBegin; 26 if (reuse == MAT_REUSE_MATRIX) { 27 PetscBool rcong,ccong; 28 29 ierr = PetscLayoutCompare((*B)->cmap,A->rmap,&rcong);CHKERRQ(ierr); 30 ierr = PetscLayoutCompare((*B)->rmap,A->cmap,&ccong);CHKERRQ(ierr); 31 cong = (PetscBool)(rcong || ccong); 32 } 33 if (reuse == MAT_INITIAL_MATRIX || *B == A || !cong) { 34 ierr = MatCreate(PetscObjectComm((PetscObject)A),&C);CHKERRQ(ierr); 35 } else { 36 ierr = PetscObjectTypeCompare((PetscObject)*B,MATIS,¬set);CHKERRQ(ierr); 37 C = *B; 38 } 39 40 if (!notset) { 41 ierr = MatSetSizes(C,A->cmap->n,A->rmap->n,A->cmap->N,A->rmap->N);CHKERRQ(ierr); 42 ierr = MatSetBlockSizes(C,PetscAbs(A->cmap->bs),PetscAbs(A->rmap->bs));CHKERRQ(ierr); 43 ierr = MatSetType(C,MATIS);CHKERRQ(ierr); 44 ierr = MatGetLocalToGlobalMapping(A,&rl2g,&cl2g);CHKERRQ(ierr); 45 ierr = MatSetLocalToGlobalMapping(C,cl2g,rl2g);CHKERRQ(ierr); 46 } 47 48 /* perform local transposition */ 49 ierr = MatISGetLocalMat(A,&lA);CHKERRQ(ierr); 50 ierr = MatTranspose(lA,MAT_INITIAL_MATRIX,&lC);CHKERRQ(ierr); 51 ierr = MatISSetLocalMat(C,lC);CHKERRQ(ierr); 52 ierr = MatDestroy(&lC);CHKERRQ(ierr); 53 54 ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 55 ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 56 57 if (reuse == MAT_INITIAL_MATRIX || *B != A) { 58 if (!cong) { 59 ierr = MatDestroy(B);CHKERRQ(ierr); 60 } 61 *B = C; 62 } else { 63 ierr = MatHeaderMerge(A,&C);CHKERRQ(ierr); 64 } 65 PetscFunctionReturn(0); 66 } 67 68 #undef __FUNCT__ 69 #define __FUNCT__ "MatDiagonalSet_IS" 70 PetscErrorCode MatDiagonalSet_IS(Mat A,Vec D,InsertMode insmode) 71 { 72 Mat_IS *is = (Mat_IS*)A->data; 73 PetscErrorCode ierr; 74 75 PetscFunctionBegin; 76 if (!D) { /* this code branch is used by MatShift_IS */ 77 ierr = VecSet(is->y,1.);CHKERRQ(ierr); 78 } else { 79 ierr = VecScatterBegin(is->rctx,D,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 80 ierr = VecScatterEnd(is->rctx,D,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 81 } 82 ierr = VecPointwiseDivide(is->y,is->y,is->counter);CHKERRQ(ierr); 83 ierr = MatDiagonalSet(is->A,is->y,insmode);CHKERRQ(ierr); 84 PetscFunctionReturn(0); 85 } 86 87 #undef __FUNCT__ 88 #define __FUNCT__ "MatShift_IS" 89 PetscErrorCode MatShift_IS(Mat A,PetscScalar a) 90 { 91 PetscErrorCode ierr; 92 93 PetscFunctionBegin; 94 ierr = MatDiagonalSet_IS(A,NULL,ADD_VALUES);CHKERRQ(ierr); 95 PetscFunctionReturn(0); 96 } 97 98 #undef __FUNCT__ 99 #define __FUNCT__ "MatSetValuesLocal_SubMat_IS" 100 static PetscErrorCode MatSetValuesLocal_SubMat_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 101 { 102 PetscErrorCode ierr; 103 Mat_IS *is = (Mat_IS*)A->data; 104 PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION]; 105 106 PetscFunctionBegin; 107 #if defined(PETSC_USE_DEBUG) 108 if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n); 109 #endif 110 ierr = ISLocalToGlobalMappingApply(A->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr); 111 ierr = ISLocalToGlobalMappingApply(A->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr); 112 ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr); 113 PetscFunctionReturn(0); 114 } 115 116 #undef __FUNCT__ 117 #define __FUNCT__ "MatSetValuesBlockedLocal_SubMat_IS" 118 static PetscErrorCode MatSetValuesBlockedLocal_SubMat_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 119 { 120 PetscErrorCode ierr; 121 Mat_IS *is = (Mat_IS*)A->data; 122 PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION]; 123 124 PetscFunctionBegin; 125 #if defined(PETSC_USE_DEBUG) 126 if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column block indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n); 127 #endif 128 ierr = ISLocalToGlobalMappingApplyBlock(A->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr); 129 ierr = ISLocalToGlobalMappingApplyBlock(A->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr); 130 ierr = MatSetValuesBlocked(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr); 131 PetscFunctionReturn(0); 132 } 133 134 #undef __FUNCT__ 135 #define __FUNCT__ "PetscLayoutMapLocal_Private" 136 static PetscErrorCode PetscLayoutMapLocal_Private(PetscLayout map,PetscInt N,const PetscInt idxs[], PetscInt *on,PetscInt **oidxs,PetscInt **ogidxs) 137 { 138 PetscInt *owners = map->range; 139 PetscInt n = map->n; 140 PetscSF sf; 141 PetscInt *lidxs,*work = NULL; 142 PetscSFNode *ridxs; 143 PetscMPIInt rank; 144 PetscInt r, p = 0, len = 0; 145 PetscErrorCode ierr; 146 147 PetscFunctionBegin; 148 if (on) *on = 0; /* squelch -Wmaybe-uninitialized */ 149 /* Create SF where leaves are input idxs and roots are owned idxs (code adapted from MatZeroRowsMapLocal_Private) */ 150 ierr = MPI_Comm_rank(map->comm,&rank);CHKERRQ(ierr); 151 ierr = PetscMalloc1(n,&lidxs);CHKERRQ(ierr); 152 for (r = 0; r < n; ++r) lidxs[r] = -1; 153 ierr = PetscMalloc1(N,&ridxs);CHKERRQ(ierr); 154 for (r = 0; r < N; ++r) { 155 const PetscInt idx = idxs[r]; 156 if (idx < 0 || map->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index %D out of range [0,%D)",idx,map->N); 157 if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this idx too */ 158 ierr = PetscLayoutFindOwner(map,idx,&p);CHKERRQ(ierr); 159 } 160 ridxs[r].rank = p; 161 ridxs[r].index = idxs[r] - owners[p]; 162 } 163 ierr = PetscSFCreate(map->comm,&sf);CHKERRQ(ierr); 164 ierr = PetscSFSetGraph(sf,n,N,NULL,PETSC_OWN_POINTER,ridxs,PETSC_OWN_POINTER);CHKERRQ(ierr); 165 ierr = PetscSFReduceBegin(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr); 166 ierr = PetscSFReduceEnd(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr); 167 if (ogidxs) { /* communicate global idxs */ 168 PetscInt cum = 0,start,*work2; 169 170 ierr = PetscMalloc1(n,&work);CHKERRQ(ierr); 171 ierr = PetscCalloc1(N,&work2);CHKERRQ(ierr); 172 for (r = 0; r < N; ++r) if (idxs[r] >=0) cum++; 173 ierr = MPI_Scan(&cum,&start,1,MPIU_INT,MPI_SUM,map->comm);CHKERRQ(ierr); 174 start -= cum; 175 cum = 0; 176 for (r = 0; r < N; ++r) if (idxs[r] >=0) work2[r] = start+cum++; 177 ierr = PetscSFReduceBegin(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr); 178 ierr = PetscSFReduceEnd(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr); 179 ierr = PetscFree(work2);CHKERRQ(ierr); 180 } 181 ierr = PetscSFDestroy(&sf);CHKERRQ(ierr); 182 /* Compress and put in indices */ 183 for (r = 0; r < n; ++r) 184 if (lidxs[r] >= 0) { 185 if (work) work[len] = work[r]; 186 lidxs[len++] = r; 187 } 188 if (on) *on = len; 189 if (oidxs) *oidxs = lidxs; 190 if (ogidxs) *ogidxs = work; 191 PetscFunctionReturn(0); 192 } 193 194 #undef __FUNCT__ 195 #define __FUNCT__ "MatGetSubMatrix_IS" 196 static PetscErrorCode MatGetSubMatrix_IS(Mat mat,IS irow,IS icol,MatReuse scall,Mat *newmat) 197 { 198 Mat locmat,newlocmat; 199 Mat_IS *newmatis; 200 #if defined(PETSC_USE_DEBUG) 201 Vec rtest,ltest; 202 const PetscScalar *array; 203 #endif 204 const PetscInt *idxs; 205 PetscInt i,m,n; 206 PetscErrorCode ierr; 207 208 PetscFunctionBegin; 209 if (scall == MAT_REUSE_MATRIX) { 210 PetscBool ismatis; 211 212 ierr = PetscObjectTypeCompare((PetscObject)*newmat,MATIS,&ismatis);CHKERRQ(ierr); 213 if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Not of MATIS type"); 214 newmatis = (Mat_IS*)(*newmat)->data; 215 if (!newmatis->getsub_ris) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Misses local row IS"); 216 if (!newmatis->getsub_cis) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Misses local col IS"); 217 } 218 /* irow and icol may not have duplicate entries */ 219 #if defined(PETSC_USE_DEBUG) 220 ierr = MatCreateVecs(mat,<est,&rtest);CHKERRQ(ierr); 221 ierr = ISGetLocalSize(irow,&n);CHKERRQ(ierr); 222 ierr = ISGetIndices(irow,&idxs);CHKERRQ(ierr); 223 for (i=0;i<n;i++) { 224 ierr = VecSetValue(rtest,idxs[i],1.0,ADD_VALUES);CHKERRQ(ierr); 225 } 226 ierr = VecAssemblyBegin(rtest);CHKERRQ(ierr); 227 ierr = VecAssemblyEnd(rtest);CHKERRQ(ierr); 228 ierr = VecGetLocalSize(rtest,&n);CHKERRQ(ierr); 229 ierr = VecGetOwnershipRange(rtest,&m,NULL);CHKERRQ(ierr); 230 ierr = VecGetArrayRead(rtest,&array);CHKERRQ(ierr); 231 for (i=0;i<n;i++) if (array[i] != 0. && array[i] != 1.) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Index %D counted %D times! Irow may not have duplicate entries",i+m,(PetscInt)PetscRealPart(array[i])); 232 ierr = VecRestoreArrayRead(rtest,&array);CHKERRQ(ierr); 233 ierr = ISRestoreIndices(irow,&idxs);CHKERRQ(ierr); 234 ierr = ISGetLocalSize(icol,&n);CHKERRQ(ierr); 235 ierr = ISGetIndices(icol,&idxs);CHKERRQ(ierr); 236 for (i=0;i<n;i++) { 237 ierr = VecSetValue(ltest,idxs[i],1.0,ADD_VALUES);CHKERRQ(ierr); 238 } 239 ierr = VecAssemblyBegin(ltest);CHKERRQ(ierr); 240 ierr = VecAssemblyEnd(ltest);CHKERRQ(ierr); 241 ierr = VecGetLocalSize(ltest,&n);CHKERRQ(ierr); 242 ierr = VecGetOwnershipRange(ltest,&m,NULL);CHKERRQ(ierr); 243 ierr = VecGetArrayRead(ltest,&array);CHKERRQ(ierr); 244 for (i=0;i<n;i++) if (array[i] != 0. && array[i] != 1.) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Index %D counted %D times! Icol may not have duplicate entries",i+m,(PetscInt)PetscRealPart(array[i])); 245 ierr = VecRestoreArrayRead(ltest,&array);CHKERRQ(ierr); 246 ierr = ISRestoreIndices(icol,&idxs);CHKERRQ(ierr); 247 ierr = VecDestroy(&rtest);CHKERRQ(ierr); 248 ierr = VecDestroy(<est);CHKERRQ(ierr); 249 #endif 250 if (scall == MAT_INITIAL_MATRIX) { 251 Mat_IS *matis = (Mat_IS*)mat->data; 252 ISLocalToGlobalMapping rl2g; 253 IS is; 254 PetscInt *lidxs,*lgidxs,*newgidxs; 255 PetscInt ll,newloc; 256 MPI_Comm comm; 257 258 ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr); 259 ierr = ISGetLocalSize(irow,&m);CHKERRQ(ierr); 260 ierr = ISGetLocalSize(icol,&n);CHKERRQ(ierr); 261 ierr = MatCreate(comm,newmat);CHKERRQ(ierr); 262 ierr = MatSetType(*newmat,MATIS);CHKERRQ(ierr); 263 ierr = MatSetSizes(*newmat,m,n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr); 264 /* communicate irow to their owners in the layout */ 265 ierr = ISGetIndices(irow,&idxs);CHKERRQ(ierr); 266 ierr = PetscLayoutMapLocal_Private(mat->rmap,m,idxs,&ll,&lidxs,&lgidxs);CHKERRQ(ierr); 267 ierr = ISRestoreIndices(irow,&idxs);CHKERRQ(ierr); 268 if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */ 269 ierr = MatISComputeSF_Private(mat);CHKERRQ(ierr); 270 } 271 ierr = PetscMemzero(matis->sf_rootdata,matis->sf_nroots*sizeof(PetscInt));CHKERRQ(ierr); 272 for (i=0;i<ll;i++) matis->sf_rootdata[lidxs[i]] = lgidxs[i]+1; 273 ierr = PetscFree(lidxs);CHKERRQ(ierr); 274 ierr = PetscFree(lgidxs);CHKERRQ(ierr); 275 ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 276 ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 277 for (i=0,newloc=0;i<matis->sf_nleaves;i++) if (matis->sf_leafdata[i]) newloc++; 278 ierr = PetscMalloc1(newloc,&newgidxs);CHKERRQ(ierr); 279 ierr = PetscMalloc1(newloc,&lidxs);CHKERRQ(ierr); 280 for (i=0,newloc=0;i<matis->sf_nleaves;i++) 281 if (matis->sf_leafdata[i]) { 282 lidxs[newloc] = i; 283 newgidxs[newloc++] = matis->sf_leafdata[i]-1; 284 } 285 ierr = ISCreateGeneral(comm,newloc,newgidxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 286 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr); 287 ierr = ISDestroy(&is);CHKERRQ(ierr); 288 /* local is to extract local submatrix */ 289 newmatis = (Mat_IS*)(*newmat)->data; 290 ierr = ISCreateGeneral(comm,newloc,lidxs,PETSC_OWN_POINTER,&newmatis->getsub_ris);CHKERRQ(ierr); 291 if (mat->congruentlayouts == -1) { /* first time we compare rows and cols layouts */ 292 PetscBool cong; 293 ierr = PetscLayoutCompare(mat->rmap,mat->cmap,&cong);CHKERRQ(ierr); 294 if (cong) mat->congruentlayouts = 1; 295 else mat->congruentlayouts = 0; 296 } 297 if (mat->congruentlayouts && irow == icol) { 298 ierr = MatSetLocalToGlobalMapping(*newmat,rl2g,rl2g);CHKERRQ(ierr); 299 ierr = PetscObjectReference((PetscObject)newmatis->getsub_ris);CHKERRQ(ierr); 300 newmatis->getsub_cis = newmatis->getsub_ris; 301 } else { 302 ISLocalToGlobalMapping cl2g; 303 304 /* communicate icol to their owners in the layout */ 305 ierr = ISGetIndices(icol,&idxs);CHKERRQ(ierr); 306 ierr = PetscLayoutMapLocal_Private(mat->cmap,n,idxs,&ll,&lidxs,&lgidxs);CHKERRQ(ierr); 307 ierr = ISRestoreIndices(icol,&idxs);CHKERRQ(ierr); 308 ierr = PetscMemzero(matis->csf_rootdata,matis->csf_nroots*sizeof(PetscInt));CHKERRQ(ierr); 309 for (i=0;i<ll;i++) matis->csf_rootdata[lidxs[i]] = lgidxs[i]+1; 310 ierr = PetscFree(lidxs);CHKERRQ(ierr); 311 ierr = PetscFree(lgidxs);CHKERRQ(ierr); 312 ierr = PetscSFBcastBegin(matis->csf,MPIU_INT,matis->csf_rootdata,matis->csf_leafdata);CHKERRQ(ierr); 313 ierr = PetscSFBcastEnd(matis->csf,MPIU_INT,matis->csf_rootdata,matis->csf_leafdata);CHKERRQ(ierr); 314 for (i=0,newloc=0;i<matis->csf_nleaves;i++) if (matis->csf_leafdata[i]) newloc++; 315 ierr = PetscMalloc1(newloc,&newgidxs);CHKERRQ(ierr); 316 ierr = PetscMalloc1(newloc,&lidxs);CHKERRQ(ierr); 317 for (i=0,newloc=0;i<matis->csf_nleaves;i++) 318 if (matis->csf_leafdata[i]) { 319 lidxs[newloc] = i; 320 newgidxs[newloc++] = matis->csf_leafdata[i]-1; 321 } 322 ierr = ISCreateGeneral(comm,newloc,newgidxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 323 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr); 324 ierr = ISDestroy(&is);CHKERRQ(ierr); 325 /* local is to extract local submatrix */ 326 ierr = ISCreateGeneral(comm,newloc,lidxs,PETSC_OWN_POINTER,&newmatis->getsub_cis);CHKERRQ(ierr); 327 ierr = MatSetLocalToGlobalMapping(*newmat,rl2g,cl2g);CHKERRQ(ierr); 328 ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr); 329 } 330 ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr); 331 } else { 332 ierr = MatISGetLocalMat(*newmat,&newlocmat);CHKERRQ(ierr); 333 } 334 ierr = MatISGetLocalMat(mat,&locmat);CHKERRQ(ierr); 335 newmatis = (Mat_IS*)(*newmat)->data; 336 ierr = MatGetSubMatrix(locmat,newmatis->getsub_ris,newmatis->getsub_cis,scall,&newlocmat);CHKERRQ(ierr); 337 if (scall == MAT_INITIAL_MATRIX) { 338 ierr = MatISSetLocalMat(*newmat,newlocmat);CHKERRQ(ierr); 339 ierr = MatDestroy(&newlocmat);CHKERRQ(ierr); 340 } 341 ierr = MatAssemblyBegin(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 342 ierr = MatAssemblyEnd(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 343 PetscFunctionReturn(0); 344 } 345 346 #undef __FUNCT__ 347 #define __FUNCT__ "MatCopy_IS" 348 static PetscErrorCode MatCopy_IS(Mat A,Mat B,MatStructure str) 349 { 350 Mat_IS *a = (Mat_IS*)A->data,*b; 351 PetscBool ismatis; 352 PetscErrorCode ierr; 353 354 PetscFunctionBegin; 355 ierr = PetscObjectTypeCompare((PetscObject)B,MATIS,&ismatis);CHKERRQ(ierr); 356 if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Need to be implemented"); 357 b = (Mat_IS*)B->data; 358 ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr); 359 PetscFunctionReturn(0); 360 } 361 362 #undef __FUNCT__ 363 #define __FUNCT__ "MatMissingDiagonal_IS" 364 static PetscErrorCode MatMissingDiagonal_IS(Mat A,PetscBool *missing,PetscInt *d) 365 { 366 Vec v; 367 const PetscScalar *array; 368 PetscInt i,n; 369 PetscErrorCode ierr; 370 371 PetscFunctionBegin; 372 *missing = PETSC_FALSE; 373 ierr = MatCreateVecs(A,NULL,&v);CHKERRQ(ierr); 374 ierr = MatGetDiagonal(A,v);CHKERRQ(ierr); 375 ierr = VecGetLocalSize(v,&n);CHKERRQ(ierr); 376 ierr = VecGetArrayRead(v,&array);CHKERRQ(ierr); 377 for (i=0;i<n;i++) if (array[i] == 0.) break; 378 ierr = VecRestoreArrayRead(v,&array);CHKERRQ(ierr); 379 ierr = VecDestroy(&v);CHKERRQ(ierr); 380 if (i != n) *missing = PETSC_TRUE; 381 if (d) { 382 *d = -1; 383 if (*missing) { 384 PetscInt rstart; 385 ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr); 386 *d = i+rstart; 387 } 388 } 389 PetscFunctionReturn(0); 390 } 391 392 #undef __FUNCT__ 393 #define __FUNCT__ "MatISComputeSF_Private" 394 static PetscErrorCode MatISComputeSF_Private(Mat B) 395 { 396 Mat_IS *matis = (Mat_IS*)(B->data); 397 const PetscInt *gidxs; 398 PetscErrorCode ierr; 399 400 PetscFunctionBegin; 401 ierr = MatGetSize(matis->A,&matis->sf_nleaves,NULL);CHKERRQ(ierr); 402 ierr = MatGetLocalSize(B,&matis->sf_nroots,NULL);CHKERRQ(ierr); 403 ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr); 404 ierr = ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr); 405 ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr); 406 ierr = ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr); 407 ierr = PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);CHKERRQ(ierr); 408 if (B->rmap->mapping != B->cmap->mapping) { /* setup SF for columns */ 409 ierr = MatGetSize(matis->A,NULL,&matis->csf_nleaves);CHKERRQ(ierr); 410 ierr = MatGetLocalSize(B,NULL,&matis->csf_nroots);CHKERRQ(ierr); 411 ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->csf);CHKERRQ(ierr); 412 ierr = ISLocalToGlobalMappingGetIndices(B->cmap->mapping,&gidxs);CHKERRQ(ierr); 413 ierr = PetscSFSetGraphLayout(matis->csf,B->cmap,matis->csf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr); 414 ierr = ISLocalToGlobalMappingRestoreIndices(B->cmap->mapping,&gidxs);CHKERRQ(ierr); 415 ierr = PetscMalloc2(matis->csf_nroots,&matis->csf_rootdata,matis->csf_nleaves,&matis->csf_leafdata);CHKERRQ(ierr); 416 } else { 417 matis->csf = matis->sf; 418 matis->csf_nleaves = matis->sf_nleaves; 419 matis->csf_nroots = matis->sf_nroots; 420 matis->csf_leafdata = matis->sf_leafdata; 421 matis->csf_rootdata = matis->sf_rootdata; 422 } 423 PetscFunctionReturn(0); 424 } 425 426 #undef __FUNCT__ 427 #define __FUNCT__ "MatISSetPreallocation" 428 /*@ 429 MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix. 430 431 Collective on MPI_Comm 432 433 Input Parameters: 434 + B - the matrix 435 . d_nz - number of nonzeros per row in DIAGONAL portion of local submatrix 436 (same value is used for all local rows) 437 . d_nnz - array containing the number of nonzeros in the various rows of the 438 DIAGONAL portion of the local submatrix (possibly different for each row) 439 or NULL, if d_nz is used to specify the nonzero structure. 440 The size of this array is equal to the number of local rows, i.e 'm'. 441 For matrices that will be factored, you must leave room for (and set) 442 the diagonal entry even if it is zero. 443 . o_nz - number of nonzeros per row in the OFF-DIAGONAL portion of local 444 submatrix (same value is used for all local rows). 445 - o_nnz - array containing the number of nonzeros in the various rows of the 446 OFF-DIAGONAL portion of the local submatrix (possibly different for 447 each row) or NULL, if o_nz is used to specify the nonzero 448 structure. The size of this array is equal to the number 449 of local rows, i.e 'm'. 450 451 If the *_nnz parameter is given then the *_nz parameter is ignored 452 453 Level: intermediate 454 455 Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition 456 from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local 457 matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects. 458 459 .keywords: matrix 460 461 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat(), MATIS 462 @*/ 463 PetscErrorCode MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 464 { 465 PetscErrorCode ierr; 466 467 PetscFunctionBegin; 468 PetscValidHeaderSpecific(B,MAT_CLASSID,1); 469 PetscValidType(B,1); 470 ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr); 471 PetscFunctionReturn(0); 472 } 473 474 #undef __FUNCT__ 475 #define __FUNCT__ "MatISSetPreallocation_IS" 476 static PetscErrorCode MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 477 { 478 Mat_IS *matis = (Mat_IS*)(B->data); 479 PetscInt bs,i,nlocalcols; 480 PetscErrorCode ierr; 481 482 PetscFunctionBegin; 483 if (!matis->A) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping"); 484 if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */ 485 ierr = MatISComputeSF_Private(B);CHKERRQ(ierr); 486 } 487 if (!d_nnz) { 488 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz; 489 } else { 490 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i]; 491 } 492 if (!o_nnz) { 493 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz; 494 } else { 495 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i]; 496 } 497 ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 498 ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr); 499 ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr); 500 ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 501 for (i=0;i<matis->sf_nleaves;i++) { 502 matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols); 503 } 504 ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr); 505 for (i=0;i<matis->sf_nleaves/bs;i++) { 506 matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs; 507 } 508 ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr); 509 for (i=0;i<matis->sf_nleaves/bs;i++) { 510 matis->sf_leafdata[i] = matis->sf_leafdata[i]-i; 511 } 512 ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr); 513 PetscFunctionReturn(0); 514 } 515 516 #undef __FUNCT__ 517 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private" 518 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce) 519 { 520 Mat_IS *matis = (Mat_IS*)(A->data); 521 PetscInt *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership; 522 const PetscInt *global_indices_r,*global_indices_c; 523 PetscInt i,j,bs,rows,cols; 524 PetscInt lrows,lcols; 525 PetscInt local_rows,local_cols; 526 PetscMPIInt nsubdomains; 527 PetscBool isdense,issbaij; 528 PetscErrorCode ierr; 529 530 PetscFunctionBegin; 531 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr); 532 ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr); 533 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 534 ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr); 535 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr); 536 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr); 537 ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr); 538 if (A->rmap->mapping != A->cmap->mapping) { 539 ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr); 540 } else { 541 global_indices_c = global_indices_r; 542 } 543 544 if (issbaij) { 545 ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr); 546 } 547 /* 548 An SF reduce is needed to sum up properly on shared rows. 549 Note that generally preallocation is not exact, since it overestimates nonzeros 550 */ 551 if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */ 552 ierr = MatISComputeSF_Private(A);CHKERRQ(ierr); 553 } 554 ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr); 555 ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr); 556 /* All processes need to compute entire row ownership */ 557 ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr); 558 ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr); 559 for (i=0;i<nsubdomains;i++) { 560 for (j=mat_ranges[i];j<mat_ranges[i+1];j++) { 561 row_ownership[j] = i; 562 } 563 } 564 ierr = MatGetOwnershipRangesColumn(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr); 565 566 /* 567 my_dnz and my_onz contains exact contribution to preallocation from each local mat 568 then, they will be summed up properly. This way, preallocation is always sufficient 569 */ 570 ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr); 571 /* preallocation as a MATAIJ */ 572 if (isdense) { /* special case for dense local matrices */ 573 for (i=0;i<local_rows;i++) { 574 PetscInt index_row = global_indices_r[i]; 575 for (j=i;j<local_rows;j++) { 576 PetscInt owner = row_ownership[index_row]; 577 PetscInt index_col = global_indices_c[j]; 578 if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */ 579 my_dnz[i] += 1; 580 } else { /* offdiag block */ 581 my_onz[i] += 1; 582 } 583 /* same as before, interchanging rows and cols */ 584 if (i != j) { 585 owner = row_ownership[index_col]; 586 if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) { 587 my_dnz[j] += 1; 588 } else { 589 my_onz[j] += 1; 590 } 591 } 592 } 593 } 594 } else if (matis->A->ops->getrowij) { 595 const PetscInt *ii,*jj,*jptr; 596 PetscBool done; 597 ierr = MatGetRowIJ(matis->A,0,PETSC_FALSE,PETSC_FALSE,&local_rows,&ii,&jj,&done);CHKERRQ(ierr); 598 if (!done) SETERRQ1(PetscObjectComm((PetscObject)(matis->A)),PETSC_ERR_PLIB,"Error in MatGetRowIJ called from %s\n",__FUNCT__); 599 jptr = jj; 600 for (i=0;i<local_rows;i++) { 601 PetscInt index_row = global_indices_r[i]; 602 for (j=0;j<ii[i+1]-ii[i];j++,jptr++) { 603 PetscInt owner = row_ownership[index_row]; 604 PetscInt index_col = global_indices_c[*jptr]; 605 if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */ 606 my_dnz[i] += 1; 607 } else { /* offdiag block */ 608 my_onz[i] += 1; 609 } 610 /* same as before, interchanging rows and cols */ 611 if (issbaij && index_col != index_row) { 612 owner = row_ownership[index_col]; 613 if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) { 614 my_dnz[*jptr] += 1; 615 } else { 616 my_onz[*jptr] += 1; 617 } 618 } 619 } 620 } 621 ierr = MatRestoreRowIJ(matis->A,0,PETSC_FALSE,PETSC_FALSE,&local_rows,&ii,&jj,&done);CHKERRQ(ierr); 622 if (!done) SETERRQ1(PetscObjectComm((PetscObject)(matis->A)),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called from %s\n",__FUNCT__); 623 } else { /* loop over rows and use MatGetRow */ 624 for (i=0;i<local_rows;i++) { 625 const PetscInt *cols; 626 PetscInt ncols,index_row = global_indices_r[i]; 627 ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr); 628 for (j=0;j<ncols;j++) { 629 PetscInt owner = row_ownership[index_row]; 630 PetscInt index_col = global_indices_c[cols[j]]; 631 if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */ 632 my_dnz[i] += 1; 633 } else { /* offdiag block */ 634 my_onz[i] += 1; 635 } 636 /* same as before, interchanging rows and cols */ 637 if (issbaij && index_col != index_row) { 638 owner = row_ownership[index_col]; 639 if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) { 640 my_dnz[cols[j]] += 1; 641 } else { 642 my_onz[cols[j]] += 1; 643 } 644 } 645 } 646 ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr); 647 } 648 } 649 ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr); 650 if (global_indices_c != global_indices_r) { 651 ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr); 652 } 653 ierr = PetscFree(row_ownership);CHKERRQ(ierr); 654 655 /* Reduce my_dnz and my_onz */ 656 if (maxreduce) { 657 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr); 658 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr); 659 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr); 660 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr); 661 } else { 662 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr); 663 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr); 664 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr); 665 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr); 666 } 667 ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr); 668 669 /* Resize preallocation if overestimated */ 670 for (i=0;i<lrows;i++) { 671 dnz[i] = PetscMin(dnz[i],lcols); 672 onz[i] = PetscMin(onz[i],cols-lcols); 673 } 674 /* set preallocation */ 675 ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr); 676 for (i=0;i<lrows/bs;i++) { 677 dnz[i] = dnz[i*bs]/bs; 678 onz[i] = onz[i*bs]/bs; 679 } 680 ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr); 681 ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr); 682 ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr); 683 if (issbaij) { 684 ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr); 685 } 686 PetscFunctionReturn(0); 687 } 688 689 #undef __FUNCT__ 690 #define __FUNCT__ "MatISGetMPIXAIJ_IS" 691 static PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M) 692 { 693 Mat_IS *matis = (Mat_IS*)(mat->data); 694 Mat local_mat; 695 /* info on mat */ 696 PetscInt bs,rows,cols,lrows,lcols; 697 PetscInt local_rows,local_cols; 698 PetscBool isdense,issbaij,isseqaij; 699 PetscMPIInt nsubdomains; 700 /* values insertion */ 701 PetscScalar *array; 702 /* work */ 703 PetscErrorCode ierr; 704 705 PetscFunctionBegin; 706 /* get info from mat */ 707 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr); 708 if (nsubdomains == 1) { 709 if (reuse == MAT_INITIAL_MATRIX) { 710 ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr); 711 } else { 712 ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 713 } 714 PetscFunctionReturn(0); 715 } 716 ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr); 717 ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr); 718 ierr = MatGetLocalSize(mat,&lrows,&lcols);CHKERRQ(ierr); 719 ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr); 720 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr); 721 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 722 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr); 723 724 if (reuse == MAT_INITIAL_MATRIX) { 725 MatType new_mat_type; 726 PetscBool issbaij_red; 727 728 /* determining new matrix type */ 729 ierr = MPIU_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 730 if (issbaij_red) { 731 new_mat_type = MATSBAIJ; 732 } else { 733 if (bs>1) { 734 new_mat_type = MATBAIJ; 735 } else { 736 new_mat_type = MATAIJ; 737 } 738 } 739 740 ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr); 741 ierr = MatSetSizes(*M,lrows,lcols,rows,cols);CHKERRQ(ierr); 742 ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr); 743 ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr); 744 ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr); 745 } else { 746 PetscInt mbs,mrows,mcols,mlrows,mlcols; 747 /* some checks */ 748 ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr); 749 ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr); 750 ierr = MatGetLocalSize(*M,&mlrows,&mlcols);CHKERRQ(ierr); 751 if (mrows != rows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows); 752 if (mcols != cols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols); 753 if (mlrows != lrows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows); 754 if (mlcols != lcols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols); 755 if (mbs != bs) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs); 756 ierr = MatZeroEntries(*M);CHKERRQ(ierr); 757 } 758 759 if (issbaij) { 760 ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);CHKERRQ(ierr); 761 } else { 762 ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr); 763 local_mat = matis->A; 764 } 765 766 /* Set values */ 767 ierr = MatSetLocalToGlobalMapping(*M,mat->rmap->mapping,mat->cmap->mapping);CHKERRQ(ierr); 768 if (isdense) { /* special case for dense local matrices */ 769 PetscInt i,*dummy_rows,*dummy_cols; 770 771 if (local_rows != local_cols) { 772 ierr = PetscMalloc2(local_rows,&dummy_rows,local_cols,&dummy_cols);CHKERRQ(ierr); 773 for (i=0;i<local_rows;i++) dummy_rows[i] = i; 774 for (i=0;i<local_cols;i++) dummy_cols[i] = i; 775 } else { 776 ierr = PetscMalloc1(local_rows,&dummy_rows);CHKERRQ(ierr); 777 for (i=0;i<local_rows;i++) dummy_rows[i] = i; 778 dummy_cols = dummy_rows; 779 } 780 ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr); 781 ierr = MatDenseGetArray(local_mat,&array);CHKERRQ(ierr); 782 ierr = MatSetValuesLocal(*M,local_rows,dummy_rows,local_cols,dummy_cols,array,ADD_VALUES);CHKERRQ(ierr); 783 ierr = MatDenseRestoreArray(local_mat,&array);CHKERRQ(ierr); 784 if (dummy_rows != dummy_cols) { 785 ierr = PetscFree2(dummy_rows,dummy_cols);CHKERRQ(ierr); 786 } else { 787 ierr = PetscFree(dummy_rows);CHKERRQ(ierr); 788 } 789 } else if (isseqaij) { 790 PetscInt i,nvtxs,*xadj,*adjncy; 791 PetscBool done; 792 793 ierr = MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr); 794 if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatGetRowIJ called from %s\n",__FUNCT__); 795 ierr = MatSeqAIJGetArray(local_mat,&array);CHKERRQ(ierr); 796 for (i=0;i<nvtxs;i++) { 797 ierr = MatSetValuesLocal(*M,1,&i,xadj[i+1]-xadj[i],adjncy+xadj[i],array+xadj[i],ADD_VALUES);CHKERRQ(ierr); 798 } 799 ierr = MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr); 800 if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called from %s\n",__FUNCT__); 801 ierr = MatSeqAIJRestoreArray(local_mat,&array);CHKERRQ(ierr); 802 } else { /* very basic values insertion for all other matrix types */ 803 PetscInt i; 804 805 for (i=0;i<local_rows;i++) { 806 PetscInt j; 807 const PetscInt *local_indices_cols; 808 809 ierr = MatGetRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr); 810 ierr = MatSetValuesLocal(*M,1,&i,j,local_indices_cols,array,ADD_VALUES);CHKERRQ(ierr); 811 ierr = MatRestoreRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr); 812 } 813 } 814 ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 815 ierr = MatDestroy(&local_mat);CHKERRQ(ierr); 816 ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 817 if (isdense) { 818 ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr); 819 } 820 PetscFunctionReturn(0); 821 } 822 823 #undef __FUNCT__ 824 #define __FUNCT__ "MatISGetMPIXAIJ" 825 /*@ 826 MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format 827 828 Input Parameter: 829 . mat - the matrix (should be of type MATIS) 830 . reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 831 832 Output Parameter: 833 . newmat - the matrix in AIJ format 834 835 Level: developer 836 837 Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested. 838 839 .seealso: MATIS 840 @*/ 841 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat) 842 { 843 PetscErrorCode ierr; 844 845 PetscFunctionBegin; 846 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 847 PetscValidLogicalCollectiveEnum(mat,reuse,2); 848 PetscValidPointer(newmat,3); 849 if (reuse != MAT_INITIAL_MATRIX) { 850 PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3); 851 PetscCheckSameComm(mat,1,*newmat,3); 852 if (mat == *newmat) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix"); 853 } 854 ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr); 855 PetscFunctionReturn(0); 856 } 857 858 #undef __FUNCT__ 859 #define __FUNCT__ "MatDuplicate_IS" 860 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat) 861 { 862 PetscErrorCode ierr; 863 Mat_IS *matis = (Mat_IS*)(mat->data); 864 PetscInt bs,m,n,M,N; 865 Mat B,localmat; 866 867 PetscFunctionBegin; 868 ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr); 869 ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr); 870 ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr); 871 ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,mat->cmap->mapping,&B);CHKERRQ(ierr); 872 ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr); 873 ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr); 874 ierr = MatDestroy(&localmat);CHKERRQ(ierr); 875 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 876 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 877 *newmat = B; 878 PetscFunctionReturn(0); 879 } 880 881 #undef __FUNCT__ 882 #define __FUNCT__ "MatIsHermitian_IS" 883 static PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool *flg) 884 { 885 PetscErrorCode ierr; 886 Mat_IS *matis = (Mat_IS*)A->data; 887 PetscBool local_sym; 888 889 PetscFunctionBegin; 890 ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr); 891 ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr); 892 PetscFunctionReturn(0); 893 } 894 895 #undef __FUNCT__ 896 #define __FUNCT__ "MatIsSymmetric_IS" 897 static PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool *flg) 898 { 899 PetscErrorCode ierr; 900 Mat_IS *matis = (Mat_IS*)A->data; 901 PetscBool local_sym; 902 903 PetscFunctionBegin; 904 ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr); 905 ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr); 906 PetscFunctionReturn(0); 907 } 908 909 #undef __FUNCT__ 910 #define __FUNCT__ "MatDestroy_IS" 911 static PetscErrorCode MatDestroy_IS(Mat A) 912 { 913 PetscErrorCode ierr; 914 Mat_IS *b = (Mat_IS*)A->data; 915 916 PetscFunctionBegin; 917 ierr = MatDestroy(&b->A);CHKERRQ(ierr); 918 ierr = VecScatterDestroy(&b->cctx);CHKERRQ(ierr); 919 ierr = VecScatterDestroy(&b->rctx);CHKERRQ(ierr); 920 ierr = VecDestroy(&b->x);CHKERRQ(ierr); 921 ierr = VecDestroy(&b->y);CHKERRQ(ierr); 922 ierr = VecDestroy(&b->counter);CHKERRQ(ierr); 923 ierr = ISDestroy(&b->getsub_ris);CHKERRQ(ierr); 924 ierr = ISDestroy(&b->getsub_cis);CHKERRQ(ierr); 925 if (b->sf != b->csf) { 926 ierr = PetscSFDestroy(&b->csf);CHKERRQ(ierr); 927 ierr = PetscFree2(b->csf_rootdata,b->csf_leafdata);CHKERRQ(ierr); 928 } 929 ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr); 930 ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr); 931 ierr = PetscFree(A->data);CHKERRQ(ierr); 932 ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr); 933 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr); 934 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr); 935 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr); 936 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr); 937 PetscFunctionReturn(0); 938 } 939 940 #undef __FUNCT__ 941 #define __FUNCT__ "MatMult_IS" 942 static PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y) 943 { 944 PetscErrorCode ierr; 945 Mat_IS *is = (Mat_IS*)A->data; 946 PetscScalar zero = 0.0; 947 948 PetscFunctionBegin; 949 /* scatter the global vector x into the local work vector */ 950 ierr = VecScatterBegin(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 951 ierr = VecScatterEnd(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 952 953 /* multiply the local matrix */ 954 ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr); 955 956 /* scatter product back into global memory */ 957 ierr = VecSet(y,zero);CHKERRQ(ierr); 958 ierr = VecScatterBegin(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 959 ierr = VecScatterEnd(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 960 PetscFunctionReturn(0); 961 } 962 963 #undef __FUNCT__ 964 #define __FUNCT__ "MatMultAdd_IS" 965 static PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3) 966 { 967 Vec temp_vec; 968 PetscErrorCode ierr; 969 970 PetscFunctionBegin; /* v3 = v2 + A * v1.*/ 971 if (v3 != v2) { 972 ierr = MatMult(A,v1,v3);CHKERRQ(ierr); 973 ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr); 974 } else { 975 ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr); 976 ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr); 977 ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr); 978 ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr); 979 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr); 980 } 981 PetscFunctionReturn(0); 982 } 983 984 #undef __FUNCT__ 985 #define __FUNCT__ "MatMultTranspose_IS" 986 static PetscErrorCode MatMultTranspose_IS(Mat A,Vec y,Vec x) 987 { 988 Mat_IS *is = (Mat_IS*)A->data; 989 PetscErrorCode ierr; 990 991 PetscFunctionBegin; 992 /* scatter the global vector x into the local work vector */ 993 ierr = VecScatterBegin(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 994 ierr = VecScatterEnd(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 995 996 /* multiply the local matrix */ 997 ierr = MatMultTranspose(is->A,is->y,is->x);CHKERRQ(ierr); 998 999 /* scatter product back into global vector */ 1000 ierr = VecSet(x,0);CHKERRQ(ierr); 1001 ierr = VecScatterBegin(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1002 ierr = VecScatterEnd(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1003 PetscFunctionReturn(0); 1004 } 1005 1006 #undef __FUNCT__ 1007 #define __FUNCT__ "MatMultTransposeAdd_IS" 1008 static PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3) 1009 { 1010 Vec temp_vec; 1011 PetscErrorCode ierr; 1012 1013 PetscFunctionBegin; /* v3 = v2 + A' * v1.*/ 1014 if (v3 != v2) { 1015 ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr); 1016 ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr); 1017 } else { 1018 ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr); 1019 ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr); 1020 ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr); 1021 ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr); 1022 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr); 1023 } 1024 PetscFunctionReturn(0); 1025 } 1026 1027 #undef __FUNCT__ 1028 #define __FUNCT__ "MatView_IS" 1029 static PetscErrorCode MatView_IS(Mat A,PetscViewer viewer) 1030 { 1031 Mat_IS *a = (Mat_IS*)A->data; 1032 PetscErrorCode ierr; 1033 PetscViewer sviewer; 1034 1035 PetscFunctionBegin; 1036 ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 1037 ierr = MatView(a->A,sviewer);CHKERRQ(ierr); 1038 ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 1039 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 1040 PetscFunctionReturn(0); 1041 } 1042 1043 #undef __FUNCT__ 1044 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS" 1045 static PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping) 1046 { 1047 PetscErrorCode ierr; 1048 PetscInt nr,rbs,nc,cbs; 1049 Mat_IS *is = (Mat_IS*)A->data; 1050 IS from,to; 1051 Vec cglobal,rglobal; 1052 1053 PetscFunctionBegin; 1054 PetscCheckSameComm(A,1,rmapping,2); 1055 PetscCheckSameComm(A,1,cmapping,3); 1056 /* Destroy any previous data */ 1057 ierr = VecDestroy(&is->x);CHKERRQ(ierr); 1058 ierr = VecDestroy(&is->y);CHKERRQ(ierr); 1059 ierr = VecDestroy(&is->counter);CHKERRQ(ierr); 1060 ierr = VecScatterDestroy(&is->rctx);CHKERRQ(ierr); 1061 ierr = VecScatterDestroy(&is->cctx);CHKERRQ(ierr); 1062 ierr = MatDestroy(&is->A);CHKERRQ(ierr); 1063 ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr); 1064 ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr); 1065 1066 /* Setup Layout and set local to global maps */ 1067 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 1068 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 1069 ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr); 1070 ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr); 1071 1072 /* Create the local matrix A */ 1073 ierr = ISLocalToGlobalMappingGetSize(rmapping,&nr);CHKERRQ(ierr); 1074 ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&rbs);CHKERRQ(ierr); 1075 ierr = ISLocalToGlobalMappingGetSize(cmapping,&nc);CHKERRQ(ierr); 1076 ierr = ISLocalToGlobalMappingGetBlockSize(cmapping,&cbs);CHKERRQ(ierr); 1077 ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr); 1078 ierr = MatSetType(is->A,MATAIJ);CHKERRQ(ierr); 1079 ierr = MatSetSizes(is->A,nr,nc,nr,nc);CHKERRQ(ierr); 1080 ierr = MatSetBlockSizes(is->A,rbs,cbs);CHKERRQ(ierr); 1081 ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr); 1082 ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr); 1083 ierr = MatSetFromOptions(is->A);CHKERRQ(ierr); 1084 1085 if (!is->islocalref) { /* setup scatters and local vectors for MatMult */ 1086 /* Create the local work vectors */ 1087 ierr = MatCreateVecs(is->A,&is->x,&is->y);CHKERRQ(ierr); 1088 1089 /* setup the global to local scatters */ 1090 ierr = MatCreateVecs(A,&cglobal,&rglobal);CHKERRQ(ierr); 1091 ierr = ISCreateStride(PETSC_COMM_SELF,nr,0,1,&to);CHKERRQ(ierr); 1092 ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr); 1093 ierr = VecScatterCreate(rglobal,from,is->y,to,&is->rctx);CHKERRQ(ierr); 1094 if (rmapping != cmapping) { 1095 ierr = ISDestroy(&to);CHKERRQ(ierr); 1096 ierr = ISDestroy(&from);CHKERRQ(ierr); 1097 ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&to);CHKERRQ(ierr); 1098 ierr = ISLocalToGlobalMappingApplyIS(cmapping,to,&from);CHKERRQ(ierr); 1099 ierr = VecScatterCreate(cglobal,from,is->x,to,&is->cctx);CHKERRQ(ierr); 1100 } else { 1101 ierr = PetscObjectReference((PetscObject)is->rctx);CHKERRQ(ierr); 1102 is->cctx = is->rctx; 1103 } 1104 1105 /* interface counter vector (local) */ 1106 ierr = VecDuplicate(is->y,&is->counter);CHKERRQ(ierr); 1107 ierr = VecSet(is->y,1.);CHKERRQ(ierr); 1108 ierr = VecScatterBegin(is->rctx,is->y,rglobal,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1109 ierr = VecScatterEnd(is->rctx,is->y,rglobal,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1110 ierr = VecScatterBegin(is->rctx,rglobal,is->counter,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1111 ierr = VecScatterEnd(is->rctx,rglobal,is->counter,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1112 1113 /* free workspace */ 1114 ierr = VecDestroy(&rglobal);CHKERRQ(ierr); 1115 ierr = VecDestroy(&cglobal);CHKERRQ(ierr); 1116 ierr = ISDestroy(&to);CHKERRQ(ierr); 1117 ierr = ISDestroy(&from);CHKERRQ(ierr); 1118 } 1119 ierr = MatSetUp(A);CHKERRQ(ierr); 1120 PetscFunctionReturn(0); 1121 } 1122 1123 #undef __FUNCT__ 1124 #define __FUNCT__ "MatSetValues_IS" 1125 static PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv) 1126 { 1127 Mat_IS *is = (Mat_IS*)mat->data; 1128 PetscErrorCode ierr; 1129 #if defined(PETSC_USE_DEBUG) 1130 PetscInt i,zm,zn; 1131 #endif 1132 PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION]; 1133 1134 PetscFunctionBegin; 1135 #if defined(PETSC_USE_DEBUG) 1136 if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n); 1137 /* count negative indices */ 1138 for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++; 1139 for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++; 1140 #endif 1141 ierr = ISGlobalToLocalMappingApply(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr); 1142 ierr = ISGlobalToLocalMappingApply(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr); 1143 #if defined(PETSC_USE_DEBUG) 1144 /* count negative indices (should be the same as before) */ 1145 for (i=0;i<m;i++) if (rows_l[i] < 0) zm--; 1146 for (i=0;i<n;i++) if (cols_l[i] < 0) zn--; 1147 if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS"); 1148 if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS"); 1149 #endif 1150 ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr); 1151 PetscFunctionReturn(0); 1152 } 1153 1154 #undef __FUNCT__ 1155 #define __FUNCT__ "MatSetValuesBlocked_IS" 1156 static PetscErrorCode MatSetValuesBlocked_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv) 1157 { 1158 Mat_IS *is = (Mat_IS*)mat->data; 1159 PetscErrorCode ierr; 1160 #if defined(PETSC_USE_DEBUG) 1161 PetscInt i,zm,zn; 1162 #endif 1163 PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION]; 1164 1165 PetscFunctionBegin; 1166 #if defined(PETSC_USE_DEBUG) 1167 if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column block indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n); 1168 /* count negative indices */ 1169 for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++; 1170 for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++; 1171 #endif 1172 ierr = ISGlobalToLocalMappingApplyBlock(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr); 1173 ierr = ISGlobalToLocalMappingApplyBlock(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr); 1174 #if defined(PETSC_USE_DEBUG) 1175 /* count negative indices (should be the same as before) */ 1176 for (i=0;i<m;i++) if (rows_l[i] < 0) zm--; 1177 for (i=0;i<n;i++) if (cols_l[i] < 0) zn--; 1178 if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS"); 1179 if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS"); 1180 #endif 1181 ierr = MatSetValuesBlocked(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr); 1182 PetscFunctionReturn(0); 1183 } 1184 1185 #undef __FUNCT__ 1186 #define __FUNCT__ "MatSetValuesLocal_IS" 1187 static PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 1188 { 1189 PetscErrorCode ierr; 1190 Mat_IS *is = (Mat_IS*)A->data; 1191 1192 PetscFunctionBegin; 1193 ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr); 1194 PetscFunctionReturn(0); 1195 } 1196 1197 #undef __FUNCT__ 1198 #define __FUNCT__ "MatSetValuesBlockedLocal_IS" 1199 static PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 1200 { 1201 PetscErrorCode ierr; 1202 Mat_IS *is = (Mat_IS*)A->data; 1203 1204 PetscFunctionBegin; 1205 ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr); 1206 PetscFunctionReturn(0); 1207 } 1208 1209 #undef __FUNCT__ 1210 #define __FUNCT__ "MatISZeroRowsColumnsLocal_Private" 1211 static PetscErrorCode MatISZeroRowsColumnsLocal_Private(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,PetscBool columns) 1212 { 1213 Mat_IS *is = (Mat_IS*)A->data; 1214 PetscErrorCode ierr; 1215 1216 PetscFunctionBegin; 1217 if (!n) { 1218 is->pure_neumann = PETSC_TRUE; 1219 } else { 1220 PetscInt i; 1221 is->pure_neumann = PETSC_FALSE; 1222 1223 if (columns) { 1224 ierr = MatZeroRowsColumns(is->A,n,rows,diag,0,0);CHKERRQ(ierr); 1225 } else { 1226 ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr); 1227 } 1228 if (diag != 0.) { 1229 const PetscScalar *array; 1230 ierr = VecGetArrayRead(is->counter,&array);CHKERRQ(ierr); 1231 for (i=0; i<n; i++) { 1232 ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr); 1233 } 1234 ierr = VecRestoreArrayRead(is->counter,&array);CHKERRQ(ierr); 1235 } 1236 ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1237 ierr = MatAssemblyEnd(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1238 } 1239 PetscFunctionReturn(0); 1240 } 1241 1242 #undef __FUNCT__ 1243 #define __FUNCT__ "MatZeroRowsColumns_Private_IS" 1244 static PetscErrorCode MatZeroRowsColumns_Private_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b,PetscBool columns) 1245 { 1246 Mat_IS *matis = (Mat_IS*)A->data; 1247 PetscInt nr,nl,len,i; 1248 PetscInt *lrows; 1249 PetscErrorCode ierr; 1250 1251 PetscFunctionBegin; 1252 #if defined(PETSC_USE_DEBUG) 1253 if (columns || diag != 0. || (x && b)) { 1254 PetscBool cong; 1255 ierr = PetscLayoutCompare(A->rmap,A->cmap,&cong);CHKERRQ(ierr); 1256 if (!cong && columns) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Columns can be zeroed if and only if A->rmap and A->cmap are congruent for MATIS"); 1257 if (!cong && diag != 0.) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Nonzero diagonal value supported if and only if A->rmap and A->cmap are congruent for MATIS"); 1258 if (!cong && x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"A->rmap and A->cmap need to be congruent"); 1259 } 1260 #endif 1261 /* get locally owned rows */ 1262 ierr = PetscLayoutMapLocal_Private(A->rmap,n,rows,&len,&lrows,NULL);CHKERRQ(ierr); 1263 /* fix right hand side if needed */ 1264 if (x && b) { 1265 const PetscScalar *xx; 1266 PetscScalar *bb; 1267 1268 ierr = VecGetArrayRead(x, &xx);CHKERRQ(ierr); 1269 ierr = VecGetArray(b, &bb);CHKERRQ(ierr); 1270 for (i=0;i<len;++i) bb[lrows[i]] = diag*xx[lrows[i]]; 1271 ierr = VecRestoreArrayRead(x, &xx);CHKERRQ(ierr); 1272 ierr = VecRestoreArray(b, &bb);CHKERRQ(ierr); 1273 } 1274 /* get rows associated to the local matrices */ 1275 if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */ 1276 ierr = MatISComputeSF_Private(A);CHKERRQ(ierr); 1277 } 1278 ierr = MatGetSize(matis->A,&nl,NULL);CHKERRQ(ierr); 1279 ierr = PetscMemzero(matis->sf_leafdata,nl*sizeof(PetscInt));CHKERRQ(ierr); 1280 ierr = PetscMemzero(matis->sf_rootdata,A->rmap->n*sizeof(PetscInt));CHKERRQ(ierr); 1281 for (i=0;i<len;i++) matis->sf_rootdata[lrows[i]] = 1; 1282 ierr = PetscFree(lrows);CHKERRQ(ierr); 1283 ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 1284 ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 1285 ierr = PetscMalloc1(nl,&lrows);CHKERRQ(ierr); 1286 for (i=0,nr=0;i<nl;i++) if (matis->sf_leafdata[i]) lrows[nr++] = i; 1287 ierr = MatISZeroRowsColumnsLocal_Private(A,nr,lrows,diag,columns);CHKERRQ(ierr); 1288 ierr = PetscFree(lrows);CHKERRQ(ierr); 1289 PetscFunctionReturn(0); 1290 } 1291 1292 #undef __FUNCT__ 1293 #define __FUNCT__ "MatZeroRows_IS" 1294 static PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 1295 { 1296 PetscErrorCode ierr; 1297 1298 PetscFunctionBegin; 1299 ierr = MatZeroRowsColumns_Private_IS(A,n,rows,diag,x,b,PETSC_FALSE);CHKERRQ(ierr); 1300 PetscFunctionReturn(0); 1301 } 1302 1303 #undef __FUNCT__ 1304 #define __FUNCT__ "MatZeroRowsColumns_IS" 1305 static PetscErrorCode MatZeroRowsColumns_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 1306 { 1307 PetscErrorCode ierr; 1308 1309 PetscFunctionBegin; 1310 ierr = MatZeroRowsColumns_Private_IS(A,n,rows,diag,x,b,PETSC_TRUE);CHKERRQ(ierr); 1311 PetscFunctionReturn(0); 1312 } 1313 1314 #undef __FUNCT__ 1315 #define __FUNCT__ "MatAssemblyBegin_IS" 1316 static PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type) 1317 { 1318 Mat_IS *is = (Mat_IS*)A->data; 1319 PetscErrorCode ierr; 1320 1321 PetscFunctionBegin; 1322 ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr); 1323 PetscFunctionReturn(0); 1324 } 1325 1326 #undef __FUNCT__ 1327 #define __FUNCT__ "MatAssemblyEnd_IS" 1328 static PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type) 1329 { 1330 Mat_IS *is = (Mat_IS*)A->data; 1331 PetscErrorCode ierr; 1332 1333 PetscFunctionBegin; 1334 ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr); 1335 PetscFunctionReturn(0); 1336 } 1337 1338 #undef __FUNCT__ 1339 #define __FUNCT__ "MatISGetLocalMat_IS" 1340 static PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local) 1341 { 1342 Mat_IS *is = (Mat_IS*)mat->data; 1343 1344 PetscFunctionBegin; 1345 *local = is->A; 1346 PetscFunctionReturn(0); 1347 } 1348 1349 #undef __FUNCT__ 1350 #define __FUNCT__ "MatISGetLocalMat" 1351 /*@ 1352 MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix. 1353 1354 Input Parameter: 1355 . mat - the matrix 1356 1357 Output Parameter: 1358 . local - the local matrix 1359 1360 Level: advanced 1361 1362 Notes: 1363 This can be called if you have precomputed the nonzero structure of the 1364 matrix and want to provide it to the inner matrix object to improve the performance 1365 of the MatSetValues() operation. 1366 1367 .seealso: MATIS 1368 @*/ 1369 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local) 1370 { 1371 PetscErrorCode ierr; 1372 1373 PetscFunctionBegin; 1374 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 1375 PetscValidPointer(local,2); 1376 ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr); 1377 PetscFunctionReturn(0); 1378 } 1379 1380 #undef __FUNCT__ 1381 #define __FUNCT__ "MatISSetLocalMat_IS" 1382 static PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local) 1383 { 1384 Mat_IS *is = (Mat_IS*)mat->data; 1385 PetscInt nrows,ncols,orows,ocols; 1386 PetscErrorCode ierr; 1387 1388 PetscFunctionBegin; 1389 if (is->A) { 1390 ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr); 1391 ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr); 1392 if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %Dx%D (you passed a %Dx%D matrix)",orows,ocols,nrows,ncols); 1393 } 1394 ierr = PetscObjectReference((PetscObject)local);CHKERRQ(ierr); 1395 ierr = MatDestroy(&is->A);CHKERRQ(ierr); 1396 is->A = local; 1397 PetscFunctionReturn(0); 1398 } 1399 1400 #undef __FUNCT__ 1401 #define __FUNCT__ "MatISSetLocalMat" 1402 /*@ 1403 MatISSetLocalMat - Replace the local matrix stored inside a MATIS object. 1404 1405 Input Parameter: 1406 . mat - the matrix 1407 . local - the local matrix 1408 1409 Output Parameter: 1410 1411 Level: advanced 1412 1413 Notes: 1414 This can be called if you have precomputed the local matrix and 1415 want to provide it to the matrix object MATIS. 1416 1417 .seealso: MATIS 1418 @*/ 1419 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local) 1420 { 1421 PetscErrorCode ierr; 1422 1423 PetscFunctionBegin; 1424 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 1425 PetscValidHeaderSpecific(local,MAT_CLASSID,2); 1426 ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr); 1427 PetscFunctionReturn(0); 1428 } 1429 1430 #undef __FUNCT__ 1431 #define __FUNCT__ "MatZeroEntries_IS" 1432 static PetscErrorCode MatZeroEntries_IS(Mat A) 1433 { 1434 Mat_IS *a = (Mat_IS*)A->data; 1435 PetscErrorCode ierr; 1436 1437 PetscFunctionBegin; 1438 ierr = MatZeroEntries(a->A);CHKERRQ(ierr); 1439 PetscFunctionReturn(0); 1440 } 1441 1442 #undef __FUNCT__ 1443 #define __FUNCT__ "MatScale_IS" 1444 static PetscErrorCode MatScale_IS(Mat A,PetscScalar a) 1445 { 1446 Mat_IS *is = (Mat_IS*)A->data; 1447 PetscErrorCode ierr; 1448 1449 PetscFunctionBegin; 1450 ierr = MatScale(is->A,a);CHKERRQ(ierr); 1451 PetscFunctionReturn(0); 1452 } 1453 1454 #undef __FUNCT__ 1455 #define __FUNCT__ "MatGetDiagonal_IS" 1456 static PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v) 1457 { 1458 Mat_IS *is = (Mat_IS*)A->data; 1459 PetscErrorCode ierr; 1460 1461 PetscFunctionBegin; 1462 /* get diagonal of the local matrix */ 1463 ierr = MatGetDiagonal(is->A,is->y);CHKERRQ(ierr); 1464 1465 /* scatter diagonal back into global vector */ 1466 ierr = VecSet(v,0);CHKERRQ(ierr); 1467 ierr = VecScatterBegin(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1468 ierr = VecScatterEnd(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1469 PetscFunctionReturn(0); 1470 } 1471 1472 #undef __FUNCT__ 1473 #define __FUNCT__ "MatSetOption_IS" 1474 static PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg) 1475 { 1476 Mat_IS *a = (Mat_IS*)A->data; 1477 PetscErrorCode ierr; 1478 1479 PetscFunctionBegin; 1480 ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 1481 PetscFunctionReturn(0); 1482 } 1483 1484 #undef __FUNCT__ 1485 #define __FUNCT__ "MatAXPY_IS" 1486 static PetscErrorCode MatAXPY_IS(Mat Y,PetscScalar a,Mat X,MatStructure str) 1487 { 1488 Mat_IS *y = (Mat_IS*)Y->data; 1489 Mat_IS *x; 1490 #if defined(PETSC_USE_DEBUG) 1491 PetscBool ismatis; 1492 #endif 1493 PetscErrorCode ierr; 1494 1495 PetscFunctionBegin; 1496 #if defined(PETSC_USE_DEBUG) 1497 ierr = PetscObjectTypeCompare((PetscObject)X,MATIS,&ismatis);CHKERRQ(ierr); 1498 if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)Y),PETSC_ERR_SUP,"Cannot call MatAXPY(Y,a,X,str) with X not of type MATIS"); 1499 #endif 1500 x = (Mat_IS*)X->data; 1501 ierr = MatAXPY(y->A,a,x->A,str);CHKERRQ(ierr); 1502 PetscFunctionReturn(0); 1503 } 1504 1505 #undef __FUNCT__ 1506 #define __FUNCT__ "MatGetLocalSubMatrix_IS" 1507 static PetscErrorCode MatGetLocalSubMatrix_IS(Mat A,IS row,IS col,Mat *submat) 1508 { 1509 Mat lA; 1510 Mat_IS *matis; 1511 ISLocalToGlobalMapping rl2g,cl2g; 1512 IS is; 1513 const PetscInt *rg,*rl; 1514 PetscInt nrg; 1515 PetscInt N,M,nrl,i,*idxs; 1516 PetscErrorCode ierr; 1517 1518 PetscFunctionBegin; 1519 ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&rg);CHKERRQ(ierr); 1520 ierr = ISGetLocalSize(row,&nrl);CHKERRQ(ierr); 1521 ierr = ISGetIndices(row,&rl);CHKERRQ(ierr); 1522 ierr = ISLocalToGlobalMappingGetSize(A->rmap->mapping,&nrg);CHKERRQ(ierr); 1523 #if defined(PETSC_USE_DEBUG) 1524 for (i=0;i<nrl;i++) if (rl[i]>=nrg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local row index %D -> %D greater than maximum possible %D",i,rl[i],nrg); 1525 #endif 1526 ierr = PetscMalloc1(nrg,&idxs);CHKERRQ(ierr); 1527 /* map from [0,nrl) to row */ 1528 for (i=0;i<nrl;i++) idxs[i] = rl[i]; 1529 #if defined(PETSC_USE_DEBUG) 1530 for (i=nrl;i<nrg;i++) idxs[i] = nrg; 1531 #else 1532 for (i=nrl;i<nrg;i++) idxs[i] = -1; 1533 #endif 1534 ierr = ISRestoreIndices(row,&rl);CHKERRQ(ierr); 1535 ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&rg);CHKERRQ(ierr); 1536 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)A),nrg,idxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 1537 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr); 1538 ierr = ISDestroy(&is);CHKERRQ(ierr); 1539 /* compute new l2g map for columns */ 1540 if (col != row || A->rmap->mapping != A->cmap->mapping) { 1541 const PetscInt *cg,*cl; 1542 PetscInt ncg; 1543 PetscInt ncl; 1544 1545 ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&cg);CHKERRQ(ierr); 1546 ierr = ISGetLocalSize(col,&ncl);CHKERRQ(ierr); 1547 ierr = ISGetIndices(col,&cl);CHKERRQ(ierr); 1548 ierr = ISLocalToGlobalMappingGetSize(A->cmap->mapping,&ncg);CHKERRQ(ierr); 1549 #if defined(PETSC_USE_DEBUG) 1550 for (i=0;i<ncl;i++) if (cl[i]>=ncg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local column index %D -> %D greater than maximum possible %D",i,cl[i],ncg); 1551 #endif 1552 ierr = PetscMalloc1(ncg,&idxs);CHKERRQ(ierr); 1553 /* map from [0,ncl) to col */ 1554 for (i=0;i<ncl;i++) idxs[i] = cl[i]; 1555 #if defined(PETSC_USE_DEBUG) 1556 for (i=ncl;i<ncg;i++) idxs[i] = ncg; 1557 #else 1558 for (i=ncl;i<ncg;i++) idxs[i] = -1; 1559 #endif 1560 ierr = ISRestoreIndices(col,&cl);CHKERRQ(ierr); 1561 ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&cg);CHKERRQ(ierr); 1562 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)A),ncg,idxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 1563 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr); 1564 ierr = ISDestroy(&is);CHKERRQ(ierr); 1565 } else { 1566 ierr = PetscObjectReference((PetscObject)rl2g);CHKERRQ(ierr); 1567 cl2g = rl2g; 1568 } 1569 /* create the MATIS submatrix */ 1570 ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr); 1571 ierr = MatCreate(PetscObjectComm((PetscObject)A),submat);CHKERRQ(ierr); 1572 ierr = MatSetSizes(*submat,PETSC_DECIDE,PETSC_DECIDE,M,N);CHKERRQ(ierr); 1573 ierr = MatSetType(*submat,MATIS);CHKERRQ(ierr); 1574 matis = (Mat_IS*)((*submat)->data); 1575 matis->islocalref = PETSC_TRUE; 1576 ierr = MatSetLocalToGlobalMapping(*submat,rl2g,cl2g);CHKERRQ(ierr); 1577 ierr = MatISGetLocalMat(A,&lA);CHKERRQ(ierr); 1578 ierr = MatISSetLocalMat(*submat,lA);CHKERRQ(ierr); 1579 ierr = MatSetUp(*submat);CHKERRQ(ierr); 1580 ierr = MatAssemblyBegin(*submat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1581 ierr = MatAssemblyEnd(*submat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1582 ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr); 1583 ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr); 1584 /* remove unsupported ops */ 1585 ierr = PetscMemzero((*submat)->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 1586 (*submat)->ops->destroy = MatDestroy_IS; 1587 (*submat)->ops->setvalueslocal = MatSetValuesLocal_SubMat_IS; 1588 (*submat)->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_SubMat_IS; 1589 (*submat)->ops->assemblybegin = MatAssemblyBegin_IS; 1590 (*submat)->ops->assemblyend = MatAssemblyEnd_IS; 1591 PetscFunctionReturn(0); 1592 } 1593 1594 #undef __FUNCT__ 1595 #define __FUNCT__ "MatCreateIS" 1596 /*@ 1597 MatCreateIS - Creates a "process" unassembled matrix, assembled on each 1598 process but not across processes. 1599 1600 Input Parameters: 1601 + comm - MPI communicator that will share the matrix 1602 . bs - block size of the matrix 1603 . m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products 1604 . rmap - local to global map for rows 1605 - cmap - local to global map for cols 1606 1607 Output Parameter: 1608 . A - the resulting matrix 1609 1610 Level: advanced 1611 1612 Notes: See MATIS for more details. 1613 m and n are NOT related to the size of the map; they represent the size of the local parts of the vectors 1614 used in MatMult operations. The sizes of rmap and cmap define the size of the local matrices. 1615 If either rmap or cmap are NULL, then the matrix is assumed to be square. 1616 1617 .seealso: MATIS, MatSetLocalToGlobalMapping() 1618 @*/ 1619 PetscErrorCode MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping rmap,ISLocalToGlobalMapping cmap,Mat *A) 1620 { 1621 PetscErrorCode ierr; 1622 1623 PetscFunctionBegin; 1624 if (!rmap && !cmap) SETERRQ(comm,PETSC_ERR_USER,"You need to provide at least one of the mappings"); 1625 ierr = MatCreate(comm,A);CHKERRQ(ierr); 1626 if (bs > 0) { 1627 ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr); 1628 } 1629 ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr); 1630 ierr = MatSetType(*A,MATIS);CHKERRQ(ierr); 1631 if (rmap && cmap) { 1632 ierr = MatSetLocalToGlobalMapping(*A,rmap,cmap);CHKERRQ(ierr); 1633 } else if (!rmap) { 1634 ierr = MatSetLocalToGlobalMapping(*A,cmap,cmap);CHKERRQ(ierr); 1635 } else { 1636 ierr = MatSetLocalToGlobalMapping(*A,rmap,rmap);CHKERRQ(ierr); 1637 } 1638 PetscFunctionReturn(0); 1639 } 1640 1641 /*MC 1642 MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition methods (e.g. PCBDDC or KSPFETIDP). 1643 This stores the matrices in globally unassembled form. Each processor 1644 assembles only its local Neumann problem and the parallel matrix vector 1645 product is handled "implicitly". 1646 1647 Operations Provided: 1648 + MatMult() 1649 . MatMultAdd() 1650 . MatMultTranspose() 1651 . MatMultTransposeAdd() 1652 . MatZeroEntries() 1653 . MatSetOption() 1654 . MatZeroRows() 1655 . MatSetValues() 1656 . MatSetValuesBlocked() 1657 . MatSetValuesLocal() 1658 . MatSetValuesBlockedLocal() 1659 . MatScale() 1660 . MatGetDiagonal() 1661 . MatMissingDiagonal() 1662 . MatDuplicate() 1663 . MatCopy() 1664 . MatAXPY() 1665 . MatGetSubMatrix() 1666 . MatGetLocalSubMatrix() 1667 . MatTranspose() 1668 - MatSetLocalToGlobalMapping() 1669 1670 Options Database Keys: 1671 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions() 1672 1673 Notes: Options prefix for the inner matrix are given by -is_mat_xxx 1674 1675 You must call MatSetLocalToGlobalMapping() before using this matrix type. 1676 1677 You can do matrix preallocation on the local matrix after you obtain it with 1678 MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation() 1679 1680 Level: advanced 1681 1682 .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), MatCreateIS(), PCBDDC, KSPFETIDP 1683 1684 M*/ 1685 1686 #undef __FUNCT__ 1687 #define __FUNCT__ "MatCreate_IS" 1688 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A) 1689 { 1690 PetscErrorCode ierr; 1691 Mat_IS *b; 1692 1693 PetscFunctionBegin; 1694 ierr = PetscNewLog(A,&b);CHKERRQ(ierr); 1695 A->data = (void*)b; 1696 1697 /* matrix ops */ 1698 ierr = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 1699 A->ops->mult = MatMult_IS; 1700 A->ops->multadd = MatMultAdd_IS; 1701 A->ops->multtranspose = MatMultTranspose_IS; 1702 A->ops->multtransposeadd = MatMultTransposeAdd_IS; 1703 A->ops->destroy = MatDestroy_IS; 1704 A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS; 1705 A->ops->setvalues = MatSetValues_IS; 1706 A->ops->setvaluesblocked = MatSetValuesBlocked_IS; 1707 A->ops->setvalueslocal = MatSetValuesLocal_IS; 1708 A->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_IS; 1709 A->ops->zerorows = MatZeroRows_IS; 1710 A->ops->zerorowscolumns = MatZeroRowsColumns_IS; 1711 A->ops->assemblybegin = MatAssemblyBegin_IS; 1712 A->ops->assemblyend = MatAssemblyEnd_IS; 1713 A->ops->view = MatView_IS; 1714 A->ops->zeroentries = MatZeroEntries_IS; 1715 A->ops->scale = MatScale_IS; 1716 A->ops->getdiagonal = MatGetDiagonal_IS; 1717 A->ops->setoption = MatSetOption_IS; 1718 A->ops->ishermitian = MatIsHermitian_IS; 1719 A->ops->issymmetric = MatIsSymmetric_IS; 1720 A->ops->duplicate = MatDuplicate_IS; 1721 A->ops->missingdiagonal = MatMissingDiagonal_IS; 1722 A->ops->copy = MatCopy_IS; 1723 A->ops->getlocalsubmatrix = MatGetLocalSubMatrix_IS; 1724 A->ops->getsubmatrix = MatGetSubMatrix_IS; 1725 A->ops->axpy = MatAXPY_IS; 1726 A->ops->diagonalset = MatDiagonalSet_IS; 1727 A->ops->shift = MatShift_IS; 1728 A->ops->transpose = MatTranspose_IS; 1729 1730 /* special MATIS functions */ 1731 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr); 1732 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr); 1733 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr); 1734 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr); 1735 ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr); 1736 PetscFunctionReturn(0); 1737 } 1738