1 /* 2 Creates a matrix class for using the Neumann-Neumann type preconditioners. 3 This stores the matrices in globally unassembled form. Each processor 4 assembles only its local Neumann problem and the parallel matrix vector 5 product is handled "implicitly". 6 7 Currently this allows for only one subdomain per processor. 8 */ 9 10 #include <../src/mat/impls/is/matis.h> /*I "petscmat.h" I*/ 11 12 #define MATIS_MAX_ENTRIES_INSERTION 2048 13 14 static PetscErrorCode MatISComputeSF_Private(Mat); 15 16 #undef __FUNCT__ 17 #define __FUNCT__ "MatGetInfo_IS" 18 static PetscErrorCode MatGetInfo_IS(Mat A,MatInfoType flag,MatInfo *ginfo) 19 { 20 Mat_IS *matis = (Mat_IS*)A->data; 21 MatInfo info; 22 PetscReal isend[6],irecv[6]; 23 PetscInt bs; 24 PetscErrorCode ierr; 25 26 PetscFunctionBegin; 27 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 28 if (matis->A->ops->getinfo) { 29 ierr = MatGetInfo(matis->A,MAT_LOCAL,&info);CHKERRQ(ierr); 30 isend[0] = info.nz_used; 31 isend[1] = info.nz_allocated; 32 isend[2] = info.nz_unneeded; 33 isend[3] = info.memory; 34 isend[4] = info.mallocs; 35 } else { 36 isend[0] = 0.; 37 isend[1] = 0.; 38 isend[2] = 0.; 39 isend[3] = 0.; 40 isend[4] = 0.; 41 } 42 isend[5] = matis->A->num_ass; 43 if (flag == MAT_LOCAL) { 44 ginfo->nz_used = isend[0]; 45 ginfo->nz_allocated = isend[1]; 46 ginfo->nz_unneeded = isend[2]; 47 ginfo->memory = isend[3]; 48 ginfo->mallocs = isend[4]; 49 ginfo->assemblies = isend[5]; 50 } else if (flag == MAT_GLOBAL_MAX) { 51 ierr = MPIU_Allreduce(isend,irecv,6,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)A));CHKERRQ(ierr); 52 53 ginfo->nz_used = irecv[0]; 54 ginfo->nz_allocated = irecv[1]; 55 ginfo->nz_unneeded = irecv[2]; 56 ginfo->memory = irecv[3]; 57 ginfo->mallocs = irecv[4]; 58 ginfo->assemblies = irecv[5]; 59 } else if (flag == MAT_GLOBAL_SUM) { 60 ierr = MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr); 61 62 ginfo->nz_used = irecv[0]; 63 ginfo->nz_allocated = irecv[1]; 64 ginfo->nz_unneeded = irecv[2]; 65 ginfo->memory = irecv[3]; 66 ginfo->mallocs = irecv[4]; 67 ginfo->assemblies = A->num_ass; 68 } 69 ginfo->block_size = bs; 70 ginfo->fill_ratio_given = 0; 71 ginfo->fill_ratio_needed = 0; 72 ginfo->factor_mallocs = 0; 73 PetscFunctionReturn(0); 74 } 75 76 #undef __FUNCT__ 77 #define __FUNCT__ "MatConvert_Nest_IS" 78 PETSC_INTERN PetscErrorCode MatConvert_Nest_IS(Mat A,MatType type,MatReuse reuse,Mat *newmat) 79 { 80 Mat **nest,*snest,**rnest,lA,B; 81 IS *iscol,*isrow,*islrow,*islcol; 82 ISLocalToGlobalMapping rl2g,cl2g; 83 MPI_Comm comm; 84 PetscInt *lr,*lc,*lrb,*lcb,*l2gidxs; 85 PetscInt sti,stl,i,j,nr,nc,rbs,cbs; 86 PetscBool convert,lreuse; 87 PetscErrorCode ierr; 88 89 PetscFunctionBeginUser; 90 ierr = MatNestGetSubMats(A,&nr,&nc,&nest);CHKERRQ(ierr); 91 lreuse = PETSC_FALSE; 92 rnest = NULL; 93 if (reuse == MAT_REUSE_MATRIX) { 94 PetscBool ismatis,isnest; 95 96 ierr = PetscObjectTypeCompare((PetscObject)*newmat,MATIS,&ismatis);CHKERRQ(ierr); 97 if (!ismatis) SETERRQ1(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_USER,"Cannot reuse matrix of type %s",((PetscObject)(*newmat))->type); 98 ierr = MatISGetLocalMat(*newmat,&lA);CHKERRQ(ierr); 99 ierr = PetscObjectTypeCompare((PetscObject)lA,MATNEST,&isnest);CHKERRQ(ierr); 100 if (isnest) { 101 ierr = MatNestGetSubMats(lA,&i,&j,&rnest);CHKERRQ(ierr); 102 lreuse = (PetscBool)(i == nr && j == nc); 103 if (!lreuse) rnest = NULL; 104 } 105 } 106 ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr); 107 ierr = PetscCalloc4(nr,&lr,nc,&lc,nr,&lrb,nc,&lcb);CHKERRQ(ierr); 108 ierr = PetscCalloc5(nr,&isrow,nc,&iscol, 109 nr,&islrow,nc,&islcol, 110 nr*nc,&snest);CHKERRQ(ierr); 111 ierr = MatNestGetISs(A,isrow,iscol);CHKERRQ(ierr); 112 for (i=0;i<nr;i++) { 113 for (j=0;j<nc;j++) { 114 PetscBool ismatis; 115 PetscInt l1,l2,ij=i*nc+j; 116 117 /* Null matrix pointers are allowed in MATNEST */ 118 if (!nest[i][j]) continue; 119 120 /* Nested matrices should be of type MATIS */ 121 ierr = PetscObjectTypeCompare((PetscObject)nest[i][j],MATIS,&ismatis);CHKERRQ(ierr); 122 if (!ismatis) SETERRQ2(comm,PETSC_ERR_SUP,"Cannot convert from MATNEST to MATIS! Matrix block (%D,%D) is not of type MATIS",i,j); 123 124 /* Check compatibility of local sizes */ 125 ierr = MatISGetLocalMat(nest[i][j],&snest[ij]);CHKERRQ(ierr); 126 ierr = MatGetSize(snest[ij],&l1,&l2);CHKERRQ(ierr); 127 if (!l1 || !l2) continue; 128 if (lr[i] && l1 != lr[i]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot convert from MATNEST to MATIS! Matrix block (%D,%D) has invalid local size %D != %D",i,j,lr[i],l1); 129 if (lc[j] && l2 != lc[j]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot convert from MATNEST to MATIS! Matrix block (%D,%D) has invalid local size %D != %D",i,j,lc[j],l2); 130 lr[i] = l1; 131 lc[j] = l2; 132 ierr = MatGetBlockSizes(snest[ij],&l1,&l2);CHKERRQ(ierr); 133 if (lrb[i] && l1 != lrb[i]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot convert from MATNEST to MATIS! Matrix block (%D,%D) has invalid block size %D != %D",i,j,lrb[i],l1); 134 if (lcb[j] && l2 != lcb[j]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot convert from MATNEST to MATIS! Matrix block (%D,%D) has invalid block size %D != %D",i,j,lcb[j],l2); 135 lrb[i] = l1; 136 lcb[j] = l2; 137 138 /* check compatibilty for local matrix reusage */ 139 if (rnest && !rnest[i][j] != !snest[ij]) lreuse = PETSC_FALSE; 140 } 141 } 142 143 #if defined (PETSC_USE_DEBUG) 144 /* Check compatibility of l2g maps for rows */ 145 for (i=0;i<nr;i++) { 146 rl2g = NULL; 147 for (j=0;j<nc;j++) { 148 PetscInt n1,n2; 149 150 if (!nest[i][j]) continue; 151 ierr = MatGetLocalToGlobalMapping(nest[i][j],&cl2g,NULL);CHKERRQ(ierr); 152 ierr = ISLocalToGlobalMappingGetSize(cl2g,&n1);CHKERRQ(ierr); 153 if (!n1) continue; 154 if (!rl2g) { 155 rl2g = cl2g; 156 } else { 157 const PetscInt *idxs1,*idxs2; 158 PetscBool same; 159 160 ierr = ISLocalToGlobalMappingGetSize(rl2g,&n2);CHKERRQ(ierr); 161 if (n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot convert from MATNEST to MATIS! Matrix block (%D,%D) has invalid row l2gmap size %D != %D",i,j,n1,n2); 162 ierr = ISLocalToGlobalMappingGetIndices(cl2g,&idxs1);CHKERRQ(ierr); 163 ierr = ISLocalToGlobalMappingGetIndices(rl2g,&idxs2);CHKERRQ(ierr); 164 ierr = PetscMemcmp(idxs1,idxs2,n1*sizeof(PetscInt),&same);CHKERRQ(ierr); 165 ierr = ISLocalToGlobalMappingRestoreIndices(cl2g,&idxs1);CHKERRQ(ierr); 166 ierr = ISLocalToGlobalMappingRestoreIndices(rl2g,&idxs2);CHKERRQ(ierr); 167 if (!same) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot convert from MATNEST to MATIS! Matrix block (%D,%D) has invalid row l2gmap",i,j); 168 } 169 } 170 } 171 /* Check compatibility of l2g maps for columns */ 172 for (i=0;i<nc;i++) { 173 rl2g = NULL; 174 for (j=0;j<nr;j++) { 175 PetscInt n1,n2; 176 177 if (!nest[j][i]) continue; 178 ierr = MatGetLocalToGlobalMapping(nest[j][i],NULL,&cl2g);CHKERRQ(ierr); 179 ierr = ISLocalToGlobalMappingGetSize(cl2g,&n1);CHKERRQ(ierr); 180 if (!n1) continue; 181 if (!rl2g) { 182 rl2g = cl2g; 183 } else { 184 const PetscInt *idxs1,*idxs2; 185 PetscBool same; 186 187 ierr = ISLocalToGlobalMappingGetSize(rl2g,&n2);CHKERRQ(ierr); 188 if (n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot convert from MATNEST to MATIS! Matrix block (%D,%D) has invalid column l2gmap size %D != %D",j,i,n1,n2); 189 ierr = ISLocalToGlobalMappingGetIndices(cl2g,&idxs1);CHKERRQ(ierr); 190 ierr = ISLocalToGlobalMappingGetIndices(rl2g,&idxs2);CHKERRQ(ierr); 191 ierr = PetscMemcmp(idxs1,idxs2,n1*sizeof(PetscInt),&same);CHKERRQ(ierr); 192 ierr = ISLocalToGlobalMappingRestoreIndices(cl2g,&idxs1);CHKERRQ(ierr); 193 ierr = ISLocalToGlobalMappingRestoreIndices(rl2g,&idxs2);CHKERRQ(ierr); 194 if (!same) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot convert from MATNEST to MATIS! Matrix block (%D,%D) has invalid column l2gmap",j,i); 195 } 196 } 197 } 198 #endif 199 200 B = NULL; 201 if (reuse != MAT_REUSE_MATRIX) { 202 /* Create l2g map for the rows of the new matrix and index sets for the local MATNEST */ 203 for (i=0,stl=0;i<nr;i++) stl += lr[i]; 204 ierr = PetscMalloc1(stl,&l2gidxs);CHKERRQ(ierr); 205 for (i=0,sti=0,stl=0;i<nr;i++) { 206 Mat usedmat; 207 Mat_IS *matis; 208 const PetscInt *idxs; 209 PetscInt n = lr[i]/lrb[i]; 210 211 /* local IS for local NEST */ 212 ierr = ISCreateStride(PETSC_COMM_SELF,n,sti,lrb[i],&islrow[i]);CHKERRQ(ierr); 213 sti += n; 214 215 /* l2gmap */ 216 j = 0; 217 usedmat = nest[i][j]; 218 while (!usedmat && j < nc) usedmat = nest[i][j++]; 219 matis = (Mat_IS*)(usedmat->data); 220 if (!matis->sf) { 221 ierr = MatISComputeSF_Private(usedmat);CHKERRQ(ierr); 222 } 223 ierr = ISGetIndices(isrow[i],&idxs);CHKERRQ(ierr); 224 ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,idxs,l2gidxs+stl);CHKERRQ(ierr); 225 ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,idxs,l2gidxs+stl);CHKERRQ(ierr); 226 ierr = ISRestoreIndices(isrow[i],&idxs);CHKERRQ(ierr); 227 stl += lr[i]; 228 } 229 ierr = ISLocalToGlobalMappingCreate(comm,1,stl,l2gidxs,PETSC_OWN_POINTER,&rl2g);CHKERRQ(ierr); 230 231 /* Create l2g map for columns of the new matrix and index sets for the local MATNEST */ 232 for (i=0,stl=0;i<nc;i++) stl += lc[i]; 233 ierr = PetscMalloc1(stl,&l2gidxs);CHKERRQ(ierr); 234 for (i=0,sti=0,stl=0;i<nc;i++) { 235 Mat usedmat; 236 Mat_IS *matis; 237 const PetscInt *idxs; 238 PetscInt n = lc[i]/lcb[i]; 239 240 /* local IS for local NEST */ 241 ierr = ISCreateStride(PETSC_COMM_SELF,n,sti,lcb[i],&islcol[i]);CHKERRQ(ierr); 242 sti += n; 243 244 /* l2gmap */ 245 j = 0; 246 usedmat = nest[j][i]; 247 while (!usedmat && j < nr) usedmat = nest[j++][i]; 248 matis = (Mat_IS*)(usedmat->data); 249 if (!matis->sf) { 250 ierr = MatISComputeSF_Private(usedmat);CHKERRQ(ierr); 251 } 252 ierr = ISGetIndices(iscol[i],&idxs);CHKERRQ(ierr); 253 ierr = PetscSFBcastBegin(matis->csf,MPIU_INT,idxs,l2gidxs+stl);CHKERRQ(ierr); 254 ierr = PetscSFBcastEnd(matis->csf,MPIU_INT,idxs,l2gidxs+stl);CHKERRQ(ierr); 255 ierr = ISRestoreIndices(iscol[i],&idxs);CHKERRQ(ierr); 256 stl += lc[i]; 257 } 258 ierr = ISLocalToGlobalMappingCreate(comm,1,stl,l2gidxs,PETSC_OWN_POINTER,&cl2g);CHKERRQ(ierr); 259 260 /* Create MATIS */ 261 ierr = MatCreate(comm,&B);CHKERRQ(ierr); 262 ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr); 263 ierr = MatGetBlockSizes(A,&rbs,&cbs);CHKERRQ(ierr); 264 ierr = MatSetBlockSizes(B,rbs,cbs);CHKERRQ(ierr); 265 ierr = MatSetType(B,MATIS);CHKERRQ(ierr); 266 ierr = MatSetLocalToGlobalMapping(B,rl2g,cl2g);CHKERRQ(ierr); 267 ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr); 268 ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr); 269 ierr = MatCreateNest(PETSC_COMM_SELF,nr,islrow,nc,islcol,snest,&lA);CHKERRQ(ierr); 270 ierr = MatISSetLocalMat(B,lA);CHKERRQ(ierr); 271 ierr = MatDestroy(&lA);CHKERRQ(ierr); 272 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 273 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 274 if (reuse == MAT_INPLACE_MATRIX) { 275 ierr = MatHeaderReplace(A,&B);CHKERRQ(ierr); 276 } else { 277 *newmat = B; 278 } 279 } else { 280 if (lreuse) { 281 ierr = MatISGetLocalMat(*newmat,&lA);CHKERRQ(ierr); 282 for (i=0;i<nr;i++) { 283 for (j=0;j<nc;j++) { 284 if (snest[i*nc+j]) { 285 ierr = MatNestSetSubMat(lA,i,j,snest[i*nc+j]);CHKERRQ(ierr); 286 } 287 } 288 } 289 } else { 290 for (i=0,sti=0;i<nr;i++) { 291 PetscInt n = lr[i]/lrb[i]; 292 293 ierr = ISCreateStride(PETSC_COMM_SELF,n,sti,lrb[i],&islrow[i]);CHKERRQ(ierr); 294 sti += n; 295 } 296 for (i=0,sti=0;i<nc;i++) { 297 PetscInt n = lc[i]/lcb[i]; 298 299 ierr = ISCreateStride(PETSC_COMM_SELF,n,sti,lcb[i],&islcol[i]);CHKERRQ(ierr); 300 sti += n; 301 } 302 ierr = MatCreateNest(PETSC_COMM_SELF,nr,islrow,nc,islcol,snest,&lA);CHKERRQ(ierr); 303 ierr = MatISSetLocalMat(*newmat,lA);CHKERRQ(ierr); 304 ierr = MatDestroy(&lA);CHKERRQ(ierr); 305 } 306 ierr = MatAssemblyBegin(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 307 ierr = MatAssemblyEnd(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 308 } 309 310 /* Free workspace */ 311 for (i=0;i<nr;i++) { 312 ierr = ISDestroy(&islrow[i]);CHKERRQ(ierr); 313 } 314 for (i=0;i<nc;i++) { 315 ierr = ISDestroy(&islcol[i]);CHKERRQ(ierr); 316 } 317 ierr = PetscFree4(lr,lc,lrb,lcb);CHKERRQ(ierr); 318 ierr = PetscFree5(isrow,iscol,islrow,islcol,snest);CHKERRQ(ierr); 319 320 /* Create local matrix in MATNEST format */ 321 convert = PETSC_FALSE; 322 ierr = PetscOptionsGetBool(NULL,((PetscObject)A)->prefix,"-matis_convert_local_nest",&convert,NULL);CHKERRQ(ierr); 323 if (convert) { 324 Mat M; 325 326 ierr = MatISGetLocalMat(*newmat,&lA);CHKERRQ(ierr); 327 ierr = MatConvert(lA,MATAIJ,MAT_INITIAL_MATRIX,&M);CHKERRQ(ierr); 328 ierr = MatISSetLocalMat(*newmat,M);CHKERRQ(ierr); 329 ierr = MatDestroy(&M);CHKERRQ(ierr); 330 } 331 PetscFunctionReturn(0); 332 } 333 334 #undef __FUNCT__ 335 #define __FUNCT__ "MatTranspose_IS" 336 PetscErrorCode MatTranspose_IS(Mat A,MatReuse reuse,Mat *B) 337 { 338 Mat C,lC,lA; 339 ISLocalToGlobalMapping rl2g,cl2g; 340 PetscBool notset = PETSC_FALSE,cong = PETSC_TRUE; 341 PetscErrorCode ierr; 342 343 PetscFunctionBegin; 344 if (reuse == MAT_REUSE_MATRIX) { 345 PetscBool rcong,ccong; 346 347 ierr = PetscLayoutCompare((*B)->cmap,A->rmap,&rcong);CHKERRQ(ierr); 348 ierr = PetscLayoutCompare((*B)->rmap,A->cmap,&ccong);CHKERRQ(ierr); 349 cong = (PetscBool)(rcong || ccong); 350 } 351 if (reuse == MAT_INITIAL_MATRIX || *B == A || !cong) { 352 ierr = MatCreate(PetscObjectComm((PetscObject)A),&C);CHKERRQ(ierr); 353 } else { 354 ierr = PetscObjectTypeCompare((PetscObject)*B,MATIS,¬set);CHKERRQ(ierr); 355 C = *B; 356 } 357 358 if (!notset) { 359 ierr = MatSetSizes(C,A->cmap->n,A->rmap->n,A->cmap->N,A->rmap->N);CHKERRQ(ierr); 360 ierr = MatSetBlockSizes(C,PetscAbs(A->cmap->bs),PetscAbs(A->rmap->bs));CHKERRQ(ierr); 361 ierr = MatSetType(C,MATIS);CHKERRQ(ierr); 362 ierr = MatGetLocalToGlobalMapping(A,&rl2g,&cl2g);CHKERRQ(ierr); 363 ierr = MatSetLocalToGlobalMapping(C,cl2g,rl2g);CHKERRQ(ierr); 364 } 365 366 /* perform local transposition */ 367 ierr = MatISGetLocalMat(A,&lA);CHKERRQ(ierr); 368 ierr = MatTranspose(lA,MAT_INITIAL_MATRIX,&lC);CHKERRQ(ierr); 369 ierr = MatISSetLocalMat(C,lC);CHKERRQ(ierr); 370 ierr = MatDestroy(&lC);CHKERRQ(ierr); 371 372 ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 373 ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 374 375 if (reuse == MAT_INITIAL_MATRIX || *B != A) { 376 if (!cong) { 377 ierr = MatDestroy(B);CHKERRQ(ierr); 378 } 379 *B = C; 380 } else { 381 ierr = MatHeaderMerge(A,&C);CHKERRQ(ierr); 382 } 383 PetscFunctionReturn(0); 384 } 385 386 #undef __FUNCT__ 387 #define __FUNCT__ "MatDiagonalSet_IS" 388 PetscErrorCode MatDiagonalSet_IS(Mat A,Vec D,InsertMode insmode) 389 { 390 Mat_IS *is = (Mat_IS*)A->data; 391 PetscErrorCode ierr; 392 393 PetscFunctionBegin; 394 if (!D) { /* this code branch is used by MatShift_IS */ 395 ierr = VecSet(is->y,1.);CHKERRQ(ierr); 396 } else { 397 ierr = VecScatterBegin(is->rctx,D,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 398 ierr = VecScatterEnd(is->rctx,D,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 399 } 400 ierr = VecPointwiseDivide(is->y,is->y,is->counter);CHKERRQ(ierr); 401 ierr = MatDiagonalSet(is->A,is->y,insmode);CHKERRQ(ierr); 402 PetscFunctionReturn(0); 403 } 404 405 #undef __FUNCT__ 406 #define __FUNCT__ "MatShift_IS" 407 PetscErrorCode MatShift_IS(Mat A,PetscScalar a) 408 { 409 PetscErrorCode ierr; 410 411 PetscFunctionBegin; 412 ierr = MatDiagonalSet_IS(A,NULL,ADD_VALUES);CHKERRQ(ierr); 413 PetscFunctionReturn(0); 414 } 415 416 #undef __FUNCT__ 417 #define __FUNCT__ "MatSetValuesLocal_SubMat_IS" 418 static PetscErrorCode MatSetValuesLocal_SubMat_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 419 { 420 PetscErrorCode ierr; 421 Mat_IS *is = (Mat_IS*)A->data; 422 PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION]; 423 424 PetscFunctionBegin; 425 #if defined(PETSC_USE_DEBUG) 426 if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n); 427 #endif 428 ierr = ISLocalToGlobalMappingApply(A->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr); 429 ierr = ISLocalToGlobalMappingApply(A->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr); 430 ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr); 431 PetscFunctionReturn(0); 432 } 433 434 #undef __FUNCT__ 435 #define __FUNCT__ "MatSetValuesBlockedLocal_SubMat_IS" 436 static PetscErrorCode MatSetValuesBlockedLocal_SubMat_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 437 { 438 PetscErrorCode ierr; 439 Mat_IS *is = (Mat_IS*)A->data; 440 PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION]; 441 442 PetscFunctionBegin; 443 #if defined(PETSC_USE_DEBUG) 444 if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column block indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n); 445 #endif 446 ierr = ISLocalToGlobalMappingApplyBlock(A->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr); 447 ierr = ISLocalToGlobalMappingApplyBlock(A->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr); 448 ierr = MatSetValuesBlocked(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr); 449 PetscFunctionReturn(0); 450 } 451 452 #undef __FUNCT__ 453 #define __FUNCT__ "PetscLayoutMapLocal_Private" 454 static PetscErrorCode PetscLayoutMapLocal_Private(PetscLayout map,PetscInt N,const PetscInt idxs[], PetscInt *on,PetscInt **oidxs,PetscInt **ogidxs) 455 { 456 PetscInt *owners = map->range; 457 PetscInt n = map->n; 458 PetscSF sf; 459 PetscInt *lidxs,*work = NULL; 460 PetscSFNode *ridxs; 461 PetscMPIInt rank; 462 PetscInt r, p = 0, len = 0; 463 PetscErrorCode ierr; 464 465 PetscFunctionBegin; 466 if (on) *on = 0; /* squelch -Wmaybe-uninitialized */ 467 /* Create SF where leaves are input idxs and roots are owned idxs (code adapted from MatZeroRowsMapLocal_Private) */ 468 ierr = MPI_Comm_rank(map->comm,&rank);CHKERRQ(ierr); 469 ierr = PetscMalloc1(n,&lidxs);CHKERRQ(ierr); 470 for (r = 0; r < n; ++r) lidxs[r] = -1; 471 ierr = PetscMalloc1(N,&ridxs);CHKERRQ(ierr); 472 for (r = 0; r < N; ++r) { 473 const PetscInt idx = idxs[r]; 474 if (idx < 0 || map->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index %D out of range [0,%D)",idx,map->N); 475 if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this idx too */ 476 ierr = PetscLayoutFindOwner(map,idx,&p);CHKERRQ(ierr); 477 } 478 ridxs[r].rank = p; 479 ridxs[r].index = idxs[r] - owners[p]; 480 } 481 ierr = PetscSFCreate(map->comm,&sf);CHKERRQ(ierr); 482 ierr = PetscSFSetGraph(sf,n,N,NULL,PETSC_OWN_POINTER,ridxs,PETSC_OWN_POINTER);CHKERRQ(ierr); 483 ierr = PetscSFReduceBegin(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr); 484 ierr = PetscSFReduceEnd(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr); 485 if (ogidxs) { /* communicate global idxs */ 486 PetscInt cum = 0,start,*work2; 487 488 ierr = PetscMalloc1(n,&work);CHKERRQ(ierr); 489 ierr = PetscCalloc1(N,&work2);CHKERRQ(ierr); 490 for (r = 0; r < N; ++r) if (idxs[r] >=0) cum++; 491 ierr = MPI_Scan(&cum,&start,1,MPIU_INT,MPI_SUM,map->comm);CHKERRQ(ierr); 492 start -= cum; 493 cum = 0; 494 for (r = 0; r < N; ++r) if (idxs[r] >=0) work2[r] = start+cum++; 495 ierr = PetscSFReduceBegin(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr); 496 ierr = PetscSFReduceEnd(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr); 497 ierr = PetscFree(work2);CHKERRQ(ierr); 498 } 499 ierr = PetscSFDestroy(&sf);CHKERRQ(ierr); 500 /* Compress and put in indices */ 501 for (r = 0; r < n; ++r) 502 if (lidxs[r] >= 0) { 503 if (work) work[len] = work[r]; 504 lidxs[len++] = r; 505 } 506 if (on) *on = len; 507 if (oidxs) *oidxs = lidxs; 508 if (ogidxs) *ogidxs = work; 509 PetscFunctionReturn(0); 510 } 511 512 #undef __FUNCT__ 513 #define __FUNCT__ "MatGetSubMatrix_IS" 514 static PetscErrorCode MatGetSubMatrix_IS(Mat mat,IS irow,IS icol,MatReuse scall,Mat *newmat) 515 { 516 Mat locmat,newlocmat; 517 Mat_IS *newmatis; 518 #if defined(PETSC_USE_DEBUG) 519 Vec rtest,ltest; 520 const PetscScalar *array; 521 #endif 522 const PetscInt *idxs; 523 PetscInt i,m,n; 524 PetscErrorCode ierr; 525 526 PetscFunctionBegin; 527 if (scall == MAT_REUSE_MATRIX) { 528 PetscBool ismatis; 529 530 ierr = PetscObjectTypeCompare((PetscObject)*newmat,MATIS,&ismatis);CHKERRQ(ierr); 531 if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Not of MATIS type"); 532 newmatis = (Mat_IS*)(*newmat)->data; 533 if (!newmatis->getsub_ris) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Misses local row IS"); 534 if (!newmatis->getsub_cis) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Misses local col IS"); 535 } 536 /* irow and icol may not have duplicate entries */ 537 #if defined(PETSC_USE_DEBUG) 538 ierr = MatCreateVecs(mat,<est,&rtest);CHKERRQ(ierr); 539 ierr = ISGetLocalSize(irow,&n);CHKERRQ(ierr); 540 ierr = ISGetIndices(irow,&idxs);CHKERRQ(ierr); 541 for (i=0;i<n;i++) { 542 ierr = VecSetValue(rtest,idxs[i],1.0,ADD_VALUES);CHKERRQ(ierr); 543 } 544 ierr = VecAssemblyBegin(rtest);CHKERRQ(ierr); 545 ierr = VecAssemblyEnd(rtest);CHKERRQ(ierr); 546 ierr = VecGetLocalSize(rtest,&n);CHKERRQ(ierr); 547 ierr = VecGetOwnershipRange(rtest,&m,NULL);CHKERRQ(ierr); 548 ierr = VecGetArrayRead(rtest,&array);CHKERRQ(ierr); 549 for (i=0;i<n;i++) if (array[i] != 0. && array[i] != 1.) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Index %D counted %D times! Irow may not have duplicate entries",i+m,(PetscInt)PetscRealPart(array[i])); 550 ierr = VecRestoreArrayRead(rtest,&array);CHKERRQ(ierr); 551 ierr = ISRestoreIndices(irow,&idxs);CHKERRQ(ierr); 552 ierr = ISGetLocalSize(icol,&n);CHKERRQ(ierr); 553 ierr = ISGetIndices(icol,&idxs);CHKERRQ(ierr); 554 for (i=0;i<n;i++) { 555 ierr = VecSetValue(ltest,idxs[i],1.0,ADD_VALUES);CHKERRQ(ierr); 556 } 557 ierr = VecAssemblyBegin(ltest);CHKERRQ(ierr); 558 ierr = VecAssemblyEnd(ltest);CHKERRQ(ierr); 559 ierr = VecGetLocalSize(ltest,&n);CHKERRQ(ierr); 560 ierr = VecGetOwnershipRange(ltest,&m,NULL);CHKERRQ(ierr); 561 ierr = VecGetArrayRead(ltest,&array);CHKERRQ(ierr); 562 for (i=0;i<n;i++) if (array[i] != 0. && array[i] != 1.) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Index %D counted %D times! Icol may not have duplicate entries",i+m,(PetscInt)PetscRealPart(array[i])); 563 ierr = VecRestoreArrayRead(ltest,&array);CHKERRQ(ierr); 564 ierr = ISRestoreIndices(icol,&idxs);CHKERRQ(ierr); 565 ierr = VecDestroy(&rtest);CHKERRQ(ierr); 566 ierr = VecDestroy(<est);CHKERRQ(ierr); 567 #endif 568 if (scall == MAT_INITIAL_MATRIX) { 569 Mat_IS *matis = (Mat_IS*)mat->data; 570 ISLocalToGlobalMapping rl2g; 571 IS is; 572 PetscInt *lidxs,*lgidxs,*newgidxs; 573 PetscInt ll,newloc; 574 MPI_Comm comm; 575 576 ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr); 577 ierr = ISGetLocalSize(irow,&m);CHKERRQ(ierr); 578 ierr = ISGetLocalSize(icol,&n);CHKERRQ(ierr); 579 ierr = MatCreate(comm,newmat);CHKERRQ(ierr); 580 ierr = MatSetType(*newmat,MATIS);CHKERRQ(ierr); 581 ierr = MatSetSizes(*newmat,m,n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr); 582 /* communicate irow to their owners in the layout */ 583 ierr = ISGetIndices(irow,&idxs);CHKERRQ(ierr); 584 ierr = PetscLayoutMapLocal_Private(mat->rmap,m,idxs,&ll,&lidxs,&lgidxs);CHKERRQ(ierr); 585 ierr = ISRestoreIndices(irow,&idxs);CHKERRQ(ierr); 586 if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */ 587 ierr = MatISComputeSF_Private(mat);CHKERRQ(ierr); 588 } 589 ierr = PetscMemzero(matis->sf_rootdata,matis->sf_nroots*sizeof(PetscInt));CHKERRQ(ierr); 590 for (i=0;i<ll;i++) matis->sf_rootdata[lidxs[i]] = lgidxs[i]+1; 591 ierr = PetscFree(lidxs);CHKERRQ(ierr); 592 ierr = PetscFree(lgidxs);CHKERRQ(ierr); 593 ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 594 ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 595 for (i=0,newloc=0;i<matis->sf_nleaves;i++) if (matis->sf_leafdata[i]) newloc++; 596 ierr = PetscMalloc1(newloc,&newgidxs);CHKERRQ(ierr); 597 ierr = PetscMalloc1(newloc,&lidxs);CHKERRQ(ierr); 598 for (i=0,newloc=0;i<matis->sf_nleaves;i++) 599 if (matis->sf_leafdata[i]) { 600 lidxs[newloc] = i; 601 newgidxs[newloc++] = matis->sf_leafdata[i]-1; 602 } 603 ierr = ISCreateGeneral(comm,newloc,newgidxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 604 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr); 605 ierr = ISDestroy(&is);CHKERRQ(ierr); 606 /* local is to extract local submatrix */ 607 newmatis = (Mat_IS*)(*newmat)->data; 608 ierr = ISCreateGeneral(comm,newloc,lidxs,PETSC_OWN_POINTER,&newmatis->getsub_ris);CHKERRQ(ierr); 609 if (mat->congruentlayouts == -1) { /* first time we compare rows and cols layouts */ 610 PetscBool cong; 611 ierr = PetscLayoutCompare(mat->rmap,mat->cmap,&cong);CHKERRQ(ierr); 612 if (cong) mat->congruentlayouts = 1; 613 else mat->congruentlayouts = 0; 614 } 615 if (mat->congruentlayouts && irow == icol) { 616 ierr = MatSetLocalToGlobalMapping(*newmat,rl2g,rl2g);CHKERRQ(ierr); 617 ierr = PetscObjectReference((PetscObject)newmatis->getsub_ris);CHKERRQ(ierr); 618 newmatis->getsub_cis = newmatis->getsub_ris; 619 } else { 620 ISLocalToGlobalMapping cl2g; 621 622 /* communicate icol to their owners in the layout */ 623 ierr = ISGetIndices(icol,&idxs);CHKERRQ(ierr); 624 ierr = PetscLayoutMapLocal_Private(mat->cmap,n,idxs,&ll,&lidxs,&lgidxs);CHKERRQ(ierr); 625 ierr = ISRestoreIndices(icol,&idxs);CHKERRQ(ierr); 626 ierr = PetscMemzero(matis->csf_rootdata,matis->csf_nroots*sizeof(PetscInt));CHKERRQ(ierr); 627 for (i=0;i<ll;i++) matis->csf_rootdata[lidxs[i]] = lgidxs[i]+1; 628 ierr = PetscFree(lidxs);CHKERRQ(ierr); 629 ierr = PetscFree(lgidxs);CHKERRQ(ierr); 630 ierr = PetscSFBcastBegin(matis->csf,MPIU_INT,matis->csf_rootdata,matis->csf_leafdata);CHKERRQ(ierr); 631 ierr = PetscSFBcastEnd(matis->csf,MPIU_INT,matis->csf_rootdata,matis->csf_leafdata);CHKERRQ(ierr); 632 for (i=0,newloc=0;i<matis->csf_nleaves;i++) if (matis->csf_leafdata[i]) newloc++; 633 ierr = PetscMalloc1(newloc,&newgidxs);CHKERRQ(ierr); 634 ierr = PetscMalloc1(newloc,&lidxs);CHKERRQ(ierr); 635 for (i=0,newloc=0;i<matis->csf_nleaves;i++) 636 if (matis->csf_leafdata[i]) { 637 lidxs[newloc] = i; 638 newgidxs[newloc++] = matis->csf_leafdata[i]-1; 639 } 640 ierr = ISCreateGeneral(comm,newloc,newgidxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 641 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr); 642 ierr = ISDestroy(&is);CHKERRQ(ierr); 643 /* local is to extract local submatrix */ 644 ierr = ISCreateGeneral(comm,newloc,lidxs,PETSC_OWN_POINTER,&newmatis->getsub_cis);CHKERRQ(ierr); 645 ierr = MatSetLocalToGlobalMapping(*newmat,rl2g,cl2g);CHKERRQ(ierr); 646 ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr); 647 } 648 ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr); 649 } else { 650 ierr = MatISGetLocalMat(*newmat,&newlocmat);CHKERRQ(ierr); 651 } 652 ierr = MatISGetLocalMat(mat,&locmat);CHKERRQ(ierr); 653 newmatis = (Mat_IS*)(*newmat)->data; 654 ierr = MatGetSubMatrix(locmat,newmatis->getsub_ris,newmatis->getsub_cis,scall,&newlocmat);CHKERRQ(ierr); 655 if (scall == MAT_INITIAL_MATRIX) { 656 ierr = MatISSetLocalMat(*newmat,newlocmat);CHKERRQ(ierr); 657 ierr = MatDestroy(&newlocmat);CHKERRQ(ierr); 658 } 659 ierr = MatAssemblyBegin(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 660 ierr = MatAssemblyEnd(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 661 PetscFunctionReturn(0); 662 } 663 664 #undef __FUNCT__ 665 #define __FUNCT__ "MatCopy_IS" 666 static PetscErrorCode MatCopy_IS(Mat A,Mat B,MatStructure str) 667 { 668 Mat_IS *a = (Mat_IS*)A->data,*b; 669 PetscBool ismatis; 670 PetscErrorCode ierr; 671 672 PetscFunctionBegin; 673 ierr = PetscObjectTypeCompare((PetscObject)B,MATIS,&ismatis);CHKERRQ(ierr); 674 if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Need to be implemented"); 675 b = (Mat_IS*)B->data; 676 ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr); 677 PetscFunctionReturn(0); 678 } 679 680 #undef __FUNCT__ 681 #define __FUNCT__ "MatMissingDiagonal_IS" 682 static PetscErrorCode MatMissingDiagonal_IS(Mat A,PetscBool *missing,PetscInt *d) 683 { 684 Vec v; 685 const PetscScalar *array; 686 PetscInt i,n; 687 PetscErrorCode ierr; 688 689 PetscFunctionBegin; 690 *missing = PETSC_FALSE; 691 ierr = MatCreateVecs(A,NULL,&v);CHKERRQ(ierr); 692 ierr = MatGetDiagonal(A,v);CHKERRQ(ierr); 693 ierr = VecGetLocalSize(v,&n);CHKERRQ(ierr); 694 ierr = VecGetArrayRead(v,&array);CHKERRQ(ierr); 695 for (i=0;i<n;i++) if (array[i] == 0.) break; 696 ierr = VecRestoreArrayRead(v,&array);CHKERRQ(ierr); 697 ierr = VecDestroy(&v);CHKERRQ(ierr); 698 if (i != n) *missing = PETSC_TRUE; 699 if (d) { 700 *d = -1; 701 if (*missing) { 702 PetscInt rstart; 703 ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr); 704 *d = i+rstart; 705 } 706 } 707 PetscFunctionReturn(0); 708 } 709 710 #undef __FUNCT__ 711 #define __FUNCT__ "MatISComputeSF_Private" 712 static PetscErrorCode MatISComputeSF_Private(Mat B) 713 { 714 Mat_IS *matis = (Mat_IS*)(B->data); 715 const PetscInt *gidxs; 716 PetscErrorCode ierr; 717 718 PetscFunctionBegin; 719 ierr = MatGetSize(matis->A,&matis->sf_nleaves,NULL);CHKERRQ(ierr); 720 ierr = MatGetLocalSize(B,&matis->sf_nroots,NULL);CHKERRQ(ierr); 721 ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr); 722 ierr = ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr); 723 ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr); 724 ierr = ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr); 725 ierr = PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);CHKERRQ(ierr); 726 if (B->rmap->mapping != B->cmap->mapping) { /* setup SF for columns */ 727 ierr = MatGetSize(matis->A,NULL,&matis->csf_nleaves);CHKERRQ(ierr); 728 ierr = MatGetLocalSize(B,NULL,&matis->csf_nroots);CHKERRQ(ierr); 729 ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->csf);CHKERRQ(ierr); 730 ierr = ISLocalToGlobalMappingGetIndices(B->cmap->mapping,&gidxs);CHKERRQ(ierr); 731 ierr = PetscSFSetGraphLayout(matis->csf,B->cmap,matis->csf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr); 732 ierr = ISLocalToGlobalMappingRestoreIndices(B->cmap->mapping,&gidxs);CHKERRQ(ierr); 733 ierr = PetscMalloc2(matis->csf_nroots,&matis->csf_rootdata,matis->csf_nleaves,&matis->csf_leafdata);CHKERRQ(ierr); 734 } else { 735 matis->csf = matis->sf; 736 matis->csf_nleaves = matis->sf_nleaves; 737 matis->csf_nroots = matis->sf_nroots; 738 matis->csf_leafdata = matis->sf_leafdata; 739 matis->csf_rootdata = matis->sf_rootdata; 740 } 741 PetscFunctionReturn(0); 742 } 743 744 #undef __FUNCT__ 745 #define __FUNCT__ "MatISSetPreallocation" 746 /*@ 747 MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix. 748 749 Collective on MPI_Comm 750 751 Input Parameters: 752 + B - the matrix 753 . d_nz - number of nonzeros per row in DIAGONAL portion of local submatrix 754 (same value is used for all local rows) 755 . d_nnz - array containing the number of nonzeros in the various rows of the 756 DIAGONAL portion of the local submatrix (possibly different for each row) 757 or NULL, if d_nz is used to specify the nonzero structure. 758 The size of this array is equal to the number of local rows, i.e 'm'. 759 For matrices that will be factored, you must leave room for (and set) 760 the diagonal entry even if it is zero. 761 . o_nz - number of nonzeros per row in the OFF-DIAGONAL portion of local 762 submatrix (same value is used for all local rows). 763 - o_nnz - array containing the number of nonzeros in the various rows of the 764 OFF-DIAGONAL portion of the local submatrix (possibly different for 765 each row) or NULL, if o_nz is used to specify the nonzero 766 structure. The size of this array is equal to the number 767 of local rows, i.e 'm'. 768 769 If the *_nnz parameter is given then the *_nz parameter is ignored 770 771 Level: intermediate 772 773 Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition 774 from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local 775 matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects. 776 777 .keywords: matrix 778 779 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat(), MATIS 780 @*/ 781 PetscErrorCode MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 782 { 783 PetscErrorCode ierr; 784 785 PetscFunctionBegin; 786 PetscValidHeaderSpecific(B,MAT_CLASSID,1); 787 PetscValidType(B,1); 788 ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr); 789 PetscFunctionReturn(0); 790 } 791 792 #undef __FUNCT__ 793 #define __FUNCT__ "MatISSetPreallocation_IS" 794 static PetscErrorCode MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 795 { 796 Mat_IS *matis = (Mat_IS*)(B->data); 797 PetscInt bs,i,nlocalcols; 798 PetscErrorCode ierr; 799 800 PetscFunctionBegin; 801 if (!matis->A) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping"); 802 if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */ 803 ierr = MatISComputeSF_Private(B);CHKERRQ(ierr); 804 } 805 if (!d_nnz) { 806 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz; 807 } else { 808 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i]; 809 } 810 if (!o_nnz) { 811 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz; 812 } else { 813 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i]; 814 } 815 ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 816 ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr); 817 ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr); 818 ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 819 for (i=0;i<matis->sf_nleaves;i++) { 820 matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols); 821 } 822 ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr); 823 for (i=0;i<matis->sf_nleaves/bs;i++) { 824 matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs; 825 } 826 ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr); 827 for (i=0;i<matis->sf_nleaves/bs;i++) { 828 matis->sf_leafdata[i] = matis->sf_leafdata[i]-i; 829 } 830 ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr); 831 PetscFunctionReturn(0); 832 } 833 834 #undef __FUNCT__ 835 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private" 836 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce) 837 { 838 Mat_IS *matis = (Mat_IS*)(A->data); 839 PetscInt *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership; 840 const PetscInt *global_indices_r,*global_indices_c; 841 PetscInt i,j,bs,rows,cols; 842 PetscInt lrows,lcols; 843 PetscInt local_rows,local_cols; 844 PetscMPIInt nsubdomains; 845 PetscBool isdense,issbaij; 846 PetscErrorCode ierr; 847 848 PetscFunctionBegin; 849 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr); 850 ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr); 851 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 852 ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr); 853 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr); 854 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr); 855 ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr); 856 if (A->rmap->mapping != A->cmap->mapping) { 857 ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr); 858 } else { 859 global_indices_c = global_indices_r; 860 } 861 862 if (issbaij) { 863 ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr); 864 } 865 /* 866 An SF reduce is needed to sum up properly on shared rows. 867 Note that generally preallocation is not exact, since it overestimates nonzeros 868 */ 869 if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */ 870 ierr = MatISComputeSF_Private(A);CHKERRQ(ierr); 871 } 872 ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr); 873 ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr); 874 /* All processes need to compute entire row ownership */ 875 ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr); 876 ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr); 877 for (i=0;i<nsubdomains;i++) { 878 for (j=mat_ranges[i];j<mat_ranges[i+1];j++) { 879 row_ownership[j] = i; 880 } 881 } 882 ierr = MatGetOwnershipRangesColumn(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr); 883 884 /* 885 my_dnz and my_onz contains exact contribution to preallocation from each local mat 886 then, they will be summed up properly. This way, preallocation is always sufficient 887 */ 888 ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr); 889 /* preallocation as a MATAIJ */ 890 if (isdense) { /* special case for dense local matrices */ 891 for (i=0;i<local_rows;i++) { 892 PetscInt index_row = global_indices_r[i]; 893 for (j=i;j<local_rows;j++) { 894 PetscInt owner = row_ownership[index_row]; 895 PetscInt index_col = global_indices_c[j]; 896 if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */ 897 my_dnz[i] += 1; 898 } else { /* offdiag block */ 899 my_onz[i] += 1; 900 } 901 /* same as before, interchanging rows and cols */ 902 if (i != j) { 903 owner = row_ownership[index_col]; 904 if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) { 905 my_dnz[j] += 1; 906 } else { 907 my_onz[j] += 1; 908 } 909 } 910 } 911 } 912 } else if (matis->A->ops->getrowij) { 913 const PetscInt *ii,*jj,*jptr; 914 PetscBool done; 915 ierr = MatGetRowIJ(matis->A,0,PETSC_FALSE,PETSC_FALSE,&local_rows,&ii,&jj,&done);CHKERRQ(ierr); 916 if (!done) SETERRQ1(PetscObjectComm((PetscObject)(matis->A)),PETSC_ERR_PLIB,"Error in MatGetRowIJ called from %s\n",__FUNCT__); 917 jptr = jj; 918 for (i=0;i<local_rows;i++) { 919 PetscInt index_row = global_indices_r[i]; 920 for (j=0;j<ii[i+1]-ii[i];j++,jptr++) { 921 PetscInt owner = row_ownership[index_row]; 922 PetscInt index_col = global_indices_c[*jptr]; 923 if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */ 924 my_dnz[i] += 1; 925 } else { /* offdiag block */ 926 my_onz[i] += 1; 927 } 928 /* same as before, interchanging rows and cols */ 929 if (issbaij && index_col != index_row) { 930 owner = row_ownership[index_col]; 931 if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) { 932 my_dnz[*jptr] += 1; 933 } else { 934 my_onz[*jptr] += 1; 935 } 936 } 937 } 938 } 939 ierr = MatRestoreRowIJ(matis->A,0,PETSC_FALSE,PETSC_FALSE,&local_rows,&ii,&jj,&done);CHKERRQ(ierr); 940 if (!done) SETERRQ1(PetscObjectComm((PetscObject)(matis->A)),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called from %s\n",__FUNCT__); 941 } else { /* loop over rows and use MatGetRow */ 942 for (i=0;i<local_rows;i++) { 943 const PetscInt *cols; 944 PetscInt ncols,index_row = global_indices_r[i]; 945 ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr); 946 for (j=0;j<ncols;j++) { 947 PetscInt owner = row_ownership[index_row]; 948 PetscInt index_col = global_indices_c[cols[j]]; 949 if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */ 950 my_dnz[i] += 1; 951 } else { /* offdiag block */ 952 my_onz[i] += 1; 953 } 954 /* same as before, interchanging rows and cols */ 955 if (issbaij && index_col != index_row) { 956 owner = row_ownership[index_col]; 957 if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) { 958 my_dnz[cols[j]] += 1; 959 } else { 960 my_onz[cols[j]] += 1; 961 } 962 } 963 } 964 ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr); 965 } 966 } 967 ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr); 968 if (global_indices_c != global_indices_r) { 969 ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr); 970 } 971 ierr = PetscFree(row_ownership);CHKERRQ(ierr); 972 973 /* Reduce my_dnz and my_onz */ 974 if (maxreduce) { 975 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr); 976 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr); 977 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr); 978 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr); 979 } else { 980 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr); 981 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr); 982 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr); 983 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr); 984 } 985 ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr); 986 987 /* Resize preallocation if overestimated */ 988 for (i=0;i<lrows;i++) { 989 dnz[i] = PetscMin(dnz[i],lcols); 990 onz[i] = PetscMin(onz[i],cols-lcols); 991 } 992 993 /* Set preallocation */ 994 ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr); 995 for (i=0;i<lrows/bs;i++) { 996 dnz[i] = dnz[i*bs]/bs; 997 onz[i] = onz[i*bs]/bs; 998 } 999 ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr); 1000 ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr); 1001 ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr); 1002 if (issbaij) { 1003 ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr); 1004 } 1005 PetscFunctionReturn(0); 1006 } 1007 1008 #undef __FUNCT__ 1009 #define __FUNCT__ "MatISGetMPIXAIJ_IS" 1010 static PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M) 1011 { 1012 Mat_IS *matis = (Mat_IS*)(mat->data); 1013 Mat local_mat; 1014 /* info on mat */ 1015 PetscInt bs,rows,cols,lrows,lcols; 1016 PetscInt local_rows,local_cols; 1017 PetscBool isdense,issbaij,isseqaij; 1018 PetscMPIInt nsubdomains; 1019 /* values insertion */ 1020 PetscScalar *array; 1021 /* work */ 1022 PetscErrorCode ierr; 1023 1024 PetscFunctionBegin; 1025 /* get info from mat */ 1026 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr); 1027 if (nsubdomains == 1) { 1028 Mat B; 1029 IS rows,cols; 1030 const PetscInt *ridxs,*cidxs; 1031 1032 ierr = ISLocalToGlobalMappingGetIndices(mat->rmap->mapping,&ridxs);CHKERRQ(ierr); 1033 ierr = ISLocalToGlobalMappingGetIndices(mat->cmap->mapping,&cidxs);CHKERRQ(ierr); 1034 ierr = ISCreateGeneral(PETSC_COMM_SELF,mat->rmap->n,ridxs,PETSC_USE_POINTER,&rows);CHKERRQ(ierr); 1035 ierr = ISCreateGeneral(PETSC_COMM_SELF,mat->cmap->n,cidxs,PETSC_USE_POINTER,&cols);CHKERRQ(ierr); 1036 ierr = MatConvert(matis->A,MATAIJ,MAT_INITIAL_MATRIX,&B);CHKERRQ(ierr); 1037 ierr = MatGetSubMatrix(B,rows,cols,reuse,M);CHKERRQ(ierr); 1038 ierr = MatDestroy(&B);CHKERRQ(ierr); 1039 ierr = ISDestroy(&cols);CHKERRQ(ierr); 1040 ierr = ISDestroy(&rows);CHKERRQ(ierr); 1041 ierr = ISLocalToGlobalMappingRestoreIndices(mat->rmap->mapping,&ridxs);CHKERRQ(ierr); 1042 ierr = ISLocalToGlobalMappingRestoreIndices(mat->cmap->mapping,&cidxs);CHKERRQ(ierr); 1043 PetscFunctionReturn(0); 1044 } 1045 ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr); 1046 ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr); 1047 ierr = MatGetLocalSize(mat,&lrows,&lcols);CHKERRQ(ierr); 1048 ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr); 1049 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr); 1050 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 1051 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr); 1052 1053 if (reuse == MAT_INITIAL_MATRIX) { 1054 MatType new_mat_type; 1055 PetscBool issbaij_red; 1056 1057 /* determining new matrix type */ 1058 ierr = MPIU_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 1059 if (issbaij_red) { 1060 new_mat_type = MATSBAIJ; 1061 } else { 1062 if (bs>1) { 1063 new_mat_type = MATBAIJ; 1064 } else { 1065 new_mat_type = MATAIJ; 1066 } 1067 } 1068 1069 ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr); 1070 ierr = MatSetSizes(*M,lrows,lcols,rows,cols);CHKERRQ(ierr); 1071 ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr); 1072 ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr); 1073 ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr); 1074 } else { 1075 PetscInt mbs,mrows,mcols,mlrows,mlcols; 1076 /* some checks */ 1077 ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr); 1078 ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr); 1079 ierr = MatGetLocalSize(*M,&mlrows,&mlcols);CHKERRQ(ierr); 1080 if (mrows != rows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows); 1081 if (mcols != cols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols); 1082 if (mlrows != lrows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows); 1083 if (mlcols != lcols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols); 1084 if (mbs != bs) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs); 1085 ierr = MatZeroEntries(*M);CHKERRQ(ierr); 1086 } 1087 1088 if (issbaij) { 1089 ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);CHKERRQ(ierr); 1090 } else { 1091 ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr); 1092 local_mat = matis->A; 1093 } 1094 1095 /* Set values */ 1096 ierr = MatSetLocalToGlobalMapping(*M,mat->rmap->mapping,mat->cmap->mapping);CHKERRQ(ierr); 1097 if (isdense) { /* special case for dense local matrices */ 1098 PetscInt i,*dummy_rows,*dummy_cols; 1099 1100 if (local_rows != local_cols) { 1101 ierr = PetscMalloc2(local_rows,&dummy_rows,local_cols,&dummy_cols);CHKERRQ(ierr); 1102 for (i=0;i<local_rows;i++) dummy_rows[i] = i; 1103 for (i=0;i<local_cols;i++) dummy_cols[i] = i; 1104 } else { 1105 ierr = PetscMalloc1(local_rows,&dummy_rows);CHKERRQ(ierr); 1106 for (i=0;i<local_rows;i++) dummy_rows[i] = i; 1107 dummy_cols = dummy_rows; 1108 } 1109 ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr); 1110 ierr = MatDenseGetArray(local_mat,&array);CHKERRQ(ierr); 1111 ierr = MatSetValuesLocal(*M,local_rows,dummy_rows,local_cols,dummy_cols,array,ADD_VALUES);CHKERRQ(ierr); 1112 ierr = MatDenseRestoreArray(local_mat,&array);CHKERRQ(ierr); 1113 if (dummy_rows != dummy_cols) { 1114 ierr = PetscFree2(dummy_rows,dummy_cols);CHKERRQ(ierr); 1115 } else { 1116 ierr = PetscFree(dummy_rows);CHKERRQ(ierr); 1117 } 1118 } else if (isseqaij) { 1119 PetscInt i,nvtxs,*xadj,*adjncy; 1120 PetscBool done; 1121 1122 ierr = MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr); 1123 if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatGetRowIJ called from %s\n",__FUNCT__); 1124 ierr = MatSeqAIJGetArray(local_mat,&array);CHKERRQ(ierr); 1125 for (i=0;i<nvtxs;i++) { 1126 ierr = MatSetValuesLocal(*M,1,&i,xadj[i+1]-xadj[i],adjncy+xadj[i],array+xadj[i],ADD_VALUES);CHKERRQ(ierr); 1127 } 1128 ierr = MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr); 1129 if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called from %s\n",__FUNCT__); 1130 ierr = MatSeqAIJRestoreArray(local_mat,&array);CHKERRQ(ierr); 1131 } else { /* very basic values insertion for all other matrix types */ 1132 PetscInt i; 1133 1134 for (i=0;i<local_rows;i++) { 1135 PetscInt j; 1136 const PetscInt *local_indices_cols; 1137 1138 ierr = MatGetRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr); 1139 ierr = MatSetValuesLocal(*M,1,&i,j,local_indices_cols,array,ADD_VALUES);CHKERRQ(ierr); 1140 ierr = MatRestoreRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr); 1141 } 1142 } 1143 ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1144 ierr = MatDestroy(&local_mat);CHKERRQ(ierr); 1145 ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1146 if (isdense) { 1147 ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr); 1148 } 1149 PetscFunctionReturn(0); 1150 } 1151 1152 #undef __FUNCT__ 1153 #define __FUNCT__ "MatISGetMPIXAIJ" 1154 /*@ 1155 MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format 1156 1157 Input Parameter: 1158 . mat - the matrix (should be of type MATIS) 1159 . reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 1160 1161 Output Parameter: 1162 . newmat - the matrix in AIJ format 1163 1164 Level: developer 1165 1166 Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested. 1167 1168 .seealso: MATIS 1169 @*/ 1170 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat) 1171 { 1172 PetscErrorCode ierr; 1173 1174 PetscFunctionBegin; 1175 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 1176 PetscValidLogicalCollectiveEnum(mat,reuse,2); 1177 PetscValidPointer(newmat,3); 1178 if (reuse != MAT_INITIAL_MATRIX) { 1179 PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3); 1180 PetscCheckSameComm(mat,1,*newmat,3); 1181 if (mat == *newmat) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix"); 1182 } 1183 ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr); 1184 PetscFunctionReturn(0); 1185 } 1186 1187 #undef __FUNCT__ 1188 #define __FUNCT__ "MatDuplicate_IS" 1189 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat) 1190 { 1191 PetscErrorCode ierr; 1192 Mat_IS *matis = (Mat_IS*)(mat->data); 1193 PetscInt bs,m,n,M,N; 1194 Mat B,localmat; 1195 1196 PetscFunctionBegin; 1197 ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr); 1198 ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr); 1199 ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr); 1200 ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,mat->cmap->mapping,&B);CHKERRQ(ierr); 1201 ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr); 1202 ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr); 1203 ierr = MatDestroy(&localmat);CHKERRQ(ierr); 1204 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1205 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1206 *newmat = B; 1207 PetscFunctionReturn(0); 1208 } 1209 1210 #undef __FUNCT__ 1211 #define __FUNCT__ "MatIsHermitian_IS" 1212 static PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool *flg) 1213 { 1214 PetscErrorCode ierr; 1215 Mat_IS *matis = (Mat_IS*)A->data; 1216 PetscBool local_sym; 1217 1218 PetscFunctionBegin; 1219 ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr); 1220 ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr); 1221 PetscFunctionReturn(0); 1222 } 1223 1224 #undef __FUNCT__ 1225 #define __FUNCT__ "MatIsSymmetric_IS" 1226 static PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool *flg) 1227 { 1228 PetscErrorCode ierr; 1229 Mat_IS *matis = (Mat_IS*)A->data; 1230 PetscBool local_sym; 1231 1232 PetscFunctionBegin; 1233 ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr); 1234 ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr); 1235 PetscFunctionReturn(0); 1236 } 1237 1238 #undef __FUNCT__ 1239 #define __FUNCT__ "MatDestroy_IS" 1240 static PetscErrorCode MatDestroy_IS(Mat A) 1241 { 1242 PetscErrorCode ierr; 1243 Mat_IS *b = (Mat_IS*)A->data; 1244 1245 PetscFunctionBegin; 1246 ierr = MatDestroy(&b->A);CHKERRQ(ierr); 1247 ierr = VecScatterDestroy(&b->cctx);CHKERRQ(ierr); 1248 ierr = VecScatterDestroy(&b->rctx);CHKERRQ(ierr); 1249 ierr = VecDestroy(&b->x);CHKERRQ(ierr); 1250 ierr = VecDestroy(&b->y);CHKERRQ(ierr); 1251 ierr = VecDestroy(&b->counter);CHKERRQ(ierr); 1252 ierr = ISDestroy(&b->getsub_ris);CHKERRQ(ierr); 1253 ierr = ISDestroy(&b->getsub_cis);CHKERRQ(ierr); 1254 if (b->sf != b->csf) { 1255 ierr = PetscSFDestroy(&b->csf);CHKERRQ(ierr); 1256 ierr = PetscFree2(b->csf_rootdata,b->csf_leafdata);CHKERRQ(ierr); 1257 } 1258 ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr); 1259 ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr); 1260 ierr = PetscFree(A->data);CHKERRQ(ierr); 1261 ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr); 1262 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr); 1263 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr); 1264 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr); 1265 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr); 1266 PetscFunctionReturn(0); 1267 } 1268 1269 #undef __FUNCT__ 1270 #define __FUNCT__ "MatMult_IS" 1271 static PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y) 1272 { 1273 PetscErrorCode ierr; 1274 Mat_IS *is = (Mat_IS*)A->data; 1275 PetscScalar zero = 0.0; 1276 1277 PetscFunctionBegin; 1278 /* scatter the global vector x into the local work vector */ 1279 ierr = VecScatterBegin(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1280 ierr = VecScatterEnd(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1281 1282 /* multiply the local matrix */ 1283 ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr); 1284 1285 /* scatter product back into global memory */ 1286 ierr = VecSet(y,zero);CHKERRQ(ierr); 1287 ierr = VecScatterBegin(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1288 ierr = VecScatterEnd(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1289 PetscFunctionReturn(0); 1290 } 1291 1292 #undef __FUNCT__ 1293 #define __FUNCT__ "MatMultAdd_IS" 1294 static PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3) 1295 { 1296 Vec temp_vec; 1297 PetscErrorCode ierr; 1298 1299 PetscFunctionBegin; /* v3 = v2 + A * v1.*/ 1300 if (v3 != v2) { 1301 ierr = MatMult(A,v1,v3);CHKERRQ(ierr); 1302 ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr); 1303 } else { 1304 ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr); 1305 ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr); 1306 ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr); 1307 ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr); 1308 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr); 1309 } 1310 PetscFunctionReturn(0); 1311 } 1312 1313 #undef __FUNCT__ 1314 #define __FUNCT__ "MatMultTranspose_IS" 1315 static PetscErrorCode MatMultTranspose_IS(Mat A,Vec y,Vec x) 1316 { 1317 Mat_IS *is = (Mat_IS*)A->data; 1318 PetscErrorCode ierr; 1319 1320 PetscFunctionBegin; 1321 /* scatter the global vector x into the local work vector */ 1322 ierr = VecScatterBegin(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1323 ierr = VecScatterEnd(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1324 1325 /* multiply the local matrix */ 1326 ierr = MatMultTranspose(is->A,is->y,is->x);CHKERRQ(ierr); 1327 1328 /* scatter product back into global vector */ 1329 ierr = VecSet(x,0);CHKERRQ(ierr); 1330 ierr = VecScatterBegin(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1331 ierr = VecScatterEnd(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1332 PetscFunctionReturn(0); 1333 } 1334 1335 #undef __FUNCT__ 1336 #define __FUNCT__ "MatMultTransposeAdd_IS" 1337 static PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3) 1338 { 1339 Vec temp_vec; 1340 PetscErrorCode ierr; 1341 1342 PetscFunctionBegin; /* v3 = v2 + A' * v1.*/ 1343 if (v3 != v2) { 1344 ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr); 1345 ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr); 1346 } else { 1347 ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr); 1348 ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr); 1349 ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr); 1350 ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr); 1351 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr); 1352 } 1353 PetscFunctionReturn(0); 1354 } 1355 1356 #undef __FUNCT__ 1357 #define __FUNCT__ "MatView_IS" 1358 static PetscErrorCode MatView_IS(Mat A,PetscViewer viewer) 1359 { 1360 Mat_IS *a = (Mat_IS*)A->data; 1361 PetscErrorCode ierr; 1362 PetscViewer sviewer; 1363 1364 PetscFunctionBegin; 1365 ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 1366 ierr = MatView(a->A,sviewer);CHKERRQ(ierr); 1367 ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 1368 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 1369 PetscFunctionReturn(0); 1370 } 1371 1372 #undef __FUNCT__ 1373 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS" 1374 static PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping) 1375 { 1376 PetscErrorCode ierr; 1377 PetscInt nr,rbs,nc,cbs; 1378 Mat_IS *is = (Mat_IS*)A->data; 1379 IS from,to; 1380 Vec cglobal,rglobal; 1381 1382 PetscFunctionBegin; 1383 PetscCheckSameComm(A,1,rmapping,2); 1384 PetscCheckSameComm(A,1,cmapping,3); 1385 /* Destroy any previous data */ 1386 ierr = VecDestroy(&is->x);CHKERRQ(ierr); 1387 ierr = VecDestroy(&is->y);CHKERRQ(ierr); 1388 ierr = VecDestroy(&is->counter);CHKERRQ(ierr); 1389 ierr = VecScatterDestroy(&is->rctx);CHKERRQ(ierr); 1390 ierr = VecScatterDestroy(&is->cctx);CHKERRQ(ierr); 1391 ierr = MatDestroy(&is->A);CHKERRQ(ierr); 1392 ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr); 1393 ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr); 1394 1395 /* Setup Layout and set local to global maps */ 1396 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 1397 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 1398 ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr); 1399 ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr); 1400 1401 /* Create the local matrix A */ 1402 ierr = ISLocalToGlobalMappingGetSize(rmapping,&nr);CHKERRQ(ierr); 1403 ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&rbs);CHKERRQ(ierr); 1404 ierr = ISLocalToGlobalMappingGetSize(cmapping,&nc);CHKERRQ(ierr); 1405 ierr = ISLocalToGlobalMappingGetBlockSize(cmapping,&cbs);CHKERRQ(ierr); 1406 ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr); 1407 ierr = MatSetType(is->A,MATAIJ);CHKERRQ(ierr); 1408 ierr = MatSetSizes(is->A,nr,nc,nr,nc);CHKERRQ(ierr); 1409 ierr = MatSetBlockSizes(is->A,rbs,cbs);CHKERRQ(ierr); 1410 ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr); 1411 ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr); 1412 ierr = MatSetFromOptions(is->A);CHKERRQ(ierr); 1413 1414 if (!is->islocalref) { /* setup scatters and local vectors for MatMult */ 1415 /* Create the local work vectors */ 1416 ierr = MatCreateVecs(is->A,&is->x,&is->y);CHKERRQ(ierr); 1417 1418 /* setup the global to local scatters */ 1419 ierr = MatCreateVecs(A,&cglobal,&rglobal);CHKERRQ(ierr); 1420 ierr = ISCreateStride(PETSC_COMM_SELF,nr,0,1,&to);CHKERRQ(ierr); 1421 ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr); 1422 ierr = VecScatterCreate(rglobal,from,is->y,to,&is->rctx);CHKERRQ(ierr); 1423 if (rmapping != cmapping) { 1424 ierr = ISDestroy(&to);CHKERRQ(ierr); 1425 ierr = ISDestroy(&from);CHKERRQ(ierr); 1426 ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&to);CHKERRQ(ierr); 1427 ierr = ISLocalToGlobalMappingApplyIS(cmapping,to,&from);CHKERRQ(ierr); 1428 ierr = VecScatterCreate(cglobal,from,is->x,to,&is->cctx);CHKERRQ(ierr); 1429 } else { 1430 ierr = PetscObjectReference((PetscObject)is->rctx);CHKERRQ(ierr); 1431 is->cctx = is->rctx; 1432 } 1433 1434 /* interface counter vector (local) */ 1435 ierr = VecDuplicate(is->y,&is->counter);CHKERRQ(ierr); 1436 ierr = VecSet(is->y,1.);CHKERRQ(ierr); 1437 ierr = VecScatterBegin(is->rctx,is->y,rglobal,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1438 ierr = VecScatterEnd(is->rctx,is->y,rglobal,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1439 ierr = VecScatterBegin(is->rctx,rglobal,is->counter,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1440 ierr = VecScatterEnd(is->rctx,rglobal,is->counter,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1441 1442 /* free workspace */ 1443 ierr = VecDestroy(&rglobal);CHKERRQ(ierr); 1444 ierr = VecDestroy(&cglobal);CHKERRQ(ierr); 1445 ierr = ISDestroy(&to);CHKERRQ(ierr); 1446 ierr = ISDestroy(&from);CHKERRQ(ierr); 1447 } 1448 ierr = MatSetUp(A);CHKERRQ(ierr); 1449 PetscFunctionReturn(0); 1450 } 1451 1452 #undef __FUNCT__ 1453 #define __FUNCT__ "MatSetValues_IS" 1454 static PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv) 1455 { 1456 Mat_IS *is = (Mat_IS*)mat->data; 1457 PetscErrorCode ierr; 1458 #if defined(PETSC_USE_DEBUG) 1459 PetscInt i,zm,zn; 1460 #endif 1461 PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION]; 1462 1463 PetscFunctionBegin; 1464 #if defined(PETSC_USE_DEBUG) 1465 if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n); 1466 /* count negative indices */ 1467 for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++; 1468 for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++; 1469 #endif 1470 ierr = ISGlobalToLocalMappingApply(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr); 1471 ierr = ISGlobalToLocalMappingApply(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr); 1472 #if defined(PETSC_USE_DEBUG) 1473 /* count negative indices (should be the same as before) */ 1474 for (i=0;i<m;i++) if (rows_l[i] < 0) zm--; 1475 for (i=0;i<n;i++) if (cols_l[i] < 0) zn--; 1476 if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS"); 1477 if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS"); 1478 #endif 1479 ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr); 1480 PetscFunctionReturn(0); 1481 } 1482 1483 #undef __FUNCT__ 1484 #define __FUNCT__ "MatSetValuesBlocked_IS" 1485 static PetscErrorCode MatSetValuesBlocked_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv) 1486 { 1487 Mat_IS *is = (Mat_IS*)mat->data; 1488 PetscErrorCode ierr; 1489 #if defined(PETSC_USE_DEBUG) 1490 PetscInt i,zm,zn; 1491 #endif 1492 PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION]; 1493 1494 PetscFunctionBegin; 1495 #if defined(PETSC_USE_DEBUG) 1496 if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column block indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n); 1497 /* count negative indices */ 1498 for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++; 1499 for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++; 1500 #endif 1501 ierr = ISGlobalToLocalMappingApplyBlock(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr); 1502 ierr = ISGlobalToLocalMappingApplyBlock(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr); 1503 #if defined(PETSC_USE_DEBUG) 1504 /* count negative indices (should be the same as before) */ 1505 for (i=0;i<m;i++) if (rows_l[i] < 0) zm--; 1506 for (i=0;i<n;i++) if (cols_l[i] < 0) zn--; 1507 if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS"); 1508 if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS"); 1509 #endif 1510 ierr = MatSetValuesBlocked(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr); 1511 PetscFunctionReturn(0); 1512 } 1513 1514 #undef __FUNCT__ 1515 #define __FUNCT__ "MatSetValuesLocal_IS" 1516 static PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 1517 { 1518 PetscErrorCode ierr; 1519 Mat_IS *is = (Mat_IS*)A->data; 1520 1521 PetscFunctionBegin; 1522 ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr); 1523 PetscFunctionReturn(0); 1524 } 1525 1526 #undef __FUNCT__ 1527 #define __FUNCT__ "MatSetValuesBlockedLocal_IS" 1528 static PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 1529 { 1530 PetscErrorCode ierr; 1531 Mat_IS *is = (Mat_IS*)A->data; 1532 1533 PetscFunctionBegin; 1534 ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr); 1535 PetscFunctionReturn(0); 1536 } 1537 1538 #undef __FUNCT__ 1539 #define __FUNCT__ "MatISZeroRowsColumnsLocal_Private" 1540 static PetscErrorCode MatISZeroRowsColumnsLocal_Private(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,PetscBool columns) 1541 { 1542 Mat_IS *is = (Mat_IS*)A->data; 1543 PetscErrorCode ierr; 1544 1545 PetscFunctionBegin; 1546 if (!n) { 1547 is->pure_neumann = PETSC_TRUE; 1548 } else { 1549 PetscInt i; 1550 is->pure_neumann = PETSC_FALSE; 1551 1552 if (columns) { 1553 ierr = MatZeroRowsColumns(is->A,n,rows,diag,0,0);CHKERRQ(ierr); 1554 } else { 1555 ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr); 1556 } 1557 if (diag != 0.) { 1558 const PetscScalar *array; 1559 ierr = VecGetArrayRead(is->counter,&array);CHKERRQ(ierr); 1560 for (i=0; i<n; i++) { 1561 ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr); 1562 } 1563 ierr = VecRestoreArrayRead(is->counter,&array);CHKERRQ(ierr); 1564 } 1565 ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1566 ierr = MatAssemblyEnd(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1567 } 1568 PetscFunctionReturn(0); 1569 } 1570 1571 #undef __FUNCT__ 1572 #define __FUNCT__ "MatZeroRowsColumns_Private_IS" 1573 static PetscErrorCode MatZeroRowsColumns_Private_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b,PetscBool columns) 1574 { 1575 Mat_IS *matis = (Mat_IS*)A->data; 1576 PetscInt nr,nl,len,i; 1577 PetscInt *lrows; 1578 PetscErrorCode ierr; 1579 1580 PetscFunctionBegin; 1581 #if defined(PETSC_USE_DEBUG) 1582 if (columns || diag != 0. || (x && b)) { 1583 PetscBool cong; 1584 ierr = PetscLayoutCompare(A->rmap,A->cmap,&cong);CHKERRQ(ierr); 1585 if (!cong && columns) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Columns can be zeroed if and only if A->rmap and A->cmap are congruent for MATIS"); 1586 if (!cong && diag != 0.) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Nonzero diagonal value supported if and only if A->rmap and A->cmap are congruent for MATIS"); 1587 if (!cong && x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"A->rmap and A->cmap need to be congruent"); 1588 } 1589 #endif 1590 /* get locally owned rows */ 1591 ierr = PetscLayoutMapLocal_Private(A->rmap,n,rows,&len,&lrows,NULL);CHKERRQ(ierr); 1592 /* fix right hand side if needed */ 1593 if (x && b) { 1594 const PetscScalar *xx; 1595 PetscScalar *bb; 1596 1597 ierr = VecGetArrayRead(x, &xx);CHKERRQ(ierr); 1598 ierr = VecGetArray(b, &bb);CHKERRQ(ierr); 1599 for (i=0;i<len;++i) bb[lrows[i]] = diag*xx[lrows[i]]; 1600 ierr = VecRestoreArrayRead(x, &xx);CHKERRQ(ierr); 1601 ierr = VecRestoreArray(b, &bb);CHKERRQ(ierr); 1602 } 1603 /* get rows associated to the local matrices */ 1604 if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */ 1605 ierr = MatISComputeSF_Private(A);CHKERRQ(ierr); 1606 } 1607 ierr = MatGetSize(matis->A,&nl,NULL);CHKERRQ(ierr); 1608 ierr = PetscMemzero(matis->sf_leafdata,nl*sizeof(PetscInt));CHKERRQ(ierr); 1609 ierr = PetscMemzero(matis->sf_rootdata,A->rmap->n*sizeof(PetscInt));CHKERRQ(ierr); 1610 for (i=0;i<len;i++) matis->sf_rootdata[lrows[i]] = 1; 1611 ierr = PetscFree(lrows);CHKERRQ(ierr); 1612 ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 1613 ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 1614 ierr = PetscMalloc1(nl,&lrows);CHKERRQ(ierr); 1615 for (i=0,nr=0;i<nl;i++) if (matis->sf_leafdata[i]) lrows[nr++] = i; 1616 ierr = MatISZeroRowsColumnsLocal_Private(A,nr,lrows,diag,columns);CHKERRQ(ierr); 1617 ierr = PetscFree(lrows);CHKERRQ(ierr); 1618 PetscFunctionReturn(0); 1619 } 1620 1621 #undef __FUNCT__ 1622 #define __FUNCT__ "MatZeroRows_IS" 1623 static PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 1624 { 1625 PetscErrorCode ierr; 1626 1627 PetscFunctionBegin; 1628 ierr = MatZeroRowsColumns_Private_IS(A,n,rows,diag,x,b,PETSC_FALSE);CHKERRQ(ierr); 1629 PetscFunctionReturn(0); 1630 } 1631 1632 #undef __FUNCT__ 1633 #define __FUNCT__ "MatZeroRowsColumns_IS" 1634 static PetscErrorCode MatZeroRowsColumns_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 1635 { 1636 PetscErrorCode ierr; 1637 1638 PetscFunctionBegin; 1639 ierr = MatZeroRowsColumns_Private_IS(A,n,rows,diag,x,b,PETSC_TRUE);CHKERRQ(ierr); 1640 PetscFunctionReturn(0); 1641 } 1642 1643 #undef __FUNCT__ 1644 #define __FUNCT__ "MatAssemblyBegin_IS" 1645 static PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type) 1646 { 1647 Mat_IS *is = (Mat_IS*)A->data; 1648 PetscErrorCode ierr; 1649 1650 PetscFunctionBegin; 1651 ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr); 1652 PetscFunctionReturn(0); 1653 } 1654 1655 #undef __FUNCT__ 1656 #define __FUNCT__ "MatAssemblyEnd_IS" 1657 static PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type) 1658 { 1659 Mat_IS *is = (Mat_IS*)A->data; 1660 PetscErrorCode ierr; 1661 1662 PetscFunctionBegin; 1663 ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr); 1664 PetscFunctionReturn(0); 1665 } 1666 1667 #undef __FUNCT__ 1668 #define __FUNCT__ "MatISGetLocalMat_IS" 1669 static PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local) 1670 { 1671 Mat_IS *is = (Mat_IS*)mat->data; 1672 1673 PetscFunctionBegin; 1674 *local = is->A; 1675 PetscFunctionReturn(0); 1676 } 1677 1678 #undef __FUNCT__ 1679 #define __FUNCT__ "MatISGetLocalMat" 1680 /*@ 1681 MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix. 1682 1683 Input Parameter: 1684 . mat - the matrix 1685 1686 Output Parameter: 1687 . local - the local matrix 1688 1689 Level: advanced 1690 1691 Notes: 1692 This can be called if you have precomputed the nonzero structure of the 1693 matrix and want to provide it to the inner matrix object to improve the performance 1694 of the MatSetValues() operation. 1695 1696 .seealso: MATIS 1697 @*/ 1698 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local) 1699 { 1700 PetscErrorCode ierr; 1701 1702 PetscFunctionBegin; 1703 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 1704 PetscValidPointer(local,2); 1705 ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr); 1706 PetscFunctionReturn(0); 1707 } 1708 1709 #undef __FUNCT__ 1710 #define __FUNCT__ "MatISSetLocalMat_IS" 1711 static PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local) 1712 { 1713 Mat_IS *is = (Mat_IS*)mat->data; 1714 PetscInt nrows,ncols,orows,ocols; 1715 PetscErrorCode ierr; 1716 1717 PetscFunctionBegin; 1718 if (is->A) { 1719 ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr); 1720 ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr); 1721 if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %Dx%D (you passed a %Dx%D matrix)",orows,ocols,nrows,ncols); 1722 } 1723 ierr = PetscObjectReference((PetscObject)local);CHKERRQ(ierr); 1724 ierr = MatDestroy(&is->A);CHKERRQ(ierr); 1725 is->A = local; 1726 PetscFunctionReturn(0); 1727 } 1728 1729 #undef __FUNCT__ 1730 #define __FUNCT__ "MatISSetLocalMat" 1731 /*@ 1732 MatISSetLocalMat - Replace the local matrix stored inside a MATIS object. 1733 1734 Input Parameter: 1735 . mat - the matrix 1736 . local - the local matrix 1737 1738 Output Parameter: 1739 1740 Level: advanced 1741 1742 Notes: 1743 This can be called if you have precomputed the local matrix and 1744 want to provide it to the matrix object MATIS. 1745 1746 .seealso: MATIS 1747 @*/ 1748 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local) 1749 { 1750 PetscErrorCode ierr; 1751 1752 PetscFunctionBegin; 1753 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 1754 PetscValidHeaderSpecific(local,MAT_CLASSID,2); 1755 ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr); 1756 PetscFunctionReturn(0); 1757 } 1758 1759 #undef __FUNCT__ 1760 #define __FUNCT__ "MatZeroEntries_IS" 1761 static PetscErrorCode MatZeroEntries_IS(Mat A) 1762 { 1763 Mat_IS *a = (Mat_IS*)A->data; 1764 PetscErrorCode ierr; 1765 1766 PetscFunctionBegin; 1767 ierr = MatZeroEntries(a->A);CHKERRQ(ierr); 1768 PetscFunctionReturn(0); 1769 } 1770 1771 #undef __FUNCT__ 1772 #define __FUNCT__ "MatScale_IS" 1773 static PetscErrorCode MatScale_IS(Mat A,PetscScalar a) 1774 { 1775 Mat_IS *is = (Mat_IS*)A->data; 1776 PetscErrorCode ierr; 1777 1778 PetscFunctionBegin; 1779 ierr = MatScale(is->A,a);CHKERRQ(ierr); 1780 PetscFunctionReturn(0); 1781 } 1782 1783 #undef __FUNCT__ 1784 #define __FUNCT__ "MatGetDiagonal_IS" 1785 static PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v) 1786 { 1787 Mat_IS *is = (Mat_IS*)A->data; 1788 PetscErrorCode ierr; 1789 1790 PetscFunctionBegin; 1791 /* get diagonal of the local matrix */ 1792 ierr = MatGetDiagonal(is->A,is->y);CHKERRQ(ierr); 1793 1794 /* scatter diagonal back into global vector */ 1795 ierr = VecSet(v,0);CHKERRQ(ierr); 1796 ierr = VecScatterBegin(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1797 ierr = VecScatterEnd(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1798 PetscFunctionReturn(0); 1799 } 1800 1801 #undef __FUNCT__ 1802 #define __FUNCT__ "MatSetOption_IS" 1803 static PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg) 1804 { 1805 Mat_IS *a = (Mat_IS*)A->data; 1806 PetscErrorCode ierr; 1807 1808 PetscFunctionBegin; 1809 ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 1810 PetscFunctionReturn(0); 1811 } 1812 1813 #undef __FUNCT__ 1814 #define __FUNCT__ "MatAXPY_IS" 1815 static PetscErrorCode MatAXPY_IS(Mat Y,PetscScalar a,Mat X,MatStructure str) 1816 { 1817 Mat_IS *y = (Mat_IS*)Y->data; 1818 Mat_IS *x; 1819 #if defined(PETSC_USE_DEBUG) 1820 PetscBool ismatis; 1821 #endif 1822 PetscErrorCode ierr; 1823 1824 PetscFunctionBegin; 1825 #if defined(PETSC_USE_DEBUG) 1826 ierr = PetscObjectTypeCompare((PetscObject)X,MATIS,&ismatis);CHKERRQ(ierr); 1827 if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)Y),PETSC_ERR_SUP,"Cannot call MatAXPY(Y,a,X,str) with X not of type MATIS"); 1828 #endif 1829 x = (Mat_IS*)X->data; 1830 ierr = MatAXPY(y->A,a,x->A,str);CHKERRQ(ierr); 1831 PetscFunctionReturn(0); 1832 } 1833 1834 #undef __FUNCT__ 1835 #define __FUNCT__ "MatGetLocalSubMatrix_IS" 1836 static PetscErrorCode MatGetLocalSubMatrix_IS(Mat A,IS row,IS col,Mat *submat) 1837 { 1838 Mat lA; 1839 Mat_IS *matis; 1840 ISLocalToGlobalMapping rl2g,cl2g; 1841 IS is; 1842 const PetscInt *rg,*rl; 1843 PetscInt nrg; 1844 PetscInt N,M,nrl,i,*idxs; 1845 PetscErrorCode ierr; 1846 1847 PetscFunctionBegin; 1848 ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&rg);CHKERRQ(ierr); 1849 ierr = ISGetLocalSize(row,&nrl);CHKERRQ(ierr); 1850 ierr = ISGetIndices(row,&rl);CHKERRQ(ierr); 1851 ierr = ISLocalToGlobalMappingGetSize(A->rmap->mapping,&nrg);CHKERRQ(ierr); 1852 #if defined(PETSC_USE_DEBUG) 1853 for (i=0;i<nrl;i++) if (rl[i]>=nrg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local row index %D -> %D greater than maximum possible %D",i,rl[i],nrg); 1854 #endif 1855 ierr = PetscMalloc1(nrg,&idxs);CHKERRQ(ierr); 1856 /* map from [0,nrl) to row */ 1857 for (i=0;i<nrl;i++) idxs[i] = rl[i]; 1858 #if defined(PETSC_USE_DEBUG) 1859 for (i=nrl;i<nrg;i++) idxs[i] = nrg; 1860 #else 1861 for (i=nrl;i<nrg;i++) idxs[i] = -1; 1862 #endif 1863 ierr = ISRestoreIndices(row,&rl);CHKERRQ(ierr); 1864 ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&rg);CHKERRQ(ierr); 1865 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)A),nrg,idxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 1866 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr); 1867 ierr = ISDestroy(&is);CHKERRQ(ierr); 1868 /* compute new l2g map for columns */ 1869 if (col != row || A->rmap->mapping != A->cmap->mapping) { 1870 const PetscInt *cg,*cl; 1871 PetscInt ncg; 1872 PetscInt ncl; 1873 1874 ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&cg);CHKERRQ(ierr); 1875 ierr = ISGetLocalSize(col,&ncl);CHKERRQ(ierr); 1876 ierr = ISGetIndices(col,&cl);CHKERRQ(ierr); 1877 ierr = ISLocalToGlobalMappingGetSize(A->cmap->mapping,&ncg);CHKERRQ(ierr); 1878 #if defined(PETSC_USE_DEBUG) 1879 for (i=0;i<ncl;i++) if (cl[i]>=ncg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local column index %D -> %D greater than maximum possible %D",i,cl[i],ncg); 1880 #endif 1881 ierr = PetscMalloc1(ncg,&idxs);CHKERRQ(ierr); 1882 /* map from [0,ncl) to col */ 1883 for (i=0;i<ncl;i++) idxs[i] = cl[i]; 1884 #if defined(PETSC_USE_DEBUG) 1885 for (i=ncl;i<ncg;i++) idxs[i] = ncg; 1886 #else 1887 for (i=ncl;i<ncg;i++) idxs[i] = -1; 1888 #endif 1889 ierr = ISRestoreIndices(col,&cl);CHKERRQ(ierr); 1890 ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&cg);CHKERRQ(ierr); 1891 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)A),ncg,idxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 1892 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr); 1893 ierr = ISDestroy(&is);CHKERRQ(ierr); 1894 } else { 1895 ierr = PetscObjectReference((PetscObject)rl2g);CHKERRQ(ierr); 1896 cl2g = rl2g; 1897 } 1898 /* create the MATIS submatrix */ 1899 ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr); 1900 ierr = MatCreate(PetscObjectComm((PetscObject)A),submat);CHKERRQ(ierr); 1901 ierr = MatSetSizes(*submat,PETSC_DECIDE,PETSC_DECIDE,M,N);CHKERRQ(ierr); 1902 ierr = MatSetType(*submat,MATIS);CHKERRQ(ierr); 1903 matis = (Mat_IS*)((*submat)->data); 1904 matis->islocalref = PETSC_TRUE; 1905 ierr = MatSetLocalToGlobalMapping(*submat,rl2g,cl2g);CHKERRQ(ierr); 1906 ierr = MatISGetLocalMat(A,&lA);CHKERRQ(ierr); 1907 ierr = MatISSetLocalMat(*submat,lA);CHKERRQ(ierr); 1908 ierr = MatSetUp(*submat);CHKERRQ(ierr); 1909 ierr = MatAssemblyBegin(*submat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1910 ierr = MatAssemblyEnd(*submat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1911 ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr); 1912 ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr); 1913 /* remove unsupported ops */ 1914 ierr = PetscMemzero((*submat)->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 1915 (*submat)->ops->destroy = MatDestroy_IS; 1916 (*submat)->ops->setvalueslocal = MatSetValuesLocal_SubMat_IS; 1917 (*submat)->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_SubMat_IS; 1918 (*submat)->ops->assemblybegin = MatAssemblyBegin_IS; 1919 (*submat)->ops->assemblyend = MatAssemblyEnd_IS; 1920 PetscFunctionReturn(0); 1921 } 1922 1923 #undef __FUNCT__ 1924 #define __FUNCT__ "MatCreateIS" 1925 /*@ 1926 MatCreateIS - Creates a "process" unassembled matrix, assembled on each 1927 process but not across processes. 1928 1929 Input Parameters: 1930 + comm - MPI communicator that will share the matrix 1931 . bs - block size of the matrix 1932 . m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products 1933 . rmap - local to global map for rows 1934 - cmap - local to global map for cols 1935 1936 Output Parameter: 1937 . A - the resulting matrix 1938 1939 Level: advanced 1940 1941 Notes: See MATIS for more details. 1942 m and n are NOT related to the size of the map; they represent the size of the local parts of the vectors 1943 used in MatMult operations. The sizes of rmap and cmap define the size of the local matrices. 1944 If either rmap or cmap are NULL, then the matrix is assumed to be square. 1945 1946 .seealso: MATIS, MatSetLocalToGlobalMapping() 1947 @*/ 1948 PetscErrorCode MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping rmap,ISLocalToGlobalMapping cmap,Mat *A) 1949 { 1950 PetscErrorCode ierr; 1951 1952 PetscFunctionBegin; 1953 if (!rmap && !cmap) SETERRQ(comm,PETSC_ERR_USER,"You need to provide at least one of the mappings"); 1954 ierr = MatCreate(comm,A);CHKERRQ(ierr); 1955 if (bs > 0) { 1956 ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr); 1957 } 1958 ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr); 1959 ierr = MatSetType(*A,MATIS);CHKERRQ(ierr); 1960 if (rmap && cmap) { 1961 ierr = MatSetLocalToGlobalMapping(*A,rmap,cmap);CHKERRQ(ierr); 1962 } else if (!rmap) { 1963 ierr = MatSetLocalToGlobalMapping(*A,cmap,cmap);CHKERRQ(ierr); 1964 } else { 1965 ierr = MatSetLocalToGlobalMapping(*A,rmap,rmap);CHKERRQ(ierr); 1966 } 1967 PetscFunctionReturn(0); 1968 } 1969 1970 /*MC 1971 MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition methods (e.g. PCBDDC or KSPFETIDP). 1972 This stores the matrices in globally unassembled form. Each processor 1973 assembles only its local Neumann problem and the parallel matrix vector 1974 product is handled "implicitly". 1975 1976 Operations Provided: 1977 + MatMult() 1978 . MatMultAdd() 1979 . MatMultTranspose() 1980 . MatMultTransposeAdd() 1981 . MatZeroEntries() 1982 . MatSetOption() 1983 . MatZeroRows() 1984 . MatSetValues() 1985 . MatSetValuesBlocked() 1986 . MatSetValuesLocal() 1987 . MatSetValuesBlockedLocal() 1988 . MatScale() 1989 . MatGetDiagonal() 1990 . MatMissingDiagonal() 1991 . MatDuplicate() 1992 . MatCopy() 1993 . MatAXPY() 1994 . MatGetSubMatrix() 1995 . MatGetLocalSubMatrix() 1996 . MatTranspose() 1997 - MatSetLocalToGlobalMapping() 1998 1999 Options Database Keys: 2000 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions() 2001 2002 Notes: Options prefix for the inner matrix are given by -is_mat_xxx 2003 2004 You must call MatSetLocalToGlobalMapping() before using this matrix type. 2005 2006 You can do matrix preallocation on the local matrix after you obtain it with 2007 MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation() 2008 2009 Level: advanced 2010 2011 .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), MatCreateIS(), PCBDDC, KSPFETIDP 2012 2013 M*/ 2014 2015 #undef __FUNCT__ 2016 #define __FUNCT__ "MatCreate_IS" 2017 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A) 2018 { 2019 PetscErrorCode ierr; 2020 Mat_IS *b; 2021 2022 PetscFunctionBegin; 2023 ierr = PetscNewLog(A,&b);CHKERRQ(ierr); 2024 A->data = (void*)b; 2025 2026 /* matrix ops */ 2027 ierr = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 2028 A->ops->mult = MatMult_IS; 2029 A->ops->multadd = MatMultAdd_IS; 2030 A->ops->multtranspose = MatMultTranspose_IS; 2031 A->ops->multtransposeadd = MatMultTransposeAdd_IS; 2032 A->ops->destroy = MatDestroy_IS; 2033 A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS; 2034 A->ops->setvalues = MatSetValues_IS; 2035 A->ops->setvaluesblocked = MatSetValuesBlocked_IS; 2036 A->ops->setvalueslocal = MatSetValuesLocal_IS; 2037 A->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_IS; 2038 A->ops->zerorows = MatZeroRows_IS; 2039 A->ops->zerorowscolumns = MatZeroRowsColumns_IS; 2040 A->ops->assemblybegin = MatAssemblyBegin_IS; 2041 A->ops->assemblyend = MatAssemblyEnd_IS; 2042 A->ops->view = MatView_IS; 2043 A->ops->zeroentries = MatZeroEntries_IS; 2044 A->ops->scale = MatScale_IS; 2045 A->ops->getdiagonal = MatGetDiagonal_IS; 2046 A->ops->setoption = MatSetOption_IS; 2047 A->ops->ishermitian = MatIsHermitian_IS; 2048 A->ops->issymmetric = MatIsSymmetric_IS; 2049 A->ops->duplicate = MatDuplicate_IS; 2050 A->ops->missingdiagonal = MatMissingDiagonal_IS; 2051 A->ops->copy = MatCopy_IS; 2052 A->ops->getlocalsubmatrix = MatGetLocalSubMatrix_IS; 2053 A->ops->getsubmatrix = MatGetSubMatrix_IS; 2054 A->ops->axpy = MatAXPY_IS; 2055 A->ops->diagonalset = MatDiagonalSet_IS; 2056 A->ops->shift = MatShift_IS; 2057 A->ops->transpose = MatTranspose_IS; 2058 A->ops->getinfo = MatGetInfo_IS; 2059 2060 /* special MATIS functions */ 2061 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr); 2062 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr); 2063 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr); 2064 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr); 2065 ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr); 2066 PetscFunctionReturn(0); 2067 } 2068