1 /* 2 Creates a matrix class for using the Neumann-Neumann type preconditioners. 3 This stores the matrices in globally unassembled form. Each processor 4 assembles only its local Neumann problem and the parallel matrix vector 5 product is handled "implicitly". 6 7 Currently this allows for only one subdomain per processor. 8 */ 9 10 #include <../src/mat/impls/is/matis.h> /*I "petscmat.h" I*/ 11 #include <../src/mat/impls/aij/mpi/mpiaij.h> 12 #include <petsc/private/sfimpl.h> 13 14 #define MATIS_MAX_ENTRIES_INSERTION 2048 15 16 #undef __FUNCT__ 17 #define __FUNCT__ "MatISContainerDestroy_Private" 18 static PetscErrorCode MatISContainerDestroy_Private(void *ptr) 19 { 20 PetscErrorCode ierr; 21 22 PetscFunctionBeginUser; 23 ierr = PetscFree(ptr); CHKERRQ(ierr); 24 PetscFunctionReturn(0); 25 } 26 27 #undef __FUNCT__ 28 #define __FUNCT__ "MatConvert_MPIAIJ_IS" 29 PETSC_INTERN PetscErrorCode MatConvert_MPIAIJ_IS(Mat A,MatType type,MatReuse reuse,Mat *newmat) 30 { 31 Mat_MPIAIJ *aij = (Mat_MPIAIJ*)A->data; 32 Mat_SeqAIJ *diag = (Mat_SeqAIJ*)(aij->A->data); 33 Mat_SeqAIJ *offd = (Mat_SeqAIJ*)(aij->B->data); 34 Mat lA; 35 ISLocalToGlobalMapping rl2g,cl2g; 36 IS is; 37 MPI_Comm comm; 38 void *ptrs[2]; 39 const char *names[2] = {"_convert_csr_aux","_convert_csr_data"}; 40 PetscScalar *dd,*od,*aa,*data; 41 PetscInt *di,*dj,*oi,*oj; 42 PetscInt *aux,*ii,*jj; 43 PetscInt lc,dr,dc,oc,str,stc,nnz,i,jd,jo,cum; 44 PetscErrorCode ierr; 45 46 PetscFunctionBeginUser; 47 ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr); 48 if (!aij->garray) SETERRQ(comm,PETSC_ERR_SUP,"garray not present"); 49 50 /* access relevant information from MPIAIJ */ 51 ierr = MatGetOwnershipRange(A,&str,NULL);CHKERRQ(ierr); 52 ierr = MatGetOwnershipRangeColumn(A,&stc,NULL);CHKERRQ(ierr); 53 ierr = MatGetLocalSize(A,&dr,&dc);CHKERRQ(ierr); 54 di = diag->i; 55 dj = diag->j; 56 dd = diag->a; 57 oc = aij->B->cmap->n; 58 oi = offd->i; 59 oj = offd->j; 60 od = offd->a; 61 nnz = diag->i[dr] + offd->i[dr]; 62 63 /* generate l2g maps for rows and cols */ 64 ierr = ISCreateStride(comm,dr,str,1,&is);CHKERRQ(ierr); 65 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr); 66 ierr = ISDestroy(&is);CHKERRQ(ierr); 67 if (dr) { 68 ierr = PetscMalloc1(dc+oc,&aux);CHKERRQ(ierr); 69 for (i=0; i<dc; i++) aux[i] = i+stc; 70 for (i=0; i<oc; i++) aux[i+dc] = aij->garray[i]; 71 ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 72 lc = dc+oc; 73 } else { 74 ierr = ISCreateGeneral(comm,0,NULL,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 75 lc = 0; 76 } 77 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr); 78 ierr = ISDestroy(&is);CHKERRQ(ierr); 79 80 /* create MATIS object */ 81 ierr = MatCreate(comm,newmat);CHKERRQ(ierr); 82 ierr = MatSetSizes(*newmat,dr,dc,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr); 83 ierr = MatSetType(*newmat,MATIS);CHKERRQ(ierr); 84 ierr = MatSetLocalToGlobalMapping(*newmat,rl2g,cl2g);CHKERRQ(ierr); 85 ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr); 86 ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr); 87 88 /* merge local matrices */ 89 ierr = PetscMalloc1(nnz+dr+1,&aux);CHKERRQ(ierr); 90 ierr = PetscMalloc1(nnz,&data);CHKERRQ(ierr); 91 ii = aux; 92 jj = aux+dr+1; 93 aa = data; 94 *ii = *(di++) + *(oi++); 95 for (jd=0,jo=0,cum=0;*ii<nnz;cum++) 96 { 97 for (;jd<*di;jd++) { *jj++ = *dj++; *aa++ = *dd++; } 98 for (;jo<*oi;jo++) { *jj++ = *oj++ + dc; *aa++ = *od++; } 99 *(++ii) = *(di++) + *(oi++); 100 } 101 for (;cum<dr;cum++) *(++ii) = nnz; 102 ii = aux; 103 jj = aux+dr+1; 104 aa = data; 105 ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,lc,ii,jj,aa,&lA);CHKERRQ(ierr); 106 107 /* create containers to destroy the data */ 108 ptrs[0] = aux; 109 ptrs[1] = data; 110 for (i=0; i<2; i++) { 111 PetscContainer c; 112 113 ierr = PetscContainerCreate(PETSC_COMM_SELF,&c);CHKERRQ(ierr); 114 ierr = PetscContainerSetPointer(c,ptrs[i]);CHKERRQ(ierr); 115 ierr = PetscContainerSetUserDestroy(c,MatISContainerDestroy_Private);CHKERRQ(ierr); 116 ierr = PetscObjectCompose((PetscObject)lA,names[i],(PetscObject)c);CHKERRQ(ierr); 117 ierr = PetscContainerDestroy(&c);CHKERRQ(ierr); 118 } 119 120 /* finalize matrix */ 121 ierr = MatISSetLocalMat(*newmat,lA);CHKERRQ(ierr); 122 ierr = MatDestroy(&lA);CHKERRQ(ierr); 123 ierr = MatAssemblyBegin(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 124 ierr = MatAssemblyEnd(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 125 PetscFunctionReturn(0); 126 } 127 128 #undef __FUNCT__ 129 #define __FUNCT__ "MatISSetUpSF" 130 /*@ 131 MatISSetUpSF - Setup star forest objects used by MatIS. 132 133 Collective on MPI_Comm 134 135 Input Parameters: 136 + A - the matrix 137 138 Level: advanced 139 140 Notes: This function does not need to be called by the user. 141 142 .keywords: matrix 143 144 .seealso: MatCreate(), MatCreateIS(), MatISSetPreallocation(), MatISGetLocalMat() 145 @*/ 146 PetscErrorCode MatISSetUpSF(Mat A) 147 { 148 PetscErrorCode ierr; 149 150 PetscFunctionBegin; 151 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 152 PetscValidType(A,1); 153 ierr = PetscTryMethod(A,"MatISSetUpSF_C",(Mat),(A));CHKERRQ(ierr); 154 PetscFunctionReturn(0); 155 } 156 157 #undef __FUNCT__ 158 #define __FUNCT__ "MatDiagonalSet_IS" 159 PetscErrorCode MatDiagonalSet_IS(Mat A,Vec D,InsertMode insmode) 160 { 161 Mat_IS *is = (Mat_IS*)A->data; 162 PetscErrorCode ierr; 163 164 PetscFunctionBegin; 165 if (!D) { /* this code branch is used by MatShift_IS */ 166 ierr = VecSet(is->y,1.);CHKERRQ(ierr); 167 } else { 168 ierr = VecScatterBegin(is->rctx,D,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 169 ierr = VecScatterEnd(is->rctx,D,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 170 } 171 ierr = VecPointwiseDivide(is->y,is->y,is->counter);CHKERRQ(ierr); 172 ierr = MatDiagonalSet(is->A,is->y,insmode);CHKERRQ(ierr); 173 PetscFunctionReturn(0); 174 } 175 176 #undef __FUNCT__ 177 #define __FUNCT__ "MatShift_IS" 178 PetscErrorCode MatShift_IS(Mat A,PetscScalar a) 179 { 180 PetscErrorCode ierr; 181 182 PetscFunctionBegin; 183 ierr = MatDiagonalSet_IS(A,NULL,ADD_VALUES);CHKERRQ(ierr); 184 PetscFunctionReturn(0); 185 } 186 187 #undef __FUNCT__ 188 #define __FUNCT__ "MatSetValuesLocal_SubMat_IS" 189 static PetscErrorCode MatSetValuesLocal_SubMat_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 190 { 191 PetscErrorCode ierr; 192 Mat_IS *is = (Mat_IS*)A->data; 193 PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION]; 194 195 PetscFunctionBegin; 196 #if defined(PETSC_USE_DEBUG) 197 if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n); 198 #endif 199 ierr = ISLocalToGlobalMappingApply(A->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr); 200 ierr = ISLocalToGlobalMappingApply(A->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr); 201 ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr); 202 PetscFunctionReturn(0); 203 } 204 205 #undef __FUNCT__ 206 #define __FUNCT__ "MatSetValuesBlockedLocal_SubMat_IS" 207 static PetscErrorCode MatSetValuesBlockedLocal_SubMat_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 208 { 209 PetscErrorCode ierr; 210 Mat_IS *is = (Mat_IS*)A->data; 211 PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION]; 212 213 PetscFunctionBegin; 214 #if defined(PETSC_USE_DEBUG) 215 if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column block indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n); 216 #endif 217 ierr = ISLocalToGlobalMappingApplyBlock(A->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr); 218 ierr = ISLocalToGlobalMappingApplyBlock(A->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr); 219 ierr = MatSetValuesBlocked(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr); 220 PetscFunctionReturn(0); 221 } 222 223 #undef __FUNCT__ 224 #define __FUNCT__ "PetscLayoutMapLocal_Private" 225 static PetscErrorCode PetscLayoutMapLocal_Private(PetscLayout map,PetscInt N,const PetscInt idxs[], PetscInt *on,PetscInt **oidxs,PetscInt **ogidxs) 226 { 227 PetscInt *owners = map->range; 228 PetscInt n = map->n; 229 PetscSF sf; 230 PetscInt *lidxs,*work = NULL; 231 PetscSFNode *ridxs; 232 PetscMPIInt rank; 233 PetscInt r, p = 0, len = 0; 234 PetscErrorCode ierr; 235 236 PetscFunctionBegin; 237 /* Create SF where leaves are input idxs and roots are owned idxs (code adapted from MatZeroRowsMapLocal_Private) */ 238 ierr = MPI_Comm_rank(map->comm,&rank);CHKERRQ(ierr); 239 ierr = PetscMalloc1(n,&lidxs);CHKERRQ(ierr); 240 for (r = 0; r < n; ++r) lidxs[r] = -1; 241 ierr = PetscMalloc1(N,&ridxs);CHKERRQ(ierr); 242 for (r = 0; r < N; ++r) { 243 const PetscInt idx = idxs[r]; 244 if (idx < 0 || map->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index %D out of range [0,%D)",idx,map->N); 245 if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this idx too */ 246 ierr = PetscLayoutFindOwner(map,idx,&p);CHKERRQ(ierr); 247 } 248 ridxs[r].rank = p; 249 ridxs[r].index = idxs[r] - owners[p]; 250 } 251 ierr = PetscSFCreate(map->comm,&sf);CHKERRQ(ierr); 252 ierr = PetscSFSetGraph(sf,n,N,NULL,PETSC_OWN_POINTER,ridxs,PETSC_OWN_POINTER);CHKERRQ(ierr); 253 ierr = PetscSFReduceBegin(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr); 254 ierr = PetscSFReduceEnd(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr); 255 if (ogidxs) { /* communicate global idxs */ 256 PetscInt cum = 0,start,*work2; 257 258 ierr = PetscMalloc1(n,&work);CHKERRQ(ierr); 259 ierr = PetscCalloc1(N,&work2);CHKERRQ(ierr); 260 for (r = 0; r < N; ++r) if (idxs[r] >=0) cum++; 261 ierr = MPI_Scan(&cum,&start,1,MPIU_INT,MPI_SUM,map->comm);CHKERRQ(ierr); 262 start -= cum; 263 cum = 0; 264 for (r = 0; r < N; ++r) if (idxs[r] >=0) work2[r] = start+cum++; 265 ierr = PetscSFReduceBegin(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr); 266 ierr = PetscSFReduceEnd(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr); 267 ierr = PetscFree(work2);CHKERRQ(ierr); 268 } 269 ierr = PetscSFDestroy(&sf);CHKERRQ(ierr); 270 /* Compress and put in indices */ 271 for (r = 0; r < n; ++r) 272 if (lidxs[r] >= 0) { 273 if (work) work[len] = work[r]; 274 lidxs[len++] = r; 275 } 276 if (on) *on = len; 277 if (oidxs) *oidxs = lidxs; 278 if (ogidxs) *ogidxs = work; 279 PetscFunctionReturn(0); 280 } 281 282 #undef __FUNCT__ 283 #define __FUNCT__ "MatGetSubMatrix_IS" 284 static PetscErrorCode MatGetSubMatrix_IS(Mat mat,IS irow,IS icol,MatReuse scall,Mat *newmat) 285 { 286 Mat locmat,newlocmat; 287 Mat_IS *newmatis; 288 #if defined(PETSC_USE_DEBUG) 289 Vec rtest,ltest; 290 const PetscScalar *array; 291 #endif 292 const PetscInt *idxs; 293 PetscInt i,m,n; 294 PetscErrorCode ierr; 295 296 PetscFunctionBegin; 297 if (scall == MAT_REUSE_MATRIX) { 298 PetscBool ismatis; 299 300 ierr = PetscObjectTypeCompare((PetscObject)*newmat,MATIS,&ismatis);CHKERRQ(ierr); 301 if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Not of MATIS type"); 302 newmatis = (Mat_IS*)(*newmat)->data; 303 if (!newmatis->getsub_ris) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Misses local row IS"); 304 if (!newmatis->getsub_cis) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Misses local col IS"); 305 } 306 /* irow and icol may not have duplicate entries */ 307 #if defined(PETSC_USE_DEBUG) 308 ierr = MatCreateVecs(mat,<est,&rtest);CHKERRQ(ierr); 309 ierr = ISGetLocalSize(irow,&n);CHKERRQ(ierr); 310 ierr = ISGetIndices(irow,&idxs);CHKERRQ(ierr); 311 for (i=0;i<n;i++) { 312 ierr = VecSetValue(rtest,idxs[i],1.0,ADD_VALUES);CHKERRQ(ierr); 313 } 314 ierr = VecAssemblyBegin(rtest);CHKERRQ(ierr); 315 ierr = VecAssemblyEnd(rtest);CHKERRQ(ierr); 316 ierr = VecGetLocalSize(rtest,&n);CHKERRQ(ierr); 317 ierr = VecGetOwnershipRange(rtest,&m,NULL);CHKERRQ(ierr); 318 ierr = VecGetArrayRead(rtest,&array);CHKERRQ(ierr); 319 for (i=0;i<n;i++) if (array[i] != 0. && array[i] != 1.) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Index %D counted %D times! Irow may not have duplicate entries",i+m,(PetscInt)PetscRealPart(array[i])); 320 ierr = VecRestoreArrayRead(rtest,&array);CHKERRQ(ierr); 321 ierr = ISRestoreIndices(irow,&idxs);CHKERRQ(ierr); 322 ierr = ISGetLocalSize(icol,&n);CHKERRQ(ierr); 323 ierr = ISGetIndices(icol,&idxs);CHKERRQ(ierr); 324 for (i=0;i<n;i++) { 325 ierr = VecSetValue(ltest,idxs[i],1.0,ADD_VALUES);CHKERRQ(ierr); 326 } 327 ierr = VecAssemblyBegin(ltest);CHKERRQ(ierr); 328 ierr = VecAssemblyEnd(ltest);CHKERRQ(ierr); 329 ierr = VecGetLocalSize(ltest,&n);CHKERRQ(ierr); 330 ierr = VecGetOwnershipRange(ltest,&m,NULL);CHKERRQ(ierr); 331 ierr = VecGetArrayRead(ltest,&array);CHKERRQ(ierr); 332 for (i=0;i<n;i++) if (array[i] != 0. && array[i] != 1.) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Index %D counted %D times! Icol may not have duplicate entries",i+m,(PetscInt)PetscRealPart(array[i])); 333 ierr = VecRestoreArrayRead(ltest,&array);CHKERRQ(ierr); 334 ierr = ISRestoreIndices(icol,&idxs);CHKERRQ(ierr); 335 ierr = VecDestroy(&rtest);CHKERRQ(ierr); 336 ierr = VecDestroy(<est);CHKERRQ(ierr); 337 #endif 338 if (scall == MAT_INITIAL_MATRIX) { 339 Mat_IS *matis = (Mat_IS*)mat->data; 340 ISLocalToGlobalMapping rl2g; 341 IS is; 342 PetscInt *lidxs,*lgidxs,*newgidxs; 343 PetscInt ll,newloc; 344 MPI_Comm comm; 345 346 ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr); 347 ierr = ISGetLocalSize(irow,&m);CHKERRQ(ierr); 348 ierr = ISGetLocalSize(icol,&n);CHKERRQ(ierr); 349 ierr = MatCreate(comm,newmat);CHKERRQ(ierr); 350 ierr = MatSetType(*newmat,MATIS);CHKERRQ(ierr); 351 ierr = MatSetSizes(*newmat,m,n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr); 352 /* communicate irow to their owners in the layout */ 353 ierr = ISGetIndices(irow,&idxs);CHKERRQ(ierr); 354 ierr = PetscLayoutMapLocal_Private(mat->rmap,m,idxs,&ll,&lidxs,&lgidxs);CHKERRQ(ierr); 355 ierr = ISRestoreIndices(irow,&idxs);CHKERRQ(ierr); 356 ierr = MatISSetUpSF(mat);CHKERRQ(ierr); 357 ierr = PetscMemzero(matis->sf_rootdata,matis->sf->nroots*sizeof(PetscInt));CHKERRQ(ierr); 358 for (i=0;i<ll;i++) matis->sf_rootdata[lidxs[i]] = lgidxs[i]+1; 359 ierr = PetscFree(lidxs);CHKERRQ(ierr); 360 ierr = PetscFree(lgidxs);CHKERRQ(ierr); 361 ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 362 ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 363 for (i=0,newloc=0;i<matis->sf->nleaves;i++) if (matis->sf_leafdata[i]) newloc++; 364 ierr = PetscMalloc1(newloc,&newgidxs);CHKERRQ(ierr); 365 ierr = PetscMalloc1(newloc,&lidxs);CHKERRQ(ierr); 366 for (i=0,newloc=0;i<matis->sf->nleaves;i++) 367 if (matis->sf_leafdata[i]) { 368 lidxs[newloc] = i; 369 newgidxs[newloc++] = matis->sf_leafdata[i]-1; 370 } 371 ierr = ISCreateGeneral(comm,newloc,newgidxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 372 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr); 373 ierr = ISDestroy(&is);CHKERRQ(ierr); 374 /* local is to extract local submatrix */ 375 newmatis = (Mat_IS*)(*newmat)->data; 376 ierr = ISCreateGeneral(comm,newloc,lidxs,PETSC_OWN_POINTER,&newmatis->getsub_ris);CHKERRQ(ierr); 377 if (mat->congruentlayouts == -1) { /* first time we compare rows and cols layouts */ 378 PetscBool cong; 379 ierr = PetscLayoutCompare(mat->rmap,mat->cmap,&cong);CHKERRQ(ierr); 380 if (cong) mat->congruentlayouts = 1; 381 else mat->congruentlayouts = 0; 382 } 383 if (mat->congruentlayouts && irow == icol) { 384 ierr = MatSetLocalToGlobalMapping(*newmat,rl2g,rl2g);CHKERRQ(ierr); 385 ierr = PetscObjectReference((PetscObject)newmatis->getsub_ris);CHKERRQ(ierr); 386 newmatis->getsub_cis = newmatis->getsub_ris; 387 } else { 388 ISLocalToGlobalMapping cl2g; 389 390 /* communicate icol to their owners in the layout */ 391 ierr = ISGetIndices(icol,&idxs);CHKERRQ(ierr); 392 ierr = PetscLayoutMapLocal_Private(mat->cmap,n,idxs,&ll,&lidxs,&lgidxs);CHKERRQ(ierr); 393 ierr = ISRestoreIndices(icol,&idxs);CHKERRQ(ierr); 394 ierr = PetscMemzero(matis->csf_rootdata,matis->csf->nroots*sizeof(PetscInt));CHKERRQ(ierr); 395 for (i=0;i<ll;i++) matis->csf_rootdata[lidxs[i]] = lgidxs[i]+1; 396 ierr = PetscFree(lidxs);CHKERRQ(ierr); 397 ierr = PetscFree(lgidxs);CHKERRQ(ierr); 398 ierr = PetscSFBcastBegin(matis->csf,MPIU_INT,matis->csf_rootdata,matis->csf_leafdata);CHKERRQ(ierr); 399 ierr = PetscSFBcastEnd(matis->csf,MPIU_INT,matis->csf_rootdata,matis->csf_leafdata);CHKERRQ(ierr); 400 for (i=0,newloc=0;i<matis->csf->nleaves;i++) if (matis->csf_leafdata[i]) newloc++; 401 ierr = PetscMalloc1(newloc,&newgidxs);CHKERRQ(ierr); 402 ierr = PetscMalloc1(newloc,&lidxs);CHKERRQ(ierr); 403 for (i=0,newloc=0;i<matis->csf->nleaves;i++) 404 if (matis->csf_leafdata[i]) { 405 lidxs[newloc] = i; 406 newgidxs[newloc++] = matis->csf_leafdata[i]-1; 407 } 408 ierr = ISCreateGeneral(comm,newloc,newgidxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 409 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr); 410 ierr = ISDestroy(&is);CHKERRQ(ierr); 411 /* local is to extract local submatrix */ 412 ierr = ISCreateGeneral(comm,newloc,lidxs,PETSC_OWN_POINTER,&newmatis->getsub_cis);CHKERRQ(ierr); 413 ierr = MatSetLocalToGlobalMapping(*newmat,rl2g,cl2g);CHKERRQ(ierr); 414 ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr); 415 } 416 ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr); 417 } else { 418 ierr = MatISGetLocalMat(*newmat,&newlocmat);CHKERRQ(ierr); 419 } 420 ierr = MatISGetLocalMat(mat,&locmat);CHKERRQ(ierr); 421 newmatis = (Mat_IS*)(*newmat)->data; 422 ierr = MatGetSubMatrix(locmat,newmatis->getsub_ris,newmatis->getsub_cis,scall,&newlocmat);CHKERRQ(ierr); 423 if (scall == MAT_INITIAL_MATRIX) { 424 ierr = MatISSetLocalMat(*newmat,newlocmat);CHKERRQ(ierr); 425 ierr = MatDestroy(&newlocmat);CHKERRQ(ierr); 426 } 427 ierr = MatAssemblyBegin(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 428 ierr = MatAssemblyEnd(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 429 PetscFunctionReturn(0); 430 } 431 432 #undef __FUNCT__ 433 #define __FUNCT__ "MatCopy_IS" 434 static PetscErrorCode MatCopy_IS(Mat A,Mat B,MatStructure str) 435 { 436 Mat_IS *a = (Mat_IS*)A->data,*b; 437 PetscBool ismatis; 438 PetscErrorCode ierr; 439 440 PetscFunctionBegin; 441 ierr = PetscObjectTypeCompare((PetscObject)B,MATIS,&ismatis);CHKERRQ(ierr); 442 if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Need to be implemented"); 443 b = (Mat_IS*)B->data; 444 ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr); 445 PetscFunctionReturn(0); 446 } 447 448 #undef __FUNCT__ 449 #define __FUNCT__ "MatMissingDiagonal_IS" 450 static PetscErrorCode MatMissingDiagonal_IS(Mat A,PetscBool *missing,PetscInt *d) 451 { 452 Vec v; 453 const PetscScalar *array; 454 PetscInt i,n; 455 PetscErrorCode ierr; 456 457 PetscFunctionBegin; 458 *missing = PETSC_FALSE; 459 ierr = MatCreateVecs(A,NULL,&v);CHKERRQ(ierr); 460 ierr = MatGetDiagonal(A,v);CHKERRQ(ierr); 461 ierr = VecGetLocalSize(v,&n);CHKERRQ(ierr); 462 ierr = VecGetArrayRead(v,&array);CHKERRQ(ierr); 463 for (i=0;i<n;i++) if (array[i] == 0.) break; 464 ierr = VecRestoreArrayRead(v,&array);CHKERRQ(ierr); 465 ierr = VecDestroy(&v);CHKERRQ(ierr); 466 if (i != n) *missing = PETSC_TRUE; 467 if (d) { 468 *d = -1; 469 if (*missing) { 470 PetscInt rstart; 471 ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr); 472 *d = i+rstart; 473 } 474 } 475 PetscFunctionReturn(0); 476 } 477 478 #undef __FUNCT__ 479 #define __FUNCT__ "MatISSetUpSF_IS" 480 static PetscErrorCode MatISSetUpSF_IS(Mat B) 481 { 482 Mat_IS *matis = (Mat_IS*)(B->data); 483 const PetscInt *gidxs; 484 PetscInt nleaves; 485 PetscErrorCode ierr; 486 487 PetscFunctionBegin; 488 if (matis->sf) PetscFunctionReturn(0); 489 ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr); 490 ierr = ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr); 491 ierr = ISLocalToGlobalMappingGetSize(B->rmap->mapping,&nleaves);CHKERRQ(ierr); 492 ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr); 493 ierr = ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr); 494 ierr = PetscMalloc2(matis->sf->nroots,&matis->sf_rootdata,matis->sf->nleaves,&matis->sf_leafdata);CHKERRQ(ierr); 495 if (B->rmap->mapping != B->cmap->mapping) { /* setup SF for columns */ 496 ierr = ISLocalToGlobalMappingGetSize(B->cmap->mapping,&nleaves);CHKERRQ(ierr); 497 ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->csf);CHKERRQ(ierr); 498 ierr = ISLocalToGlobalMappingGetIndices(B->cmap->mapping,&gidxs);CHKERRQ(ierr); 499 ierr = PetscSFSetGraphLayout(matis->csf,B->cmap,nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr); 500 ierr = ISLocalToGlobalMappingRestoreIndices(B->cmap->mapping,&gidxs);CHKERRQ(ierr); 501 ierr = PetscMalloc2(matis->csf->nroots,&matis->csf_rootdata,matis->csf->nleaves,&matis->csf_leafdata);CHKERRQ(ierr); 502 } else { 503 matis->csf = matis->sf; 504 matis->csf_leafdata = matis->sf_leafdata; 505 matis->csf_rootdata = matis->sf_rootdata; 506 } 507 PetscFunctionReturn(0); 508 } 509 510 #undef __FUNCT__ 511 #define __FUNCT__ "MatISSetPreallocation" 512 /*@ 513 MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix. 514 515 Collective on MPI_Comm 516 517 Input Parameters: 518 + B - the matrix 519 . d_nz - number of nonzeros per row in DIAGONAL portion of local submatrix 520 (same value is used for all local rows) 521 . d_nnz - array containing the number of nonzeros in the various rows of the 522 DIAGONAL portion of the local submatrix (possibly different for each row) 523 or NULL, if d_nz is used to specify the nonzero structure. 524 The size of this array is equal to the number of local rows, i.e 'm'. 525 For matrices that will be factored, you must leave room for (and set) 526 the diagonal entry even if it is zero. 527 . o_nz - number of nonzeros per row in the OFF-DIAGONAL portion of local 528 submatrix (same value is used for all local rows). 529 - o_nnz - array containing the number of nonzeros in the various rows of the 530 OFF-DIAGONAL portion of the local submatrix (possibly different for 531 each row) or NULL, if o_nz is used to specify the nonzero 532 structure. The size of this array is equal to the number 533 of local rows, i.e 'm'. 534 535 If the *_nnz parameter is given then the *_nz parameter is ignored 536 537 Level: intermediate 538 539 Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition 540 from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local 541 matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects. 542 543 .keywords: matrix 544 545 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat(), MATIS 546 @*/ 547 PetscErrorCode MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 548 { 549 PetscErrorCode ierr; 550 551 PetscFunctionBegin; 552 PetscValidHeaderSpecific(B,MAT_CLASSID,1); 553 PetscValidType(B,1); 554 ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr); 555 PetscFunctionReturn(0); 556 } 557 558 #undef __FUNCT__ 559 #define __FUNCT__ "MatISSetPreallocation_IS" 560 static PetscErrorCode MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 561 { 562 Mat_IS *matis = (Mat_IS*)(B->data); 563 PetscInt bs,i,nlocalcols; 564 PetscErrorCode ierr; 565 566 PetscFunctionBegin; 567 if (!matis->A) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping"); 568 ierr = MatISSetUpSF(B);CHKERRQ(ierr); 569 570 if (!d_nnz) for (i=0;i<matis->sf->nroots;i++) matis->sf_rootdata[i] = d_nz; 571 else for (i=0;i<matis->sf->nroots;i++) matis->sf_rootdata[i] = d_nnz[i]; 572 573 if (!o_nnz) for (i=0;i<matis->sf->nroots;i++) matis->sf_rootdata[i] += o_nz; 574 else for (i=0;i<matis->sf->nroots;i++) matis->sf_rootdata[i] += o_nnz[i]; 575 576 ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 577 ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr); 578 ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr); 579 ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 580 581 for (i=0;i<matis->sf->nleaves;i++) matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols); 582 ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr); 583 584 for (i=0;i<matis->sf->nleaves/bs;i++) matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs; 585 ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr); 586 587 for (i=0;i<matis->sf->nleaves/bs;i++) matis->sf_leafdata[i] = matis->sf_leafdata[i]-i; 588 ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr); 589 PetscFunctionReturn(0); 590 } 591 592 #undef __FUNCT__ 593 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private" 594 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce) 595 { 596 Mat_IS *matis = (Mat_IS*)(A->data); 597 PetscInt *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership; 598 const PetscInt *global_indices_r,*global_indices_c; 599 PetscInt i,j,bs,rows,cols; 600 PetscInt lrows,lcols; 601 PetscInt local_rows,local_cols; 602 PetscMPIInt nsubdomains; 603 PetscBool isdense,issbaij; 604 PetscErrorCode ierr; 605 606 PetscFunctionBegin; 607 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr); 608 ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr); 609 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 610 ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr); 611 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr); 612 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr); 613 ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr); 614 if (A->rmap->mapping != A->cmap->mapping) { 615 ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr); 616 } else { 617 global_indices_c = global_indices_r; 618 } 619 620 if (issbaij) { 621 ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr); 622 } 623 /* 624 An SF reduce is needed to sum up properly on shared rows. 625 Note that generally preallocation is not exact, since it overestimates nonzeros 626 */ 627 ierr = MatISSetUpSF(A);CHKERRQ(ierr); 628 ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr); 629 ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr); 630 /* All processes need to compute entire row ownership */ 631 ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr); 632 ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr); 633 for (i=0;i<nsubdomains;i++) { 634 for (j=mat_ranges[i];j<mat_ranges[i+1];j++) { 635 row_ownership[j] = i; 636 } 637 } 638 ierr = MatGetOwnershipRangesColumn(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr); 639 640 /* 641 my_dnz and my_onz contains exact contribution to preallocation from each local mat 642 then, they will be summed up properly. This way, preallocation is always sufficient 643 */ 644 ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr); 645 /* preallocation as a MATAIJ */ 646 if (isdense) { /* special case for dense local matrices */ 647 for (i=0;i<local_rows;i++) { 648 PetscInt index_row = global_indices_r[i]; 649 for (j=i;j<local_rows;j++) { 650 PetscInt owner = row_ownership[index_row]; 651 PetscInt index_col = global_indices_c[j]; 652 if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */ 653 my_dnz[i] += 1; 654 } else { /* offdiag block */ 655 my_onz[i] += 1; 656 } 657 /* same as before, interchanging rows and cols */ 658 if (i != j) { 659 owner = row_ownership[index_col]; 660 if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) { 661 my_dnz[j] += 1; 662 } else { 663 my_onz[j] += 1; 664 } 665 } 666 } 667 } 668 } else if (matis->A->ops->getrowij) { 669 const PetscInt *ii,*jj,*jptr; 670 PetscBool done; 671 ierr = MatGetRowIJ(matis->A,0,PETSC_FALSE,PETSC_FALSE,&local_rows,&ii,&jj,&done);CHKERRQ(ierr); 672 if (!done) SETERRQ1(PetscObjectComm((PetscObject)(matis->A)),PETSC_ERR_PLIB,"Error in MatGetRowIJ called from %s\n",__FUNCT__); 673 jptr = jj; 674 for (i=0;i<local_rows;i++) { 675 PetscInt index_row = global_indices_r[i]; 676 for (j=0;j<ii[i+1]-ii[i];j++,jptr++) { 677 PetscInt owner = row_ownership[index_row]; 678 PetscInt index_col = global_indices_c[*jptr]; 679 if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */ 680 my_dnz[i] += 1; 681 } else { /* offdiag block */ 682 my_onz[i] += 1; 683 } 684 /* same as before, interchanging rows and cols */ 685 if (issbaij && index_col != index_row) { 686 owner = row_ownership[index_col]; 687 if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) { 688 my_dnz[*jptr] += 1; 689 } else { 690 my_onz[*jptr] += 1; 691 } 692 } 693 } 694 } 695 ierr = MatRestoreRowIJ(matis->A,0,PETSC_FALSE,PETSC_FALSE,&local_rows,&ii,&jj,&done);CHKERRQ(ierr); 696 if (!done) SETERRQ1(PetscObjectComm((PetscObject)(matis->A)),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called from %s\n",__FUNCT__); 697 } else { /* loop over rows and use MatGetRow */ 698 for (i=0;i<local_rows;i++) { 699 const PetscInt *cols; 700 PetscInt ncols,index_row = global_indices_r[i]; 701 ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr); 702 for (j=0;j<ncols;j++) { 703 PetscInt owner = row_ownership[index_row]; 704 PetscInt index_col = global_indices_c[cols[j]]; 705 if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */ 706 my_dnz[i] += 1; 707 } else { /* offdiag block */ 708 my_onz[i] += 1; 709 } 710 /* same as before, interchanging rows and cols */ 711 if (issbaij && index_col != index_row) { 712 owner = row_ownership[index_col]; 713 if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) { 714 my_dnz[cols[j]] += 1; 715 } else { 716 my_onz[cols[j]] += 1; 717 } 718 } 719 } 720 ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr); 721 } 722 } 723 ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr); 724 if (global_indices_c != global_indices_r) { 725 ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr); 726 } 727 ierr = PetscFree(row_ownership);CHKERRQ(ierr); 728 729 /* Reduce my_dnz and my_onz */ 730 if (maxreduce) { 731 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr); 732 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr); 733 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr); 734 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr); 735 } else { 736 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr); 737 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr); 738 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr); 739 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr); 740 } 741 ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr); 742 743 /* Resize preallocation if overestimated */ 744 for (i=0;i<lrows;i++) { 745 dnz[i] = PetscMin(dnz[i],lcols); 746 onz[i] = PetscMin(onz[i],cols-lcols); 747 } 748 /* set preallocation */ 749 ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr); 750 for (i=0;i<lrows/bs;i++) { 751 dnz[i] = dnz[i*bs]/bs; 752 onz[i] = onz[i*bs]/bs; 753 } 754 ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr); 755 ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr); 756 ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr); 757 if (issbaij) { 758 ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr); 759 } 760 PetscFunctionReturn(0); 761 } 762 763 #undef __FUNCT__ 764 #define __FUNCT__ "MatISGetMPIXAIJ_IS" 765 static PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M) 766 { 767 Mat_IS *matis = (Mat_IS*)(mat->data); 768 Mat local_mat; 769 /* info on mat */ 770 PetscInt bs,rows,cols,lrows,lcols; 771 PetscInt local_rows,local_cols; 772 PetscBool isseqdense,isseqsbaij,isseqaij,isseqbaij; 773 #if defined (PETSC_USE_DEBUG) 774 PetscBool lb[4],bb[4]; 775 #endif 776 PetscMPIInt nsubdomains; 777 /* values insertion */ 778 PetscScalar *array; 779 /* work */ 780 PetscErrorCode ierr; 781 782 PetscFunctionBegin; 783 /* get info from mat */ 784 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr); 785 if (nsubdomains == 1) { 786 if (reuse == MAT_INITIAL_MATRIX) { 787 ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr); 788 } else { 789 ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 790 } 791 PetscFunctionReturn(0); 792 } 793 ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr); 794 ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr); 795 ierr = MatGetLocalSize(mat,&lrows,&lcols);CHKERRQ(ierr); 796 ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr); 797 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isseqdense);CHKERRQ(ierr); 798 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 799 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQBAIJ,&isseqbaij);CHKERRQ(ierr); 800 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&isseqsbaij);CHKERRQ(ierr); 801 if (!isseqdense && !isseqaij && !isseqbaij && !isseqsbaij) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not for matrix type %s",((PetscObject)(matis->A))->type_name); 802 #if defined (PETSC_USE_DEBUG) 803 lb[0] = isseqdense; 804 lb[1] = isseqaij; 805 lb[2] = isseqbaij; 806 lb[3] = isseqsbaij; 807 ierr = MPIU_Allreduce(lb,bb,4,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 808 if (!bb[0] && !bb[1] && !bb[2] && !bb[3]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Local matrices must have the same type"); 809 #endif 810 811 if (reuse == MAT_INITIAL_MATRIX) { 812 ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr); 813 ierr = MatSetSizes(*M,lrows,lcols,rows,cols);CHKERRQ(ierr); 814 ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr); 815 if (!isseqsbaij) { 816 ierr = MatSetType(*M,MATAIJ);CHKERRQ(ierr); 817 } else { 818 ierr = MatSetType(*M,MATSBAIJ);CHKERRQ(ierr); 819 } 820 ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr); 821 } else { 822 PetscInt mbs,mrows,mcols,mlrows,mlcols; 823 /* some checks */ 824 ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr); 825 ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr); 826 ierr = MatGetLocalSize(*M,&mlrows,&mlcols);CHKERRQ(ierr); 827 if (mrows != rows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows); 828 if (mcols != cols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols); 829 if (mlrows != lrows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows); 830 if (mlcols != lcols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols); 831 if (mbs != bs) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs); 832 ierr = MatZeroEntries(*M);CHKERRQ(ierr); 833 } 834 835 if (isseqsbaij) { 836 ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);CHKERRQ(ierr); 837 } else { 838 ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr); 839 local_mat = matis->A; 840 } 841 842 /* Set values */ 843 ierr = MatSetLocalToGlobalMapping(*M,mat->rmap->mapping,mat->cmap->mapping);CHKERRQ(ierr); 844 if (isseqdense) { /* special case for dense local matrices */ 845 PetscInt i,*dummy_rows,*dummy_cols; 846 847 if (local_rows != local_cols) { 848 ierr = PetscMalloc2(local_rows,&dummy_rows,local_cols,&dummy_cols);CHKERRQ(ierr); 849 for (i=0;i<local_rows;i++) dummy_rows[i] = i; 850 for (i=0;i<local_cols;i++) dummy_cols[i] = i; 851 } else { 852 ierr = PetscMalloc1(local_rows,&dummy_rows);CHKERRQ(ierr); 853 for (i=0;i<local_rows;i++) dummy_rows[i] = i; 854 dummy_cols = dummy_rows; 855 } 856 ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr); 857 ierr = MatDenseGetArray(local_mat,&array);CHKERRQ(ierr); 858 ierr = MatSetValuesLocal(*M,local_rows,dummy_rows,local_cols,dummy_cols,array,ADD_VALUES);CHKERRQ(ierr); 859 ierr = MatDenseRestoreArray(local_mat,&array);CHKERRQ(ierr); 860 if (dummy_rows != dummy_cols) { 861 ierr = PetscFree2(dummy_rows,dummy_cols);CHKERRQ(ierr); 862 } else { 863 ierr = PetscFree(dummy_rows);CHKERRQ(ierr); 864 } 865 } else if (isseqaij) { 866 PetscInt i,nvtxs,*xadj,*adjncy; 867 PetscBool done; 868 869 ierr = MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr); 870 if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatGetRowIJ called from %s\n",__FUNCT__); 871 ierr = MatSeqAIJGetArray(local_mat,&array);CHKERRQ(ierr); 872 for (i=0;i<nvtxs;i++) { 873 ierr = MatSetValuesLocal(*M,1,&i,xadj[i+1]-xadj[i],adjncy+xadj[i],array+xadj[i],ADD_VALUES);CHKERRQ(ierr); 874 } 875 ierr = MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr); 876 if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called from %s\n",__FUNCT__); 877 ierr = MatSeqAIJRestoreArray(local_mat,&array);CHKERRQ(ierr); 878 } else { /* very basic values insertion for all other matrix types */ 879 PetscInt i; 880 881 for (i=0;i<local_rows;i++) { 882 PetscInt j; 883 const PetscInt *local_indices_cols; 884 885 ierr = MatGetRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr); 886 ierr = MatSetValuesLocal(*M,1,&i,j,local_indices_cols,array,ADD_VALUES);CHKERRQ(ierr); 887 ierr = MatRestoreRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr); 888 } 889 } 890 ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 891 ierr = MatDestroy(&local_mat);CHKERRQ(ierr); 892 ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 893 if (isseqdense) { 894 ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr); 895 } 896 PetscFunctionReturn(0); 897 } 898 899 #undef __FUNCT__ 900 #define __FUNCT__ "MatISGetMPIXAIJ" 901 /*@ 902 MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format 903 904 Input Parameter: 905 . mat - the matrix (should be of type MATIS) 906 . reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 907 908 Output Parameter: 909 . newmat - the matrix in AIJ format 910 911 Level: developer 912 913 Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested. 914 915 .seealso: MATIS 916 @*/ 917 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat) 918 { 919 PetscErrorCode ierr; 920 921 PetscFunctionBegin; 922 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 923 PetscValidLogicalCollectiveEnum(mat,reuse,2); 924 PetscValidPointer(newmat,3); 925 if (reuse != MAT_INITIAL_MATRIX) { 926 PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3); 927 PetscCheckSameComm(mat,1,*newmat,3); 928 if (mat == *newmat) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix"); 929 } 930 ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr); 931 PetscFunctionReturn(0); 932 } 933 934 #undef __FUNCT__ 935 #define __FUNCT__ "MatDuplicate_IS" 936 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat) 937 { 938 PetscErrorCode ierr; 939 Mat_IS *matis = (Mat_IS*)(mat->data); 940 PetscInt bs,m,n,M,N; 941 Mat B,localmat; 942 943 PetscFunctionBegin; 944 ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr); 945 ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr); 946 ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr); 947 ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,mat->cmap->mapping,&B);CHKERRQ(ierr); 948 ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr); 949 ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr); 950 ierr = MatDestroy(&localmat);CHKERRQ(ierr); 951 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 952 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 953 *newmat = B; 954 PetscFunctionReturn(0); 955 } 956 957 #undef __FUNCT__ 958 #define __FUNCT__ "MatIsHermitian_IS" 959 static PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool *flg) 960 { 961 PetscErrorCode ierr; 962 Mat_IS *matis = (Mat_IS*)A->data; 963 PetscBool local_sym; 964 965 PetscFunctionBegin; 966 ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr); 967 ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr); 968 PetscFunctionReturn(0); 969 } 970 971 #undef __FUNCT__ 972 #define __FUNCT__ "MatIsSymmetric_IS" 973 static PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool *flg) 974 { 975 PetscErrorCode ierr; 976 Mat_IS *matis = (Mat_IS*)A->data; 977 PetscBool local_sym; 978 979 PetscFunctionBegin; 980 ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr); 981 ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr); 982 PetscFunctionReturn(0); 983 } 984 985 #undef __FUNCT__ 986 #define __FUNCT__ "MatIsStructurallySymmetric_IS" 987 static PetscErrorCode MatIsStructurallySymmetric_IS(Mat A,PetscBool *flg) 988 { 989 PetscErrorCode ierr; 990 Mat_IS *matis = (Mat_IS*)A->data; 991 PetscBool local_sym; 992 993 PetscFunctionBegin; 994 if (A->rmap->mapping != A->cmap->mapping) { 995 *flg = PETSC_FALSE; 996 PetscFunctionReturn(0); 997 } 998 ierr = MatIsStructurallySymmetric(matis->A,&local_sym);CHKERRQ(ierr); 999 ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr); 1000 PetscFunctionReturn(0); 1001 } 1002 1003 #undef __FUNCT__ 1004 #define __FUNCT__ "MatDestroy_IS" 1005 static PetscErrorCode MatDestroy_IS(Mat A) 1006 { 1007 PetscErrorCode ierr; 1008 Mat_IS *b = (Mat_IS*)A->data; 1009 1010 PetscFunctionBegin; 1011 ierr = MatDestroy(&b->A);CHKERRQ(ierr); 1012 ierr = VecScatterDestroy(&b->cctx);CHKERRQ(ierr); 1013 ierr = VecScatterDestroy(&b->rctx);CHKERRQ(ierr); 1014 ierr = VecDestroy(&b->x);CHKERRQ(ierr); 1015 ierr = VecDestroy(&b->y);CHKERRQ(ierr); 1016 ierr = VecDestroy(&b->counter);CHKERRQ(ierr); 1017 ierr = ISDestroy(&b->getsub_ris);CHKERRQ(ierr); 1018 ierr = ISDestroy(&b->getsub_cis);CHKERRQ(ierr); 1019 if (b->sf != b->csf) { 1020 ierr = PetscSFDestroy(&b->csf);CHKERRQ(ierr); 1021 ierr = PetscFree2(b->csf_rootdata,b->csf_leafdata);CHKERRQ(ierr); 1022 } 1023 ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr); 1024 ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr); 1025 ierr = PetscFree(A->data);CHKERRQ(ierr); 1026 ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr); 1027 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr); 1028 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr); 1029 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr); 1030 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr); 1031 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetUpSF_C",NULL);CHKERRQ(ierr); 1032 PetscFunctionReturn(0); 1033 } 1034 1035 #undef __FUNCT__ 1036 #define __FUNCT__ "MatMult_IS" 1037 static PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y) 1038 { 1039 PetscErrorCode ierr; 1040 Mat_IS *is = (Mat_IS*)A->data; 1041 PetscScalar zero = 0.0; 1042 1043 PetscFunctionBegin; 1044 /* scatter the global vector x into the local work vector */ 1045 ierr = VecScatterBegin(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1046 ierr = VecScatterEnd(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1047 1048 /* multiply the local matrix */ 1049 ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr); 1050 1051 /* scatter product back into global memory */ 1052 ierr = VecSet(y,zero);CHKERRQ(ierr); 1053 ierr = VecScatterBegin(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1054 ierr = VecScatterEnd(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1055 PetscFunctionReturn(0); 1056 } 1057 1058 #undef __FUNCT__ 1059 #define __FUNCT__ "MatMultAdd_IS" 1060 static PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3) 1061 { 1062 Vec temp_vec; 1063 PetscErrorCode ierr; 1064 1065 PetscFunctionBegin; /* v3 = v2 + A * v1.*/ 1066 if (v3 != v2) { 1067 ierr = MatMult(A,v1,v3);CHKERRQ(ierr); 1068 ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr); 1069 } else { 1070 ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr); 1071 ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr); 1072 ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr); 1073 ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr); 1074 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr); 1075 } 1076 PetscFunctionReturn(0); 1077 } 1078 1079 #undef __FUNCT__ 1080 #define __FUNCT__ "MatMultTranspose_IS" 1081 static PetscErrorCode MatMultTranspose_IS(Mat A,Vec y,Vec x) 1082 { 1083 Mat_IS *is = (Mat_IS*)A->data; 1084 PetscErrorCode ierr; 1085 1086 PetscFunctionBegin; 1087 /* scatter the global vector x into the local work vector */ 1088 ierr = VecScatterBegin(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1089 ierr = VecScatterEnd(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1090 1091 /* multiply the local matrix */ 1092 ierr = MatMultTranspose(is->A,is->y,is->x);CHKERRQ(ierr); 1093 1094 /* scatter product back into global vector */ 1095 ierr = VecSet(x,0);CHKERRQ(ierr); 1096 ierr = VecScatterBegin(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1097 ierr = VecScatterEnd(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1098 PetscFunctionReturn(0); 1099 } 1100 1101 #undef __FUNCT__ 1102 #define __FUNCT__ "MatMultTransposeAdd_IS" 1103 static PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3) 1104 { 1105 Vec temp_vec; 1106 PetscErrorCode ierr; 1107 1108 PetscFunctionBegin; /* v3 = v2 + A' * v1.*/ 1109 if (v3 != v2) { 1110 ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr); 1111 ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr); 1112 } else { 1113 ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr); 1114 ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr); 1115 ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr); 1116 ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr); 1117 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr); 1118 } 1119 PetscFunctionReturn(0); 1120 } 1121 1122 #undef __FUNCT__ 1123 #define __FUNCT__ "MatView_IS" 1124 static PetscErrorCode MatView_IS(Mat A,PetscViewer viewer) 1125 { 1126 Mat_IS *a = (Mat_IS*)A->data; 1127 PetscErrorCode ierr; 1128 PetscViewer sviewer; 1129 PetscBool isascii,view = PETSC_TRUE; 1130 1131 PetscFunctionBegin; 1132 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);CHKERRQ(ierr); 1133 if (isascii) { 1134 PetscViewerFormat format; 1135 1136 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 1137 if (format == PETSC_VIEWER_ASCII_INFO) view = PETSC_FALSE; 1138 } 1139 if (!view) PetscFunctionReturn(0); 1140 ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 1141 ierr = MatView(a->A,sviewer);CHKERRQ(ierr); 1142 ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 1143 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 1144 PetscFunctionReturn(0); 1145 } 1146 1147 #undef __FUNCT__ 1148 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS" 1149 static PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping) 1150 { 1151 PetscErrorCode ierr; 1152 PetscInt nr,rbs,nc,cbs; 1153 Mat_IS *is = (Mat_IS*)A->data; 1154 IS from,to; 1155 Vec cglobal,rglobal; 1156 1157 PetscFunctionBegin; 1158 PetscCheckSameComm(A,1,rmapping,2); 1159 PetscCheckSameComm(A,1,cmapping,3); 1160 /* Destroy any previous data */ 1161 ierr = VecDestroy(&is->x);CHKERRQ(ierr); 1162 ierr = VecDestroy(&is->y);CHKERRQ(ierr); 1163 ierr = VecDestroy(&is->counter);CHKERRQ(ierr); 1164 ierr = VecScatterDestroy(&is->rctx);CHKERRQ(ierr); 1165 ierr = VecScatterDestroy(&is->cctx);CHKERRQ(ierr); 1166 ierr = MatDestroy(&is->A);CHKERRQ(ierr); 1167 ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr); 1168 ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr); 1169 1170 /* Setup Layout and set local to global maps */ 1171 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 1172 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 1173 ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr); 1174 ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr); 1175 1176 /* Create the local matrix A */ 1177 ierr = ISLocalToGlobalMappingGetSize(rmapping,&nr);CHKERRQ(ierr); 1178 ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&rbs);CHKERRQ(ierr); 1179 ierr = ISLocalToGlobalMappingGetSize(cmapping,&nc);CHKERRQ(ierr); 1180 ierr = ISLocalToGlobalMappingGetBlockSize(cmapping,&cbs);CHKERRQ(ierr); 1181 ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr); 1182 ierr = MatSetType(is->A,MATAIJ);CHKERRQ(ierr); 1183 ierr = MatSetSizes(is->A,nr,nc,nr,nc);CHKERRQ(ierr); 1184 ierr = MatSetBlockSizes(is->A,rbs,cbs);CHKERRQ(ierr); 1185 ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr); 1186 ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr); 1187 ierr = MatSetFromOptions(is->A);CHKERRQ(ierr); 1188 1189 if (!is->islocalref) { /* setup scatters and local vectors for MatMult */ 1190 /* Create the local work vectors */ 1191 ierr = MatCreateVecs(is->A,&is->x,&is->y);CHKERRQ(ierr); 1192 1193 /* setup the global to local scatters */ 1194 ierr = MatCreateVecs(A,&cglobal,&rglobal);CHKERRQ(ierr); 1195 ierr = ISCreateStride(PETSC_COMM_SELF,nr,0,1,&to);CHKERRQ(ierr); 1196 ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr); 1197 ierr = VecScatterCreate(rglobal,from,is->y,to,&is->rctx);CHKERRQ(ierr); 1198 if (rmapping != cmapping) { 1199 ierr = ISDestroy(&to);CHKERRQ(ierr); 1200 ierr = ISDestroy(&from);CHKERRQ(ierr); 1201 ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&to);CHKERRQ(ierr); 1202 ierr = ISLocalToGlobalMappingApplyIS(cmapping,to,&from);CHKERRQ(ierr); 1203 ierr = VecScatterCreate(cglobal,from,is->x,to,&is->cctx);CHKERRQ(ierr); 1204 } else { 1205 ierr = PetscObjectReference((PetscObject)is->rctx);CHKERRQ(ierr); 1206 is->cctx = is->rctx; 1207 } 1208 1209 /* interface counter vector (local) */ 1210 ierr = VecDuplicate(is->y,&is->counter);CHKERRQ(ierr); 1211 ierr = VecSet(is->y,1.);CHKERRQ(ierr); 1212 ierr = VecScatterBegin(is->rctx,is->y,rglobal,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1213 ierr = VecScatterEnd(is->rctx,is->y,rglobal,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1214 ierr = VecScatterBegin(is->rctx,rglobal,is->counter,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1215 ierr = VecScatterEnd(is->rctx,rglobal,is->counter,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1216 1217 /* free workspace */ 1218 ierr = VecDestroy(&rglobal);CHKERRQ(ierr); 1219 ierr = VecDestroy(&cglobal);CHKERRQ(ierr); 1220 ierr = ISDestroy(&to);CHKERRQ(ierr); 1221 ierr = ISDestroy(&from);CHKERRQ(ierr); 1222 } 1223 ierr = MatSetUp(A);CHKERRQ(ierr); 1224 PetscFunctionReturn(0); 1225 } 1226 1227 #undef __FUNCT__ 1228 #define __FUNCT__ "MatSetValues_IS" 1229 static PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv) 1230 { 1231 Mat_IS *is = (Mat_IS*)mat->data; 1232 PetscErrorCode ierr; 1233 #if defined(PETSC_USE_DEBUG) 1234 PetscInt i,zm,zn; 1235 #endif 1236 PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION]; 1237 1238 PetscFunctionBegin; 1239 #if defined(PETSC_USE_DEBUG) 1240 if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n); 1241 /* count negative indices */ 1242 for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++; 1243 for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++; 1244 #endif 1245 ierr = ISGlobalToLocalMappingApply(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr); 1246 ierr = ISGlobalToLocalMappingApply(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr); 1247 #if defined(PETSC_USE_DEBUG) 1248 /* count negative indices (should be the same as before) */ 1249 for (i=0;i<m;i++) if (rows_l[i] < 0) zm--; 1250 for (i=0;i<n;i++) if (cols_l[i] < 0) zn--; 1251 if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS"); 1252 if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS"); 1253 #endif 1254 ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr); 1255 PetscFunctionReturn(0); 1256 } 1257 1258 #undef __FUNCT__ 1259 #define __FUNCT__ "MatSetValuesBlocked_IS" 1260 static PetscErrorCode MatSetValuesBlocked_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv) 1261 { 1262 Mat_IS *is = (Mat_IS*)mat->data; 1263 PetscErrorCode ierr; 1264 #if defined(PETSC_USE_DEBUG) 1265 PetscInt i,zm,zn; 1266 #endif 1267 PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION]; 1268 1269 PetscFunctionBegin; 1270 #if defined(PETSC_USE_DEBUG) 1271 if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column block indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n); 1272 /* count negative indices */ 1273 for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++; 1274 for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++; 1275 #endif 1276 ierr = ISGlobalToLocalMappingApplyBlock(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr); 1277 ierr = ISGlobalToLocalMappingApplyBlock(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr); 1278 #if defined(PETSC_USE_DEBUG) 1279 /* count negative indices (should be the same as before) */ 1280 for (i=0;i<m;i++) if (rows_l[i] < 0) zm--; 1281 for (i=0;i<n;i++) if (cols_l[i] < 0) zn--; 1282 if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS"); 1283 if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS"); 1284 #endif 1285 ierr = MatSetValuesBlocked(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr); 1286 PetscFunctionReturn(0); 1287 } 1288 1289 #undef __FUNCT__ 1290 #define __FUNCT__ "MatSetValuesLocal_IS" 1291 static PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 1292 { 1293 PetscErrorCode ierr; 1294 Mat_IS *is = (Mat_IS*)A->data; 1295 1296 PetscFunctionBegin; 1297 ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr); 1298 PetscFunctionReturn(0); 1299 } 1300 1301 #undef __FUNCT__ 1302 #define __FUNCT__ "MatSetValuesBlockedLocal_IS" 1303 static PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 1304 { 1305 PetscErrorCode ierr; 1306 Mat_IS *is = (Mat_IS*)A->data; 1307 1308 PetscFunctionBegin; 1309 ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr); 1310 PetscFunctionReturn(0); 1311 } 1312 1313 #undef __FUNCT__ 1314 #define __FUNCT__ "MatISZeroRowsColumnsLocal_Private" 1315 static PetscErrorCode MatISZeroRowsColumnsLocal_Private(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,PetscBool columns) 1316 { 1317 Mat_IS *is = (Mat_IS*)A->data; 1318 PetscErrorCode ierr; 1319 1320 PetscFunctionBegin; 1321 if (!n) { 1322 is->pure_neumann = PETSC_TRUE; 1323 } else { 1324 PetscInt i; 1325 is->pure_neumann = PETSC_FALSE; 1326 1327 if (columns) { 1328 ierr = MatZeroRowsColumns(is->A,n,rows,diag,0,0);CHKERRQ(ierr); 1329 } else { 1330 ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr); 1331 } 1332 if (diag != 0.) { 1333 const PetscScalar *array; 1334 ierr = VecGetArrayRead(is->counter,&array);CHKERRQ(ierr); 1335 for (i=0; i<n; i++) { 1336 ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr); 1337 } 1338 ierr = VecRestoreArrayRead(is->counter,&array);CHKERRQ(ierr); 1339 } 1340 ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1341 ierr = MatAssemblyEnd(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1342 } 1343 PetscFunctionReturn(0); 1344 } 1345 1346 #undef __FUNCT__ 1347 #define __FUNCT__ "MatZeroRowsColumns_Private_IS" 1348 static PetscErrorCode MatZeroRowsColumns_Private_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b,PetscBool columns) 1349 { 1350 Mat_IS *matis = (Mat_IS*)A->data; 1351 PetscInt nr,nl,len,i; 1352 PetscInt *lrows; 1353 PetscErrorCode ierr; 1354 1355 PetscFunctionBegin; 1356 #if defined(PETSC_USE_DEBUG) 1357 if (columns || diag != 0. || (x && b)) { 1358 PetscBool cong; 1359 ierr = PetscLayoutCompare(A->rmap,A->cmap,&cong);CHKERRQ(ierr); 1360 if (!cong && columns) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Columns can be zeroed if and only if A->rmap and A->cmap are congruent for MATIS"); 1361 if (!cong && diag != 0.) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Nonzero diagonal value supported if and only if A->rmap and A->cmap are congruent for MATIS"); 1362 if (!cong && x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"A->rmap and A->cmap need to be congruent"); 1363 } 1364 #endif 1365 /* get locally owned rows */ 1366 ierr = PetscLayoutMapLocal_Private(A->rmap,n,rows,&len,&lrows,NULL);CHKERRQ(ierr); 1367 /* fix right hand side if needed */ 1368 if (x && b) { 1369 const PetscScalar *xx; 1370 PetscScalar *bb; 1371 1372 ierr = VecGetArrayRead(x, &xx);CHKERRQ(ierr); 1373 ierr = VecGetArray(b, &bb);CHKERRQ(ierr); 1374 for (i=0;i<len;++i) bb[lrows[i]] = diag*xx[lrows[i]]; 1375 ierr = VecRestoreArrayRead(x, &xx);CHKERRQ(ierr); 1376 ierr = VecRestoreArray(b, &bb);CHKERRQ(ierr); 1377 } 1378 /* get rows associated to the local matrices */ 1379 ierr = MatISSetUpSF(A);CHKERRQ(ierr); 1380 ierr = MatGetSize(matis->A,&nl,NULL);CHKERRQ(ierr); 1381 ierr = PetscMemzero(matis->sf_leafdata,nl*sizeof(PetscInt));CHKERRQ(ierr); 1382 ierr = PetscMemzero(matis->sf_rootdata,A->rmap->n*sizeof(PetscInt));CHKERRQ(ierr); 1383 for (i=0;i<len;i++) matis->sf_rootdata[lrows[i]] = 1; 1384 ierr = PetscFree(lrows);CHKERRQ(ierr); 1385 ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 1386 ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 1387 ierr = PetscMalloc1(nl,&lrows);CHKERRQ(ierr); 1388 for (i=0,nr=0;i<nl;i++) if (matis->sf_leafdata[i]) lrows[nr++] = i; 1389 ierr = MatISZeroRowsColumnsLocal_Private(A,nr,lrows,diag,columns);CHKERRQ(ierr); 1390 ierr = PetscFree(lrows);CHKERRQ(ierr); 1391 PetscFunctionReturn(0); 1392 } 1393 1394 #undef __FUNCT__ 1395 #define __FUNCT__ "MatZeroRows_IS" 1396 static PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 1397 { 1398 PetscErrorCode ierr; 1399 1400 PetscFunctionBegin; 1401 ierr = MatZeroRowsColumns_Private_IS(A,n,rows,diag,x,b,PETSC_FALSE);CHKERRQ(ierr); 1402 PetscFunctionReturn(0); 1403 } 1404 1405 #undef __FUNCT__ 1406 #define __FUNCT__ "MatZeroRowsColumns_IS" 1407 static PetscErrorCode MatZeroRowsColumns_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 1408 { 1409 PetscErrorCode ierr; 1410 1411 PetscFunctionBegin; 1412 ierr = MatZeroRowsColumns_Private_IS(A,n,rows,diag,x,b,PETSC_TRUE);CHKERRQ(ierr); 1413 PetscFunctionReturn(0); 1414 } 1415 1416 #undef __FUNCT__ 1417 #define __FUNCT__ "MatAssemblyBegin_IS" 1418 static PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type) 1419 { 1420 Mat_IS *is = (Mat_IS*)A->data; 1421 PetscErrorCode ierr; 1422 1423 PetscFunctionBegin; 1424 ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr); 1425 PetscFunctionReturn(0); 1426 } 1427 1428 #undef __FUNCT__ 1429 #define __FUNCT__ "MatAssemblyEnd_IS" 1430 static PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type) 1431 { 1432 Mat_IS *is = (Mat_IS*)A->data; 1433 PetscErrorCode ierr; 1434 1435 PetscFunctionBegin; 1436 ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr); 1437 PetscFunctionReturn(0); 1438 } 1439 1440 #undef __FUNCT__ 1441 #define __FUNCT__ "MatISGetLocalMat_IS" 1442 static PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local) 1443 { 1444 Mat_IS *is = (Mat_IS*)mat->data; 1445 1446 PetscFunctionBegin; 1447 *local = is->A; 1448 PetscFunctionReturn(0); 1449 } 1450 1451 #undef __FUNCT__ 1452 #define __FUNCT__ "MatISGetLocalMat" 1453 /*@ 1454 MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix. 1455 1456 Input Parameter: 1457 . mat - the matrix 1458 1459 Output Parameter: 1460 . local - the local matrix 1461 1462 Level: advanced 1463 1464 Notes: 1465 This can be called if you have precomputed the nonzero structure of the 1466 matrix and want to provide it to the inner matrix object to improve the performance 1467 of the MatSetValues() operation. 1468 1469 .seealso: MATIS 1470 @*/ 1471 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local) 1472 { 1473 PetscErrorCode ierr; 1474 1475 PetscFunctionBegin; 1476 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 1477 PetscValidPointer(local,2); 1478 ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr); 1479 PetscFunctionReturn(0); 1480 } 1481 1482 #undef __FUNCT__ 1483 #define __FUNCT__ "MatISSetLocalMat_IS" 1484 static PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local) 1485 { 1486 Mat_IS *is = (Mat_IS*)mat->data; 1487 PetscInt nrows,ncols,orows,ocols; 1488 PetscErrorCode ierr; 1489 1490 PetscFunctionBegin; 1491 if (is->A) { 1492 ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr); 1493 ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr); 1494 if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %Dx%D (you passed a %Dx%D matrix)",orows,ocols,nrows,ncols); 1495 } 1496 ierr = PetscObjectReference((PetscObject)local);CHKERRQ(ierr); 1497 ierr = MatDestroy(&is->A);CHKERRQ(ierr); 1498 is->A = local; 1499 PetscFunctionReturn(0); 1500 } 1501 1502 #undef __FUNCT__ 1503 #define __FUNCT__ "MatISSetLocalMat" 1504 /*@ 1505 MatISSetLocalMat - Replace the local matrix stored inside a MATIS object. 1506 1507 Input Parameter: 1508 . mat - the matrix 1509 . local - the local matrix 1510 1511 Output Parameter: 1512 1513 Level: advanced 1514 1515 Notes: 1516 This can be called if you have precomputed the local matrix and 1517 want to provide it to the matrix object MATIS. 1518 1519 .seealso: MATIS 1520 @*/ 1521 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local) 1522 { 1523 PetscErrorCode ierr; 1524 1525 PetscFunctionBegin; 1526 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 1527 PetscValidHeaderSpecific(local,MAT_CLASSID,2); 1528 ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr); 1529 PetscFunctionReturn(0); 1530 } 1531 1532 #undef __FUNCT__ 1533 #define __FUNCT__ "MatZeroEntries_IS" 1534 static PetscErrorCode MatZeroEntries_IS(Mat A) 1535 { 1536 Mat_IS *a = (Mat_IS*)A->data; 1537 PetscErrorCode ierr; 1538 1539 PetscFunctionBegin; 1540 ierr = MatZeroEntries(a->A);CHKERRQ(ierr); 1541 PetscFunctionReturn(0); 1542 } 1543 1544 #undef __FUNCT__ 1545 #define __FUNCT__ "MatScale_IS" 1546 static PetscErrorCode MatScale_IS(Mat A,PetscScalar a) 1547 { 1548 Mat_IS *is = (Mat_IS*)A->data; 1549 PetscErrorCode ierr; 1550 1551 PetscFunctionBegin; 1552 ierr = MatScale(is->A,a);CHKERRQ(ierr); 1553 PetscFunctionReturn(0); 1554 } 1555 1556 #undef __FUNCT__ 1557 #define __FUNCT__ "MatGetDiagonal_IS" 1558 static PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v) 1559 { 1560 Mat_IS *is = (Mat_IS*)A->data; 1561 PetscErrorCode ierr; 1562 1563 PetscFunctionBegin; 1564 /* get diagonal of the local matrix */ 1565 ierr = MatGetDiagonal(is->A,is->y);CHKERRQ(ierr); 1566 1567 /* scatter diagonal back into global vector */ 1568 ierr = VecSet(v,0);CHKERRQ(ierr); 1569 ierr = VecScatterBegin(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1570 ierr = VecScatterEnd(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1571 PetscFunctionReturn(0); 1572 } 1573 1574 #undef __FUNCT__ 1575 #define __FUNCT__ "MatSetOption_IS" 1576 static PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg) 1577 { 1578 Mat_IS *a = (Mat_IS*)A->data; 1579 PetscErrorCode ierr; 1580 1581 PetscFunctionBegin; 1582 ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 1583 PetscFunctionReturn(0); 1584 } 1585 1586 #undef __FUNCT__ 1587 #define __FUNCT__ "MatAXPY_IS" 1588 static PetscErrorCode MatAXPY_IS(Mat Y,PetscScalar a,Mat X,MatStructure str) 1589 { 1590 Mat_IS *y = (Mat_IS*)Y->data; 1591 Mat_IS *x; 1592 #if defined(PETSC_USE_DEBUG) 1593 PetscBool ismatis; 1594 #endif 1595 PetscErrorCode ierr; 1596 1597 PetscFunctionBegin; 1598 #if defined(PETSC_USE_DEBUG) 1599 ierr = PetscObjectTypeCompare((PetscObject)X,MATIS,&ismatis);CHKERRQ(ierr); 1600 if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)Y),PETSC_ERR_SUP,"Cannot call MatAXPY(Y,a,X,str) with X not of type MATIS"); 1601 #endif 1602 x = (Mat_IS*)X->data; 1603 ierr = MatAXPY(y->A,a,x->A,str);CHKERRQ(ierr); 1604 PetscFunctionReturn(0); 1605 } 1606 1607 #undef __FUNCT__ 1608 #define __FUNCT__ "MatGetLocalSubMatrix_IS" 1609 static PetscErrorCode MatGetLocalSubMatrix_IS(Mat A,IS row,IS col,Mat *submat) 1610 { 1611 Mat lA; 1612 Mat_IS *matis; 1613 ISLocalToGlobalMapping rl2g,cl2g; 1614 IS is; 1615 const PetscInt *rg,*rl; 1616 PetscInt nrg; 1617 PetscInt N,M,nrl,i,*idxs; 1618 PetscErrorCode ierr; 1619 1620 PetscFunctionBegin; 1621 ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&rg);CHKERRQ(ierr); 1622 ierr = ISGetLocalSize(row,&nrl);CHKERRQ(ierr); 1623 ierr = ISGetIndices(row,&rl);CHKERRQ(ierr); 1624 ierr = ISLocalToGlobalMappingGetSize(A->rmap->mapping,&nrg);CHKERRQ(ierr); 1625 #if defined(PETSC_USE_DEBUG) 1626 for (i=0;i<nrl;i++) if (rl[i]>=nrg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local row index %D -> %D greater than maximum possible %D",i,rl[i],nrg); 1627 #endif 1628 ierr = PetscMalloc1(nrg,&idxs);CHKERRQ(ierr); 1629 /* map from [0,nrl) to row */ 1630 for (i=0;i<nrl;i++) idxs[i] = rl[i]; 1631 #if defined(PETSC_USE_DEBUG) 1632 for (i=nrl;i<nrg;i++) idxs[i] = nrg; 1633 #else 1634 for (i=nrl;i<nrg;i++) idxs[i] = -1; 1635 #endif 1636 ierr = ISRestoreIndices(row,&rl);CHKERRQ(ierr); 1637 ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&rg);CHKERRQ(ierr); 1638 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)A),nrg,idxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 1639 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr); 1640 ierr = ISDestroy(&is);CHKERRQ(ierr); 1641 /* compute new l2g map for columns */ 1642 if (col != row || A->rmap->mapping != A->cmap->mapping) { 1643 const PetscInt *cg,*cl; 1644 PetscInt ncg; 1645 PetscInt ncl; 1646 1647 ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&cg);CHKERRQ(ierr); 1648 ierr = ISGetLocalSize(col,&ncl);CHKERRQ(ierr); 1649 ierr = ISGetIndices(col,&cl);CHKERRQ(ierr); 1650 ierr = ISLocalToGlobalMappingGetSize(A->cmap->mapping,&ncg);CHKERRQ(ierr); 1651 #if defined(PETSC_USE_DEBUG) 1652 for (i=0;i<ncl;i++) if (cl[i]>=ncg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local column index %D -> %D greater than maximum possible %D",i,cl[i],ncg); 1653 #endif 1654 ierr = PetscMalloc1(ncg,&idxs);CHKERRQ(ierr); 1655 /* map from [0,ncl) to col */ 1656 for (i=0;i<ncl;i++) idxs[i] = cl[i]; 1657 #if defined(PETSC_USE_DEBUG) 1658 for (i=ncl;i<ncg;i++) idxs[i] = ncg; 1659 #else 1660 for (i=ncl;i<ncg;i++) idxs[i] = -1; 1661 #endif 1662 ierr = ISRestoreIndices(col,&cl);CHKERRQ(ierr); 1663 ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&cg);CHKERRQ(ierr); 1664 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)A),ncg,idxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 1665 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr); 1666 ierr = ISDestroy(&is);CHKERRQ(ierr); 1667 } else { 1668 ierr = PetscObjectReference((PetscObject)rl2g);CHKERRQ(ierr); 1669 cl2g = rl2g; 1670 } 1671 /* create the MATIS submatrix */ 1672 ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr); 1673 ierr = MatCreate(PetscObjectComm((PetscObject)A),submat);CHKERRQ(ierr); 1674 ierr = MatSetSizes(*submat,PETSC_DECIDE,PETSC_DECIDE,M,N);CHKERRQ(ierr); 1675 ierr = MatSetType(*submat,MATIS);CHKERRQ(ierr); 1676 matis = (Mat_IS*)((*submat)->data); 1677 matis->islocalref = PETSC_TRUE; 1678 ierr = MatSetLocalToGlobalMapping(*submat,rl2g,cl2g);CHKERRQ(ierr); 1679 ierr = MatISGetLocalMat(A,&lA);CHKERRQ(ierr); 1680 ierr = MatISSetLocalMat(*submat,lA);CHKERRQ(ierr); 1681 ierr = MatSetUp(*submat);CHKERRQ(ierr); 1682 ierr = MatAssemblyBegin(*submat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1683 ierr = MatAssemblyEnd(*submat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1684 ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr); 1685 ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr); 1686 /* remove unsupported ops */ 1687 ierr = PetscMemzero((*submat)->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 1688 (*submat)->ops->destroy = MatDestroy_IS; 1689 (*submat)->ops->setvalueslocal = MatSetValuesLocal_SubMat_IS; 1690 (*submat)->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_SubMat_IS; 1691 (*submat)->ops->assemblybegin = MatAssemblyBegin_IS; 1692 (*submat)->ops->assemblyend = MatAssemblyEnd_IS; 1693 PetscFunctionReturn(0); 1694 } 1695 1696 #undef __FUNCT__ 1697 #define __FUNCT__ "MatCreateIS" 1698 /*@ 1699 MatCreateIS - Creates a "process" unassembled matrix, assembled on each 1700 process but not across processes. 1701 1702 Input Parameters: 1703 + comm - MPI communicator that will share the matrix 1704 . bs - block size of the matrix 1705 . m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products 1706 . rmap - local to global map for rows 1707 - cmap - local to global map for cols 1708 1709 Output Parameter: 1710 . A - the resulting matrix 1711 1712 Level: advanced 1713 1714 Notes: See MATIS for more details. 1715 m and n are NOT related to the size of the map; they are the size of the part of the vector owned 1716 by that process. The sizes of rmap and cmap define the size of the local matrices. 1717 If either rmap or cmap are NULL, then the matrix is assumed to be square. 1718 1719 .seealso: MATIS, MatSetLocalToGlobalMapping() 1720 @*/ 1721 PetscErrorCode MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping rmap,ISLocalToGlobalMapping cmap,Mat *A) 1722 { 1723 PetscErrorCode ierr; 1724 1725 PetscFunctionBegin; 1726 if (!rmap && !cmap) SETERRQ(comm,PETSC_ERR_USER,"You need to provide at least one of the mapping"); 1727 ierr = MatCreate(comm,A);CHKERRQ(ierr); 1728 ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr); 1729 ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr); 1730 ierr = MatSetType(*A,MATIS);CHKERRQ(ierr); 1731 if (rmap && cmap) { 1732 ierr = MatSetLocalToGlobalMapping(*A,rmap,cmap);CHKERRQ(ierr); 1733 } else if (!rmap) { 1734 ierr = MatSetLocalToGlobalMapping(*A,cmap,cmap);CHKERRQ(ierr); 1735 } else { 1736 ierr = MatSetLocalToGlobalMapping(*A,rmap,rmap);CHKERRQ(ierr); 1737 } 1738 PetscFunctionReturn(0); 1739 } 1740 1741 /*MC 1742 MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition methods (e.g. PCBDDC or KSPFETIDP). 1743 This stores the matrices in globally unassembled form. Each processor 1744 assembles only its local Neumann problem and the parallel matrix vector 1745 product is handled "implicitly". 1746 1747 Operations Provided: 1748 + MatMult() 1749 . MatMultAdd() 1750 . MatMultTranspose() 1751 . MatMultTransposeAdd() 1752 . MatZeroEntries() 1753 . MatSetOption() 1754 . MatZeroRows() 1755 . MatSetValues() 1756 . MatSetValuesBlocked() 1757 . MatSetValuesLocal() 1758 . MatSetValuesBlockedLocal() 1759 . MatScale() 1760 . MatGetDiagonal() 1761 . MatMissingDiagonal() 1762 . MatDuplicate() 1763 . MatCopy() 1764 . MatAXPY() 1765 . MatGetSubMatrix() 1766 . MatGetLocalSubMatrix() 1767 - MatSetLocalToGlobalMapping() 1768 1769 Options Database Keys: 1770 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions() 1771 1772 Notes: Options prefix for the inner matrix are given by -is_mat_xxx 1773 1774 You must call MatSetLocalToGlobalMapping() before using this matrix type. 1775 1776 You can do matrix preallocation on the local matrix after you obtain it with 1777 MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation() 1778 1779 Level: advanced 1780 1781 .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), MatCreateIS(), PCBDDC, KSPFETIDP 1782 1783 M*/ 1784 1785 #undef __FUNCT__ 1786 #define __FUNCT__ "MatCreate_IS" 1787 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A) 1788 { 1789 PetscErrorCode ierr; 1790 Mat_IS *b; 1791 1792 PetscFunctionBegin; 1793 ierr = PetscNewLog(A,&b);CHKERRQ(ierr); 1794 A->data = (void*)b; 1795 1796 /* matrix ops */ 1797 ierr = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 1798 A->ops->mult = MatMult_IS; 1799 A->ops->multadd = MatMultAdd_IS; 1800 A->ops->multtranspose = MatMultTranspose_IS; 1801 A->ops->multtransposeadd = MatMultTransposeAdd_IS; 1802 A->ops->destroy = MatDestroy_IS; 1803 A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS; 1804 A->ops->setvalues = MatSetValues_IS; 1805 A->ops->setvaluesblocked = MatSetValuesBlocked_IS; 1806 A->ops->setvalueslocal = MatSetValuesLocal_IS; 1807 A->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_IS; 1808 A->ops->zerorows = MatZeroRows_IS; 1809 A->ops->zerorowscolumns = MatZeroRowsColumns_IS; 1810 A->ops->assemblybegin = MatAssemblyBegin_IS; 1811 A->ops->assemblyend = MatAssemblyEnd_IS; 1812 A->ops->view = MatView_IS; 1813 A->ops->zeroentries = MatZeroEntries_IS; 1814 A->ops->scale = MatScale_IS; 1815 A->ops->getdiagonal = MatGetDiagonal_IS; 1816 A->ops->setoption = MatSetOption_IS; 1817 A->ops->ishermitian = MatIsHermitian_IS; 1818 A->ops->issymmetric = MatIsSymmetric_IS; 1819 A->ops->isstructurallysymmetric = MatIsStructurallySymmetric_IS; 1820 A->ops->duplicate = MatDuplicate_IS; 1821 A->ops->missingdiagonal = MatMissingDiagonal_IS; 1822 A->ops->copy = MatCopy_IS; 1823 A->ops->getlocalsubmatrix = MatGetLocalSubMatrix_IS; 1824 A->ops->getsubmatrix = MatGetSubMatrix_IS; 1825 A->ops->axpy = MatAXPY_IS; 1826 A->ops->diagonalset = MatDiagonalSet_IS; 1827 A->ops->shift = MatShift_IS; 1828 1829 /* special MATIS functions */ 1830 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr); 1831 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr); 1832 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr); 1833 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr); 1834 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetUpSF_C",MatISSetUpSF_IS);CHKERRQ(ierr); 1835 ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr); 1836 PetscFunctionReturn(0); 1837 } 1838