1 2 /* 3 Creates a matrix class for using the Neumann-Neumann type preconditioners. 4 This stores the matrices in globally unassembled form. Each processor 5 assembles only its local Neumann problem and the parallel matrix vector 6 product is handled "implicitly". 7 8 Currently this allows for only one subdomain per processor. 9 */ 10 11 #include <../src/mat/impls/is/matis.h> /*I "petscmat.h" I*/ 12 13 #define MATIS_MAX_ENTRIES_INSERTION 2048 14 15 static PetscErrorCode MatISComputeSF_Private(Mat); 16 static PetscErrorCode MatISZeroRowsLocal_Private(Mat,PetscInt,const PetscInt[],PetscScalar); 17 18 #undef __FUNCT__ 19 #define __FUNCT__ "MatSetValuesLocal_SubMat_IS" 20 static PetscErrorCode MatSetValuesLocal_SubMat_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 21 { 22 PetscErrorCode ierr; 23 Mat_IS *is = (Mat_IS*)A->data; 24 PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION]; 25 26 PetscFunctionBegin; 27 #if defined(PETSC_USE_DEBUG) 28 if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n); 29 #endif 30 ierr = ISLocalToGlobalMappingApply(A->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr); 31 ierr = ISLocalToGlobalMappingApply(A->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr); 32 ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr); 33 PetscFunctionReturn(0); 34 } 35 36 #undef __FUNCT__ 37 #define __FUNCT__ "MatSetValuesBlockedLocal_SubMat_IS" 38 static PetscErrorCode MatSetValuesBlockedLocal_SubMat_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 39 { 40 PetscErrorCode ierr; 41 Mat_IS *is = (Mat_IS*)A->data; 42 PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION]; 43 44 PetscFunctionBegin; 45 #if defined(PETSC_USE_DEBUG) 46 if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column block indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n); 47 #endif 48 ierr = ISLocalToGlobalMappingApplyBlock(A->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr); 49 ierr = ISLocalToGlobalMappingApplyBlock(A->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr); 50 ierr = MatSetValuesBlocked(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr); 51 PetscFunctionReturn(0); 52 } 53 54 #undef __FUNCT__ 55 #define __FUNCT__ "PetscLayoutMapLocal_Private" 56 static PetscErrorCode PetscLayoutMapLocal_Private(PetscLayout map,PetscInt N,const PetscInt idxs[],const PetscInt gidxs[], PetscInt *on,PetscInt **oidxs,PetscInt **ogidxs) 57 { 58 PetscInt *owners = map->range; 59 PetscInt n = map->n; 60 PetscSF sf; 61 PetscInt *lidxs,*work = NULL; 62 PetscSFNode *ridxs; 63 PetscMPIInt rank; 64 PetscInt r, p = 0, len = 0; 65 PetscErrorCode ierr; 66 67 PetscFunctionBegin; 68 /* Create SF where leaves are input idxs and roots are owned idxs (code adapted from MatZeroRowsMapLocal_Private) */ 69 ierr = MPI_Comm_rank(map->comm,&rank);CHKERRQ(ierr); 70 ierr = PetscMalloc1(n,&lidxs);CHKERRQ(ierr); 71 for (r = 0; r < n; ++r) lidxs[r] = -1; 72 ierr = PetscMalloc1(N,&ridxs);CHKERRQ(ierr); 73 for (r = 0; r < N; ++r) { 74 const PetscInt idx = idxs[r]; 75 if (idx < 0 || map->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index %D out of range [0,%D)",idx,map->N); 76 if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this idx too */ 77 ierr = PetscLayoutFindOwner(map,idx,&p);CHKERRQ(ierr); 78 } 79 ridxs[r].rank = p; 80 ridxs[r].index = idxs[r] - owners[p]; 81 } 82 ierr = PetscSFCreate(map->comm,&sf);CHKERRQ(ierr); 83 ierr = PetscSFSetGraph(sf,n,N,NULL,PETSC_OWN_POINTER,ridxs,PETSC_OWN_POINTER);CHKERRQ(ierr); 84 ierr = PetscSFReduceBegin(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr); 85 ierr = PetscSFReduceEnd(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr); 86 if ((gidxs && ogidxs) || ogidxs) { 87 ierr = PetscMalloc1(n,&work);CHKERRQ(ierr); 88 if (!gidxs) { /* if gidxs is not provided, global indices are inferred */ 89 PetscInt cum = 0,start,*work2; 90 ierr = PetscCalloc1(N,&work2);CHKERRQ(ierr); 91 for (r = 0; r < N; ++r) if (idxs[r] >=0) cum++; 92 ierr = MPI_Scan(&cum,&start,1,MPIU_INT,MPI_SUM,map->comm);CHKERRQ(ierr); 93 start -= cum; 94 cum = 0; 95 for (r = 0; r < N; ++r) if (idxs[r] >=0) work2[r] = start+cum++; 96 ierr = PetscSFReduceBegin(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr); 97 ierr = PetscSFReduceEnd(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr); 98 ierr = PetscFree(work2);CHKERRQ(ierr); 99 } else { 100 ierr = PetscSFReduceBegin(sf,MPIU_INT,(PetscInt*)gidxs,work,MPIU_REPLACE);CHKERRQ(ierr); 101 ierr = PetscSFReduceEnd(sf,MPIU_INT,(PetscInt*)gidxs,work,MPIU_REPLACE);CHKERRQ(ierr); 102 } 103 } 104 ierr = PetscSFDestroy(&sf);CHKERRQ(ierr); 105 /* Compress and put in indices */ 106 for (r = 0; r < n; ++r) 107 if (lidxs[r] >= 0) { 108 if (work) work[len] = work[r]; 109 lidxs[len++] = r; 110 } 111 if (on) *on = len; 112 if (oidxs) *oidxs = lidxs; 113 if (ogidxs) *ogidxs = work; 114 PetscFunctionReturn(0); 115 } 116 117 #undef __FUNCT__ 118 #define __FUNCT__ "MatGetSubMatrix_IS" 119 static PetscErrorCode MatGetSubMatrix_IS(Mat mat,IS irow,IS icol,MatReuse scall,Mat *newmat) 120 { 121 Mat locmat,newlocmat; 122 Mat_IS *newmatis; 123 #if defined(PETSC_USE_DEBUG) 124 Vec rtest,ltest; 125 const PetscScalar *array; 126 #endif 127 const PetscInt *idxs; 128 PetscInt i,m,n; 129 PetscErrorCode ierr; 130 131 PetscFunctionBegin; 132 if (scall == MAT_REUSE_MATRIX) { 133 PetscBool ismatis; 134 135 ierr = PetscObjectTypeCompare((PetscObject)*newmat,MATIS,&ismatis);CHKERRQ(ierr); 136 if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Not of MATIS type"); 137 newmatis = (Mat_IS*)(*newmat)->data; 138 if (!newmatis->getsub_ris) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Misses local row IS"); 139 if (!newmatis->getsub_cis) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Misses local col IS"); 140 } 141 /* irow and icol may not have duplicate entries */ 142 #if defined(PETSC_USE_DEBUG) 143 ierr = MatCreateVecs(mat,<est,&rtest);CHKERRQ(ierr); 144 ierr = ISGetLocalSize(irow,&n);CHKERRQ(ierr); 145 ierr = ISGetIndices(irow,&idxs);CHKERRQ(ierr); 146 for (i=0;i<n;i++) { 147 ierr = VecSetValue(rtest,idxs[i],1.0,ADD_VALUES);CHKERRQ(ierr); 148 } 149 ierr = VecAssemblyBegin(rtest);CHKERRQ(ierr); 150 ierr = VecAssemblyEnd(rtest);CHKERRQ(ierr); 151 ierr = VecGetLocalSize(rtest,&n);CHKERRQ(ierr); 152 ierr = VecGetOwnershipRange(rtest,&m,NULL);CHKERRQ(ierr); 153 ierr = VecGetArrayRead(rtest,&array);CHKERRQ(ierr); 154 for (i=0;i<n;i++) if (array[i] != 0. && array[i] != 1.) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Index %D counted %D times! Irow may not have duplicate entries",i+m,(PetscInt)PetscRealPart(array[i])); 155 ierr = VecRestoreArrayRead(rtest,&array);CHKERRQ(ierr); 156 ierr = ISRestoreIndices(irow,&idxs);CHKERRQ(ierr); 157 ierr = ISGetLocalSize(icol,&n);CHKERRQ(ierr); 158 ierr = ISGetIndices(icol,&idxs);CHKERRQ(ierr); 159 for (i=0;i<n;i++) { 160 ierr = VecSetValue(ltest,idxs[i],1.0,ADD_VALUES);CHKERRQ(ierr); 161 } 162 ierr = VecAssemblyBegin(ltest);CHKERRQ(ierr); 163 ierr = VecAssemblyEnd(ltest);CHKERRQ(ierr); 164 ierr = VecGetLocalSize(ltest,&n);CHKERRQ(ierr); 165 ierr = VecGetOwnershipRange(ltest,&m,NULL);CHKERRQ(ierr); 166 ierr = VecGetArrayRead(ltest,&array);CHKERRQ(ierr); 167 for (i=0;i<n;i++) if (array[i] != 0. && array[i] != 1.) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Index %D counted %D times! Icol may not have duplicate entries",i+m,(PetscInt)PetscRealPart(array[i])); 168 ierr = VecRestoreArrayRead(ltest,&array);CHKERRQ(ierr); 169 ierr = ISRestoreIndices(icol,&idxs);CHKERRQ(ierr); 170 ierr = VecDestroy(&rtest);CHKERRQ(ierr); 171 ierr = VecDestroy(<est);CHKERRQ(ierr); 172 #endif 173 if (scall == MAT_INITIAL_MATRIX) { 174 Mat_IS *matis = (Mat_IS*)mat->data; 175 ISLocalToGlobalMapping rl2g; 176 IS is; 177 PetscInt *lidxs,*lgidxs,*newgidxs; 178 PetscInt ll,newloc; 179 MPI_Comm comm; 180 181 ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr); 182 ierr = ISGetLocalSize(irow,&m);CHKERRQ(ierr); 183 ierr = ISGetLocalSize(icol,&n);CHKERRQ(ierr); 184 ierr = MatCreate(comm,newmat);CHKERRQ(ierr); 185 ierr = MatSetType(*newmat,MATIS);CHKERRQ(ierr); 186 ierr = MatSetSizes(*newmat,m,n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr); 187 /* communicate irow to their owners in the layout */ 188 ierr = ISGetIndices(irow,&idxs);CHKERRQ(ierr); 189 ierr = PetscLayoutMapLocal_Private(mat->rmap,m,idxs,NULL,&ll,&lidxs,&lgidxs);CHKERRQ(ierr); 190 ierr = ISRestoreIndices(irow,&idxs);CHKERRQ(ierr); 191 if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */ 192 ierr = MatISComputeSF_Private(mat);CHKERRQ(ierr); 193 } 194 ierr = PetscMemzero(matis->sf_rootdata,matis->sf_nroots*sizeof(PetscInt));CHKERRQ(ierr); 195 for (i=0;i<ll;i++) matis->sf_rootdata[lidxs[i]] = lgidxs[i]+1; 196 ierr = PetscFree(lidxs);CHKERRQ(ierr); 197 ierr = PetscFree(lgidxs);CHKERRQ(ierr); 198 ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 199 ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 200 for (i=0,newloc=0;i<matis->sf_nleaves;i++) if (matis->sf_leafdata[i]) newloc++; 201 ierr = PetscMalloc1(newloc,&newgidxs);CHKERRQ(ierr); 202 ierr = PetscMalloc1(newloc,&lidxs);CHKERRQ(ierr); 203 for (i=0,newloc=0;i<matis->sf_nleaves;i++) 204 if (matis->sf_leafdata[i]) { 205 lidxs[newloc] = i; 206 newgidxs[newloc++] = matis->sf_leafdata[i]-1; 207 } 208 ierr = ISCreateGeneral(comm,newloc,newgidxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 209 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr); 210 ierr = ISDestroy(&is);CHKERRQ(ierr); 211 /* local is to extract local submatrix */ 212 newmatis = (Mat_IS*)(*newmat)->data; 213 ierr = ISCreateGeneral(comm,newloc,lidxs,PETSC_OWN_POINTER,&newmatis->getsub_ris);CHKERRQ(ierr); 214 if (mat->congruentlayouts == -1) { /* first time we compare rows and cols layouts */ 215 PetscBool cong; 216 ierr = PetscLayoutCompare(mat->rmap,mat->cmap,&cong);CHKERRQ(ierr); 217 if (cong) mat->congruentlayouts = 1; 218 else mat->congruentlayouts = 0; 219 } 220 if (mat->congruentlayouts && irow == icol) { 221 ierr = MatSetLocalToGlobalMapping(*newmat,rl2g,rl2g);CHKERRQ(ierr); 222 ierr = PetscObjectReference((PetscObject)newmatis->getsub_ris);CHKERRQ(ierr); 223 newmatis->getsub_cis = newmatis->getsub_ris; 224 } else { 225 ISLocalToGlobalMapping cl2g; 226 227 /* communicate icol to their owners in the layout */ 228 ierr = ISGetIndices(icol,&idxs);CHKERRQ(ierr); 229 ierr = PetscLayoutMapLocal_Private(mat->cmap,n,idxs,NULL,&ll,&lidxs,&lgidxs);CHKERRQ(ierr); 230 ierr = ISRestoreIndices(icol,&idxs);CHKERRQ(ierr); 231 ierr = PetscMemzero(matis->csf_rootdata,matis->csf_nroots*sizeof(PetscInt));CHKERRQ(ierr); 232 for (i=0;i<ll;i++) matis->csf_rootdata[lidxs[i]] = lgidxs[i]+1; 233 ierr = PetscFree(lidxs);CHKERRQ(ierr); 234 ierr = PetscFree(lgidxs);CHKERRQ(ierr); 235 ierr = PetscSFBcastBegin(matis->csf,MPIU_INT,matis->csf_rootdata,matis->csf_leafdata);CHKERRQ(ierr); 236 ierr = PetscSFBcastEnd(matis->csf,MPIU_INT,matis->csf_rootdata,matis->csf_leafdata);CHKERRQ(ierr); 237 for (i=0,newloc=0;i<matis->csf_nleaves;i++) if (matis->csf_leafdata[i]) newloc++; 238 ierr = PetscMalloc1(newloc,&newgidxs);CHKERRQ(ierr); 239 ierr = PetscMalloc1(newloc,&lidxs);CHKERRQ(ierr); 240 for (i=0,newloc=0;i<matis->csf_nleaves;i++) 241 if (matis->csf_leafdata[i]) { 242 lidxs[newloc] = i; 243 newgidxs[newloc++] = matis->csf_leafdata[i]-1; 244 } 245 ierr = ISCreateGeneral(comm,newloc,newgidxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 246 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr); 247 ierr = ISDestroy(&is);CHKERRQ(ierr); 248 /* local is to extract local submatrix */ 249 ierr = ISCreateGeneral(comm,newloc,lidxs,PETSC_OWN_POINTER,&newmatis->getsub_cis);CHKERRQ(ierr); 250 ierr = MatSetLocalToGlobalMapping(*newmat,rl2g,cl2g);CHKERRQ(ierr); 251 ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr); 252 } 253 ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr); 254 } else { 255 ierr = MatISGetLocalMat(*newmat,&newlocmat);CHKERRQ(ierr); 256 } 257 ierr = MatISGetLocalMat(mat,&locmat);CHKERRQ(ierr); 258 newmatis = (Mat_IS*)(*newmat)->data; 259 ierr = MatGetSubMatrix(locmat,newmatis->getsub_ris,newmatis->getsub_cis,scall,&newlocmat);CHKERRQ(ierr); 260 if (scall == MAT_INITIAL_MATRIX) { 261 ierr = MatISSetLocalMat(*newmat,newlocmat);CHKERRQ(ierr); 262 ierr = MatDestroy(&newlocmat);CHKERRQ(ierr); 263 } 264 ierr = MatAssemblyBegin(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 265 ierr = MatAssemblyEnd(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 266 PetscFunctionReturn(0); 267 } 268 269 #undef __FUNCT__ 270 #define __FUNCT__ "MatCopy_IS" 271 static PetscErrorCode MatCopy_IS(Mat A,Mat B,MatStructure str) 272 { 273 Mat_IS *a = (Mat_IS*)A->data,*b; 274 PetscBool ismatis; 275 PetscErrorCode ierr; 276 277 PetscFunctionBegin; 278 ierr = PetscObjectTypeCompare((PetscObject)B,MATIS,&ismatis);CHKERRQ(ierr); 279 if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Need to be implemented"); 280 b = (Mat_IS*)B->data; 281 ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr); 282 PetscFunctionReturn(0); 283 } 284 285 #undef __FUNCT__ 286 #define __FUNCT__ "MatMissingDiagonal_IS" 287 static PetscErrorCode MatMissingDiagonal_IS(Mat A,PetscBool *missing,PetscInt *d) 288 { 289 Vec v; 290 const PetscScalar *array; 291 PetscInt i,n; 292 PetscErrorCode ierr; 293 294 PetscFunctionBegin; 295 *missing = PETSC_FALSE; 296 ierr = MatCreateVecs(A,NULL,&v);CHKERRQ(ierr); 297 ierr = MatGetDiagonal(A,v);CHKERRQ(ierr); 298 ierr = VecGetLocalSize(v,&n);CHKERRQ(ierr); 299 ierr = VecGetArrayRead(v,&array);CHKERRQ(ierr); 300 for (i=0;i<n;i++) if (array[i] == 0.) break; 301 ierr = VecRestoreArrayRead(v,&array);CHKERRQ(ierr); 302 ierr = VecDestroy(&v);CHKERRQ(ierr); 303 if (i != n) *missing = PETSC_TRUE; 304 if (d) { 305 *d = -1; 306 if (*missing) { 307 PetscInt rstart; 308 ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr); 309 *d = i+rstart; 310 } 311 } 312 PetscFunctionReturn(0); 313 } 314 315 #undef __FUNCT__ 316 #define __FUNCT__ "MatISComputeSF_Private" 317 static PetscErrorCode MatISComputeSF_Private(Mat B) 318 { 319 Mat_IS *matis = (Mat_IS*)(B->data); 320 const PetscInt *gidxs; 321 PetscErrorCode ierr; 322 323 PetscFunctionBegin; 324 ierr = MatGetSize(matis->A,&matis->sf_nleaves,NULL);CHKERRQ(ierr); 325 ierr = MatGetLocalSize(B,&matis->sf_nroots,NULL);CHKERRQ(ierr); 326 ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr); 327 ierr = ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr); 328 ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr); 329 ierr = ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr); 330 ierr = PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);CHKERRQ(ierr); 331 if (B->rmap->mapping != B->cmap->mapping) { /* setup SF for columns */ 332 ierr = MatGetSize(matis->A,NULL,&matis->csf_nleaves);CHKERRQ(ierr); 333 ierr = MatGetLocalSize(B,NULL,&matis->csf_nroots);CHKERRQ(ierr); 334 ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->csf);CHKERRQ(ierr); 335 ierr = ISLocalToGlobalMappingGetIndices(B->cmap->mapping,&gidxs);CHKERRQ(ierr); 336 ierr = PetscSFSetGraphLayout(matis->csf,B->cmap,matis->csf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr); 337 ierr = ISLocalToGlobalMappingRestoreIndices(B->cmap->mapping,&gidxs);CHKERRQ(ierr); 338 ierr = PetscMalloc2(matis->csf_nroots,&matis->csf_rootdata,matis->csf_nleaves,&matis->csf_leafdata);CHKERRQ(ierr); 339 } else { 340 matis->csf = matis->sf; 341 matis->csf_nleaves = matis->sf_nleaves; 342 matis->csf_nroots = matis->sf_nroots; 343 matis->csf_leafdata = matis->sf_leafdata; 344 matis->csf_rootdata = matis->sf_rootdata; 345 } 346 PetscFunctionReturn(0); 347 } 348 349 #undef __FUNCT__ 350 #define __FUNCT__ "MatISSetPreallocation" 351 /*@ 352 MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix. 353 354 Collective on MPI_Comm 355 356 Input Parameters: 357 + B - the matrix 358 . d_nz - number of nonzeros per row in DIAGONAL portion of local submatrix 359 (same value is used for all local rows) 360 . d_nnz - array containing the number of nonzeros in the various rows of the 361 DIAGONAL portion of the local submatrix (possibly different for each row) 362 or NULL, if d_nz is used to specify the nonzero structure. 363 The size of this array is equal to the number of local rows, i.e 'm'. 364 For matrices that will be factored, you must leave room for (and set) 365 the diagonal entry even if it is zero. 366 . o_nz - number of nonzeros per row in the OFF-DIAGONAL portion of local 367 submatrix (same value is used for all local rows). 368 - o_nnz - array containing the number of nonzeros in the various rows of the 369 OFF-DIAGONAL portion of the local submatrix (possibly different for 370 each row) or NULL, if o_nz is used to specify the nonzero 371 structure. The size of this array is equal to the number 372 of local rows, i.e 'm'. 373 374 If the *_nnz parameter is given then the *_nz parameter is ignored 375 376 Level: intermediate 377 378 Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition 379 from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local 380 matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects. 381 382 .keywords: matrix 383 384 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat(), MATIS 385 @*/ 386 PetscErrorCode MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 387 { 388 PetscErrorCode ierr; 389 390 PetscFunctionBegin; 391 PetscValidHeaderSpecific(B,MAT_CLASSID,1); 392 PetscValidType(B,1); 393 ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr); 394 PetscFunctionReturn(0); 395 } 396 397 #undef __FUNCT__ 398 #define __FUNCT__ "MatISSetPreallocation_IS" 399 static PetscErrorCode MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 400 { 401 Mat_IS *matis = (Mat_IS*)(B->data); 402 PetscInt bs,i,nlocalcols; 403 PetscErrorCode ierr; 404 405 PetscFunctionBegin; 406 if (!matis->A) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping"); 407 if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */ 408 ierr = MatISComputeSF_Private(B);CHKERRQ(ierr); 409 } 410 if (!d_nnz) { 411 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz; 412 } else { 413 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i]; 414 } 415 if (!o_nnz) { 416 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz; 417 } else { 418 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i]; 419 } 420 ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 421 ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr); 422 ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr); 423 ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 424 for (i=0;i<matis->sf_nleaves;i++) { 425 matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols); 426 } 427 ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr); 428 for (i=0;i<matis->sf_nleaves/bs;i++) { 429 matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs; 430 } 431 ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr); 432 for (i=0;i<matis->sf_nleaves/bs;i++) { 433 matis->sf_leafdata[i] = matis->sf_leafdata[i]-i; 434 } 435 ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr); 436 PetscFunctionReturn(0); 437 } 438 439 #undef __FUNCT__ 440 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private" 441 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce) 442 { 443 Mat_IS *matis = (Mat_IS*)(A->data); 444 PetscInt *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership; 445 const PetscInt *global_indices_r,*global_indices_c; 446 PetscInt i,j,bs,rows,cols; 447 PetscInt lrows,lcols; 448 PetscInt local_rows,local_cols; 449 PetscMPIInt nsubdomains; 450 PetscBool isdense,issbaij; 451 PetscErrorCode ierr; 452 453 PetscFunctionBegin; 454 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr); 455 ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr); 456 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 457 ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr); 458 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr); 459 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr); 460 ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr); 461 if (A->rmap->mapping != A->cmap->mapping) { 462 ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr); 463 } else { 464 global_indices_c = global_indices_r; 465 } 466 467 if (issbaij) { 468 ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr); 469 } 470 /* 471 An SF reduce is needed to sum up properly on shared rows. 472 Note that generally preallocation is not exact, since it overestimates nonzeros 473 */ 474 if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */ 475 ierr = MatISComputeSF_Private(A);CHKERRQ(ierr); 476 } 477 ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr); 478 ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr); 479 /* All processes need to compute entire row ownership */ 480 ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr); 481 ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr); 482 for (i=0;i<nsubdomains;i++) { 483 for (j=mat_ranges[i];j<mat_ranges[i+1];j++) { 484 row_ownership[j] = i; 485 } 486 } 487 ierr = MatGetOwnershipRangesColumn(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr); 488 489 /* 490 my_dnz and my_onz contains exact contribution to preallocation from each local mat 491 then, they will be summed up properly. This way, preallocation is always sufficient 492 */ 493 ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr); 494 /* preallocation as a MATAIJ */ 495 if (isdense) { /* special case for dense local matrices */ 496 for (i=0;i<local_rows;i++) { 497 PetscInt index_row = global_indices_r[i]; 498 for (j=i;j<local_rows;j++) { 499 PetscInt owner = row_ownership[index_row]; 500 PetscInt index_col = global_indices_c[j]; 501 if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */ 502 my_dnz[i] += 1; 503 } else { /* offdiag block */ 504 my_onz[i] += 1; 505 } 506 /* same as before, interchanging rows and cols */ 507 if (i != j) { 508 owner = row_ownership[index_col]; 509 if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) { 510 my_dnz[j] += 1; 511 } else { 512 my_onz[j] += 1; 513 } 514 } 515 } 516 } 517 } else { /* TODO: this could be optimized using MatGetRowIJ */ 518 for (i=0;i<local_rows;i++) { 519 const PetscInt *cols; 520 PetscInt ncols,index_row = global_indices_r[i]; 521 ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr); 522 for (j=0;j<ncols;j++) { 523 PetscInt owner = row_ownership[index_row]; 524 PetscInt index_col = global_indices_c[cols[j]]; 525 if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */ 526 my_dnz[i] += 1; 527 } else { /* offdiag block */ 528 my_onz[i] += 1; 529 } 530 /* same as before, interchanging rows and cols */ 531 if (issbaij && index_col != index_row) { 532 owner = row_ownership[index_col]; 533 if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) { 534 my_dnz[cols[j]] += 1; 535 } else { 536 my_onz[cols[j]] += 1; 537 } 538 } 539 } 540 ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr); 541 } 542 } 543 ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr); 544 if (global_indices_c != global_indices_r) { 545 ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr); 546 } 547 ierr = PetscFree(row_ownership);CHKERRQ(ierr); 548 549 /* Reduce my_dnz and my_onz */ 550 if (maxreduce) { 551 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr); 552 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr); 553 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr); 554 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr); 555 } else { 556 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr); 557 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr); 558 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr); 559 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr); 560 } 561 ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr); 562 563 /* Resize preallocation if overestimated */ 564 for (i=0;i<lrows;i++) { 565 dnz[i] = PetscMin(dnz[i],lcols); 566 onz[i] = PetscMin(onz[i],cols-lcols); 567 } 568 /* set preallocation */ 569 ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr); 570 for (i=0;i<lrows/bs;i++) { 571 dnz[i] = dnz[i*bs]/bs; 572 onz[i] = onz[i*bs]/bs; 573 } 574 ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr); 575 ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr); 576 ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr); 577 if (issbaij) { 578 ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr); 579 } 580 PetscFunctionReturn(0); 581 } 582 583 #undef __FUNCT__ 584 #define __FUNCT__ "MatISGetMPIXAIJ_IS" 585 static PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M) 586 { 587 Mat_IS *matis = (Mat_IS*)(mat->data); 588 Mat local_mat; 589 /* info on mat */ 590 PetscInt bs,rows,cols,lrows,lcols; 591 PetscInt local_rows,local_cols; 592 PetscBool isdense,issbaij,isseqaij; 593 PetscMPIInt nsubdomains; 594 /* values insertion */ 595 PetscScalar *array; 596 /* work */ 597 PetscErrorCode ierr; 598 599 PetscFunctionBegin; 600 /* get info from mat */ 601 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr); 602 if (nsubdomains == 1) { 603 if (reuse == MAT_INITIAL_MATRIX) { 604 ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr); 605 } else { 606 ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 607 } 608 PetscFunctionReturn(0); 609 } 610 ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr); 611 ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr); 612 ierr = MatGetLocalSize(mat,&lrows,&lcols);CHKERRQ(ierr); 613 ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr); 614 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr); 615 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 616 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr); 617 618 if (reuse == MAT_INITIAL_MATRIX) { 619 MatType new_mat_type; 620 PetscBool issbaij_red; 621 622 /* determining new matrix type */ 623 ierr = MPIU_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 624 if (issbaij_red) { 625 new_mat_type = MATSBAIJ; 626 } else { 627 if (bs>1) { 628 new_mat_type = MATBAIJ; 629 } else { 630 new_mat_type = MATAIJ; 631 } 632 } 633 634 ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr); 635 ierr = MatSetSizes(*M,lrows,lcols,rows,cols);CHKERRQ(ierr); 636 ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr); 637 ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr); 638 ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr); 639 } else { 640 PetscInt mbs,mrows,mcols,mlrows,mlcols; 641 /* some checks */ 642 ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr); 643 ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr); 644 ierr = MatGetLocalSize(*M,&mlrows,&mlcols);CHKERRQ(ierr); 645 if (mrows != rows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows); 646 if (mcols != cols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols); 647 if (mlrows != lrows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows); 648 if (mlcols != lcols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols); 649 if (mbs != bs) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs); 650 ierr = MatZeroEntries(*M);CHKERRQ(ierr); 651 } 652 653 if (issbaij) { 654 ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);CHKERRQ(ierr); 655 } else { 656 ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr); 657 local_mat = matis->A; 658 } 659 660 /* Set values */ 661 ierr = MatSetLocalToGlobalMapping(*M,mat->rmap->mapping,mat->cmap->mapping);CHKERRQ(ierr); 662 if (isdense) { /* special case for dense local matrices */ 663 PetscInt i,*dummy_rows,*dummy_cols; 664 665 if (local_rows != local_cols) { 666 ierr = PetscMalloc2(local_rows,&dummy_rows,local_cols,&dummy_cols);CHKERRQ(ierr); 667 for (i=0;i<local_rows;i++) dummy_rows[i] = i; 668 for (i=0;i<local_cols;i++) dummy_cols[i] = i; 669 } else { 670 ierr = PetscMalloc1(local_rows,&dummy_rows);CHKERRQ(ierr); 671 for (i=0;i<local_rows;i++) dummy_rows[i] = i; 672 dummy_cols = dummy_rows; 673 } 674 ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr); 675 ierr = MatDenseGetArray(local_mat,&array);CHKERRQ(ierr); 676 ierr = MatSetValuesLocal(*M,local_rows,dummy_rows,local_cols,dummy_cols,array,ADD_VALUES);CHKERRQ(ierr); 677 ierr = MatDenseRestoreArray(local_mat,&array);CHKERRQ(ierr); 678 if (dummy_rows != dummy_cols) { 679 ierr = PetscFree2(dummy_rows,dummy_cols);CHKERRQ(ierr); 680 } else { 681 ierr = PetscFree(dummy_rows);CHKERRQ(ierr); 682 } 683 } else if (isseqaij) { 684 PetscInt i,nvtxs,*xadj,*adjncy; 685 PetscBool done; 686 687 ierr = MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr); 688 if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__); 689 ierr = MatSeqAIJGetArray(local_mat,&array);CHKERRQ(ierr); 690 for (i=0;i<nvtxs;i++) { 691 ierr = MatSetValuesLocal(*M,1,&i,xadj[i+1]-xadj[i],adjncy+xadj[i],array+xadj[i],ADD_VALUES);CHKERRQ(ierr); 692 } 693 ierr = MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr); 694 if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__); 695 ierr = MatSeqAIJRestoreArray(local_mat,&array);CHKERRQ(ierr); 696 } else { /* very basic values insertion for all other matrix types */ 697 PetscInt i; 698 699 for (i=0;i<local_rows;i++) { 700 PetscInt j; 701 const PetscInt *local_indices_cols; 702 703 ierr = MatGetRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr); 704 ierr = MatSetValuesLocal(*M,1,&i,j,local_indices_cols,array,ADD_VALUES);CHKERRQ(ierr); 705 ierr = MatRestoreRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr); 706 } 707 } 708 ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 709 ierr = MatDestroy(&local_mat);CHKERRQ(ierr); 710 ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 711 if (isdense) { 712 ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr); 713 } 714 PetscFunctionReturn(0); 715 } 716 717 #undef __FUNCT__ 718 #define __FUNCT__ "MatISGetMPIXAIJ" 719 /*@ 720 MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format 721 722 Input Parameter: 723 . mat - the matrix (should be of type MATIS) 724 . reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 725 726 Output Parameter: 727 . newmat - the matrix in AIJ format 728 729 Level: developer 730 731 Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested. 732 733 .seealso: MATIS 734 @*/ 735 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat) 736 { 737 PetscErrorCode ierr; 738 739 PetscFunctionBegin; 740 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 741 PetscValidLogicalCollectiveEnum(mat,reuse,2); 742 PetscValidPointer(newmat,3); 743 if (reuse != MAT_INITIAL_MATRIX) { 744 PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3); 745 PetscCheckSameComm(mat,1,*newmat,3); 746 if (mat == *newmat) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix"); 747 } 748 ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr); 749 PetscFunctionReturn(0); 750 } 751 752 #undef __FUNCT__ 753 #define __FUNCT__ "MatDuplicate_IS" 754 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat) 755 { 756 PetscErrorCode ierr; 757 Mat_IS *matis = (Mat_IS*)(mat->data); 758 PetscInt bs,m,n,M,N; 759 Mat B,localmat; 760 761 PetscFunctionBegin; 762 ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr); 763 ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr); 764 ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr); 765 ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,mat->cmap->mapping,&B);CHKERRQ(ierr); 766 ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr); 767 ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr); 768 ierr = MatDestroy(&localmat);CHKERRQ(ierr); 769 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 770 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 771 *newmat = B; 772 PetscFunctionReturn(0); 773 } 774 775 #undef __FUNCT__ 776 #define __FUNCT__ "MatIsHermitian_IS" 777 static PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool *flg) 778 { 779 PetscErrorCode ierr; 780 Mat_IS *matis = (Mat_IS*)A->data; 781 PetscBool local_sym; 782 783 PetscFunctionBegin; 784 ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr); 785 ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr); 786 PetscFunctionReturn(0); 787 } 788 789 #undef __FUNCT__ 790 #define __FUNCT__ "MatIsSymmetric_IS" 791 static PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool *flg) 792 { 793 PetscErrorCode ierr; 794 Mat_IS *matis = (Mat_IS*)A->data; 795 PetscBool local_sym; 796 797 PetscFunctionBegin; 798 ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr); 799 ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr); 800 PetscFunctionReturn(0); 801 } 802 803 #undef __FUNCT__ 804 #define __FUNCT__ "MatDestroy_IS" 805 static PetscErrorCode MatDestroy_IS(Mat A) 806 { 807 PetscErrorCode ierr; 808 Mat_IS *b = (Mat_IS*)A->data; 809 810 PetscFunctionBegin; 811 ierr = MatDestroy(&b->A);CHKERRQ(ierr); 812 ierr = VecScatterDestroy(&b->cctx);CHKERRQ(ierr); 813 ierr = VecScatterDestroy(&b->rctx);CHKERRQ(ierr); 814 ierr = VecDestroy(&b->x);CHKERRQ(ierr); 815 ierr = VecDestroy(&b->y);CHKERRQ(ierr); 816 ierr = ISDestroy(&b->getsub_ris);CHKERRQ(ierr); 817 ierr = ISDestroy(&b->getsub_cis);CHKERRQ(ierr); 818 if (b->sf != b->csf) { 819 ierr = PetscSFDestroy(&b->csf);CHKERRQ(ierr); 820 ierr = PetscFree2(b->csf_rootdata,b->csf_leafdata);CHKERRQ(ierr); 821 } 822 ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr); 823 ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr); 824 ierr = PetscFree(A->data);CHKERRQ(ierr); 825 ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr); 826 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr); 827 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr); 828 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr); 829 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr); 830 PetscFunctionReturn(0); 831 } 832 833 #undef __FUNCT__ 834 #define __FUNCT__ "MatMult_IS" 835 static PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y) 836 { 837 PetscErrorCode ierr; 838 Mat_IS *is = (Mat_IS*)A->data; 839 PetscScalar zero = 0.0; 840 841 PetscFunctionBegin; 842 /* scatter the global vector x into the local work vector */ 843 ierr = VecScatterBegin(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 844 ierr = VecScatterEnd(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 845 846 /* multiply the local matrix */ 847 ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr); 848 849 /* scatter product back into global memory */ 850 ierr = VecSet(y,zero);CHKERRQ(ierr); 851 ierr = VecScatterBegin(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 852 ierr = VecScatterEnd(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 853 PetscFunctionReturn(0); 854 } 855 856 #undef __FUNCT__ 857 #define __FUNCT__ "MatMultAdd_IS" 858 static PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3) 859 { 860 Vec temp_vec; 861 PetscErrorCode ierr; 862 863 PetscFunctionBegin; /* v3 = v2 + A * v1.*/ 864 if (v3 != v2) { 865 ierr = MatMult(A,v1,v3);CHKERRQ(ierr); 866 ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr); 867 } else { 868 ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr); 869 ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr); 870 ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr); 871 ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr); 872 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr); 873 } 874 PetscFunctionReturn(0); 875 } 876 877 #undef __FUNCT__ 878 #define __FUNCT__ "MatMultTranspose_IS" 879 static PetscErrorCode MatMultTranspose_IS(Mat A,Vec y,Vec x) 880 { 881 Mat_IS *is = (Mat_IS*)A->data; 882 PetscErrorCode ierr; 883 884 PetscFunctionBegin; 885 /* scatter the global vector x into the local work vector */ 886 ierr = VecScatterBegin(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 887 ierr = VecScatterEnd(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 888 889 /* multiply the local matrix */ 890 ierr = MatMultTranspose(is->A,is->y,is->x);CHKERRQ(ierr); 891 892 /* scatter product back into global vector */ 893 ierr = VecSet(x,0);CHKERRQ(ierr); 894 ierr = VecScatterBegin(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 895 ierr = VecScatterEnd(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 896 PetscFunctionReturn(0); 897 } 898 899 #undef __FUNCT__ 900 #define __FUNCT__ "MatMultTransposeAdd_IS" 901 static PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3) 902 { 903 Vec temp_vec; 904 PetscErrorCode ierr; 905 906 PetscFunctionBegin; /* v3 = v2 + A' * v1.*/ 907 if (v3 != v2) { 908 ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr); 909 ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr); 910 } else { 911 ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr); 912 ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr); 913 ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr); 914 ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr); 915 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr); 916 } 917 PetscFunctionReturn(0); 918 } 919 920 #undef __FUNCT__ 921 #define __FUNCT__ "MatView_IS" 922 static PetscErrorCode MatView_IS(Mat A,PetscViewer viewer) 923 { 924 Mat_IS *a = (Mat_IS*)A->data; 925 PetscErrorCode ierr; 926 PetscViewer sviewer; 927 928 PetscFunctionBegin; 929 ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 930 ierr = MatView(a->A,sviewer);CHKERRQ(ierr); 931 ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 932 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 933 PetscFunctionReturn(0); 934 } 935 936 #undef __FUNCT__ 937 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS" 938 static PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping) 939 { 940 PetscErrorCode ierr; 941 PetscInt nr,rbs,nc,cbs; 942 Mat_IS *is = (Mat_IS*)A->data; 943 IS from,to; 944 Vec cglobal,rglobal; 945 946 PetscFunctionBegin; 947 PetscCheckSameComm(A,1,rmapping,2); 948 PetscCheckSameComm(A,1,cmapping,3); 949 /* Destroy any previous data */ 950 ierr = VecDestroy(&is->x);CHKERRQ(ierr); 951 ierr = VecDestroy(&is->y);CHKERRQ(ierr); 952 ierr = VecScatterDestroy(&is->rctx);CHKERRQ(ierr); 953 ierr = VecScatterDestroy(&is->cctx);CHKERRQ(ierr); 954 ierr = MatDestroy(&is->A);CHKERRQ(ierr); 955 ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr); 956 ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr); 957 958 /* Setup Layout and set local to global maps */ 959 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 960 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 961 ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr); 962 ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr); 963 964 /* Create the local matrix A */ 965 ierr = ISLocalToGlobalMappingGetSize(rmapping,&nr);CHKERRQ(ierr); 966 ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&rbs);CHKERRQ(ierr); 967 ierr = ISLocalToGlobalMappingGetSize(cmapping,&nc);CHKERRQ(ierr); 968 ierr = ISLocalToGlobalMappingGetBlockSize(cmapping,&cbs);CHKERRQ(ierr); 969 ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr); 970 ierr = MatSetType(is->A,MATAIJ);CHKERRQ(ierr); 971 ierr = MatSetSizes(is->A,nr,nc,nr,nc);CHKERRQ(ierr); 972 ierr = MatSetBlockSizes(is->A,rbs,cbs);CHKERRQ(ierr); 973 ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr); 974 ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr); 975 ierr = MatSetFromOptions(is->A);CHKERRQ(ierr); 976 977 if (!is->islocalref) { /* setup scatters and local vectors for MatMult */ 978 /* Create the local work vectors */ 979 ierr = MatCreateVecs(is->A,&is->x,&is->y);CHKERRQ(ierr); 980 981 /* setup the global to local scatters */ 982 ierr = MatCreateVecs(A,&cglobal,&rglobal);CHKERRQ(ierr); 983 ierr = ISCreateStride(PETSC_COMM_SELF,nr,0,1,&to);CHKERRQ(ierr); 984 ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr); 985 ierr = VecScatterCreate(rglobal,from,is->y,to,&is->rctx);CHKERRQ(ierr); 986 if (rmapping != cmapping) { 987 ierr = ISDestroy(&to);CHKERRQ(ierr); 988 ierr = ISDestroy(&from);CHKERRQ(ierr); 989 ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&to);CHKERRQ(ierr); 990 ierr = ISLocalToGlobalMappingApplyIS(cmapping,to,&from);CHKERRQ(ierr); 991 ierr = VecScatterCreate(cglobal,from,is->x,to,&is->cctx);CHKERRQ(ierr); 992 } else { 993 ierr = PetscObjectReference((PetscObject)is->rctx);CHKERRQ(ierr); 994 is->cctx = is->rctx; 995 } 996 ierr = VecDestroy(&rglobal);CHKERRQ(ierr); 997 ierr = VecDestroy(&cglobal);CHKERRQ(ierr); 998 ierr = ISDestroy(&to);CHKERRQ(ierr); 999 ierr = ISDestroy(&from);CHKERRQ(ierr); 1000 } 1001 ierr = MatSetUp(A);CHKERRQ(ierr); 1002 PetscFunctionReturn(0); 1003 } 1004 1005 #undef __FUNCT__ 1006 #define __FUNCT__ "MatSetValues_IS" 1007 static PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv) 1008 { 1009 Mat_IS *is = (Mat_IS*)mat->data; 1010 PetscErrorCode ierr; 1011 #if defined(PETSC_USE_DEBUG) 1012 PetscInt i,zm,zn; 1013 #endif 1014 PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION]; 1015 1016 PetscFunctionBegin; 1017 #if defined(PETSC_USE_DEBUG) 1018 if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n); 1019 /* count negative indices */ 1020 for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++; 1021 for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++; 1022 #endif 1023 ierr = ISGlobalToLocalMappingApply(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr); 1024 ierr = ISGlobalToLocalMappingApply(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr); 1025 #if defined(PETSC_USE_DEBUG) 1026 /* count negative indices (should be the same as before) */ 1027 for (i=0;i<m;i++) if (rows_l[i] < 0) zm--; 1028 for (i=0;i<n;i++) if (cols_l[i] < 0) zn--; 1029 if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS"); 1030 if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS"); 1031 #endif 1032 ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr); 1033 PetscFunctionReturn(0); 1034 } 1035 1036 #undef __FUNCT__ 1037 #define __FUNCT__ "MatSetValuesBlocked_IS" 1038 static PetscErrorCode MatSetValuesBlocked_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv) 1039 { 1040 Mat_IS *is = (Mat_IS*)mat->data; 1041 PetscErrorCode ierr; 1042 #if defined(PETSC_USE_DEBUG) 1043 PetscInt i,zm,zn; 1044 #endif 1045 PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION]; 1046 1047 PetscFunctionBegin; 1048 #if defined(PETSC_USE_DEBUG) 1049 if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column block indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n); 1050 /* count negative indices */ 1051 for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++; 1052 for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++; 1053 #endif 1054 ierr = ISGlobalToLocalMappingApplyBlock(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr); 1055 ierr = ISGlobalToLocalMappingApplyBlock(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr); 1056 #if defined(PETSC_USE_DEBUG) 1057 /* count negative indices (should be the same as before) */ 1058 for (i=0;i<m;i++) if (rows_l[i] < 0) zm--; 1059 for (i=0;i<n;i++) if (cols_l[i] < 0) zn--; 1060 if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS"); 1061 if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS"); 1062 #endif 1063 ierr = MatSetValuesBlocked(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr); 1064 PetscFunctionReturn(0); 1065 } 1066 1067 #undef __FUNCT__ 1068 #define __FUNCT__ "MatSetValuesLocal_IS" 1069 static PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 1070 { 1071 PetscErrorCode ierr; 1072 Mat_IS *is = (Mat_IS*)A->data; 1073 1074 PetscFunctionBegin; 1075 ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr); 1076 PetscFunctionReturn(0); 1077 } 1078 1079 #undef __FUNCT__ 1080 #define __FUNCT__ "MatSetValuesBlockedLocal_IS" 1081 static PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 1082 { 1083 PetscErrorCode ierr; 1084 Mat_IS *is = (Mat_IS*)A->data; 1085 1086 PetscFunctionBegin; 1087 ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr); 1088 PetscFunctionReturn(0); 1089 } 1090 1091 #undef __FUNCT__ 1092 #define __FUNCT__ "MatZeroRows_IS" 1093 static PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 1094 { 1095 Mat_IS *matis = (Mat_IS*)A->data; 1096 PetscInt nr,nl,len,i; 1097 PetscInt *lrows; 1098 PetscErrorCode ierr; 1099 1100 PetscFunctionBegin; 1101 /* get locally owned rows */ 1102 ierr = MatZeroRowsMapLocal_Private(A,n,rows,&len,&lrows);CHKERRQ(ierr); 1103 /* fix right hand side if needed */ 1104 if (x && b) { 1105 const PetscScalar *xx; 1106 PetscScalar *bb; 1107 1108 ierr = VecGetArrayRead(x, &xx);CHKERRQ(ierr); 1109 ierr = VecGetArray(b, &bb);CHKERRQ(ierr); 1110 for (i=0;i<len;++i) bb[lrows[i]] = diag*xx[lrows[i]]; 1111 ierr = VecRestoreArrayRead(x, &xx);CHKERRQ(ierr); 1112 ierr = VecRestoreArray(b, &bb);CHKERRQ(ierr); 1113 } 1114 /* get rows associated to the local matrices */ 1115 if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */ 1116 ierr = MatISComputeSF_Private(A);CHKERRQ(ierr); 1117 } 1118 ierr = MatGetSize(matis->A,&nl,NULL);CHKERRQ(ierr); 1119 ierr = PetscMemzero(matis->sf_leafdata,nl*sizeof(PetscInt));CHKERRQ(ierr); 1120 ierr = PetscMemzero(matis->sf_rootdata,A->rmap->n*sizeof(PetscInt));CHKERRQ(ierr); 1121 for (i=0;i<len;i++) matis->sf_rootdata[lrows[i]] = 1; 1122 ierr = PetscFree(lrows);CHKERRQ(ierr); 1123 ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 1124 ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 1125 ierr = PetscMalloc1(nl,&lrows);CHKERRQ(ierr); 1126 for (i=0,nr=0;i<nl;i++) if (matis->sf_leafdata[i]) lrows[nr++] = i; 1127 ierr = MatISZeroRowsLocal_Private(A,nr,lrows,diag);CHKERRQ(ierr); 1128 ierr = PetscFree(lrows);CHKERRQ(ierr); 1129 PetscFunctionReturn(0); 1130 } 1131 1132 #undef __FUNCT__ 1133 #define __FUNCT__ "MatISZeroRowsLocal_Private" 1134 static PetscErrorCode MatISZeroRowsLocal_Private(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag) 1135 { 1136 Mat_IS *is = (Mat_IS*)A->data; 1137 PetscErrorCode ierr; 1138 1139 PetscFunctionBegin; 1140 if (diag != 0.) { 1141 /* 1142 Set up is->x as a "counting vector". This is in order to MatMult_IS 1143 work properly in the interface nodes. 1144 */ 1145 Vec counter; 1146 ierr = MatCreateVecs(A,NULL,&counter);CHKERRQ(ierr); 1147 ierr = VecSet(counter,0.);CHKERRQ(ierr); 1148 ierr = VecSet(is->y,1.);CHKERRQ(ierr); 1149 ierr = VecScatterBegin(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1150 ierr = VecScatterEnd(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1151 ierr = VecScatterBegin(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1152 ierr = VecScatterEnd(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1153 ierr = VecDestroy(&counter);CHKERRQ(ierr); 1154 } 1155 1156 if (!n) { 1157 is->pure_neumann = PETSC_TRUE; 1158 } else { 1159 PetscInt i; 1160 is->pure_neumann = PETSC_FALSE; 1161 1162 ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr); 1163 if (diag != 0.) { 1164 const PetscScalar *array; 1165 ierr = VecGetArrayRead(is->y,&array);CHKERRQ(ierr); 1166 for (i=0; i<n; i++) { 1167 ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr); 1168 } 1169 ierr = VecRestoreArrayRead(is->y,&array);CHKERRQ(ierr); 1170 } 1171 ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1172 ierr = MatAssemblyEnd(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1173 } 1174 PetscFunctionReturn(0); 1175 } 1176 1177 #undef __FUNCT__ 1178 #define __FUNCT__ "MatAssemblyBegin_IS" 1179 static PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type) 1180 { 1181 Mat_IS *is = (Mat_IS*)A->data; 1182 PetscErrorCode ierr; 1183 1184 PetscFunctionBegin; 1185 ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr); 1186 PetscFunctionReturn(0); 1187 } 1188 1189 #undef __FUNCT__ 1190 #define __FUNCT__ "MatAssemblyEnd_IS" 1191 static PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type) 1192 { 1193 Mat_IS *is = (Mat_IS*)A->data; 1194 PetscErrorCode ierr; 1195 1196 PetscFunctionBegin; 1197 ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr); 1198 PetscFunctionReturn(0); 1199 } 1200 1201 #undef __FUNCT__ 1202 #define __FUNCT__ "MatISGetLocalMat_IS" 1203 static PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local) 1204 { 1205 Mat_IS *is = (Mat_IS*)mat->data; 1206 1207 PetscFunctionBegin; 1208 *local = is->A; 1209 PetscFunctionReturn(0); 1210 } 1211 1212 #undef __FUNCT__ 1213 #define __FUNCT__ "MatISGetLocalMat" 1214 /*@ 1215 MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix. 1216 1217 Input Parameter: 1218 . mat - the matrix 1219 1220 Output Parameter: 1221 . local - the local matrix 1222 1223 Level: advanced 1224 1225 Notes: 1226 This can be called if you have precomputed the nonzero structure of the 1227 matrix and want to provide it to the inner matrix object to improve the performance 1228 of the MatSetValues() operation. 1229 1230 .seealso: MATIS 1231 @*/ 1232 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local) 1233 { 1234 PetscErrorCode ierr; 1235 1236 PetscFunctionBegin; 1237 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 1238 PetscValidPointer(local,2); 1239 ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr); 1240 PetscFunctionReturn(0); 1241 } 1242 1243 #undef __FUNCT__ 1244 #define __FUNCT__ "MatISSetLocalMat_IS" 1245 static PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local) 1246 { 1247 Mat_IS *is = (Mat_IS*)mat->data; 1248 PetscInt nrows,ncols,orows,ocols; 1249 PetscErrorCode ierr; 1250 1251 PetscFunctionBegin; 1252 if (is->A) { 1253 ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr); 1254 ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr); 1255 if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %dx%d (you passed a %dx%d matrix)\n",orows,ocols,nrows,ncols); 1256 } 1257 ierr = PetscObjectReference((PetscObject)local);CHKERRQ(ierr); 1258 ierr = MatDestroy(&is->A);CHKERRQ(ierr); 1259 is->A = local; 1260 PetscFunctionReturn(0); 1261 } 1262 1263 #undef __FUNCT__ 1264 #define __FUNCT__ "MatISSetLocalMat" 1265 /*@ 1266 MatISSetLocalMat - Replace the local matrix stored inside a MATIS object. 1267 1268 Input Parameter: 1269 . mat - the matrix 1270 . local - the local matrix 1271 1272 Output Parameter: 1273 1274 Level: advanced 1275 1276 Notes: 1277 This can be called if you have precomputed the local matrix and 1278 want to provide it to the matrix object MATIS. 1279 1280 .seealso: MATIS 1281 @*/ 1282 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local) 1283 { 1284 PetscErrorCode ierr; 1285 1286 PetscFunctionBegin; 1287 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 1288 PetscValidHeaderSpecific(local,MAT_CLASSID,2); 1289 ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr); 1290 PetscFunctionReturn(0); 1291 } 1292 1293 #undef __FUNCT__ 1294 #define __FUNCT__ "MatZeroEntries_IS" 1295 static PetscErrorCode MatZeroEntries_IS(Mat A) 1296 { 1297 Mat_IS *a = (Mat_IS*)A->data; 1298 PetscErrorCode ierr; 1299 1300 PetscFunctionBegin; 1301 ierr = MatZeroEntries(a->A);CHKERRQ(ierr); 1302 PetscFunctionReturn(0); 1303 } 1304 1305 #undef __FUNCT__ 1306 #define __FUNCT__ "MatScale_IS" 1307 static PetscErrorCode MatScale_IS(Mat A,PetscScalar a) 1308 { 1309 Mat_IS *is = (Mat_IS*)A->data; 1310 PetscErrorCode ierr; 1311 1312 PetscFunctionBegin; 1313 ierr = MatScale(is->A,a);CHKERRQ(ierr); 1314 PetscFunctionReturn(0); 1315 } 1316 1317 #undef __FUNCT__ 1318 #define __FUNCT__ "MatGetDiagonal_IS" 1319 static PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v) 1320 { 1321 Mat_IS *is = (Mat_IS*)A->data; 1322 PetscErrorCode ierr; 1323 1324 PetscFunctionBegin; 1325 /* get diagonal of the local matrix */ 1326 ierr = MatGetDiagonal(is->A,is->y);CHKERRQ(ierr); 1327 1328 /* scatter diagonal back into global vector */ 1329 ierr = VecSet(v,0);CHKERRQ(ierr); 1330 ierr = VecScatterBegin(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1331 ierr = VecScatterEnd(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1332 PetscFunctionReturn(0); 1333 } 1334 1335 #undef __FUNCT__ 1336 #define __FUNCT__ "MatSetOption_IS" 1337 static PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg) 1338 { 1339 Mat_IS *a = (Mat_IS*)A->data; 1340 PetscErrorCode ierr; 1341 1342 PetscFunctionBegin; 1343 ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 1344 PetscFunctionReturn(0); 1345 } 1346 1347 #undef __FUNCT__ 1348 #define __FUNCT__ "MatAXPY_IS" 1349 static PetscErrorCode MatAXPY_IS(Mat Y,PetscScalar a,Mat X,MatStructure str) 1350 { 1351 Mat_IS *y = (Mat_IS*)Y->data; 1352 Mat_IS *x; 1353 #if defined(PETSC_USE_DEBUG) 1354 PetscBool ismatis; 1355 #endif 1356 PetscErrorCode ierr; 1357 1358 PetscFunctionBegin; 1359 #if defined(PETSC_USE_DEBUG) 1360 ierr = PetscObjectTypeCompare((PetscObject)X,MATIS,&ismatis);CHKERRQ(ierr); 1361 if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)Y),PETSC_ERR_SUP,"Cannot call MatAXPY(Y,a,X,str) with X not of type MATIS"); 1362 #endif 1363 x = (Mat_IS*)X->data; 1364 ierr = MatAXPY(y->A,a,x->A,str);CHKERRQ(ierr); 1365 PetscFunctionReturn(0); 1366 } 1367 1368 #undef __FUNCT__ 1369 #define __FUNCT__ "MatGetLocalSubMatrix_IS" 1370 static PetscErrorCode MatGetLocalSubMatrix_IS(Mat A,IS row,IS col,Mat *submat) 1371 { 1372 Mat lA; 1373 Mat_IS *matis; 1374 ISLocalToGlobalMapping rl2g,cl2g; 1375 IS is; 1376 const PetscInt *rg,*rl; 1377 PetscInt nrg; 1378 PetscInt N,M,nrl,i,*idxs; 1379 PetscErrorCode ierr; 1380 1381 PetscFunctionBegin; 1382 ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&rg);CHKERRQ(ierr); 1383 ierr = ISGetLocalSize(row,&nrl);CHKERRQ(ierr); 1384 ierr = ISGetIndices(row,&rl);CHKERRQ(ierr); 1385 ierr = ISLocalToGlobalMappingGetSize(A->rmap->mapping,&nrg);CHKERRQ(ierr); 1386 #if defined(PETSC_USE_DEBUG) 1387 for (i=0;i<nrl;i++) if (rl[i]>=nrg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local row index %d -> %d greater than maximum possible %d\n",i,rl[i],nrg); 1388 #endif 1389 ierr = PetscMalloc1(nrg,&idxs);CHKERRQ(ierr); 1390 /* map from [0,nrl) to row */ 1391 for (i=0;i<nrl;i++) idxs[i] = rl[i]; 1392 #if defined(PETSC_USE_DEBUG) 1393 for (i=nrl;i<nrg;i++) idxs[i] = nrg; 1394 #else 1395 for (i=nrl;i<nrg;i++) idxs[i] = -1; 1396 #endif 1397 ierr = ISRestoreIndices(row,&rl);CHKERRQ(ierr); 1398 ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&rg);CHKERRQ(ierr); 1399 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)A),nrg,idxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 1400 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr); 1401 ierr = ISDestroy(&is);CHKERRQ(ierr); 1402 /* compute new l2g map for columns */ 1403 if (col != row || A->rmap->mapping != A->cmap->mapping) { 1404 const PetscInt *cg,*cl; 1405 PetscInt ncg; 1406 PetscInt ncl; 1407 1408 ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&cg);CHKERRQ(ierr); 1409 ierr = ISGetLocalSize(col,&ncl);CHKERRQ(ierr); 1410 ierr = ISGetIndices(col,&cl);CHKERRQ(ierr); 1411 ierr = ISLocalToGlobalMappingGetSize(A->cmap->mapping,&ncg);CHKERRQ(ierr); 1412 #if defined(PETSC_USE_DEBUG) 1413 for (i=0;i<ncl;i++) if (cl[i]>=ncg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local column index %d -> %d greater than maximum possible %d\n",i,cl[i],ncg); 1414 #endif 1415 ierr = PetscMalloc1(ncg,&idxs);CHKERRQ(ierr); 1416 /* map from [0,ncl) to col */ 1417 for (i=0;i<ncl;i++) idxs[i] = cl[i]; 1418 #if defined(PETSC_USE_DEBUG) 1419 for (i=ncl;i<ncg;i++) idxs[i] = ncg; 1420 #else 1421 for (i=ncl;i<ncg;i++) idxs[i] = -1; 1422 #endif 1423 ierr = ISRestoreIndices(col,&cl);CHKERRQ(ierr); 1424 ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&cg);CHKERRQ(ierr); 1425 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)A),ncg,idxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr); 1426 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr); 1427 ierr = ISDestroy(&is);CHKERRQ(ierr); 1428 } else { 1429 ierr = PetscObjectReference((PetscObject)rl2g);CHKERRQ(ierr); 1430 cl2g = rl2g; 1431 } 1432 /* create the MATIS submatrix */ 1433 ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr); 1434 ierr = MatCreate(PetscObjectComm((PetscObject)A),submat);CHKERRQ(ierr); 1435 ierr = MatSetSizes(*submat,PETSC_DECIDE,PETSC_DECIDE,M,N);CHKERRQ(ierr); 1436 ierr = MatSetType(*submat,MATIS);CHKERRQ(ierr); 1437 matis = (Mat_IS*)((*submat)->data); 1438 matis->islocalref = PETSC_TRUE; 1439 ierr = MatSetLocalToGlobalMapping(*submat,rl2g,cl2g);CHKERRQ(ierr); 1440 ierr = MatISGetLocalMat(A,&lA);CHKERRQ(ierr); 1441 ierr = MatISSetLocalMat(*submat,lA);CHKERRQ(ierr); 1442 ierr = MatSetUp(*submat);CHKERRQ(ierr); 1443 ierr = MatAssemblyBegin(*submat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1444 ierr = MatAssemblyEnd(*submat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1445 ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr); 1446 ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr); 1447 /* remove unsupported ops */ 1448 ierr = PetscMemzero((*submat)->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 1449 (*submat)->ops->destroy = MatDestroy_IS; 1450 (*submat)->ops->setvalueslocal = MatSetValuesLocal_SubMat_IS; 1451 (*submat)->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_SubMat_IS; 1452 (*submat)->ops->assemblybegin = MatAssemblyBegin_IS; 1453 (*submat)->ops->assemblyend = MatAssemblyEnd_IS; 1454 PetscFunctionReturn(0); 1455 } 1456 1457 #undef __FUNCT__ 1458 #define __FUNCT__ "MatCreateIS" 1459 /*@ 1460 MatCreateIS - Creates a "process" unassembled matrix, assembled on each 1461 process but not across processes. 1462 1463 Input Parameters: 1464 + comm - MPI communicator that will share the matrix 1465 . bs - block size of the matrix 1466 . m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products 1467 . rmap - local to global map for rows 1468 - cmap - local to global map for cols 1469 1470 Output Parameter: 1471 . A - the resulting matrix 1472 1473 Level: advanced 1474 1475 Notes: See MATIS for more details. 1476 m and n are NOT related to the size of the map; they are the size of the part of the vector owned 1477 by that process. The sizes of rmap and cmap define the size of the local matrices. 1478 If either rmap or cmap are NULL, then the matrix is assumed to be square. 1479 1480 .seealso: MATIS, MatSetLocalToGlobalMapping() 1481 @*/ 1482 PetscErrorCode MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping rmap,ISLocalToGlobalMapping cmap,Mat *A) 1483 { 1484 PetscErrorCode ierr; 1485 1486 PetscFunctionBegin; 1487 if (!rmap && !cmap) SETERRQ(comm,PETSC_ERR_USER,"You need to provide at least one of the mapping"); 1488 ierr = MatCreate(comm,A);CHKERRQ(ierr); 1489 ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr); 1490 ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr); 1491 ierr = MatSetType(*A,MATIS);CHKERRQ(ierr); 1492 if (rmap && cmap) { 1493 ierr = MatSetLocalToGlobalMapping(*A,rmap,cmap);CHKERRQ(ierr); 1494 } else if (!rmap) { 1495 ierr = MatSetLocalToGlobalMapping(*A,cmap,cmap);CHKERRQ(ierr); 1496 } else { 1497 ierr = MatSetLocalToGlobalMapping(*A,rmap,rmap);CHKERRQ(ierr); 1498 } 1499 PetscFunctionReturn(0); 1500 } 1501 1502 /*MC 1503 MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition methods (e.g. PCBDDC or KSPFETIDP). 1504 This stores the matrices in globally unassembled form. Each processor 1505 assembles only its local Neumann problem and the parallel matrix vector 1506 product is handled "implicitly". 1507 1508 Operations Provided: 1509 + MatMult() 1510 . MatMultAdd() 1511 . MatMultTranspose() 1512 . MatMultTransposeAdd() 1513 . MatZeroEntries() 1514 . MatSetOption() 1515 . MatZeroRows() 1516 . MatSetValues() 1517 . MatSetValuesBlocked() 1518 . MatSetValuesLocal() 1519 . MatSetValuesBlockedLocal() 1520 . MatScale() 1521 . MatGetDiagonal() 1522 . MatMissingDiagonal() 1523 . MatDuplicate() 1524 . MatCopy() 1525 . MatAXPY() 1526 . MatGetSubMatrix() 1527 . MatGetLocalSubMatrix() 1528 - MatSetLocalToGlobalMapping() 1529 1530 Options Database Keys: 1531 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions() 1532 1533 Notes: Options prefix for the inner matrix are given by -is_mat_xxx 1534 1535 You must call MatSetLocalToGlobalMapping() before using this matrix type. 1536 1537 You can do matrix preallocation on the local matrix after you obtain it with 1538 MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation() 1539 1540 Level: advanced 1541 1542 .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), MatCreateIS(), PCBDDC, KSPFETIDP 1543 1544 M*/ 1545 1546 #undef __FUNCT__ 1547 #define __FUNCT__ "MatCreate_IS" 1548 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A) 1549 { 1550 PetscErrorCode ierr; 1551 Mat_IS *b; 1552 1553 PetscFunctionBegin; 1554 ierr = PetscNewLog(A,&b);CHKERRQ(ierr); 1555 A->data = (void*)b; 1556 1557 /* matrix ops */ 1558 ierr = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 1559 A->ops->mult = MatMult_IS; 1560 A->ops->multadd = MatMultAdd_IS; 1561 A->ops->multtranspose = MatMultTranspose_IS; 1562 A->ops->multtransposeadd = MatMultTransposeAdd_IS; 1563 A->ops->destroy = MatDestroy_IS; 1564 A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS; 1565 A->ops->setvalues = MatSetValues_IS; 1566 A->ops->setvaluesblocked = MatSetValuesBlocked_IS; 1567 A->ops->setvalueslocal = MatSetValuesLocal_IS; 1568 A->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_IS; 1569 A->ops->zerorows = MatZeroRows_IS; 1570 A->ops->assemblybegin = MatAssemblyBegin_IS; 1571 A->ops->assemblyend = MatAssemblyEnd_IS; 1572 A->ops->view = MatView_IS; 1573 A->ops->zeroentries = MatZeroEntries_IS; 1574 A->ops->scale = MatScale_IS; 1575 A->ops->getdiagonal = MatGetDiagonal_IS; 1576 A->ops->setoption = MatSetOption_IS; 1577 A->ops->ishermitian = MatIsHermitian_IS; 1578 A->ops->issymmetric = MatIsSymmetric_IS; 1579 A->ops->duplicate = MatDuplicate_IS; 1580 A->ops->missingdiagonal = MatMissingDiagonal_IS; 1581 A->ops->copy = MatCopy_IS; 1582 A->ops->getlocalsubmatrix = MatGetLocalSubMatrix_IS; 1583 A->ops->getsubmatrix = MatGetSubMatrix_IS; 1584 A->ops->axpy = MatAXPY_IS; 1585 1586 /* special MATIS functions */ 1587 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr); 1588 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr); 1589 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr); 1590 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr); 1591 ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr); 1592 PetscFunctionReturn(0); 1593 } 1594