1 2 /* 3 Creates a matrix class for using the Neumann-Neumann type preconditioners. 4 This stores the matrices in globally unassembled form. Each processor 5 assembles only its local Neumann problem and the parallel matrix vector 6 product is handled "implicitly". 7 8 We provide: 9 MatMult() 10 11 Currently this allows for only one subdomain per processor. 12 13 */ 14 15 #include <../src/mat/impls/is/matis.h> /*I "petscmat.h" I*/ 16 #include <petscsf.h> 17 18 #undef __FUNCT__ 19 #define __FUNCT__ "MatISSetPreallocation" 20 /* 21 MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix. 22 23 Collective on MPI_Comm 24 25 Input Parameters: 26 + B - the matrix 27 . d_nz - number of nonzeros per row in DIAGONAL portion of local submatrix 28 (same value is used for all local rows) 29 . d_nnz - array containing the number of nonzeros in the various rows of the 30 DIAGONAL portion of the local submatrix (possibly different for each row) 31 or NULL, if d_nz is used to specify the nonzero structure. 32 The size of this array is equal to the number of local rows, i.e 'm'. 33 For matrices that will be factored, you must leave room for (and set) 34 the diagonal entry even if it is zero. 35 . o_nz - number of nonzeros per row in the OFF-DIAGONAL portion of local 36 submatrix (same value is used for all local rows). 37 - o_nnz - array containing the number of nonzeros in the various rows of the 38 OFF-DIAGONAL portion of the local submatrix (possibly different for 39 each row) or NULL, if o_nz is used to specify the nonzero 40 structure. The size of this array is equal to the number 41 of local rows, i.e 'm'. 42 43 If the *_nnz parameter is given then the *_nz parameter is ignored 44 45 Level: intermediate 46 47 Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition 48 from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local 49 matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects. 50 51 .keywords: matrix 52 53 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat() 54 @*/ 55 PetscErrorCode MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 56 { 57 PetscErrorCode ierr; 58 59 PetscFunctionBegin; 60 PetscValidHeaderSpecific(B,MAT_CLASSID,1); 61 PetscValidType(B,1); 62 ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr); 63 PetscFunctionReturn(0); 64 } 65 66 #undef __FUNCT__ 67 #define __FUNCT__ "MatISSetPreallocation_IS" 68 PetscErrorCode MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 69 { 70 Mat_IS *matis = (Mat_IS*)(B->data); 71 PetscSF sf; 72 PetscInt bs,i,nroots,*rootdata,nleaves,*leafdata,nlocalcols; 73 const PetscInt *gidxs; 74 PetscErrorCode ierr; 75 76 PetscFunctionBegin; 77 if (!matis->A) { 78 SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping"); 79 } 80 ierr = MatGetLocalSize(B,&nroots,NULL);CHKERRQ(ierr); 81 ierr = MatGetSize(matis->A,&nleaves,&nlocalcols);CHKERRQ(ierr); 82 ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr); 83 ierr = PetscCalloc2(nroots,&rootdata,nleaves,&leafdata);CHKERRQ(ierr); 84 ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&sf);CHKERRQ(ierr); 85 ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr); 86 ierr = ISLocalToGlobalMappingGetIndices(matis->mapping,&gidxs);CHKERRQ(ierr); 87 ierr = PetscSFSetGraphLayout(sf,B->rmap,nleaves,NULL,PETSC_COPY_VALUES,gidxs);CHKERRQ(ierr); 88 ierr = ISLocalToGlobalMappingRestoreIndices(matis->mapping,&gidxs);CHKERRQ(ierr); 89 if (!d_nnz) { 90 for (i=0;i<nroots;i++) rootdata[i] += d_nz; 91 } else { 92 for (i=0;i<nroots;i++) rootdata[i] += d_nnz[i]; 93 } 94 if (!o_nnz) { 95 for (i=0;i<nroots;i++) rootdata[i] += o_nz; 96 } else { 97 for (i=0;i<nroots;i++) rootdata[i] += o_nnz[i]; 98 } 99 ierr = PetscSFBcastBegin(sf,MPIU_INT,rootdata,leafdata);CHKERRQ(ierr); 100 ierr = PetscSFBcastEnd(sf,MPIU_INT,rootdata,leafdata);CHKERRQ(ierr); 101 for (i=0;i<nleaves;i++) { 102 leafdata[i] = PetscMin(leafdata[i],nlocalcols); 103 } 104 ierr = MatSeqAIJSetPreallocation(matis->A,0,leafdata);CHKERRQ(ierr); 105 for (i=0;i<nleaves/bs;i++) { 106 leafdata[i] = leafdata[i*bs]/bs; 107 } 108 ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,leafdata);CHKERRQ(ierr); 109 for (i=0;i<nleaves/bs;i++) { 110 leafdata[i] = leafdata[i]-i; 111 } 112 ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,leafdata);CHKERRQ(ierr); 113 ierr = PetscSFDestroy(&sf);CHKERRQ(ierr); 114 ierr = PetscFree2(rootdata,leafdata);CHKERRQ(ierr); 115 PetscFunctionReturn(0); 116 } 117 118 #undef __FUNCT__ 119 #define __FUNCT__ "MatISGetMPIXAIJ_IS" 120 PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M) 121 { 122 Mat_IS *matis = (Mat_IS*)(mat->data); 123 /* info on mat */ 124 /* ISLocalToGlobalMapping rmapping,cmapping; */ 125 PetscInt bs,rows,cols; 126 PetscInt lrows,lcols; 127 PetscInt local_rows,local_cols; 128 PetscBool isdense,issbaij,issbaij_red; 129 /* values insertion */ 130 PetscScalar *array; 131 PetscInt *local_indices,*global_indices; 132 /* work */ 133 PetscInt i,j,index_row; 134 PetscErrorCode ierr; 135 136 PetscFunctionBegin; 137 /* MISSING CHECKS 138 - rectangular case not covered (it is not allowed by MATIS) 139 */ 140 /* get info from mat */ 141 /* ierr = MatGetLocalToGlobalMapping(mat,&rmapping,&cmapping);CHKERRQ(ierr); */ 142 ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr); 143 ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr); 144 ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr); 145 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr); 146 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr); 147 148 /* work */ 149 ierr = PetscMalloc1(local_rows,&local_indices);CHKERRQ(ierr); 150 for (i=0;i<local_rows;i++) local_indices[i]=i; 151 /* map indices of local mat to global values */ 152 ierr = PetscMalloc(PetscMax(local_cols,local_rows)*sizeof(*global_indices),&global_indices);CHKERRQ(ierr); 153 /* ierr = ISLocalToGlobalMappingApply(rmapping,local_rows,local_indices,global_indices);CHKERRQ(ierr); */ 154 ierr = ISLocalToGlobalMappingApply(matis->mapping,local_rows,local_indices,global_indices);CHKERRQ(ierr); 155 156 if (issbaij) { 157 ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr); 158 } 159 160 if (reuse == MAT_INITIAL_MATRIX) { 161 Mat new_mat; 162 MatType new_mat_type; 163 Vec vec_dnz,vec_onz; 164 PetscScalar *my_dnz,*my_onz; 165 PetscInt *dnz,*onz,*mat_ranges,*row_ownership; 166 PetscInt index_col,owner; 167 PetscMPIInt nsubdomains; 168 169 /* determining new matrix type */ 170 ierr = MPI_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 171 if (issbaij_red) { 172 new_mat_type = MATSBAIJ; 173 } else { 174 if (bs>1) { 175 new_mat_type = MATBAIJ; 176 } else { 177 new_mat_type = MATAIJ; 178 } 179 } 180 181 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr); 182 ierr = MatCreate(PetscObjectComm((PetscObject)mat),&new_mat);CHKERRQ(ierr); 183 ierr = MatSetSizes(new_mat,PETSC_DECIDE,PETSC_DECIDE,rows,cols);CHKERRQ(ierr); 184 ierr = MatSetBlockSize(new_mat,bs);CHKERRQ(ierr); 185 ierr = MatSetType(new_mat,new_mat_type);CHKERRQ(ierr); 186 ierr = MatSetUp(new_mat);CHKERRQ(ierr); 187 ierr = MatGetLocalSize(new_mat,&lrows,&lcols);CHKERRQ(ierr); 188 189 /* 190 preallocation 191 */ 192 193 ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)new_mat),lrows,lcols,dnz,onz);CHKERRQ(ierr); 194 /* 195 Some vectors are needed to sum up properly on shared interface dofs. 196 Preallocation macros cannot do the job. 197 Note that preallocation is not exact, since it overestimates nonzeros 198 */ 199 ierr = MatCreateVecs(new_mat,NULL,&vec_dnz);CHKERRQ(ierr); 200 /* ierr = VecSetLocalToGlobalMapping(vec_dnz,rmapping);CHKERRQ(ierr); */ 201 ierr = VecSetLocalToGlobalMapping(vec_dnz,matis->mapping);CHKERRQ(ierr); 202 ierr = VecDuplicate(vec_dnz,&vec_onz);CHKERRQ(ierr); 203 /* All processes need to compute entire row ownership */ 204 ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr); 205 ierr = MatGetOwnershipRanges(new_mat,(const PetscInt**)&mat_ranges);CHKERRQ(ierr); 206 for (i=0;i<nsubdomains;i++) { 207 for (j=mat_ranges[i];j<mat_ranges[i+1];j++) { 208 row_ownership[j]=i; 209 } 210 } 211 212 /* 213 my_dnz and my_onz contains exact contribution to preallocation from each local mat 214 then, they will be summed up properly. This way, preallocation is always sufficient 215 */ 216 ierr = PetscMalloc1(local_rows,&my_dnz);CHKERRQ(ierr); 217 ierr = PetscMalloc1(local_rows,&my_onz);CHKERRQ(ierr); 218 ierr = PetscMemzero(my_dnz,local_rows*sizeof(*my_dnz));CHKERRQ(ierr); 219 ierr = PetscMemzero(my_onz,local_rows*sizeof(*my_onz));CHKERRQ(ierr); 220 /* preallocation as a MATAIJ */ 221 if (isdense) { /* special case for dense local matrices */ 222 for (i=0;i<local_rows;i++) { 223 index_row = global_indices[i]; 224 for (j=i;j<local_rows;j++) { 225 owner = row_ownership[index_row]; 226 index_col = global_indices[j]; 227 if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */ 228 my_dnz[i] += 1.0; 229 } else { /* offdiag block */ 230 my_onz[i] += 1.0; 231 } 232 /* same as before, interchanging rows and cols */ 233 if (i != j) { 234 owner = row_ownership[index_col]; 235 if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) { 236 my_dnz[j] += 1.0; 237 } else { 238 my_onz[j] += 1.0; 239 } 240 } 241 } 242 } 243 } else { 244 for (i=0;i<local_rows;i++) { 245 PetscInt ncols; 246 const PetscInt *cols; 247 index_row = global_indices[i]; 248 ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr); 249 for (j=0;j<ncols;j++) { 250 owner = row_ownership[index_row]; 251 index_col = global_indices[cols[j]]; 252 if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */ 253 my_dnz[i] += 1.0; 254 } else { /* offdiag block */ 255 my_onz[i] += 1.0; 256 } 257 /* same as before, interchanging rows and cols */ 258 if (issbaij) { 259 owner = row_ownership[index_col]; 260 if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) { 261 my_dnz[j] += 1.0; 262 } else { 263 my_onz[j] += 1.0; 264 } 265 } 266 } 267 ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr); 268 } 269 } 270 ierr = VecSet(vec_dnz,0.0);CHKERRQ(ierr); 271 ierr = VecSet(vec_onz,0.0);CHKERRQ(ierr); 272 if (local_rows) { /* multilevel guard */ 273 ierr = VecSetValuesLocal(vec_dnz,local_rows,local_indices,my_dnz,ADD_VALUES);CHKERRQ(ierr); 274 ierr = VecSetValuesLocal(vec_onz,local_rows,local_indices,my_onz,ADD_VALUES);CHKERRQ(ierr); 275 } 276 ierr = VecAssemblyBegin(vec_dnz);CHKERRQ(ierr); 277 ierr = VecAssemblyBegin(vec_onz);CHKERRQ(ierr); 278 ierr = VecAssemblyEnd(vec_dnz);CHKERRQ(ierr); 279 ierr = VecAssemblyEnd(vec_onz);CHKERRQ(ierr); 280 ierr = PetscFree(my_dnz);CHKERRQ(ierr); 281 ierr = PetscFree(my_onz);CHKERRQ(ierr); 282 ierr = PetscFree(row_ownership);CHKERRQ(ierr); 283 284 /* set computed preallocation in dnz and onz */ 285 ierr = VecGetArray(vec_dnz,&array);CHKERRQ(ierr); 286 for (i=0; i<lrows; i++) dnz[i] = (PetscInt)PetscRealPart(array[i]); 287 ierr = VecRestoreArray(vec_dnz,&array);CHKERRQ(ierr); 288 ierr = VecGetArray(vec_onz,&array);CHKERRQ(ierr); 289 for (i=0;i<lrows;i++) onz[i] = (PetscInt)PetscRealPart(array[i]); 290 ierr = VecRestoreArray(vec_onz,&array);CHKERRQ(ierr); 291 ierr = VecDestroy(&vec_dnz);CHKERRQ(ierr); 292 ierr = VecDestroy(&vec_onz);CHKERRQ(ierr); 293 294 /* Resize preallocation if overestimated */ 295 for (i=0;i<lrows;i++) { 296 dnz[i] = PetscMin(dnz[i],lcols); 297 onz[i] = PetscMin(onz[i],cols-lcols); 298 } 299 /* set preallocation */ 300 ierr = MatMPIAIJSetPreallocation(new_mat,0,dnz,0,onz);CHKERRQ(ierr); 301 for (i=0;i<lrows/bs;i++) { 302 dnz[i] = dnz[i*bs]/bs; 303 onz[i] = onz[i*bs]/bs; 304 } 305 ierr = MatMPIBAIJSetPreallocation(new_mat,bs,0,dnz,0,onz);CHKERRQ(ierr); 306 for (i=0;i<lrows/bs;i++) { 307 dnz[i] = dnz[i]-i; 308 } 309 ierr = MatMPISBAIJSetPreallocation(new_mat,bs,0,dnz,0,onz);CHKERRQ(ierr); 310 ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr); 311 *M = new_mat; 312 } else { 313 PetscInt mbs,mrows,mcols; 314 /* some checks */ 315 ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr); 316 ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr); 317 if (mrows != rows) { 318 SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows); 319 } 320 if (mrows != rows) { 321 SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols); 322 } 323 if (mbs != bs) { 324 SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs); 325 } 326 ierr = MatZeroEntries(*M);CHKERRQ(ierr); 327 } 328 /* set local to global mappings */ 329 /* ierr = MatSetLocalToGlobalMapping(*M,rmapping,cmapping);CHKERRQ(ierr); */ 330 /* Set values */ 331 if (isdense) { /* special case for dense local matrices */ 332 ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr); 333 ierr = MatDenseGetArray(matis->A,&array);CHKERRQ(ierr); 334 ierr = MatSetValues(*M,local_rows,global_indices,local_cols,global_indices,array,ADD_VALUES);CHKERRQ(ierr); 335 ierr = MatDenseRestoreArray(matis->A,&array);CHKERRQ(ierr); 336 ierr = PetscFree(local_indices);CHKERRQ(ierr); 337 ierr = PetscFree(global_indices);CHKERRQ(ierr); 338 } else { /* very basic values insertion for all other matrix types */ 339 ierr = PetscFree(local_indices);CHKERRQ(ierr); 340 for (i=0;i<local_rows;i++) { 341 ierr = MatGetRow(matis->A,i,&j,(const PetscInt**)&local_indices,(const PetscScalar**)&array);CHKERRQ(ierr); 342 /* ierr = MatSetValuesLocal(*M,1,&i,j,local_indices,array,ADD_VALUES);CHKERRQ(ierr); */ 343 ierr = ISLocalToGlobalMappingApply(matis->mapping,j,local_indices,global_indices);CHKERRQ(ierr); 344 ierr = ISLocalToGlobalMappingApply(matis->mapping,1,&i,&index_row);CHKERRQ(ierr); 345 ierr = MatSetValues(*M,1,&index_row,j,global_indices,array,ADD_VALUES);CHKERRQ(ierr); 346 ierr = MatRestoreRow(matis->A,i,&j,(const PetscInt**)&local_indices,(const PetscScalar**)&array);CHKERRQ(ierr); 347 } 348 ierr = PetscFree(global_indices);CHKERRQ(ierr); 349 } 350 ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 351 ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 352 if (isdense) { 353 ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr); 354 } 355 if (issbaij) { 356 ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr); 357 } 358 PetscFunctionReturn(0); 359 } 360 361 #undef __FUNCT__ 362 #define __FUNCT__ "MatISGetMPIXAIJ" 363 /*@ 364 MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format 365 366 Input Parameter: 367 . mat - the matrix (should be of type MATIS) 368 . reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 369 370 Output Parameter: 371 . newmat - the matrix in AIJ format 372 373 Level: developer 374 375 Notes: 376 377 .seealso: MATIS 378 @*/ 379 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat) 380 { 381 PetscErrorCode ierr; 382 383 PetscFunctionBegin; 384 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 385 PetscValidLogicalCollectiveEnum(mat,reuse,2); 386 PetscValidPointer(newmat,3); 387 if (reuse != MAT_INITIAL_MATRIX) { 388 PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3); 389 PetscCheckSameComm(mat,1,*newmat,3); 390 } 391 ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr); 392 PetscFunctionReturn(0); 393 } 394 395 #undef __FUNCT__ 396 #define __FUNCT__ "MatDuplicate_IS" 397 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat) 398 { 399 PetscErrorCode ierr; 400 Mat_IS *matis = (Mat_IS*)(mat->data); 401 PetscInt bs,m,n,M,N; 402 Mat B,localmat; 403 404 PetscFunctionBegin; 405 ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr); 406 ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr); 407 ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr); 408 ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,matis->mapping,&B);CHKERRQ(ierr); 409 ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr); 410 ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr); 411 ierr = MatDestroy(&localmat);CHKERRQ(ierr); 412 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 413 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 414 *newmat = B; 415 PetscFunctionReturn(0); 416 } 417 418 #undef __FUNCT__ 419 #define __FUNCT__ "MatIsHermitian_IS" 420 PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool *flg) 421 { 422 PetscErrorCode ierr; 423 Mat_IS *matis = (Mat_IS*)A->data; 424 PetscBool local_sym; 425 426 PetscFunctionBegin; 427 ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr); 428 ierr = MPI_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr); 429 PetscFunctionReturn(0); 430 } 431 432 #undef __FUNCT__ 433 #define __FUNCT__ "MatIsSymmetric_IS" 434 PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool *flg) 435 { 436 PetscErrorCode ierr; 437 Mat_IS *matis = (Mat_IS*)A->data; 438 PetscBool local_sym; 439 440 PetscFunctionBegin; 441 ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr); 442 ierr = MPI_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr); 443 PetscFunctionReturn(0); 444 } 445 446 #undef __FUNCT__ 447 #define __FUNCT__ "MatDestroy_IS" 448 PetscErrorCode MatDestroy_IS(Mat A) 449 { 450 PetscErrorCode ierr; 451 Mat_IS *b = (Mat_IS*)A->data; 452 453 PetscFunctionBegin; 454 ierr = MatDestroy(&b->A);CHKERRQ(ierr); 455 ierr = VecScatterDestroy(&b->ctx);CHKERRQ(ierr); 456 ierr = VecDestroy(&b->x);CHKERRQ(ierr); 457 ierr = VecDestroy(&b->y);CHKERRQ(ierr); 458 ierr = ISLocalToGlobalMappingDestroy(&b->mapping);CHKERRQ(ierr); 459 ierr = PetscFree(A->data);CHKERRQ(ierr); 460 ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr); 461 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr); 462 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr); 463 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr); 464 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr); 465 PetscFunctionReturn(0); 466 } 467 468 #undef __FUNCT__ 469 #define __FUNCT__ "MatMult_IS" 470 PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y) 471 { 472 PetscErrorCode ierr; 473 Mat_IS *is = (Mat_IS*)A->data; 474 PetscScalar zero = 0.0; 475 476 PetscFunctionBegin; 477 /* scatter the global vector x into the local work vector */ 478 ierr = VecScatterBegin(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 479 ierr = VecScatterEnd(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 480 481 /* multiply the local matrix */ 482 ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr); 483 484 /* scatter product back into global memory */ 485 ierr = VecSet(y,zero);CHKERRQ(ierr); 486 ierr = VecScatterBegin(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 487 ierr = VecScatterEnd(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 488 PetscFunctionReturn(0); 489 } 490 491 #undef __FUNCT__ 492 #define __FUNCT__ "MatMultAdd_IS" 493 PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3) 494 { 495 Vec temp_vec; 496 PetscErrorCode ierr; 497 498 PetscFunctionBegin; /* v3 = v2 + A * v1.*/ 499 if (v3 != v2) { 500 ierr = MatMult(A,v1,v3);CHKERRQ(ierr); 501 ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr); 502 } else { 503 ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr); 504 ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr); 505 ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr); 506 ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr); 507 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr); 508 } 509 PetscFunctionReturn(0); 510 } 511 512 #undef __FUNCT__ 513 #define __FUNCT__ "MatMultTranspose_IS" 514 PetscErrorCode MatMultTranspose_IS(Mat A,Vec x,Vec y) 515 { 516 Mat_IS *is = (Mat_IS*)A->data; 517 PetscErrorCode ierr; 518 519 PetscFunctionBegin; /* y = A' * x */ 520 /* scatter the global vector x into the local work vector */ 521 ierr = VecScatterBegin(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 522 ierr = VecScatterEnd(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 523 524 /* multiply the local matrix */ 525 ierr = MatMultTranspose(is->A,is->x,is->y);CHKERRQ(ierr); 526 527 /* scatter product back into global vector */ 528 ierr = VecSet(y,0);CHKERRQ(ierr); 529 ierr = VecScatterBegin(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 530 ierr = VecScatterEnd(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 531 PetscFunctionReturn(0); 532 } 533 534 #undef __FUNCT__ 535 #define __FUNCT__ "MatMultTransposeAdd_IS" 536 PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3) 537 { 538 Vec temp_vec; 539 PetscErrorCode ierr; 540 541 PetscFunctionBegin; /* v3 = v2 + A' * v1.*/ 542 if (v3 != v2) { 543 ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr); 544 ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr); 545 } else { 546 ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr); 547 ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr); 548 ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr); 549 ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr); 550 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr); 551 } 552 PetscFunctionReturn(0); 553 } 554 555 #undef __FUNCT__ 556 #define __FUNCT__ "MatView_IS" 557 PetscErrorCode MatView_IS(Mat A,PetscViewer viewer) 558 { 559 Mat_IS *a = (Mat_IS*)A->data; 560 PetscErrorCode ierr; 561 PetscViewer sviewer; 562 563 PetscFunctionBegin; 564 ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr); 565 ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr); 566 ierr = MatView(a->A,sviewer);CHKERRQ(ierr); 567 ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr); 568 PetscFunctionReturn(0); 569 } 570 571 #undef __FUNCT__ 572 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS" 573 PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping) 574 { 575 PetscErrorCode ierr; 576 PetscInt n,bs; 577 Mat_IS *is = (Mat_IS*)A->data; 578 IS from,to; 579 Vec global; 580 581 PetscFunctionBegin; 582 PetscCheckSameComm(A,1,rmapping,2); 583 if (rmapping != cmapping) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"MATIS requires the row and column mappings to be identical"); 584 if (is->mapping) { /* Currenly destroys the objects that will be created by this routine. Is there anything else that should be checked? */ 585 ierr = ISLocalToGlobalMappingDestroy(&is->mapping);CHKERRQ(ierr); 586 ierr = VecDestroy(&is->x);CHKERRQ(ierr); 587 ierr = VecDestroy(&is->y);CHKERRQ(ierr); 588 ierr = VecScatterDestroy(&is->ctx);CHKERRQ(ierr); 589 ierr = MatDestroy(&is->A);CHKERRQ(ierr); 590 } 591 ierr = PetscObjectReference((PetscObject)rmapping);CHKERRQ(ierr); 592 ierr = ISLocalToGlobalMappingDestroy(&is->mapping);CHKERRQ(ierr); 593 is->mapping = rmapping; 594 /* 595 ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr); 596 ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr); 597 */ 598 599 /* Create the local matrix A */ 600 ierr = ISLocalToGlobalMappingGetSize(rmapping,&n);CHKERRQ(ierr); 601 ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&bs);CHKERRQ(ierr); 602 ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr); 603 if (bs > 1) { 604 ierr = MatSetType(is->A,MATSEQBAIJ);CHKERRQ(ierr); 605 } else { 606 ierr = MatSetType(is->A,MATSEQAIJ);CHKERRQ(ierr); 607 } 608 ierr = MatSetSizes(is->A,n,n,n,n);CHKERRQ(ierr); 609 ierr = MatSetBlockSize(is->A,bs);CHKERRQ(ierr); 610 ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr); 611 ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr); 612 ierr = MatSetFromOptions(is->A);CHKERRQ(ierr); 613 614 /* Create the local work vectors */ 615 ierr = VecCreate(PETSC_COMM_SELF,&is->x);CHKERRQ(ierr); 616 ierr = VecSetBlockSize(is->x,bs);CHKERRQ(ierr); 617 ierr = VecSetSizes(is->x,n,n);CHKERRQ(ierr); 618 ierr = VecSetOptionsPrefix(is->x,((PetscObject)A)->prefix);CHKERRQ(ierr); 619 ierr = VecAppendOptionsPrefix(is->x,"is_");CHKERRQ(ierr); 620 ierr = VecSetFromOptions(is->x);CHKERRQ(ierr); 621 ierr = VecDuplicate(is->x,&is->y);CHKERRQ(ierr); 622 623 /* setup the global to local scatter */ 624 ierr = ISCreateStride(PETSC_COMM_SELF,n,0,1,&to);CHKERRQ(ierr); 625 ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr); 626 ierr = MatCreateVecs(A,&global,NULL);CHKERRQ(ierr); 627 ierr = VecScatterCreate(global,from,is->x,to,&is->ctx);CHKERRQ(ierr); 628 ierr = VecDestroy(&global);CHKERRQ(ierr); 629 ierr = ISDestroy(&to);CHKERRQ(ierr); 630 ierr = ISDestroy(&from);CHKERRQ(ierr); 631 PetscFunctionReturn(0); 632 } 633 634 #undef __FUNCT__ 635 #define __FUNCT__ "MatSetValues_IS" 636 PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv) 637 { 638 Mat_IS *is = (Mat_IS*)mat->data; 639 PetscInt rows_l[2048],cols_l[2048]; 640 PetscErrorCode ierr; 641 642 PetscFunctionBegin; 643 #if defined(PETSC_USE_DEBUG) 644 if (m > 2048 || n > 2048) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= 2048: they are %D %D",m,n); 645 #endif 646 ierr = ISG2LMapApply(is->mapping,m,rows,rows_l);CHKERRQ(ierr); 647 ierr = ISG2LMapApply(is->mapping,n,cols,cols_l);CHKERRQ(ierr); 648 ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr); 649 PetscFunctionReturn(0); 650 } 651 652 #undef ISG2LMapSetUp 653 #undef ISG2LMapApply 654 655 #undef __FUNCT__ 656 #define __FUNCT__ "MatSetValuesLocal_IS" 657 PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 658 { 659 PetscErrorCode ierr; 660 Mat_IS *is = (Mat_IS*)A->data; 661 662 PetscFunctionBegin; 663 ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr); 664 PetscFunctionReturn(0); 665 } 666 667 #undef __FUNCT__ 668 #define __FUNCT__ "MatSetValuesBlockedLocal_IS" 669 PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 670 { 671 PetscErrorCode ierr; 672 Mat_IS *is = (Mat_IS*)A->data; 673 674 PetscFunctionBegin; 675 ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr); 676 PetscFunctionReturn(0); 677 } 678 679 #undef __FUNCT__ 680 #define __FUNCT__ "MatZeroRows_IS" 681 PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 682 { 683 Mat_IS *is = (Mat_IS*)A->data; 684 PetscInt n_l = 0, *rows_l = NULL; 685 PetscErrorCode ierr; 686 687 PetscFunctionBegin; 688 if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support"); 689 if (n) { 690 ierr = PetscMalloc1(n,&rows_l);CHKERRQ(ierr); 691 ierr = ISGlobalToLocalMappingApply(is->mapping,IS_GTOLM_DROP,n,rows,&n_l,rows_l);CHKERRQ(ierr); 692 } 693 ierr = MatZeroRowsLocal(A,n_l,rows_l,diag,x,b);CHKERRQ(ierr); 694 ierr = PetscFree(rows_l);CHKERRQ(ierr); 695 PetscFunctionReturn(0); 696 } 697 698 #undef __FUNCT__ 699 #define __FUNCT__ "MatZeroRowsLocal_IS" 700 PetscErrorCode MatZeroRowsLocal_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 701 { 702 Mat_IS *is = (Mat_IS*)A->data; 703 PetscErrorCode ierr; 704 PetscInt i; 705 PetscScalar *array; 706 707 PetscFunctionBegin; 708 if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support"); 709 { 710 /* 711 Set up is->x as a "counting vector". This is in order to MatMult_IS 712 work properly in the interface nodes. 713 */ 714 Vec counter; 715 PetscScalar one=1.0, zero=0.0; 716 ierr = MatCreateVecs(A,&counter,NULL);CHKERRQ(ierr); 717 ierr = VecSet(counter,zero);CHKERRQ(ierr); 718 ierr = VecSet(is->x,one);CHKERRQ(ierr); 719 ierr = VecScatterBegin(is->ctx,is->x,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 720 ierr = VecScatterEnd (is->ctx,is->x,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 721 ierr = VecScatterBegin(is->ctx,counter,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 722 ierr = VecScatterEnd (is->ctx,counter,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 723 ierr = VecDestroy(&counter);CHKERRQ(ierr); 724 } 725 if (!n) { 726 is->pure_neumann = PETSC_TRUE; 727 } else { 728 is->pure_neumann = PETSC_FALSE; 729 730 ierr = VecGetArray(is->x,&array);CHKERRQ(ierr); 731 ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr); 732 for (i=0; i<n; i++) { 733 ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr); 734 } 735 ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 736 ierr = MatAssemblyEnd (is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 737 ierr = VecRestoreArray(is->x,&array);CHKERRQ(ierr); 738 } 739 PetscFunctionReturn(0); 740 } 741 742 #undef __FUNCT__ 743 #define __FUNCT__ "MatAssemblyBegin_IS" 744 PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type) 745 { 746 Mat_IS *is = (Mat_IS*)A->data; 747 PetscErrorCode ierr; 748 749 PetscFunctionBegin; 750 ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr); 751 PetscFunctionReturn(0); 752 } 753 754 #undef __FUNCT__ 755 #define __FUNCT__ "MatAssemblyEnd_IS" 756 PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type) 757 { 758 Mat_IS *is = (Mat_IS*)A->data; 759 PetscErrorCode ierr; 760 761 PetscFunctionBegin; 762 ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr); 763 PetscFunctionReturn(0); 764 } 765 766 #undef __FUNCT__ 767 #define __FUNCT__ "MatISGetLocalMat_IS" 768 PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local) 769 { 770 Mat_IS *is = (Mat_IS*)mat->data; 771 772 PetscFunctionBegin; 773 *local = is->A; 774 PetscFunctionReturn(0); 775 } 776 777 #undef __FUNCT__ 778 #define __FUNCT__ "MatISGetLocalMat" 779 /*@ 780 MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix. 781 782 Input Parameter: 783 . mat - the matrix 784 785 Output Parameter: 786 . local - the local matrix usually MATSEQAIJ 787 788 Level: advanced 789 790 Notes: 791 This can be called if you have precomputed the nonzero structure of the 792 matrix and want to provide it to the inner matrix object to improve the performance 793 of the MatSetValues() operation. 794 795 .seealso: MATIS 796 @*/ 797 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local) 798 { 799 PetscErrorCode ierr; 800 801 PetscFunctionBegin; 802 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 803 PetscValidPointer(local,2); 804 ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr); 805 PetscFunctionReturn(0); 806 } 807 808 #undef __FUNCT__ 809 #define __FUNCT__ "MatISSetLocalMat_IS" 810 PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local) 811 { 812 Mat_IS *is = (Mat_IS*)mat->data; 813 PetscInt nrows,ncols,orows,ocols; 814 PetscErrorCode ierr; 815 816 PetscFunctionBegin; 817 if (is->A) { 818 ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr); 819 ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr); 820 if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %dx%d (you passed a %dx%d matrix)\n",orows,ocols,nrows,ncols); 821 } 822 ierr = PetscObjectReference((PetscObject)local);CHKERRQ(ierr); 823 ierr = MatDestroy(&is->A);CHKERRQ(ierr); 824 is->A = local; 825 PetscFunctionReturn(0); 826 } 827 828 #undef __FUNCT__ 829 #define __FUNCT__ "MatISSetLocalMat" 830 /*@ 831 MatISSetLocalMat - Set the local matrix stored inside a MATIS matrix. 832 833 Input Parameter: 834 . mat - the matrix 835 . local - the local matrix usually MATSEQAIJ 836 837 Output Parameter: 838 839 Level: advanced 840 841 Notes: 842 This can be called if you have precomputed the local matrix and 843 want to provide it to the matrix object MATIS. 844 845 .seealso: MATIS 846 @*/ 847 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local) 848 { 849 PetscErrorCode ierr; 850 851 PetscFunctionBegin; 852 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 853 PetscValidHeaderSpecific(local,MAT_CLASSID,2); 854 ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr); 855 PetscFunctionReturn(0); 856 } 857 858 #undef __FUNCT__ 859 #define __FUNCT__ "MatZeroEntries_IS" 860 PetscErrorCode MatZeroEntries_IS(Mat A) 861 { 862 Mat_IS *a = (Mat_IS*)A->data; 863 PetscErrorCode ierr; 864 865 PetscFunctionBegin; 866 ierr = MatZeroEntries(a->A);CHKERRQ(ierr); 867 PetscFunctionReturn(0); 868 } 869 870 #undef __FUNCT__ 871 #define __FUNCT__ "MatScale_IS" 872 PetscErrorCode MatScale_IS(Mat A,PetscScalar a) 873 { 874 Mat_IS *is = (Mat_IS*)A->data; 875 PetscErrorCode ierr; 876 877 PetscFunctionBegin; 878 ierr = MatScale(is->A,a);CHKERRQ(ierr); 879 PetscFunctionReturn(0); 880 } 881 882 #undef __FUNCT__ 883 #define __FUNCT__ "MatGetDiagonal_IS" 884 PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v) 885 { 886 Mat_IS *is = (Mat_IS*)A->data; 887 PetscErrorCode ierr; 888 889 PetscFunctionBegin; 890 /* get diagonal of the local matrix */ 891 ierr = MatGetDiagonal(is->A,is->x);CHKERRQ(ierr); 892 893 /* scatter diagonal back into global vector */ 894 ierr = VecSet(v,0);CHKERRQ(ierr); 895 ierr = VecScatterBegin(is->ctx,is->x,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 896 ierr = VecScatterEnd (is->ctx,is->x,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 897 PetscFunctionReturn(0); 898 } 899 900 #undef __FUNCT__ 901 #define __FUNCT__ "MatSetOption_IS" 902 PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg) 903 { 904 Mat_IS *a = (Mat_IS*)A->data; 905 PetscErrorCode ierr; 906 907 PetscFunctionBegin; 908 ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 909 PetscFunctionReturn(0); 910 } 911 912 #undef __FUNCT__ 913 #define __FUNCT__ "MatCreateIS" 914 /*@ 915 MatCreateIS - Creates a "process" unassmembled matrix, it is assembled on each 916 process but not across processes. 917 918 Input Parameters: 919 + comm - MPI communicator that will share the matrix 920 . bs - local and global block size of the matrix 921 . m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products 922 - map - mapping that defines the global number for each local number 923 924 Output Parameter: 925 . A - the resulting matrix 926 927 Level: advanced 928 929 Notes: See MATIS for more details 930 m and n are NOT related to the size of the map, they are the size of the part of the vector owned 931 by that process. m + nghosts (or n + nghosts) is the length of map since map maps all local points 932 plus the ghost points to global indices. 933 934 .seealso: MATIS, MatSetLocalToGlobalMapping() 935 @*/ 936 PetscErrorCode MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping map,Mat *A) 937 { 938 PetscErrorCode ierr; 939 940 PetscFunctionBegin; 941 ierr = MatCreate(comm,A);CHKERRQ(ierr); 942 ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr); 943 ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr); 944 ierr = MatSetType(*A,MATIS);CHKERRQ(ierr); 945 ierr = MatSetUp(*A);CHKERRQ(ierr); 946 ierr = MatSetLocalToGlobalMapping(*A,map,map);CHKERRQ(ierr); 947 PetscFunctionReturn(0); 948 } 949 950 /*MC 951 MATIS - MATIS = "is" - A matrix type to be used for using the Neumann-Neumann type preconditioners, see PCBDDC. 952 This stores the matrices in globally unassembled form. Each processor 953 assembles only its local Neumann problem and the parallel matrix vector 954 product is handled "implicitly". 955 956 Operations Provided: 957 + MatMult() 958 . MatMultAdd() 959 . MatMultTranspose() 960 . MatMultTransposeAdd() 961 . MatZeroEntries() 962 . MatSetOption() 963 . MatZeroRows() 964 . MatZeroRowsLocal() 965 . MatSetValues() 966 . MatSetValuesLocal() 967 . MatScale() 968 . MatGetDiagonal() 969 - MatSetLocalToGlobalMapping() 970 971 Options Database Keys: 972 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions() 973 974 Notes: Options prefix for the inner matrix are given by -is_mat_xxx 975 976 You must call MatSetLocalToGlobalMapping() before using this matrix type. 977 978 You can do matrix preallocation on the local matrix after you obtain it with 979 MatISGetLocalMat() 980 981 Level: advanced 982 983 .seealso: PC, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), PCBDDC 984 985 M*/ 986 987 #undef __FUNCT__ 988 #define __FUNCT__ "MatCreate_IS" 989 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A) 990 { 991 PetscErrorCode ierr; 992 Mat_IS *b; 993 994 PetscFunctionBegin; 995 ierr = PetscNewLog(A,&b);CHKERRQ(ierr); 996 A->data = (void*)b; 997 ierr = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 998 999 A->ops->mult = MatMult_IS; 1000 A->ops->multadd = MatMultAdd_IS; 1001 A->ops->multtranspose = MatMultTranspose_IS; 1002 A->ops->multtransposeadd = MatMultTransposeAdd_IS; 1003 A->ops->destroy = MatDestroy_IS; 1004 A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS; 1005 A->ops->setvalues = MatSetValues_IS; 1006 A->ops->setvalueslocal = MatSetValuesLocal_IS; 1007 A->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_IS; 1008 A->ops->zerorows = MatZeroRows_IS; 1009 A->ops->zerorowslocal = MatZeroRowsLocal_IS; 1010 A->ops->assemblybegin = MatAssemblyBegin_IS; 1011 A->ops->assemblyend = MatAssemblyEnd_IS; 1012 A->ops->view = MatView_IS; 1013 A->ops->zeroentries = MatZeroEntries_IS; 1014 A->ops->scale = MatScale_IS; 1015 A->ops->getdiagonal = MatGetDiagonal_IS; 1016 A->ops->setoption = MatSetOption_IS; 1017 A->ops->ishermitian = MatIsHermitian_IS; 1018 A->ops->issymmetric = MatIsSymmetric_IS; 1019 A->ops->duplicate = MatDuplicate_IS; 1020 1021 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 1022 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 1023 1024 /* zeros pointer */ 1025 b->A = 0; 1026 b->ctx = 0; 1027 b->x = 0; 1028 b->y = 0; 1029 b->mapping = 0; 1030 1031 /* special MATIS functions */ 1032 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr); 1033 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr); 1034 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr); 1035 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr); 1036 ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr); 1037 PetscFunctionReturn(0); 1038 } 1039