1 2 /* 3 Creates a matrix class for using the Neumann-Neumann type preconditioners. 4 This stores the matrices in globally unassembled form. Each processor 5 assembles only its local Neumann problem and the parallel matrix vector 6 product is handled "implicitly". 7 8 We provide: 9 MatMult() 10 11 Currently this allows for only one subdomain per processor. 12 13 */ 14 15 #include <../src/mat/impls/is/matis.h> /*I "petscmat.h" I*/ 16 17 static PetscErrorCode MatISComputeSF_Private(Mat); 18 19 #undef __FUNCT__ 20 #define __FUNCT__ "MatISComputeSF_Private" 21 static PetscErrorCode MatISComputeSF_Private(Mat B) 22 { 23 Mat_IS *matis = (Mat_IS*)(B->data); 24 const PetscInt *gidxs; 25 PetscErrorCode ierr; 26 27 PetscFunctionBegin; 28 ierr = MatGetSize(matis->A,&matis->sf_nleaves,NULL);CHKERRQ(ierr); 29 ierr = MatGetLocalSize(B,&matis->sf_nroots,NULL);CHKERRQ(ierr); 30 ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr); 31 ierr = ISLocalToGlobalMappingGetIndices(matis->mapping,&gidxs);CHKERRQ(ierr); 32 /* PETSC_OWN_POINTER refers to ilocal which is NULL */ 33 ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr); 34 ierr = ISLocalToGlobalMappingRestoreIndices(matis->mapping,&gidxs);CHKERRQ(ierr); 35 ierr = PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);CHKERRQ(ierr); 36 PetscFunctionReturn(0); 37 } 38 39 #undef __FUNCT__ 40 #define __FUNCT__ "MatISSetPreallocation" 41 /* 42 MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix. 43 44 Collective on MPI_Comm 45 46 Input Parameters: 47 + B - the matrix 48 . d_nz - number of nonzeros per row in DIAGONAL portion of local submatrix 49 (same value is used for all local rows) 50 . d_nnz - array containing the number of nonzeros in the various rows of the 51 DIAGONAL portion of the local submatrix (possibly different for each row) 52 or NULL, if d_nz is used to specify the nonzero structure. 53 The size of this array is equal to the number of local rows, i.e 'm'. 54 For matrices that will be factored, you must leave room for (and set) 55 the diagonal entry even if it is zero. 56 . o_nz - number of nonzeros per row in the OFF-DIAGONAL portion of local 57 submatrix (same value is used for all local rows). 58 - o_nnz - array containing the number of nonzeros in the various rows of the 59 OFF-DIAGONAL portion of the local submatrix (possibly different for 60 each row) or NULL, if o_nz is used to specify the nonzero 61 structure. The size of this array is equal to the number 62 of local rows, i.e 'm'. 63 64 If the *_nnz parameter is given then the *_nz parameter is ignored 65 66 Level: intermediate 67 68 Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition 69 from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local 70 matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects. 71 72 .keywords: matrix 73 74 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat() 75 @*/ 76 PetscErrorCode MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 77 { 78 PetscErrorCode ierr; 79 80 PetscFunctionBegin; 81 PetscValidHeaderSpecific(B,MAT_CLASSID,1); 82 PetscValidType(B,1); 83 ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr); 84 PetscFunctionReturn(0); 85 } 86 87 #undef __FUNCT__ 88 #define __FUNCT__ "MatISSetPreallocation_IS" 89 PetscErrorCode MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 90 { 91 Mat_IS *matis = (Mat_IS*)(B->data); 92 PetscInt bs,i,nlocalcols; 93 PetscErrorCode ierr; 94 95 PetscFunctionBegin; 96 if (!matis->A) { 97 SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping"); 98 } 99 if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */ 100 ierr = MatISComputeSF_Private(B);CHKERRQ(ierr); 101 } 102 if (!d_nnz) { 103 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz; 104 } else { 105 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i]; 106 } 107 if (!o_nnz) { 108 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz; 109 } else { 110 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i]; 111 } 112 ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 113 ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr); 114 ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr); 115 ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 116 for (i=0;i<matis->sf_nleaves;i++) { 117 matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols); 118 } 119 ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr); 120 for (i=0;i<matis->sf_nleaves/bs;i++) { 121 matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs; 122 } 123 ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr); 124 for (i=0;i<matis->sf_nleaves/bs;i++) { 125 matis->sf_leafdata[i] = matis->sf_leafdata[i]-i; 126 } 127 ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr); 128 PetscFunctionReturn(0); 129 } 130 131 #undef __FUNCT__ 132 #define __FUNCT__ "MatISGetMPIXAIJ_IS" 133 PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M) 134 { 135 Mat_IS *matis = (Mat_IS*)(mat->data); 136 /* info on mat */ 137 /* ISLocalToGlobalMapping rmapping,cmapping; */ 138 PetscInt bs,rows,cols; 139 PetscInt lrows,lcols; 140 PetscInt local_rows,local_cols; 141 PetscBool isdense,issbaij,issbaij_red; 142 PetscMPIInt nsubdomains; 143 /* values insertion */ 144 PetscScalar *array; 145 PetscInt *local_indices,*global_indices; 146 /* work */ 147 PetscInt i,j,index_row; 148 PetscErrorCode ierr; 149 150 PetscFunctionBegin; 151 /* MISSING CHECKS 152 - rectangular case not covered (it is not allowed by MATIS) 153 */ 154 /* get info from mat */ 155 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr); 156 if (nsubdomains == 1) { 157 if (reuse == MAT_INITIAL_MATRIX) { 158 ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr); 159 } else { 160 ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 161 } 162 PetscFunctionReturn(0); 163 } 164 /* ierr = MatGetLocalToGlobalMapping(mat,&rmapping,&cmapping);CHKERRQ(ierr); */ 165 ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr); 166 ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr); 167 ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr); 168 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr); 169 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr); 170 171 /* work */ 172 ierr = PetscMalloc1(local_rows,&local_indices);CHKERRQ(ierr); 173 for (i=0;i<local_rows;i++) local_indices[i]=i; 174 /* map indices of local mat to global values */ 175 ierr = PetscMalloc1(PetscMax(local_cols,local_rows),&global_indices);CHKERRQ(ierr); 176 /* ierr = ISLocalToGlobalMappingApply(rmapping,local_rows,local_indices,global_indices);CHKERRQ(ierr); */ 177 ierr = ISLocalToGlobalMappingApply(matis->mapping,local_rows,local_indices,global_indices);CHKERRQ(ierr); 178 179 if (issbaij) { 180 ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr); 181 } 182 183 if (reuse == MAT_INITIAL_MATRIX) { 184 Mat new_mat; 185 MatType new_mat_type; 186 PetscInt *my_dnz,*my_onz; 187 PetscInt *dnz,*onz,*mat_ranges,*row_ownership; 188 PetscInt index_col,owner; 189 PetscBool maxreduce = PETSC_FALSE; 190 191 /* determining new matrix type */ 192 ierr = MPI_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 193 if (issbaij_red) { 194 new_mat_type = MATSBAIJ; 195 } else { 196 if (bs>1) { 197 new_mat_type = MATBAIJ; 198 } else { 199 new_mat_type = MATAIJ; 200 } 201 } 202 203 ierr = MatCreate(PetscObjectComm((PetscObject)mat),&new_mat);CHKERRQ(ierr); 204 ierr = MatSetSizes(new_mat,PETSC_DECIDE,PETSC_DECIDE,rows,cols);CHKERRQ(ierr); 205 ierr = MatSetBlockSize(new_mat,bs);CHKERRQ(ierr); 206 ierr = MatSetType(new_mat,new_mat_type);CHKERRQ(ierr); 207 ierr = MatSetUp(new_mat);CHKERRQ(ierr); 208 ierr = MatGetLocalSize(new_mat,&lrows,&lcols);CHKERRQ(ierr); 209 if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */ 210 ierr = MatISComputeSF_Private(mat);CHKERRQ(ierr); 211 } 212 213 /* 214 preallocation 215 */ 216 217 ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)new_mat),lrows,lcols,dnz,onz);CHKERRQ(ierr); 218 /* 219 An SF reduce is needed to sum up properly on shared interface dofs. 220 Note that preallocation is not exact, since it overestimates nonzeros 221 */ 222 /* All processes need to compute entire row ownership */ 223 ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr); 224 ierr = MatGetOwnershipRanges(new_mat,(const PetscInt**)&mat_ranges);CHKERRQ(ierr); 225 for (i=0;i<nsubdomains;i++) { 226 for (j=mat_ranges[i];j<mat_ranges[i+1];j++) { 227 row_ownership[j]=i; 228 } 229 } 230 231 /* 232 my_dnz and my_onz contains exact contribution to preallocation from each local mat 233 then, they will be summed up properly. This way, preallocation is always sufficient 234 */ 235 ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr); 236 /* preallocation as a MATAIJ */ 237 if (isdense) { /* special case for dense local matrices */ 238 for (i=0;i<local_rows;i++) { 239 index_row = global_indices[i]; 240 for (j=i;j<local_rows;j++) { 241 owner = row_ownership[index_row]; 242 index_col = global_indices[j]; 243 if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */ 244 my_dnz[i] += 1; 245 } else { /* offdiag block */ 246 my_onz[i] += 1; 247 } 248 /* same as before, interchanging rows and cols */ 249 if (i != j) { 250 owner = row_ownership[index_col]; 251 if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) { 252 my_dnz[j] += 1; 253 } else { 254 my_onz[j] += 1; 255 } 256 } 257 } 258 } 259 } else { 260 for (i=0;i<local_rows;i++) { 261 PetscInt ncols; 262 const PetscInt *cols; 263 index_row = global_indices[i]; 264 ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr); 265 for (j=0;j<ncols;j++) { 266 owner = row_ownership[index_row]; 267 index_col = global_indices[cols[j]]; 268 if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */ 269 my_dnz[i] += 1; 270 } else { /* offdiag block */ 271 my_onz[i] += 1; 272 } 273 /* same as before, interchanging rows and cols */ 274 if (issbaij) { 275 owner = row_ownership[index_col]; 276 if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) { 277 my_dnz[j] += 1; 278 } else { 279 my_onz[j] += 1; 280 } 281 } 282 } 283 ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr); 284 } 285 } 286 ierr = PetscFree(row_ownership);CHKERRQ(ierr); 287 if (maxreduce) { 288 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr); 289 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr); 290 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr); 291 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr); 292 } else { 293 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr); 294 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr); 295 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr); 296 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr); 297 } 298 ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr); 299 300 /* Resize preallocation if overestimated */ 301 for (i=0;i<lrows;i++) { 302 dnz[i] = PetscMin(dnz[i],lcols); 303 onz[i] = PetscMin(onz[i],cols-lcols); 304 } 305 /* set preallocation */ 306 ierr = MatMPIAIJSetPreallocation(new_mat,0,dnz,0,onz);CHKERRQ(ierr); 307 for (i=0;i<lrows/bs;i++) { 308 dnz[i] = dnz[i*bs]/bs; 309 onz[i] = onz[i*bs]/bs; 310 } 311 ierr = MatMPIBAIJSetPreallocation(new_mat,bs,0,dnz,0,onz);CHKERRQ(ierr); 312 for (i=0;i<lrows/bs;i++) { 313 dnz[i] = dnz[i]-i; 314 } 315 ierr = MatMPISBAIJSetPreallocation(new_mat,bs,0,dnz,0,onz);CHKERRQ(ierr); 316 ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr); 317 *M = new_mat; 318 } else { 319 PetscInt mbs,mrows,mcols; 320 /* some checks */ 321 ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr); 322 ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr); 323 if (mrows != rows) { 324 SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows); 325 } 326 if (mrows != rows) { 327 SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols); 328 } 329 if (mbs != bs) { 330 SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs); 331 } 332 ierr = MatZeroEntries(*M);CHKERRQ(ierr); 333 } 334 /* set local to global mappings */ 335 /* ierr = MatSetLocalToGlobalMapping(*M,rmapping,cmapping);CHKERRQ(ierr); */ 336 /* Set values */ 337 if (isdense) { /* special case for dense local matrices */ 338 ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr); 339 ierr = MatDenseGetArray(matis->A,&array);CHKERRQ(ierr); 340 ierr = MatSetValues(*M,local_rows,global_indices,local_cols,global_indices,array,ADD_VALUES);CHKERRQ(ierr); 341 ierr = MatDenseRestoreArray(matis->A,&array);CHKERRQ(ierr); 342 ierr = PetscFree(local_indices);CHKERRQ(ierr); 343 ierr = PetscFree(global_indices);CHKERRQ(ierr); 344 } else { /* very basic values insertion for all other matrix types */ 345 ierr = PetscFree(local_indices);CHKERRQ(ierr); 346 for (i=0;i<local_rows;i++) { 347 ierr = MatGetRow(matis->A,i,&j,(const PetscInt**)&local_indices,(const PetscScalar**)&array);CHKERRQ(ierr); 348 /* ierr = MatSetValuesLocal(*M,1,&i,j,local_indices,array,ADD_VALUES);CHKERRQ(ierr); */ 349 ierr = ISLocalToGlobalMappingApply(matis->mapping,j,local_indices,global_indices);CHKERRQ(ierr); 350 ierr = ISLocalToGlobalMappingApply(matis->mapping,1,&i,&index_row);CHKERRQ(ierr); 351 ierr = MatSetValues(*M,1,&index_row,j,global_indices,array,ADD_VALUES);CHKERRQ(ierr); 352 ierr = MatRestoreRow(matis->A,i,&j,(const PetscInt**)&local_indices,(const PetscScalar**)&array);CHKERRQ(ierr); 353 } 354 ierr = PetscFree(global_indices);CHKERRQ(ierr); 355 } 356 ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 357 ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 358 if (isdense) { 359 ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr); 360 } 361 if (issbaij) { 362 ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr); 363 } 364 PetscFunctionReturn(0); 365 } 366 367 #undef __FUNCT__ 368 #define __FUNCT__ "MatISGetMPIXAIJ" 369 /*@ 370 MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format 371 372 Input Parameter: 373 . mat - the matrix (should be of type MATIS) 374 . reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 375 376 Output Parameter: 377 . newmat - the matrix in AIJ format 378 379 Level: developer 380 381 Notes: 382 383 .seealso: MATIS 384 @*/ 385 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat) 386 { 387 PetscErrorCode ierr; 388 389 PetscFunctionBegin; 390 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 391 PetscValidLogicalCollectiveEnum(mat,reuse,2); 392 PetscValidPointer(newmat,3); 393 if (reuse != MAT_INITIAL_MATRIX) { 394 PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3); 395 PetscCheckSameComm(mat,1,*newmat,3); 396 } 397 ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr); 398 PetscFunctionReturn(0); 399 } 400 401 #undef __FUNCT__ 402 #define __FUNCT__ "MatDuplicate_IS" 403 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat) 404 { 405 PetscErrorCode ierr; 406 Mat_IS *matis = (Mat_IS*)(mat->data); 407 PetscInt bs,m,n,M,N; 408 Mat B,localmat; 409 410 PetscFunctionBegin; 411 ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr); 412 ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr); 413 ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr); 414 ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,matis->mapping,&B);CHKERRQ(ierr); 415 ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr); 416 ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr); 417 ierr = MatDestroy(&localmat);CHKERRQ(ierr); 418 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 419 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 420 *newmat = B; 421 PetscFunctionReturn(0); 422 } 423 424 #undef __FUNCT__ 425 #define __FUNCT__ "MatIsHermitian_IS" 426 PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool *flg) 427 { 428 PetscErrorCode ierr; 429 Mat_IS *matis = (Mat_IS*)A->data; 430 PetscBool local_sym; 431 432 PetscFunctionBegin; 433 ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr); 434 ierr = MPI_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr); 435 PetscFunctionReturn(0); 436 } 437 438 #undef __FUNCT__ 439 #define __FUNCT__ "MatIsSymmetric_IS" 440 PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool *flg) 441 { 442 PetscErrorCode ierr; 443 Mat_IS *matis = (Mat_IS*)A->data; 444 PetscBool local_sym; 445 446 PetscFunctionBegin; 447 ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr); 448 ierr = MPI_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr); 449 PetscFunctionReturn(0); 450 } 451 452 #undef __FUNCT__ 453 #define __FUNCT__ "MatDestroy_IS" 454 PetscErrorCode MatDestroy_IS(Mat A) 455 { 456 PetscErrorCode ierr; 457 Mat_IS *b = (Mat_IS*)A->data; 458 459 PetscFunctionBegin; 460 ierr = MatDestroy(&b->A);CHKERRQ(ierr); 461 ierr = VecScatterDestroy(&b->ctx);CHKERRQ(ierr); 462 ierr = VecDestroy(&b->x);CHKERRQ(ierr); 463 ierr = VecDestroy(&b->y);CHKERRQ(ierr); 464 ierr = ISLocalToGlobalMappingDestroy(&b->mapping);CHKERRQ(ierr); 465 ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr); 466 ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr); 467 ierr = PetscFree(A->data);CHKERRQ(ierr); 468 ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr); 469 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr); 470 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr); 471 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr); 472 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr); 473 PetscFunctionReturn(0); 474 } 475 476 #undef __FUNCT__ 477 #define __FUNCT__ "MatMult_IS" 478 PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y) 479 { 480 PetscErrorCode ierr; 481 Mat_IS *is = (Mat_IS*)A->data; 482 PetscScalar zero = 0.0; 483 484 PetscFunctionBegin; 485 /* scatter the global vector x into the local work vector */ 486 ierr = VecScatterBegin(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 487 ierr = VecScatterEnd(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 488 489 /* multiply the local matrix */ 490 ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr); 491 492 /* scatter product back into global memory */ 493 ierr = VecSet(y,zero);CHKERRQ(ierr); 494 ierr = VecScatterBegin(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 495 ierr = VecScatterEnd(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 496 PetscFunctionReturn(0); 497 } 498 499 #undef __FUNCT__ 500 #define __FUNCT__ "MatMultAdd_IS" 501 PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3) 502 { 503 Vec temp_vec; 504 PetscErrorCode ierr; 505 506 PetscFunctionBegin; /* v3 = v2 + A * v1.*/ 507 if (v3 != v2) { 508 ierr = MatMult(A,v1,v3);CHKERRQ(ierr); 509 ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr); 510 } else { 511 ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr); 512 ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr); 513 ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr); 514 ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr); 515 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr); 516 } 517 PetscFunctionReturn(0); 518 } 519 520 #undef __FUNCT__ 521 #define __FUNCT__ "MatMultTranspose_IS" 522 PetscErrorCode MatMultTranspose_IS(Mat A,Vec x,Vec y) 523 { 524 Mat_IS *is = (Mat_IS*)A->data; 525 PetscErrorCode ierr; 526 527 PetscFunctionBegin; /* y = A' * x */ 528 /* scatter the global vector x into the local work vector */ 529 ierr = VecScatterBegin(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 530 ierr = VecScatterEnd(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 531 532 /* multiply the local matrix */ 533 ierr = MatMultTranspose(is->A,is->x,is->y);CHKERRQ(ierr); 534 535 /* scatter product back into global vector */ 536 ierr = VecSet(y,0);CHKERRQ(ierr); 537 ierr = VecScatterBegin(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 538 ierr = VecScatterEnd(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 539 PetscFunctionReturn(0); 540 } 541 542 #undef __FUNCT__ 543 #define __FUNCT__ "MatMultTransposeAdd_IS" 544 PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3) 545 { 546 Vec temp_vec; 547 PetscErrorCode ierr; 548 549 PetscFunctionBegin; /* v3 = v2 + A' * v1.*/ 550 if (v3 != v2) { 551 ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr); 552 ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr); 553 } else { 554 ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr); 555 ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr); 556 ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr); 557 ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr); 558 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr); 559 } 560 PetscFunctionReturn(0); 561 } 562 563 #undef __FUNCT__ 564 #define __FUNCT__ "MatView_IS" 565 PetscErrorCode MatView_IS(Mat A,PetscViewer viewer) 566 { 567 Mat_IS *a = (Mat_IS*)A->data; 568 PetscErrorCode ierr; 569 PetscViewer sviewer; 570 571 PetscFunctionBegin; 572 ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr); 573 ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr); 574 ierr = MatView(a->A,sviewer);CHKERRQ(ierr); 575 ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr); 576 PetscFunctionReturn(0); 577 } 578 579 #undef __FUNCT__ 580 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS" 581 PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping) 582 { 583 PetscErrorCode ierr; 584 PetscInt n,bs; 585 Mat_IS *is = (Mat_IS*)A->data; 586 IS from,to; 587 Vec global; 588 589 PetscFunctionBegin; 590 PetscCheckSameComm(A,1,rmapping,2); 591 if (rmapping != cmapping) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"MATIS requires the row and column mappings to be identical"); 592 if (is->mapping) { /* Currenly destroys the objects that will be created by this routine. Is there anything else that should be checked? */ 593 ierr = ISLocalToGlobalMappingDestroy(&is->mapping);CHKERRQ(ierr); 594 ierr = VecDestroy(&is->x);CHKERRQ(ierr); 595 ierr = VecDestroy(&is->y);CHKERRQ(ierr); 596 ierr = VecScatterDestroy(&is->ctx);CHKERRQ(ierr); 597 ierr = MatDestroy(&is->A);CHKERRQ(ierr); 598 ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr); 599 ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr); 600 } 601 ierr = PetscObjectReference((PetscObject)rmapping);CHKERRQ(ierr); 602 ierr = ISLocalToGlobalMappingDestroy(&is->mapping);CHKERRQ(ierr); 603 is->mapping = rmapping; 604 /* 605 ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr); 606 ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr); 607 */ 608 609 /* Create the local matrix A */ 610 ierr = ISLocalToGlobalMappingGetSize(rmapping,&n);CHKERRQ(ierr); 611 ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&bs);CHKERRQ(ierr); 612 ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr); 613 if (bs > 1) { 614 ierr = MatSetType(is->A,MATSEQBAIJ);CHKERRQ(ierr); 615 } else { 616 ierr = MatSetType(is->A,MATSEQAIJ);CHKERRQ(ierr); 617 } 618 ierr = MatSetSizes(is->A,n,n,n,n);CHKERRQ(ierr); 619 ierr = MatSetBlockSize(is->A,bs);CHKERRQ(ierr); 620 ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr); 621 ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr); 622 ierr = MatSetFromOptions(is->A);CHKERRQ(ierr); 623 624 /* Create the local work vectors */ 625 ierr = VecCreate(PETSC_COMM_SELF,&is->x);CHKERRQ(ierr); 626 ierr = VecSetBlockSize(is->x,bs);CHKERRQ(ierr); 627 ierr = VecSetSizes(is->x,n,n);CHKERRQ(ierr); 628 ierr = VecSetOptionsPrefix(is->x,((PetscObject)A)->prefix);CHKERRQ(ierr); 629 ierr = VecAppendOptionsPrefix(is->x,"is_");CHKERRQ(ierr); 630 ierr = VecSetFromOptions(is->x);CHKERRQ(ierr); 631 ierr = VecDuplicate(is->x,&is->y);CHKERRQ(ierr); 632 633 /* setup the global to local scatter */ 634 ierr = ISCreateStride(PETSC_COMM_SELF,n,0,1,&to);CHKERRQ(ierr); 635 ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr); 636 ierr = MatCreateVecs(A,&global,NULL);CHKERRQ(ierr); 637 ierr = VecScatterCreate(global,from,is->x,to,&is->ctx);CHKERRQ(ierr); 638 ierr = VecDestroy(&global);CHKERRQ(ierr); 639 ierr = ISDestroy(&to);CHKERRQ(ierr); 640 ierr = ISDestroy(&from);CHKERRQ(ierr); 641 PetscFunctionReturn(0); 642 } 643 644 #undef __FUNCT__ 645 #define __FUNCT__ "MatSetValues_IS" 646 PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv) 647 { 648 Mat_IS *is = (Mat_IS*)mat->data; 649 PetscInt rows_l[2048],cols_l[2048]; 650 PetscErrorCode ierr; 651 652 PetscFunctionBegin; 653 #if defined(PETSC_USE_DEBUG) 654 if (m > 2048 || n > 2048) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= 2048: they are %D %D",m,n); 655 #endif 656 ierr = ISG2LMapApply(is->mapping,m,rows,rows_l);CHKERRQ(ierr); 657 ierr = ISG2LMapApply(is->mapping,n,cols,cols_l);CHKERRQ(ierr); 658 ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr); 659 PetscFunctionReturn(0); 660 } 661 662 #undef ISG2LMapSetUp 663 #undef ISG2LMapApply 664 665 #undef __FUNCT__ 666 #define __FUNCT__ "MatSetValuesLocal_IS" 667 PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 668 { 669 PetscErrorCode ierr; 670 Mat_IS *is = (Mat_IS*)A->data; 671 672 PetscFunctionBegin; 673 ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr); 674 PetscFunctionReturn(0); 675 } 676 677 #undef __FUNCT__ 678 #define __FUNCT__ "MatSetValuesBlockedLocal_IS" 679 PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 680 { 681 PetscErrorCode ierr; 682 Mat_IS *is = (Mat_IS*)A->data; 683 684 PetscFunctionBegin; 685 ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr); 686 PetscFunctionReturn(0); 687 } 688 689 #undef __FUNCT__ 690 #define __FUNCT__ "MatZeroRows_IS" 691 PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 692 { 693 Mat_IS *is = (Mat_IS*)A->data; 694 PetscInt n_l = 0, *rows_l = NULL; 695 PetscErrorCode ierr; 696 697 PetscFunctionBegin; 698 if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support"); 699 if (n) { 700 ierr = PetscMalloc1(n,&rows_l);CHKERRQ(ierr); 701 ierr = ISGlobalToLocalMappingApply(is->mapping,IS_GTOLM_DROP,n,rows,&n_l,rows_l);CHKERRQ(ierr); 702 } 703 ierr = MatZeroRowsLocal(A,n_l,rows_l,diag,x,b);CHKERRQ(ierr); 704 ierr = PetscFree(rows_l);CHKERRQ(ierr); 705 PetscFunctionReturn(0); 706 } 707 708 #undef __FUNCT__ 709 #define __FUNCT__ "MatZeroRowsLocal_IS" 710 PetscErrorCode MatZeroRowsLocal_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 711 { 712 Mat_IS *is = (Mat_IS*)A->data; 713 PetscErrorCode ierr; 714 PetscInt i; 715 PetscScalar *array; 716 717 PetscFunctionBegin; 718 if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support"); 719 { 720 /* 721 Set up is->x as a "counting vector". This is in order to MatMult_IS 722 work properly in the interface nodes. 723 */ 724 Vec counter; 725 PetscScalar one=1.0, zero=0.0; 726 ierr = MatCreateVecs(A,&counter,NULL);CHKERRQ(ierr); 727 ierr = VecSet(counter,zero);CHKERRQ(ierr); 728 ierr = VecSet(is->x,one);CHKERRQ(ierr); 729 ierr = VecScatterBegin(is->ctx,is->x,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 730 ierr = VecScatterEnd (is->ctx,is->x,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 731 ierr = VecScatterBegin(is->ctx,counter,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 732 ierr = VecScatterEnd (is->ctx,counter,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 733 ierr = VecDestroy(&counter);CHKERRQ(ierr); 734 } 735 if (!n) { 736 is->pure_neumann = PETSC_TRUE; 737 } else { 738 is->pure_neumann = PETSC_FALSE; 739 740 ierr = VecGetArray(is->x,&array);CHKERRQ(ierr); 741 ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr); 742 for (i=0; i<n; i++) { 743 ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr); 744 } 745 ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 746 ierr = MatAssemblyEnd (is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 747 ierr = VecRestoreArray(is->x,&array);CHKERRQ(ierr); 748 } 749 PetscFunctionReturn(0); 750 } 751 752 #undef __FUNCT__ 753 #define __FUNCT__ "MatAssemblyBegin_IS" 754 PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type) 755 { 756 Mat_IS *is = (Mat_IS*)A->data; 757 PetscErrorCode ierr; 758 759 PetscFunctionBegin; 760 ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr); 761 PetscFunctionReturn(0); 762 } 763 764 #undef __FUNCT__ 765 #define __FUNCT__ "MatAssemblyEnd_IS" 766 PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type) 767 { 768 Mat_IS *is = (Mat_IS*)A->data; 769 PetscErrorCode ierr; 770 771 PetscFunctionBegin; 772 ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr); 773 PetscFunctionReturn(0); 774 } 775 776 #undef __FUNCT__ 777 #define __FUNCT__ "MatISGetLocalMat_IS" 778 PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local) 779 { 780 Mat_IS *is = (Mat_IS*)mat->data; 781 782 PetscFunctionBegin; 783 *local = is->A; 784 PetscFunctionReturn(0); 785 } 786 787 #undef __FUNCT__ 788 #define __FUNCT__ "MatISGetLocalMat" 789 /*@ 790 MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix. 791 792 Input Parameter: 793 . mat - the matrix 794 795 Output Parameter: 796 . local - the local matrix usually MATSEQAIJ 797 798 Level: advanced 799 800 Notes: 801 This can be called if you have precomputed the nonzero structure of the 802 matrix and want to provide it to the inner matrix object to improve the performance 803 of the MatSetValues() operation. 804 805 .seealso: MATIS 806 @*/ 807 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local) 808 { 809 PetscErrorCode ierr; 810 811 PetscFunctionBegin; 812 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 813 PetscValidPointer(local,2); 814 ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr); 815 PetscFunctionReturn(0); 816 } 817 818 #undef __FUNCT__ 819 #define __FUNCT__ "MatISSetLocalMat_IS" 820 PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local) 821 { 822 Mat_IS *is = (Mat_IS*)mat->data; 823 PetscInt nrows,ncols,orows,ocols; 824 PetscErrorCode ierr; 825 826 PetscFunctionBegin; 827 if (is->A) { 828 ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr); 829 ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr); 830 if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %dx%d (you passed a %dx%d matrix)\n",orows,ocols,nrows,ncols); 831 } 832 ierr = PetscObjectReference((PetscObject)local);CHKERRQ(ierr); 833 ierr = MatDestroy(&is->A);CHKERRQ(ierr); 834 is->A = local; 835 PetscFunctionReturn(0); 836 } 837 838 #undef __FUNCT__ 839 #define __FUNCT__ "MatISSetLocalMat" 840 /*@ 841 MatISSetLocalMat - Set the local matrix stored inside a MATIS matrix. 842 843 Input Parameter: 844 . mat - the matrix 845 . local - the local matrix usually MATSEQAIJ 846 847 Output Parameter: 848 849 Level: advanced 850 851 Notes: 852 This can be called if you have precomputed the local matrix and 853 want to provide it to the matrix object MATIS. 854 855 .seealso: MATIS 856 @*/ 857 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local) 858 { 859 PetscErrorCode ierr; 860 861 PetscFunctionBegin; 862 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 863 PetscValidHeaderSpecific(local,MAT_CLASSID,2); 864 ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr); 865 PetscFunctionReturn(0); 866 } 867 868 #undef __FUNCT__ 869 #define __FUNCT__ "MatZeroEntries_IS" 870 PetscErrorCode MatZeroEntries_IS(Mat A) 871 { 872 Mat_IS *a = (Mat_IS*)A->data; 873 PetscErrorCode ierr; 874 875 PetscFunctionBegin; 876 ierr = MatZeroEntries(a->A);CHKERRQ(ierr); 877 PetscFunctionReturn(0); 878 } 879 880 #undef __FUNCT__ 881 #define __FUNCT__ "MatScale_IS" 882 PetscErrorCode MatScale_IS(Mat A,PetscScalar a) 883 { 884 Mat_IS *is = (Mat_IS*)A->data; 885 PetscErrorCode ierr; 886 887 PetscFunctionBegin; 888 ierr = MatScale(is->A,a);CHKERRQ(ierr); 889 PetscFunctionReturn(0); 890 } 891 892 #undef __FUNCT__ 893 #define __FUNCT__ "MatGetDiagonal_IS" 894 PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v) 895 { 896 Mat_IS *is = (Mat_IS*)A->data; 897 PetscErrorCode ierr; 898 899 PetscFunctionBegin; 900 /* get diagonal of the local matrix */ 901 ierr = MatGetDiagonal(is->A,is->x);CHKERRQ(ierr); 902 903 /* scatter diagonal back into global vector */ 904 ierr = VecSet(v,0);CHKERRQ(ierr); 905 ierr = VecScatterBegin(is->ctx,is->x,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 906 ierr = VecScatterEnd (is->ctx,is->x,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 907 PetscFunctionReturn(0); 908 } 909 910 #undef __FUNCT__ 911 #define __FUNCT__ "MatSetOption_IS" 912 PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg) 913 { 914 Mat_IS *a = (Mat_IS*)A->data; 915 PetscErrorCode ierr; 916 917 PetscFunctionBegin; 918 ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 919 PetscFunctionReturn(0); 920 } 921 922 #undef __FUNCT__ 923 #define __FUNCT__ "MatCreateIS" 924 /*@ 925 MatCreateIS - Creates a "process" unassmembled matrix, it is assembled on each 926 process but not across processes. 927 928 Input Parameters: 929 + comm - MPI communicator that will share the matrix 930 . bs - local and global block size of the matrix 931 . m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products 932 - map - mapping that defines the global number for each local number 933 934 Output Parameter: 935 . A - the resulting matrix 936 937 Level: advanced 938 939 Notes: See MATIS for more details 940 m and n are NOT related to the size of the map, they are the size of the part of the vector owned 941 by that process. m + nghosts (or n + nghosts) is the length of map since map maps all local points 942 plus the ghost points to global indices. 943 944 .seealso: MATIS, MatSetLocalToGlobalMapping() 945 @*/ 946 PetscErrorCode MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping map,Mat *A) 947 { 948 PetscErrorCode ierr; 949 950 PetscFunctionBegin; 951 ierr = MatCreate(comm,A);CHKERRQ(ierr); 952 ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr); 953 ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr); 954 ierr = MatSetType(*A,MATIS);CHKERRQ(ierr); 955 ierr = MatSetUp(*A);CHKERRQ(ierr); 956 ierr = MatSetLocalToGlobalMapping(*A,map,map);CHKERRQ(ierr); 957 PetscFunctionReturn(0); 958 } 959 960 /*MC 961 MATIS - MATIS = "is" - A matrix type to be used for using the Neumann-Neumann type preconditioners, see PCBDDC. 962 This stores the matrices in globally unassembled form. Each processor 963 assembles only its local Neumann problem and the parallel matrix vector 964 product is handled "implicitly". 965 966 Operations Provided: 967 + MatMult() 968 . MatMultAdd() 969 . MatMultTranspose() 970 . MatMultTransposeAdd() 971 . MatZeroEntries() 972 . MatSetOption() 973 . MatZeroRows() 974 . MatZeroRowsLocal() 975 . MatSetValues() 976 . MatSetValuesLocal() 977 . MatScale() 978 . MatGetDiagonal() 979 - MatSetLocalToGlobalMapping() 980 981 Options Database Keys: 982 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions() 983 984 Notes: Options prefix for the inner matrix are given by -is_mat_xxx 985 986 You must call MatSetLocalToGlobalMapping() before using this matrix type. 987 988 You can do matrix preallocation on the local matrix after you obtain it with 989 MatISGetLocalMat() 990 991 Level: advanced 992 993 .seealso: PC, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), PCBDDC 994 995 M*/ 996 997 #undef __FUNCT__ 998 #define __FUNCT__ "MatCreate_IS" 999 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A) 1000 { 1001 PetscErrorCode ierr; 1002 Mat_IS *b; 1003 1004 PetscFunctionBegin; 1005 ierr = PetscNewLog(A,&b);CHKERRQ(ierr); 1006 A->data = (void*)b; 1007 ierr = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 1008 1009 A->ops->mult = MatMult_IS; 1010 A->ops->multadd = MatMultAdd_IS; 1011 A->ops->multtranspose = MatMultTranspose_IS; 1012 A->ops->multtransposeadd = MatMultTransposeAdd_IS; 1013 A->ops->destroy = MatDestroy_IS; 1014 A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS; 1015 A->ops->setvalues = MatSetValues_IS; 1016 A->ops->setvalueslocal = MatSetValuesLocal_IS; 1017 A->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_IS; 1018 A->ops->zerorows = MatZeroRows_IS; 1019 A->ops->zerorowslocal = MatZeroRowsLocal_IS; 1020 A->ops->assemblybegin = MatAssemblyBegin_IS; 1021 A->ops->assemblyend = MatAssemblyEnd_IS; 1022 A->ops->view = MatView_IS; 1023 A->ops->zeroentries = MatZeroEntries_IS; 1024 A->ops->scale = MatScale_IS; 1025 A->ops->getdiagonal = MatGetDiagonal_IS; 1026 A->ops->setoption = MatSetOption_IS; 1027 A->ops->ishermitian = MatIsHermitian_IS; 1028 A->ops->issymmetric = MatIsSymmetric_IS; 1029 A->ops->duplicate = MatDuplicate_IS; 1030 1031 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 1032 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 1033 1034 /* zeros pointer */ 1035 b->A = 0; 1036 b->ctx = 0; 1037 b->x = 0; 1038 b->y = 0; 1039 b->mapping = 0; 1040 b->sf = 0; 1041 b->sf_rootdata = 0; 1042 b->sf_leafdata = 0; 1043 1044 /* special MATIS functions */ 1045 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr); 1046 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr); 1047 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr); 1048 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr); 1049 ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr); 1050 PetscFunctionReturn(0); 1051 } 1052