1 2 /* 3 Creates a matrix class for using the Neumann-Neumann type preconditioners. 4 This stores the matrices in globally unassembled form. Each processor 5 assembles only its local Neumann problem and the parallel matrix vector 6 product is handled "implicitly". 7 8 We provide: 9 MatMult() 10 11 Currently this allows for only one subdomain per processor. 12 13 */ 14 15 #include <../src/mat/impls/is/matis.h> /*I "petscmat.h" I*/ 16 17 #undef __FUNCT__ 18 #define __FUNCT__ "MatISComputeSF_Private" 19 PetscErrorCode MatISComputeSF_Private(Mat B) 20 { 21 Mat_IS *matis = (Mat_IS*)(B->data); 22 const PetscInt *gidxs; 23 PetscErrorCode ierr; 24 25 PetscFunctionBegin; 26 ierr = MatGetSize(matis->A,&matis->sf_nleaves,NULL);CHKERRQ(ierr); 27 ierr = MatGetLocalSize(B,&matis->sf_nroots,NULL);CHKERRQ(ierr); 28 ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr); 29 ierr = ISLocalToGlobalMappingGetIndices(matis->mapping,&gidxs);CHKERRQ(ierr); 30 /* PETSC_OWN_POINTER refers to ilocal which is NULL */ 31 ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr); 32 ierr = ISLocalToGlobalMappingRestoreIndices(matis->mapping,&gidxs);CHKERRQ(ierr); 33 ierr = PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);CHKERRQ(ierr); 34 PetscFunctionReturn(0); 35 } 36 37 #undef __FUNCT__ 38 #define __FUNCT__ "MatISSetPreallocation" 39 /* 40 MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix. 41 42 Collective on MPI_Comm 43 44 Input Parameters: 45 + B - the matrix 46 . d_nz - number of nonzeros per row in DIAGONAL portion of local submatrix 47 (same value is used for all local rows) 48 . d_nnz - array containing the number of nonzeros in the various rows of the 49 DIAGONAL portion of the local submatrix (possibly different for each row) 50 or NULL, if d_nz is used to specify the nonzero structure. 51 The size of this array is equal to the number of local rows, i.e 'm'. 52 For matrices that will be factored, you must leave room for (and set) 53 the diagonal entry even if it is zero. 54 . o_nz - number of nonzeros per row in the OFF-DIAGONAL portion of local 55 submatrix (same value is used for all local rows). 56 - o_nnz - array containing the number of nonzeros in the various rows of the 57 OFF-DIAGONAL portion of the local submatrix (possibly different for 58 each row) or NULL, if o_nz is used to specify the nonzero 59 structure. The size of this array is equal to the number 60 of local rows, i.e 'm'. 61 62 If the *_nnz parameter is given then the *_nz parameter is ignored 63 64 Level: intermediate 65 66 Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition 67 from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local 68 matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects. 69 70 .keywords: matrix 71 72 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat() 73 @*/ 74 PetscErrorCode MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 75 { 76 PetscErrorCode ierr; 77 78 PetscFunctionBegin; 79 PetscValidHeaderSpecific(B,MAT_CLASSID,1); 80 PetscValidType(B,1); 81 ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr); 82 PetscFunctionReturn(0); 83 } 84 85 #undef __FUNCT__ 86 #define __FUNCT__ "MatISSetPreallocation_IS" 87 PetscErrorCode MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 88 { 89 Mat_IS *matis = (Mat_IS*)(B->data); 90 PetscInt bs,i,nlocalcols; 91 PetscErrorCode ierr; 92 93 PetscFunctionBegin; 94 if (!matis->A) { 95 SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping"); 96 } 97 if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */ 98 ierr = MatISComputeSF_Private(B);CHKERRQ(ierr); 99 } 100 if (!d_nnz) { 101 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz; 102 } else { 103 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i]; 104 } 105 if (!o_nnz) { 106 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz; 107 } else { 108 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i]; 109 } 110 ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 111 ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr); 112 ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr); 113 ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 114 for (i=0;i<matis->sf_nleaves;i++) { 115 matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols); 116 } 117 ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr); 118 for (i=0;i<matis->sf_nleaves/bs;i++) { 119 matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs; 120 } 121 ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr); 122 for (i=0;i<matis->sf_nleaves/bs;i++) { 123 matis->sf_leafdata[i] = matis->sf_leafdata[i]-i; 124 } 125 ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr); 126 PetscFunctionReturn(0); 127 } 128 129 #undef __FUNCT__ 130 #define __FUNCT__ "MatISGetMPIXAIJ_IS" 131 PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M) 132 { 133 Mat_IS *matis = (Mat_IS*)(mat->data); 134 /* info on mat */ 135 /* ISLocalToGlobalMapping rmapping,cmapping; */ 136 PetscInt bs,rows,cols; 137 PetscInt lrows,lcols; 138 PetscInt local_rows,local_cols; 139 PetscBool isdense,issbaij,issbaij_red; 140 PetscMPIInt nsubdomains; 141 /* values insertion */ 142 PetscScalar *array; 143 PetscInt *local_indices,*global_indices; 144 /* work */ 145 PetscInt i,j,index_row; 146 PetscErrorCode ierr; 147 148 PetscFunctionBegin; 149 /* MISSING CHECKS 150 - rectangular case not covered (it is not allowed by MATIS) 151 */ 152 /* get info from mat */ 153 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr); 154 if (nsubdomains == 1) { 155 if (reuse == MAT_INITIAL_MATRIX) { 156 ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr); 157 } else { 158 ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 159 } 160 PetscFunctionReturn(0); 161 } 162 /* ierr = MatGetLocalToGlobalMapping(mat,&rmapping,&cmapping);CHKERRQ(ierr); */ 163 ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr); 164 ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr); 165 ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr); 166 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr); 167 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr); 168 169 /* work */ 170 ierr = PetscMalloc1(local_rows,&local_indices);CHKERRQ(ierr); 171 for (i=0;i<local_rows;i++) local_indices[i]=i; 172 /* map indices of local mat to global values */ 173 ierr = PetscMalloc1(PetscMax(local_cols,local_rows),&global_indices);CHKERRQ(ierr); 174 /* ierr = ISLocalToGlobalMappingApply(rmapping,local_rows,local_indices,global_indices);CHKERRQ(ierr); */ 175 ierr = ISLocalToGlobalMappingApply(matis->mapping,local_rows,local_indices,global_indices);CHKERRQ(ierr); 176 177 if (issbaij) { 178 ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr); 179 } 180 181 if (reuse == MAT_INITIAL_MATRIX) { 182 Mat new_mat; 183 MatType new_mat_type; 184 Vec vec_dnz,vec_onz; 185 PetscScalar *my_dnz,*my_onz; 186 PetscInt *dnz,*onz,*mat_ranges,*row_ownership; 187 PetscInt index_col,owner; 188 189 /* determining new matrix type */ 190 ierr = MPI_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 191 if (issbaij_red) { 192 new_mat_type = MATSBAIJ; 193 } else { 194 if (bs>1) { 195 new_mat_type = MATBAIJ; 196 } else { 197 new_mat_type = MATAIJ; 198 } 199 } 200 201 ierr = MatCreate(PetscObjectComm((PetscObject)mat),&new_mat);CHKERRQ(ierr); 202 ierr = MatSetSizes(new_mat,PETSC_DECIDE,PETSC_DECIDE,rows,cols);CHKERRQ(ierr); 203 ierr = MatSetBlockSize(new_mat,bs);CHKERRQ(ierr); 204 ierr = MatSetType(new_mat,new_mat_type);CHKERRQ(ierr); 205 ierr = MatSetUp(new_mat);CHKERRQ(ierr); 206 ierr = MatGetLocalSize(new_mat,&lrows,&lcols);CHKERRQ(ierr); 207 208 /* 209 preallocation 210 */ 211 212 ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)new_mat),lrows,lcols,dnz,onz);CHKERRQ(ierr); 213 /* 214 Some vectors are needed to sum up properly on shared interface dofs. 215 Preallocation macros cannot do the job. 216 Note that preallocation is not exact, since it overestimates nonzeros 217 */ 218 ierr = MatCreateVecs(new_mat,NULL,&vec_dnz);CHKERRQ(ierr); 219 /* ierr = VecSetLocalToGlobalMapping(vec_dnz,rmapping);CHKERRQ(ierr); */ 220 ierr = VecSetLocalToGlobalMapping(vec_dnz,matis->mapping);CHKERRQ(ierr); 221 ierr = VecDuplicate(vec_dnz,&vec_onz);CHKERRQ(ierr); 222 /* All processes need to compute entire row ownership */ 223 ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr); 224 ierr = MatGetOwnershipRanges(new_mat,(const PetscInt**)&mat_ranges);CHKERRQ(ierr); 225 for (i=0;i<nsubdomains;i++) { 226 for (j=mat_ranges[i];j<mat_ranges[i+1];j++) { 227 row_ownership[j]=i; 228 } 229 } 230 231 /* 232 my_dnz and my_onz contains exact contribution to preallocation from each local mat 233 then, they will be summed up properly. This way, preallocation is always sufficient 234 */ 235 ierr = PetscMalloc1(local_rows,&my_dnz);CHKERRQ(ierr); 236 ierr = PetscMalloc1(local_rows,&my_onz);CHKERRQ(ierr); 237 ierr = PetscMemzero(my_dnz,local_rows*sizeof(*my_dnz));CHKERRQ(ierr); 238 ierr = PetscMemzero(my_onz,local_rows*sizeof(*my_onz));CHKERRQ(ierr); 239 /* preallocation as a MATAIJ */ 240 if (isdense) { /* special case for dense local matrices */ 241 for (i=0;i<local_rows;i++) { 242 index_row = global_indices[i]; 243 for (j=i;j<local_rows;j++) { 244 owner = row_ownership[index_row]; 245 index_col = global_indices[j]; 246 if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */ 247 my_dnz[i] += 1.0; 248 } else { /* offdiag block */ 249 my_onz[i] += 1.0; 250 } 251 /* same as before, interchanging rows and cols */ 252 if (i != j) { 253 owner = row_ownership[index_col]; 254 if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) { 255 my_dnz[j] += 1.0; 256 } else { 257 my_onz[j] += 1.0; 258 } 259 } 260 } 261 } 262 } else { 263 for (i=0;i<local_rows;i++) { 264 PetscInt ncols; 265 const PetscInt *cols; 266 index_row = global_indices[i]; 267 ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr); 268 for (j=0;j<ncols;j++) { 269 owner = row_ownership[index_row]; 270 index_col = global_indices[cols[j]]; 271 if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */ 272 my_dnz[i] += 1.0; 273 } else { /* offdiag block */ 274 my_onz[i] += 1.0; 275 } 276 /* same as before, interchanging rows and cols */ 277 if (issbaij) { 278 owner = row_ownership[index_col]; 279 if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) { 280 my_dnz[j] += 1.0; 281 } else { 282 my_onz[j] += 1.0; 283 } 284 } 285 } 286 ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr); 287 } 288 } 289 ierr = VecSet(vec_dnz,0.0);CHKERRQ(ierr); 290 ierr = VecSet(vec_onz,0.0);CHKERRQ(ierr); 291 if (local_rows) { /* multilevel guard */ 292 ierr = VecSetValuesLocal(vec_dnz,local_rows,local_indices,my_dnz,ADD_VALUES);CHKERRQ(ierr); 293 ierr = VecSetValuesLocal(vec_onz,local_rows,local_indices,my_onz,ADD_VALUES);CHKERRQ(ierr); 294 } 295 ierr = VecAssemblyBegin(vec_dnz);CHKERRQ(ierr); 296 ierr = VecAssemblyBegin(vec_onz);CHKERRQ(ierr); 297 ierr = VecAssemblyEnd(vec_dnz);CHKERRQ(ierr); 298 ierr = VecAssemblyEnd(vec_onz);CHKERRQ(ierr); 299 ierr = PetscFree(my_dnz);CHKERRQ(ierr); 300 ierr = PetscFree(my_onz);CHKERRQ(ierr); 301 ierr = PetscFree(row_ownership);CHKERRQ(ierr); 302 303 /* set computed preallocation in dnz and onz */ 304 ierr = VecGetArray(vec_dnz,&array);CHKERRQ(ierr); 305 for (i=0; i<lrows; i++) dnz[i] = (PetscInt)PetscRealPart(array[i]); 306 ierr = VecRestoreArray(vec_dnz,&array);CHKERRQ(ierr); 307 ierr = VecGetArray(vec_onz,&array);CHKERRQ(ierr); 308 for (i=0;i<lrows;i++) onz[i] = (PetscInt)PetscRealPart(array[i]); 309 ierr = VecRestoreArray(vec_onz,&array);CHKERRQ(ierr); 310 ierr = VecDestroy(&vec_dnz);CHKERRQ(ierr); 311 ierr = VecDestroy(&vec_onz);CHKERRQ(ierr); 312 313 /* Resize preallocation if overestimated */ 314 for (i=0;i<lrows;i++) { 315 dnz[i] = PetscMin(dnz[i],lcols); 316 onz[i] = PetscMin(onz[i],cols-lcols); 317 } 318 /* set preallocation */ 319 ierr = MatMPIAIJSetPreallocation(new_mat,0,dnz,0,onz);CHKERRQ(ierr); 320 for (i=0;i<lrows/bs;i++) { 321 dnz[i] = dnz[i*bs]/bs; 322 onz[i] = onz[i*bs]/bs; 323 } 324 ierr = MatMPIBAIJSetPreallocation(new_mat,bs,0,dnz,0,onz);CHKERRQ(ierr); 325 for (i=0;i<lrows/bs;i++) { 326 dnz[i] = dnz[i]-i; 327 } 328 ierr = MatMPISBAIJSetPreallocation(new_mat,bs,0,dnz,0,onz);CHKERRQ(ierr); 329 ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr); 330 *M = new_mat; 331 } else { 332 PetscInt mbs,mrows,mcols; 333 /* some checks */ 334 ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr); 335 ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr); 336 if (mrows != rows) { 337 SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows); 338 } 339 if (mrows != rows) { 340 SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols); 341 } 342 if (mbs != bs) { 343 SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs); 344 } 345 ierr = MatZeroEntries(*M);CHKERRQ(ierr); 346 } 347 /* set local to global mappings */ 348 /* ierr = MatSetLocalToGlobalMapping(*M,rmapping,cmapping);CHKERRQ(ierr); */ 349 /* Set values */ 350 if (isdense) { /* special case for dense local matrices */ 351 ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr); 352 ierr = MatDenseGetArray(matis->A,&array);CHKERRQ(ierr); 353 ierr = MatSetValues(*M,local_rows,global_indices,local_cols,global_indices,array,ADD_VALUES);CHKERRQ(ierr); 354 ierr = MatDenseRestoreArray(matis->A,&array);CHKERRQ(ierr); 355 ierr = PetscFree(local_indices);CHKERRQ(ierr); 356 ierr = PetscFree(global_indices);CHKERRQ(ierr); 357 } else { /* very basic values insertion for all other matrix types */ 358 ierr = PetscFree(local_indices);CHKERRQ(ierr); 359 for (i=0;i<local_rows;i++) { 360 ierr = MatGetRow(matis->A,i,&j,(const PetscInt**)&local_indices,(const PetscScalar**)&array);CHKERRQ(ierr); 361 /* ierr = MatSetValuesLocal(*M,1,&i,j,local_indices,array,ADD_VALUES);CHKERRQ(ierr); */ 362 ierr = ISLocalToGlobalMappingApply(matis->mapping,j,local_indices,global_indices);CHKERRQ(ierr); 363 ierr = ISLocalToGlobalMappingApply(matis->mapping,1,&i,&index_row);CHKERRQ(ierr); 364 ierr = MatSetValues(*M,1,&index_row,j,global_indices,array,ADD_VALUES);CHKERRQ(ierr); 365 ierr = MatRestoreRow(matis->A,i,&j,(const PetscInt**)&local_indices,(const PetscScalar**)&array);CHKERRQ(ierr); 366 } 367 ierr = PetscFree(global_indices);CHKERRQ(ierr); 368 } 369 ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 370 ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 371 if (isdense) { 372 ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr); 373 } 374 if (issbaij) { 375 ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr); 376 } 377 PetscFunctionReturn(0); 378 } 379 380 #undef __FUNCT__ 381 #define __FUNCT__ "MatISGetMPIXAIJ" 382 /*@ 383 MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format 384 385 Input Parameter: 386 . mat - the matrix (should be of type MATIS) 387 . reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 388 389 Output Parameter: 390 . newmat - the matrix in AIJ format 391 392 Level: developer 393 394 Notes: 395 396 .seealso: MATIS 397 @*/ 398 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat) 399 { 400 PetscErrorCode ierr; 401 402 PetscFunctionBegin; 403 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 404 PetscValidLogicalCollectiveEnum(mat,reuse,2); 405 PetscValidPointer(newmat,3); 406 if (reuse != MAT_INITIAL_MATRIX) { 407 PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3); 408 PetscCheckSameComm(mat,1,*newmat,3); 409 } 410 ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr); 411 PetscFunctionReturn(0); 412 } 413 414 #undef __FUNCT__ 415 #define __FUNCT__ "MatDuplicate_IS" 416 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat) 417 { 418 PetscErrorCode ierr; 419 Mat_IS *matis = (Mat_IS*)(mat->data); 420 PetscInt bs,m,n,M,N; 421 Mat B,localmat; 422 423 PetscFunctionBegin; 424 ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr); 425 ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr); 426 ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr); 427 ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,matis->mapping,&B);CHKERRQ(ierr); 428 ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr); 429 ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr); 430 ierr = MatDestroy(&localmat);CHKERRQ(ierr); 431 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 432 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 433 *newmat = B; 434 PetscFunctionReturn(0); 435 } 436 437 #undef __FUNCT__ 438 #define __FUNCT__ "MatIsHermitian_IS" 439 PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool *flg) 440 { 441 PetscErrorCode ierr; 442 Mat_IS *matis = (Mat_IS*)A->data; 443 PetscBool local_sym; 444 445 PetscFunctionBegin; 446 ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr); 447 ierr = MPI_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr); 448 PetscFunctionReturn(0); 449 } 450 451 #undef __FUNCT__ 452 #define __FUNCT__ "MatIsSymmetric_IS" 453 PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool *flg) 454 { 455 PetscErrorCode ierr; 456 Mat_IS *matis = (Mat_IS*)A->data; 457 PetscBool local_sym; 458 459 PetscFunctionBegin; 460 ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr); 461 ierr = MPI_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr); 462 PetscFunctionReturn(0); 463 } 464 465 #undef __FUNCT__ 466 #define __FUNCT__ "MatDestroy_IS" 467 PetscErrorCode MatDestroy_IS(Mat A) 468 { 469 PetscErrorCode ierr; 470 Mat_IS *b = (Mat_IS*)A->data; 471 472 PetscFunctionBegin; 473 ierr = MatDestroy(&b->A);CHKERRQ(ierr); 474 ierr = VecScatterDestroy(&b->ctx);CHKERRQ(ierr); 475 ierr = VecDestroy(&b->x);CHKERRQ(ierr); 476 ierr = VecDestroy(&b->y);CHKERRQ(ierr); 477 ierr = ISLocalToGlobalMappingDestroy(&b->mapping);CHKERRQ(ierr); 478 ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr); 479 ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr); 480 ierr = PetscFree(A->data);CHKERRQ(ierr); 481 ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr); 482 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr); 483 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr); 484 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr); 485 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr); 486 PetscFunctionReturn(0); 487 } 488 489 #undef __FUNCT__ 490 #define __FUNCT__ "MatMult_IS" 491 PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y) 492 { 493 PetscErrorCode ierr; 494 Mat_IS *is = (Mat_IS*)A->data; 495 PetscScalar zero = 0.0; 496 497 PetscFunctionBegin; 498 /* scatter the global vector x into the local work vector */ 499 ierr = VecScatterBegin(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 500 ierr = VecScatterEnd(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 501 502 /* multiply the local matrix */ 503 ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr); 504 505 /* scatter product back into global memory */ 506 ierr = VecSet(y,zero);CHKERRQ(ierr); 507 ierr = VecScatterBegin(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 508 ierr = VecScatterEnd(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 509 PetscFunctionReturn(0); 510 } 511 512 #undef __FUNCT__ 513 #define __FUNCT__ "MatMultAdd_IS" 514 PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3) 515 { 516 Vec temp_vec; 517 PetscErrorCode ierr; 518 519 PetscFunctionBegin; /* v3 = v2 + A * v1.*/ 520 if (v3 != v2) { 521 ierr = MatMult(A,v1,v3);CHKERRQ(ierr); 522 ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr); 523 } else { 524 ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr); 525 ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr); 526 ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr); 527 ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr); 528 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr); 529 } 530 PetscFunctionReturn(0); 531 } 532 533 #undef __FUNCT__ 534 #define __FUNCT__ "MatMultTranspose_IS" 535 PetscErrorCode MatMultTranspose_IS(Mat A,Vec x,Vec y) 536 { 537 Mat_IS *is = (Mat_IS*)A->data; 538 PetscErrorCode ierr; 539 540 PetscFunctionBegin; /* y = A' * x */ 541 /* scatter the global vector x into the local work vector */ 542 ierr = VecScatterBegin(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 543 ierr = VecScatterEnd(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 544 545 /* multiply the local matrix */ 546 ierr = MatMultTranspose(is->A,is->x,is->y);CHKERRQ(ierr); 547 548 /* scatter product back into global vector */ 549 ierr = VecSet(y,0);CHKERRQ(ierr); 550 ierr = VecScatterBegin(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 551 ierr = VecScatterEnd(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 552 PetscFunctionReturn(0); 553 } 554 555 #undef __FUNCT__ 556 #define __FUNCT__ "MatMultTransposeAdd_IS" 557 PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3) 558 { 559 Vec temp_vec; 560 PetscErrorCode ierr; 561 562 PetscFunctionBegin; /* v3 = v2 + A' * v1.*/ 563 if (v3 != v2) { 564 ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr); 565 ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr); 566 } else { 567 ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr); 568 ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr); 569 ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr); 570 ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr); 571 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr); 572 } 573 PetscFunctionReturn(0); 574 } 575 576 #undef __FUNCT__ 577 #define __FUNCT__ "MatView_IS" 578 PetscErrorCode MatView_IS(Mat A,PetscViewer viewer) 579 { 580 Mat_IS *a = (Mat_IS*)A->data; 581 PetscErrorCode ierr; 582 PetscViewer sviewer; 583 584 PetscFunctionBegin; 585 ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr); 586 ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr); 587 ierr = MatView(a->A,sviewer);CHKERRQ(ierr); 588 ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr); 589 PetscFunctionReturn(0); 590 } 591 592 #undef __FUNCT__ 593 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS" 594 PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping) 595 { 596 PetscErrorCode ierr; 597 PetscInt n,bs; 598 Mat_IS *is = (Mat_IS*)A->data; 599 IS from,to; 600 Vec global; 601 602 PetscFunctionBegin; 603 PetscCheckSameComm(A,1,rmapping,2); 604 if (rmapping != cmapping) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"MATIS requires the row and column mappings to be identical"); 605 if (is->mapping) { /* Currenly destroys the objects that will be created by this routine. Is there anything else that should be checked? */ 606 ierr = ISLocalToGlobalMappingDestroy(&is->mapping);CHKERRQ(ierr); 607 ierr = VecDestroy(&is->x);CHKERRQ(ierr); 608 ierr = VecDestroy(&is->y);CHKERRQ(ierr); 609 ierr = VecScatterDestroy(&is->ctx);CHKERRQ(ierr); 610 ierr = MatDestroy(&is->A);CHKERRQ(ierr); 611 ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr); 612 ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr); 613 } 614 ierr = PetscObjectReference((PetscObject)rmapping);CHKERRQ(ierr); 615 ierr = ISLocalToGlobalMappingDestroy(&is->mapping);CHKERRQ(ierr); 616 is->mapping = rmapping; 617 /* 618 ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr); 619 ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr); 620 */ 621 622 /* Create the local matrix A */ 623 ierr = ISLocalToGlobalMappingGetSize(rmapping,&n);CHKERRQ(ierr); 624 ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&bs);CHKERRQ(ierr); 625 ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr); 626 if (bs > 1) { 627 ierr = MatSetType(is->A,MATSEQBAIJ);CHKERRQ(ierr); 628 } else { 629 ierr = MatSetType(is->A,MATSEQAIJ);CHKERRQ(ierr); 630 } 631 ierr = MatSetSizes(is->A,n,n,n,n);CHKERRQ(ierr); 632 ierr = MatSetBlockSize(is->A,bs);CHKERRQ(ierr); 633 ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr); 634 ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr); 635 ierr = MatSetFromOptions(is->A);CHKERRQ(ierr); 636 637 /* Create the local work vectors */ 638 ierr = VecCreate(PETSC_COMM_SELF,&is->x);CHKERRQ(ierr); 639 ierr = VecSetBlockSize(is->x,bs);CHKERRQ(ierr); 640 ierr = VecSetSizes(is->x,n,n);CHKERRQ(ierr); 641 ierr = VecSetOptionsPrefix(is->x,((PetscObject)A)->prefix);CHKERRQ(ierr); 642 ierr = VecAppendOptionsPrefix(is->x,"is_");CHKERRQ(ierr); 643 ierr = VecSetFromOptions(is->x);CHKERRQ(ierr); 644 ierr = VecDuplicate(is->x,&is->y);CHKERRQ(ierr); 645 646 /* setup the global to local scatter */ 647 ierr = ISCreateStride(PETSC_COMM_SELF,n,0,1,&to);CHKERRQ(ierr); 648 ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr); 649 ierr = MatCreateVecs(A,&global,NULL);CHKERRQ(ierr); 650 ierr = VecScatterCreate(global,from,is->x,to,&is->ctx);CHKERRQ(ierr); 651 ierr = VecDestroy(&global);CHKERRQ(ierr); 652 ierr = ISDestroy(&to);CHKERRQ(ierr); 653 ierr = ISDestroy(&from);CHKERRQ(ierr); 654 PetscFunctionReturn(0); 655 } 656 657 #undef __FUNCT__ 658 #define __FUNCT__ "MatSetValues_IS" 659 PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv) 660 { 661 Mat_IS *is = (Mat_IS*)mat->data; 662 PetscInt rows_l[2048],cols_l[2048]; 663 PetscErrorCode ierr; 664 665 PetscFunctionBegin; 666 #if defined(PETSC_USE_DEBUG) 667 if (m > 2048 || n > 2048) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= 2048: they are %D %D",m,n); 668 #endif 669 ierr = ISG2LMapApply(is->mapping,m,rows,rows_l);CHKERRQ(ierr); 670 ierr = ISG2LMapApply(is->mapping,n,cols,cols_l);CHKERRQ(ierr); 671 ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr); 672 PetscFunctionReturn(0); 673 } 674 675 #undef ISG2LMapSetUp 676 #undef ISG2LMapApply 677 678 #undef __FUNCT__ 679 #define __FUNCT__ "MatSetValuesLocal_IS" 680 PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 681 { 682 PetscErrorCode ierr; 683 Mat_IS *is = (Mat_IS*)A->data; 684 685 PetscFunctionBegin; 686 ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr); 687 PetscFunctionReturn(0); 688 } 689 690 #undef __FUNCT__ 691 #define __FUNCT__ "MatSetValuesBlockedLocal_IS" 692 PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 693 { 694 PetscErrorCode ierr; 695 Mat_IS *is = (Mat_IS*)A->data; 696 697 PetscFunctionBegin; 698 ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr); 699 PetscFunctionReturn(0); 700 } 701 702 #undef __FUNCT__ 703 #define __FUNCT__ "MatZeroRows_IS" 704 PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 705 { 706 Mat_IS *is = (Mat_IS*)A->data; 707 PetscInt n_l = 0, *rows_l = NULL; 708 PetscErrorCode ierr; 709 710 PetscFunctionBegin; 711 if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support"); 712 if (n) { 713 ierr = PetscMalloc1(n,&rows_l);CHKERRQ(ierr); 714 ierr = ISGlobalToLocalMappingApply(is->mapping,IS_GTOLM_DROP,n,rows,&n_l,rows_l);CHKERRQ(ierr); 715 } 716 ierr = MatZeroRowsLocal(A,n_l,rows_l,diag,x,b);CHKERRQ(ierr); 717 ierr = PetscFree(rows_l);CHKERRQ(ierr); 718 PetscFunctionReturn(0); 719 } 720 721 #undef __FUNCT__ 722 #define __FUNCT__ "MatZeroRowsLocal_IS" 723 PetscErrorCode MatZeroRowsLocal_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 724 { 725 Mat_IS *is = (Mat_IS*)A->data; 726 PetscErrorCode ierr; 727 PetscInt i; 728 PetscScalar *array; 729 730 PetscFunctionBegin; 731 if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support"); 732 { 733 /* 734 Set up is->x as a "counting vector". This is in order to MatMult_IS 735 work properly in the interface nodes. 736 */ 737 Vec counter; 738 PetscScalar one=1.0, zero=0.0; 739 ierr = MatCreateVecs(A,&counter,NULL);CHKERRQ(ierr); 740 ierr = VecSet(counter,zero);CHKERRQ(ierr); 741 ierr = VecSet(is->x,one);CHKERRQ(ierr); 742 ierr = VecScatterBegin(is->ctx,is->x,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 743 ierr = VecScatterEnd (is->ctx,is->x,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 744 ierr = VecScatterBegin(is->ctx,counter,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 745 ierr = VecScatterEnd (is->ctx,counter,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 746 ierr = VecDestroy(&counter);CHKERRQ(ierr); 747 } 748 if (!n) { 749 is->pure_neumann = PETSC_TRUE; 750 } else { 751 is->pure_neumann = PETSC_FALSE; 752 753 ierr = VecGetArray(is->x,&array);CHKERRQ(ierr); 754 ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr); 755 for (i=0; i<n; i++) { 756 ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr); 757 } 758 ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 759 ierr = MatAssemblyEnd (is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 760 ierr = VecRestoreArray(is->x,&array);CHKERRQ(ierr); 761 } 762 PetscFunctionReturn(0); 763 } 764 765 #undef __FUNCT__ 766 #define __FUNCT__ "MatAssemblyBegin_IS" 767 PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type) 768 { 769 Mat_IS *is = (Mat_IS*)A->data; 770 PetscErrorCode ierr; 771 772 PetscFunctionBegin; 773 ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr); 774 PetscFunctionReturn(0); 775 } 776 777 #undef __FUNCT__ 778 #define __FUNCT__ "MatAssemblyEnd_IS" 779 PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type) 780 { 781 Mat_IS *is = (Mat_IS*)A->data; 782 PetscErrorCode ierr; 783 784 PetscFunctionBegin; 785 ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr); 786 PetscFunctionReturn(0); 787 } 788 789 #undef __FUNCT__ 790 #define __FUNCT__ "MatISGetLocalMat_IS" 791 PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local) 792 { 793 Mat_IS *is = (Mat_IS*)mat->data; 794 795 PetscFunctionBegin; 796 *local = is->A; 797 PetscFunctionReturn(0); 798 } 799 800 #undef __FUNCT__ 801 #define __FUNCT__ "MatISGetLocalMat" 802 /*@ 803 MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix. 804 805 Input Parameter: 806 . mat - the matrix 807 808 Output Parameter: 809 . local - the local matrix usually MATSEQAIJ 810 811 Level: advanced 812 813 Notes: 814 This can be called if you have precomputed the nonzero structure of the 815 matrix and want to provide it to the inner matrix object to improve the performance 816 of the MatSetValues() operation. 817 818 .seealso: MATIS 819 @*/ 820 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local) 821 { 822 PetscErrorCode ierr; 823 824 PetscFunctionBegin; 825 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 826 PetscValidPointer(local,2); 827 ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr); 828 PetscFunctionReturn(0); 829 } 830 831 #undef __FUNCT__ 832 #define __FUNCT__ "MatISSetLocalMat_IS" 833 PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local) 834 { 835 Mat_IS *is = (Mat_IS*)mat->data; 836 PetscInt nrows,ncols,orows,ocols; 837 PetscErrorCode ierr; 838 839 PetscFunctionBegin; 840 if (is->A) { 841 ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr); 842 ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr); 843 if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %dx%d (you passed a %dx%d matrix)\n",orows,ocols,nrows,ncols); 844 } 845 ierr = PetscObjectReference((PetscObject)local);CHKERRQ(ierr); 846 ierr = MatDestroy(&is->A);CHKERRQ(ierr); 847 is->A = local; 848 PetscFunctionReturn(0); 849 } 850 851 #undef __FUNCT__ 852 #define __FUNCT__ "MatISSetLocalMat" 853 /*@ 854 MatISSetLocalMat - Set the local matrix stored inside a MATIS matrix. 855 856 Input Parameter: 857 . mat - the matrix 858 . local - the local matrix usually MATSEQAIJ 859 860 Output Parameter: 861 862 Level: advanced 863 864 Notes: 865 This can be called if you have precomputed the local matrix and 866 want to provide it to the matrix object MATIS. 867 868 .seealso: MATIS 869 @*/ 870 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local) 871 { 872 PetscErrorCode ierr; 873 874 PetscFunctionBegin; 875 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 876 PetscValidHeaderSpecific(local,MAT_CLASSID,2); 877 ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr); 878 PetscFunctionReturn(0); 879 } 880 881 #undef __FUNCT__ 882 #define __FUNCT__ "MatZeroEntries_IS" 883 PetscErrorCode MatZeroEntries_IS(Mat A) 884 { 885 Mat_IS *a = (Mat_IS*)A->data; 886 PetscErrorCode ierr; 887 888 PetscFunctionBegin; 889 ierr = MatZeroEntries(a->A);CHKERRQ(ierr); 890 PetscFunctionReturn(0); 891 } 892 893 #undef __FUNCT__ 894 #define __FUNCT__ "MatScale_IS" 895 PetscErrorCode MatScale_IS(Mat A,PetscScalar a) 896 { 897 Mat_IS *is = (Mat_IS*)A->data; 898 PetscErrorCode ierr; 899 900 PetscFunctionBegin; 901 ierr = MatScale(is->A,a);CHKERRQ(ierr); 902 PetscFunctionReturn(0); 903 } 904 905 #undef __FUNCT__ 906 #define __FUNCT__ "MatGetDiagonal_IS" 907 PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v) 908 { 909 Mat_IS *is = (Mat_IS*)A->data; 910 PetscErrorCode ierr; 911 912 PetscFunctionBegin; 913 /* get diagonal of the local matrix */ 914 ierr = MatGetDiagonal(is->A,is->x);CHKERRQ(ierr); 915 916 /* scatter diagonal back into global vector */ 917 ierr = VecSet(v,0);CHKERRQ(ierr); 918 ierr = VecScatterBegin(is->ctx,is->x,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 919 ierr = VecScatterEnd (is->ctx,is->x,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 920 PetscFunctionReturn(0); 921 } 922 923 #undef __FUNCT__ 924 #define __FUNCT__ "MatSetOption_IS" 925 PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg) 926 { 927 Mat_IS *a = (Mat_IS*)A->data; 928 PetscErrorCode ierr; 929 930 PetscFunctionBegin; 931 ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 932 PetscFunctionReturn(0); 933 } 934 935 #undef __FUNCT__ 936 #define __FUNCT__ "MatCreateIS" 937 /*@ 938 MatCreateIS - Creates a "process" unassmembled matrix, it is assembled on each 939 process but not across processes. 940 941 Input Parameters: 942 + comm - MPI communicator that will share the matrix 943 . bs - local and global block size of the matrix 944 . m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products 945 - map - mapping that defines the global number for each local number 946 947 Output Parameter: 948 . A - the resulting matrix 949 950 Level: advanced 951 952 Notes: See MATIS for more details 953 m and n are NOT related to the size of the map, they are the size of the part of the vector owned 954 by that process. m + nghosts (or n + nghosts) is the length of map since map maps all local points 955 plus the ghost points to global indices. 956 957 .seealso: MATIS, MatSetLocalToGlobalMapping() 958 @*/ 959 PetscErrorCode MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping map,Mat *A) 960 { 961 PetscErrorCode ierr; 962 963 PetscFunctionBegin; 964 ierr = MatCreate(comm,A);CHKERRQ(ierr); 965 ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr); 966 ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr); 967 ierr = MatSetType(*A,MATIS);CHKERRQ(ierr); 968 ierr = MatSetUp(*A);CHKERRQ(ierr); 969 ierr = MatSetLocalToGlobalMapping(*A,map,map);CHKERRQ(ierr); 970 PetscFunctionReturn(0); 971 } 972 973 /*MC 974 MATIS - MATIS = "is" - A matrix type to be used for using the Neumann-Neumann type preconditioners, see PCBDDC. 975 This stores the matrices in globally unassembled form. Each processor 976 assembles only its local Neumann problem and the parallel matrix vector 977 product is handled "implicitly". 978 979 Operations Provided: 980 + MatMult() 981 . MatMultAdd() 982 . MatMultTranspose() 983 . MatMultTransposeAdd() 984 . MatZeroEntries() 985 . MatSetOption() 986 . MatZeroRows() 987 . MatZeroRowsLocal() 988 . MatSetValues() 989 . MatSetValuesLocal() 990 . MatScale() 991 . MatGetDiagonal() 992 - MatSetLocalToGlobalMapping() 993 994 Options Database Keys: 995 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions() 996 997 Notes: Options prefix for the inner matrix are given by -is_mat_xxx 998 999 You must call MatSetLocalToGlobalMapping() before using this matrix type. 1000 1001 You can do matrix preallocation on the local matrix after you obtain it with 1002 MatISGetLocalMat() 1003 1004 Level: advanced 1005 1006 .seealso: PC, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), PCBDDC 1007 1008 M*/ 1009 1010 #undef __FUNCT__ 1011 #define __FUNCT__ "MatCreate_IS" 1012 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A) 1013 { 1014 PetscErrorCode ierr; 1015 Mat_IS *b; 1016 1017 PetscFunctionBegin; 1018 ierr = PetscNewLog(A,&b);CHKERRQ(ierr); 1019 A->data = (void*)b; 1020 ierr = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 1021 1022 A->ops->mult = MatMult_IS; 1023 A->ops->multadd = MatMultAdd_IS; 1024 A->ops->multtranspose = MatMultTranspose_IS; 1025 A->ops->multtransposeadd = MatMultTransposeAdd_IS; 1026 A->ops->destroy = MatDestroy_IS; 1027 A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS; 1028 A->ops->setvalues = MatSetValues_IS; 1029 A->ops->setvalueslocal = MatSetValuesLocal_IS; 1030 A->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_IS; 1031 A->ops->zerorows = MatZeroRows_IS; 1032 A->ops->zerorowslocal = MatZeroRowsLocal_IS; 1033 A->ops->assemblybegin = MatAssemblyBegin_IS; 1034 A->ops->assemblyend = MatAssemblyEnd_IS; 1035 A->ops->view = MatView_IS; 1036 A->ops->zeroentries = MatZeroEntries_IS; 1037 A->ops->scale = MatScale_IS; 1038 A->ops->getdiagonal = MatGetDiagonal_IS; 1039 A->ops->setoption = MatSetOption_IS; 1040 A->ops->ishermitian = MatIsHermitian_IS; 1041 A->ops->issymmetric = MatIsSymmetric_IS; 1042 A->ops->duplicate = MatDuplicate_IS; 1043 1044 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 1045 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 1046 1047 /* zeros pointer */ 1048 b->A = 0; 1049 b->ctx = 0; 1050 b->x = 0; 1051 b->y = 0; 1052 b->mapping = 0; 1053 b->sf = 0; 1054 b->sf_rootdata = 0; 1055 b->sf_leafdata = 0; 1056 1057 /* special MATIS functions */ 1058 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr); 1059 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr); 1060 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr); 1061 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr); 1062 ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr); 1063 PetscFunctionReturn(0); 1064 } 1065