1 2 /* 3 Creates a matrix class for using the Neumann-Neumann type preconditioners. 4 This stores the matrices in globally unassembled form. Each processor 5 assembles only its local Neumann problem and the parallel matrix vector 6 product is handled "implicitly". 7 8 We provide: 9 MatMult() 10 11 Currently this allows for only one subdomain per processor. 12 13 */ 14 15 #include <../src/mat/impls/is/matis.h> /*I "petscmat.h" I*/ 16 17 static PetscErrorCode MatISComputeSF_Private(Mat); 18 19 #undef __FUNCT__ 20 #define __FUNCT__ "MatISComputeSF_Private" 21 static PetscErrorCode MatISComputeSF_Private(Mat B) 22 { 23 Mat_IS *matis = (Mat_IS*)(B->data); 24 const PetscInt *gidxs; 25 PetscErrorCode ierr; 26 27 PetscFunctionBegin; 28 ierr = MatGetSize(matis->A,&matis->sf_nleaves,NULL);CHKERRQ(ierr); 29 ierr = MatGetLocalSize(B,&matis->sf_nroots,NULL);CHKERRQ(ierr); 30 ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr); 31 ierr = ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr); 32 /* PETSC_OWN_POINTER refers to ilocal which is NULL */ 33 ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr); 34 ierr = ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr); 35 ierr = PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);CHKERRQ(ierr); 36 PetscFunctionReturn(0); 37 } 38 39 #undef __FUNCT__ 40 #define __FUNCT__ "MatISSetPreallocation" 41 /*@ 42 MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix. 43 44 Collective on MPI_Comm 45 46 Input Parameters: 47 + B - the matrix 48 . d_nz - number of nonzeros per row in DIAGONAL portion of local submatrix 49 (same value is used for all local rows) 50 . d_nnz - array containing the number of nonzeros in the various rows of the 51 DIAGONAL portion of the local submatrix (possibly different for each row) 52 or NULL, if d_nz is used to specify the nonzero structure. 53 The size of this array is equal to the number of local rows, i.e 'm'. 54 For matrices that will be factored, you must leave room for (and set) 55 the diagonal entry even if it is zero. 56 . o_nz - number of nonzeros per row in the OFF-DIAGONAL portion of local 57 submatrix (same value is used for all local rows). 58 - o_nnz - array containing the number of nonzeros in the various rows of the 59 OFF-DIAGONAL portion of the local submatrix (possibly different for 60 each row) or NULL, if o_nz is used to specify the nonzero 61 structure. The size of this array is equal to the number 62 of local rows, i.e 'm'. 63 64 If the *_nnz parameter is given then the *_nz parameter is ignored 65 66 Level: intermediate 67 68 Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition 69 from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local 70 matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects. 71 72 .keywords: matrix 73 74 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat() 75 @*/ 76 PetscErrorCode MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 77 { 78 PetscErrorCode ierr; 79 80 PetscFunctionBegin; 81 PetscValidHeaderSpecific(B,MAT_CLASSID,1); 82 PetscValidType(B,1); 83 ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr); 84 PetscFunctionReturn(0); 85 } 86 87 #undef __FUNCT__ 88 #define __FUNCT__ "MatISSetPreallocation_IS" 89 PetscErrorCode MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 90 { 91 Mat_IS *matis = (Mat_IS*)(B->data); 92 PetscInt bs,i,nlocalcols; 93 PetscErrorCode ierr; 94 95 PetscFunctionBegin; 96 if (!matis->A) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping"); 97 if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */ 98 ierr = MatISComputeSF_Private(B);CHKERRQ(ierr); 99 } 100 if (!d_nnz) { 101 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz; 102 } else { 103 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i]; 104 } 105 if (!o_nnz) { 106 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz; 107 } else { 108 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i]; 109 } 110 ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 111 ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr); 112 ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr); 113 ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 114 for (i=0;i<matis->sf_nleaves;i++) { 115 matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols); 116 } 117 ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr); 118 for (i=0;i<matis->sf_nleaves/bs;i++) { 119 matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs; 120 } 121 ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr); 122 for (i=0;i<matis->sf_nleaves/bs;i++) { 123 matis->sf_leafdata[i] = matis->sf_leafdata[i]-i; 124 } 125 ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr); 126 PetscFunctionReturn(0); 127 } 128 129 #undef __FUNCT__ 130 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private" 131 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce) 132 { 133 Mat_IS *matis = (Mat_IS*)(A->data); 134 PetscInt *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership; 135 const PetscInt *global_indices_r,*global_indices_c; 136 PetscInt i,j,bs,rows,cols; 137 PetscInt lrows,lcols; 138 PetscInt local_rows,local_cols; 139 PetscMPIInt nsubdomains; 140 PetscBool isdense,issbaij; 141 PetscErrorCode ierr; 142 143 PetscFunctionBegin; 144 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr); 145 ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr); 146 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 147 ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr); 148 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr); 149 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr); 150 ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr); 151 if (A->rmap->mapping != A->cmap->mapping) { 152 ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr); 153 } else { 154 global_indices_c = global_indices_r; 155 } 156 157 if (issbaij) { 158 ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr); 159 } 160 /* 161 An SF reduce is needed to sum up properly on shared rows. 162 Note that generally preallocation is not exact, since it overestimates nonzeros 163 */ 164 if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */ 165 ierr = MatISComputeSF_Private(A);CHKERRQ(ierr); 166 } 167 ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr); 168 ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr); 169 /* All processes need to compute entire row ownership */ 170 ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr); 171 ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr); 172 for (i=0;i<nsubdomains;i++) { 173 for (j=mat_ranges[i];j<mat_ranges[i+1];j++) { 174 row_ownership[j] = i; 175 } 176 } 177 ierr = MatGetOwnershipRangesColumn(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr); 178 179 /* 180 my_dnz and my_onz contains exact contribution to preallocation from each local mat 181 then, they will be summed up properly. This way, preallocation is always sufficient 182 */ 183 ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr); 184 /* preallocation as a MATAIJ */ 185 if (isdense) { /* special case for dense local matrices */ 186 for (i=0;i<local_rows;i++) { 187 PetscInt index_row = global_indices_r[i]; 188 for (j=i;j<local_cols;j++) { 189 PetscInt owner = row_ownership[index_row]; 190 PetscInt index_col = global_indices_c[j]; 191 if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */ 192 my_dnz[i] += 1; 193 } else { /* offdiag block */ 194 my_onz[i] += 1; 195 } 196 /* same as before, interchanging rows and cols */ 197 if (i != j) { 198 owner = row_ownership[index_col]; 199 if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) { 200 my_dnz[j] += 1; 201 } else { 202 my_onz[j] += 1; 203 } 204 } 205 } 206 } 207 } else { /* TODO: this could be optimized using MatGetRowIJ */ 208 for (i=0;i<local_rows;i++) { 209 const PetscInt *cols; 210 PetscInt ncols,index_row = global_indices_r[i]; 211 ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr); 212 for (j=0;j<ncols;j++) { 213 PetscInt owner = row_ownership[index_row]; 214 PetscInt index_col = global_indices_c[cols[j]]; 215 if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */ 216 my_dnz[i] += 1; 217 } else { /* offdiag block */ 218 my_onz[i] += 1; 219 } 220 /* same as before, interchanging rows and cols */ 221 if (issbaij && index_col != index_row) { 222 owner = row_ownership[index_col]; 223 if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) { 224 my_dnz[cols[j]] += 1; 225 } else { 226 my_onz[cols[j]] += 1; 227 } 228 } 229 } 230 ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr); 231 } 232 } 233 if (global_indices_c != global_indices_r) { 234 ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr); 235 } 236 ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr); 237 ierr = PetscFree(row_ownership);CHKERRQ(ierr); 238 239 /* Reduce my_dnz and my_onz */ 240 if (maxreduce) { 241 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr); 242 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr); 243 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr); 244 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr); 245 } else { 246 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr); 247 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr); 248 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr); 249 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr); 250 } 251 ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr); 252 253 /* Resize preallocation if overestimated */ 254 for (i=0;i<lrows;i++) { 255 dnz[i] = PetscMin(dnz[i],lcols); 256 onz[i] = PetscMin(onz[i],cols-lcols); 257 } 258 /* set preallocation */ 259 ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr); 260 for (i=0;i<lrows/bs;i++) { 261 dnz[i] = dnz[i*bs]/bs; 262 onz[i] = onz[i*bs]/bs; 263 } 264 ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr); 265 ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr); 266 ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr); 267 if (issbaij) { 268 ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr); 269 } 270 PetscFunctionReturn(0); 271 } 272 273 #undef __FUNCT__ 274 #define __FUNCT__ "MatISGetMPIXAIJ_IS" 275 PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M) 276 { 277 Mat_IS *matis = (Mat_IS*)(mat->data); 278 Mat local_mat; 279 /* info on mat */ 280 PetscInt bs,rows,cols,lrows,lcols; 281 PetscInt local_rows,local_cols; 282 PetscBool isdense,issbaij,isseqaij; 283 PetscMPIInt nsubdomains; 284 /* values insertion */ 285 PetscScalar *array; 286 /* work */ 287 PetscErrorCode ierr; 288 289 PetscFunctionBegin; 290 /* get info from mat */ 291 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr); 292 if (nsubdomains == 1) { 293 if (reuse == MAT_INITIAL_MATRIX) { 294 ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr); 295 } else { 296 ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 297 } 298 PetscFunctionReturn(0); 299 } 300 ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr); 301 ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr); 302 ierr = MatGetLocalSize(mat,&lrows,&lcols);CHKERRQ(ierr); 303 ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr); 304 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr); 305 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 306 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr); 307 308 if (reuse == MAT_INITIAL_MATRIX) { 309 MatType new_mat_type; 310 PetscBool issbaij_red; 311 312 /* determining new matrix type */ 313 ierr = MPIU_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 314 if (issbaij_red) { 315 new_mat_type = MATSBAIJ; 316 } else { 317 if (bs>1) { 318 new_mat_type = MATBAIJ; 319 } else { 320 new_mat_type = MATAIJ; 321 } 322 } 323 324 ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr); 325 ierr = MatSetSizes(*M,lrows,lcols,rows,cols);CHKERRQ(ierr); 326 ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr); 327 ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr); 328 ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr); 329 } else { 330 PetscInt mbs,mrows,mcols,mlrows,mlcols; 331 /* some checks */ 332 ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr); 333 ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr); 334 ierr = MatGetLocalSize(*M,&mlrows,&mlcols);CHKERRQ(ierr); 335 if (mrows != rows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows); 336 if (mcols != cols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols); 337 if (mlrows != lrows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows); 338 if (mlcols != lcols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols); 339 if (mbs != bs) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs); 340 ierr = MatZeroEntries(*M);CHKERRQ(ierr); 341 } 342 343 if (issbaij) { 344 ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);CHKERRQ(ierr); 345 } else { 346 ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr); 347 local_mat = matis->A; 348 } 349 350 /* Set values */ 351 ierr = MatSetLocalToGlobalMapping(*M,mat->rmap->mapping,mat->cmap->mapping);CHKERRQ(ierr); 352 if (isdense) { /* special case for dense local matrices */ 353 PetscInt i,*dummy_rows,*dummy_cols; 354 355 if (local_rows != local_cols) { 356 ierr = PetscMalloc2(local_rows,&dummy_rows,local_cols,&dummy_cols);CHKERRQ(ierr); 357 for (i=0;i<local_rows;i++) dummy_rows[i] = i; 358 for (i=0;i<local_cols;i++) dummy_cols[i] = i; 359 } else { 360 ierr = PetscMalloc1(local_rows,&dummy_rows);CHKERRQ(ierr); 361 for (i=0;i<local_rows;i++) dummy_rows[i] = i; 362 dummy_cols = dummy_rows; 363 } 364 ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr); 365 ierr = MatDenseGetArray(local_mat,&array);CHKERRQ(ierr); 366 ierr = MatSetValuesLocal(*M,local_rows,dummy_rows,local_cols,dummy_cols,array,ADD_VALUES);CHKERRQ(ierr); 367 ierr = MatDenseRestoreArray(local_mat,&array);CHKERRQ(ierr); 368 if (dummy_rows != dummy_cols) { 369 ierr = PetscFree2(dummy_rows,dummy_cols);CHKERRQ(ierr); 370 } else { 371 ierr = PetscFree(dummy_rows);CHKERRQ(ierr); 372 } 373 } else if (isseqaij) { 374 PetscInt i,nvtxs,*xadj,*adjncy; 375 PetscBool done; 376 377 ierr = MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr); 378 if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__); 379 ierr = MatSeqAIJGetArray(local_mat,&array);CHKERRQ(ierr); 380 for (i=0;i<nvtxs;i++) { 381 ierr = MatSetValuesLocal(*M,1,&i,xadj[i+1]-xadj[i],adjncy+xadj[i],array+xadj[i],ADD_VALUES);CHKERRQ(ierr); 382 } 383 ierr = MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr); 384 if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__); 385 ierr = MatSeqAIJRestoreArray(local_mat,&array);CHKERRQ(ierr); 386 } else { /* very basic values insertion for all other matrix types */ 387 PetscInt i; 388 389 for (i=0;i<local_rows;i++) { 390 PetscInt j; 391 const PetscInt *local_indices_cols; 392 393 ierr = MatGetRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr); 394 ierr = MatSetValuesLocal(*M,1,&i,j,local_indices_cols,array,ADD_VALUES);CHKERRQ(ierr); 395 ierr = MatRestoreRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr); 396 } 397 } 398 ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 399 ierr = MatDestroy(&local_mat);CHKERRQ(ierr); 400 ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 401 if (isdense) { 402 ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr); 403 } 404 PetscFunctionReturn(0); 405 } 406 407 #undef __FUNCT__ 408 #define __FUNCT__ "MatISGetMPIXAIJ" 409 /*@ 410 MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format 411 412 Input Parameter: 413 . mat - the matrix (should be of type MATIS) 414 . reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 415 416 Output Parameter: 417 . newmat - the matrix in AIJ format 418 419 Level: developer 420 421 Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested. 422 423 .seealso: MATIS 424 @*/ 425 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat) 426 { 427 PetscErrorCode ierr; 428 429 PetscFunctionBegin; 430 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 431 PetscValidLogicalCollectiveEnum(mat,reuse,2); 432 PetscValidPointer(newmat,3); 433 if (reuse != MAT_INITIAL_MATRIX) { 434 PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3); 435 PetscCheckSameComm(mat,1,*newmat,3); 436 if (mat == *newmat) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix"); 437 } 438 ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr); 439 PetscFunctionReturn(0); 440 } 441 442 #undef __FUNCT__ 443 #define __FUNCT__ "MatDuplicate_IS" 444 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat) 445 { 446 PetscErrorCode ierr; 447 Mat_IS *matis = (Mat_IS*)(mat->data); 448 PetscInt bs,m,n,M,N; 449 Mat B,localmat; 450 451 PetscFunctionBegin; 452 ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr); 453 ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr); 454 ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr); 455 ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,mat->cmap->mapping,&B);CHKERRQ(ierr); 456 ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr); 457 ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr); 458 ierr = MatDestroy(&localmat);CHKERRQ(ierr); 459 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 460 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 461 *newmat = B; 462 PetscFunctionReturn(0); 463 } 464 465 #undef __FUNCT__ 466 #define __FUNCT__ "MatIsHermitian_IS" 467 PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool *flg) 468 { 469 PetscErrorCode ierr; 470 Mat_IS *matis = (Mat_IS*)A->data; 471 PetscBool local_sym; 472 473 PetscFunctionBegin; 474 ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr); 475 ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr); 476 PetscFunctionReturn(0); 477 } 478 479 #undef __FUNCT__ 480 #define __FUNCT__ "MatIsSymmetric_IS" 481 PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool *flg) 482 { 483 PetscErrorCode ierr; 484 Mat_IS *matis = (Mat_IS*)A->data; 485 PetscBool local_sym; 486 487 PetscFunctionBegin; 488 ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr); 489 ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr); 490 PetscFunctionReturn(0); 491 } 492 493 #undef __FUNCT__ 494 #define __FUNCT__ "MatDestroy_IS" 495 PetscErrorCode MatDestroy_IS(Mat A) 496 { 497 PetscErrorCode ierr; 498 Mat_IS *b = (Mat_IS*)A->data; 499 500 PetscFunctionBegin; 501 ierr = MatDestroy(&b->A);CHKERRQ(ierr); 502 ierr = VecScatterDestroy(&b->cctx);CHKERRQ(ierr); 503 ierr = VecScatterDestroy(&b->rctx);CHKERRQ(ierr); 504 ierr = VecDestroy(&b->x);CHKERRQ(ierr); 505 ierr = VecDestroy(&b->y);CHKERRQ(ierr); 506 ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr); 507 ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr); 508 ierr = PetscFree(A->data);CHKERRQ(ierr); 509 ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr); 510 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr); 511 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr); 512 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr); 513 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr); 514 PetscFunctionReturn(0); 515 } 516 517 #undef __FUNCT__ 518 #define __FUNCT__ "MatMult_IS" 519 PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y) 520 { 521 PetscErrorCode ierr; 522 Mat_IS *is = (Mat_IS*)A->data; 523 PetscScalar zero = 0.0; 524 525 PetscFunctionBegin; 526 /* scatter the global vector x into the local work vector */ 527 ierr = VecScatterBegin(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 528 ierr = VecScatterEnd(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 529 530 /* multiply the local matrix */ 531 ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr); 532 533 /* scatter product back into global memory */ 534 ierr = VecSet(y,zero);CHKERRQ(ierr); 535 ierr = VecScatterBegin(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 536 ierr = VecScatterEnd(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 537 PetscFunctionReturn(0); 538 } 539 540 #undef __FUNCT__ 541 #define __FUNCT__ "MatMultAdd_IS" 542 PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3) 543 { 544 Vec temp_vec; 545 PetscErrorCode ierr; 546 547 PetscFunctionBegin; /* v3 = v2 + A * v1.*/ 548 if (v3 != v2) { 549 ierr = MatMult(A,v1,v3);CHKERRQ(ierr); 550 ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr); 551 } else { 552 ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr); 553 ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr); 554 ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr); 555 ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr); 556 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr); 557 } 558 PetscFunctionReturn(0); 559 } 560 561 #undef __FUNCT__ 562 #define __FUNCT__ "MatMultTranspose_IS" 563 PetscErrorCode MatMultTranspose_IS(Mat A,Vec y,Vec x) 564 { 565 Mat_IS *is = (Mat_IS*)A->data; 566 PetscErrorCode ierr; 567 568 PetscFunctionBegin; 569 /* scatter the global vector x into the local work vector */ 570 ierr = VecScatterBegin(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 571 ierr = VecScatterEnd(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 572 573 /* multiply the local matrix */ 574 ierr = MatMultTranspose(is->A,is->y,is->x);CHKERRQ(ierr); 575 576 /* scatter product back into global vector */ 577 ierr = VecSet(x,0);CHKERRQ(ierr); 578 ierr = VecScatterBegin(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 579 ierr = VecScatterEnd(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 580 PetscFunctionReturn(0); 581 } 582 583 #undef __FUNCT__ 584 #define __FUNCT__ "MatMultTransposeAdd_IS" 585 PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3) 586 { 587 Vec temp_vec; 588 PetscErrorCode ierr; 589 590 PetscFunctionBegin; /* v3 = v2 + A' * v1.*/ 591 if (v3 != v2) { 592 ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr); 593 ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr); 594 } else { 595 ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr); 596 ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr); 597 ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr); 598 ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr); 599 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr); 600 } 601 PetscFunctionReturn(0); 602 } 603 604 #undef __FUNCT__ 605 #define __FUNCT__ "MatView_IS" 606 PetscErrorCode MatView_IS(Mat A,PetscViewer viewer) 607 { 608 Mat_IS *a = (Mat_IS*)A->data; 609 PetscErrorCode ierr; 610 PetscViewer sviewer; 611 612 PetscFunctionBegin; 613 ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 614 ierr = MatView(a->A,sviewer);CHKERRQ(ierr); 615 ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 616 PetscFunctionReturn(0); 617 } 618 619 #undef __FUNCT__ 620 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS" 621 PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping) 622 { 623 PetscErrorCode ierr; 624 PetscInt nr,rbs,nc,cbs; 625 Mat_IS *is = (Mat_IS*)A->data; 626 IS from,to; 627 Vec cglobal,rglobal; 628 629 PetscFunctionBegin; 630 PetscCheckSameComm(A,1,rmapping,2); 631 PetscCheckSameComm(A,1,cmapping,3); 632 /* Destroy any previous data */ 633 ierr = VecDestroy(&is->x);CHKERRQ(ierr); 634 ierr = VecDestroy(&is->y);CHKERRQ(ierr); 635 ierr = VecScatterDestroy(&is->rctx);CHKERRQ(ierr); 636 ierr = VecScatterDestroy(&is->cctx);CHKERRQ(ierr); 637 ierr = MatDestroy(&is->A);CHKERRQ(ierr); 638 ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr); 639 ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr); 640 641 /* Setup Layout and set local to global maps */ 642 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 643 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 644 ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr); 645 ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr); 646 647 /* Create the local matrix A */ 648 ierr = ISLocalToGlobalMappingGetSize(rmapping,&nr);CHKERRQ(ierr); 649 ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&rbs);CHKERRQ(ierr); 650 ierr = ISLocalToGlobalMappingGetSize(cmapping,&nc);CHKERRQ(ierr); 651 ierr = ISLocalToGlobalMappingGetBlockSize(cmapping,&cbs);CHKERRQ(ierr); 652 ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr); 653 ierr = MatSetType(is->A,MATAIJ);CHKERRQ(ierr); 654 ierr = MatSetSizes(is->A,nr,nc,nr,nc);CHKERRQ(ierr); 655 ierr = MatSetBlockSizes(is->A,rbs,cbs);CHKERRQ(ierr); 656 ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr); 657 ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr); 658 ierr = MatSetFromOptions(is->A);CHKERRQ(ierr); 659 660 /* Create the local work vectors */ 661 ierr = MatCreateVecs(is->A,&is->x,&is->y);CHKERRQ(ierr); 662 663 /* setup the global to local scatters */ 664 ierr = MatCreateVecs(A,&cglobal,&rglobal);CHKERRQ(ierr); 665 ierr = ISCreateStride(PETSC_COMM_SELF,nr,0,1,&to);CHKERRQ(ierr); 666 ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr); 667 ierr = VecScatterCreate(rglobal,from,is->y,to,&is->rctx);CHKERRQ(ierr); 668 if (rmapping != cmapping) { 669 ierr = ISDestroy(&to);CHKERRQ(ierr); 670 ierr = ISDestroy(&from);CHKERRQ(ierr); 671 ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&to);CHKERRQ(ierr); 672 ierr = ISLocalToGlobalMappingApplyIS(cmapping,to,&from);CHKERRQ(ierr); 673 ierr = VecScatterCreate(cglobal,from,is->x,to,&is->cctx);CHKERRQ(ierr); 674 } else { 675 ierr = PetscObjectReference((PetscObject)is->rctx);CHKERRQ(ierr); 676 is->cctx = is->rctx; 677 } 678 ierr = VecDestroy(&rglobal);CHKERRQ(ierr); 679 ierr = VecDestroy(&cglobal);CHKERRQ(ierr); 680 ierr = ISDestroy(&to);CHKERRQ(ierr); 681 ierr = ISDestroy(&from);CHKERRQ(ierr); 682 PetscFunctionReturn(0); 683 } 684 685 #undef __FUNCT__ 686 #define __FUNCT__ "MatSetValues_IS" 687 PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv) 688 { 689 Mat_IS *is = (Mat_IS*)mat->data; 690 PetscInt rows_l[2048],cols_l[2048]; 691 PetscErrorCode ierr; 692 693 PetscFunctionBegin; 694 #if defined(PETSC_USE_DEBUG) 695 if (m > 2048 || n > 2048) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= 2048: they are %D %D",m,n); 696 #endif 697 ierr = ISG2LMapApply(mat->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr); 698 ierr = ISG2LMapApply(mat->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr); 699 ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr); 700 PetscFunctionReturn(0); 701 } 702 703 #undef __FUNCT__ 704 #define __FUNCT__ "MatSetValuesLocal_IS" 705 PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 706 { 707 PetscErrorCode ierr; 708 Mat_IS *is = (Mat_IS*)A->data; 709 710 PetscFunctionBegin; 711 ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr); 712 PetscFunctionReturn(0); 713 } 714 715 #undef __FUNCT__ 716 #define __FUNCT__ "MatSetValuesBlockedLocal_IS" 717 PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 718 { 719 PetscErrorCode ierr; 720 Mat_IS *is = (Mat_IS*)A->data; 721 722 PetscFunctionBegin; 723 ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr); 724 PetscFunctionReturn(0); 725 } 726 727 #undef __FUNCT__ 728 #define __FUNCT__ "MatZeroRows_IS" 729 PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 730 { 731 PetscInt n_l = 0, *rows_l = NULL; 732 PetscErrorCode ierr; 733 734 PetscFunctionBegin; 735 if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support"); 736 if (n) { 737 ierr = PetscMalloc1(n,&rows_l);CHKERRQ(ierr); 738 ierr = ISGlobalToLocalMappingApply(A->rmap->mapping,IS_GTOLM_DROP,n,rows,&n_l,rows_l);CHKERRQ(ierr); 739 } 740 ierr = MatZeroRowsLocal(A,n_l,rows_l,diag,x,b);CHKERRQ(ierr); 741 ierr = PetscFree(rows_l);CHKERRQ(ierr); 742 PetscFunctionReturn(0); 743 } 744 745 #undef __FUNCT__ 746 #define __FUNCT__ "MatZeroRowsLocal_IS" 747 PetscErrorCode MatZeroRowsLocal_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 748 { 749 Mat_IS *is = (Mat_IS*)A->data; 750 PetscErrorCode ierr; 751 PetscInt i; 752 PetscScalar *array; 753 754 PetscFunctionBegin; 755 if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support"); 756 { 757 /* 758 Set up is->x as a "counting vector". This is in order to MatMult_IS 759 work properly in the interface nodes. 760 */ 761 Vec counter; 762 ierr = MatCreateVecs(A,NULL,&counter);CHKERRQ(ierr); 763 ierr = VecSet(counter,0.);CHKERRQ(ierr); 764 ierr = VecSet(is->y,1.);CHKERRQ(ierr); 765 ierr = VecScatterBegin(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 766 ierr = VecScatterEnd(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 767 ierr = VecScatterBegin(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 768 ierr = VecScatterEnd(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 769 ierr = VecDestroy(&counter);CHKERRQ(ierr); 770 } 771 if (!n) { 772 is->pure_neumann = PETSC_TRUE; 773 } else { 774 is->pure_neumann = PETSC_FALSE; 775 776 ierr = VecGetArray(is->y,&array);CHKERRQ(ierr); 777 ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr); 778 for (i=0; i<n; i++) { 779 ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr); 780 } 781 ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 782 ierr = MatAssemblyEnd(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 783 ierr = VecRestoreArray(is->y,&array);CHKERRQ(ierr); 784 } 785 PetscFunctionReturn(0); 786 } 787 788 #undef __FUNCT__ 789 #define __FUNCT__ "MatAssemblyBegin_IS" 790 PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type) 791 { 792 Mat_IS *is = (Mat_IS*)A->data; 793 PetscErrorCode ierr; 794 795 PetscFunctionBegin; 796 ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr); 797 PetscFunctionReturn(0); 798 } 799 800 #undef __FUNCT__ 801 #define __FUNCT__ "MatAssemblyEnd_IS" 802 PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type) 803 { 804 Mat_IS *is = (Mat_IS*)A->data; 805 PetscErrorCode ierr; 806 807 PetscFunctionBegin; 808 ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr); 809 PetscFunctionReturn(0); 810 } 811 812 #undef __FUNCT__ 813 #define __FUNCT__ "MatISGetLocalMat_IS" 814 PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local) 815 { 816 Mat_IS *is = (Mat_IS*)mat->data; 817 818 PetscFunctionBegin; 819 *local = is->A; 820 PetscFunctionReturn(0); 821 } 822 823 #undef __FUNCT__ 824 #define __FUNCT__ "MatISGetLocalMat" 825 /*@ 826 MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix. 827 828 Input Parameter: 829 . mat - the matrix 830 831 Output Parameter: 832 . local - the local matrix 833 834 Level: advanced 835 836 Notes: 837 This can be called if you have precomputed the nonzero structure of the 838 matrix and want to provide it to the inner matrix object to improve the performance 839 of the MatSetValues() operation. 840 841 This function does not increase the reference count for the local Mat. Do not destroy it and do not attempt to use 842 your reference after destroying the parent mat. 843 844 .seealso: MATIS 845 @*/ 846 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local) 847 { 848 PetscErrorCode ierr; 849 850 PetscFunctionBegin; 851 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 852 PetscValidPointer(local,2); 853 ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr); 854 PetscFunctionReturn(0); 855 } 856 857 #undef __FUNCT__ 858 #define __FUNCT__ "MatISSetLocalMat_IS" 859 PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local) 860 { 861 Mat_IS *is = (Mat_IS*)mat->data; 862 PetscInt nrows,ncols,orows,ocols; 863 PetscErrorCode ierr; 864 865 PetscFunctionBegin; 866 if (is->A) { 867 ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr); 868 ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr); 869 if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %dx%d (you passed a %dx%d matrix)\n",orows,ocols,nrows,ncols); 870 } 871 ierr = PetscObjectReference((PetscObject)local);CHKERRQ(ierr); 872 ierr = MatDestroy(&is->A);CHKERRQ(ierr); 873 is->A = local; 874 PetscFunctionReturn(0); 875 } 876 877 #undef __FUNCT__ 878 #define __FUNCT__ "MatISSetLocalMat" 879 /*@ 880 MatISSetLocalMat - Replace the local matrix stored inside a MATIS object. 881 882 Input Parameter: 883 . mat - the matrix 884 . local - the local matrix 885 886 Output Parameter: 887 888 Level: advanced 889 890 Notes: 891 This can be called if you have precomputed the local matrix and 892 want to provide it to the matrix object MATIS. 893 894 .seealso: MATIS 895 @*/ 896 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local) 897 { 898 PetscErrorCode ierr; 899 900 PetscFunctionBegin; 901 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 902 PetscValidHeaderSpecific(local,MAT_CLASSID,2); 903 ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr); 904 PetscFunctionReturn(0); 905 } 906 907 #undef __FUNCT__ 908 #define __FUNCT__ "MatZeroEntries_IS" 909 PetscErrorCode MatZeroEntries_IS(Mat A) 910 { 911 Mat_IS *a = (Mat_IS*)A->data; 912 PetscErrorCode ierr; 913 914 PetscFunctionBegin; 915 ierr = MatZeroEntries(a->A);CHKERRQ(ierr); 916 PetscFunctionReturn(0); 917 } 918 919 #undef __FUNCT__ 920 #define __FUNCT__ "MatScale_IS" 921 PetscErrorCode MatScale_IS(Mat A,PetscScalar a) 922 { 923 Mat_IS *is = (Mat_IS*)A->data; 924 PetscErrorCode ierr; 925 926 PetscFunctionBegin; 927 ierr = MatScale(is->A,a);CHKERRQ(ierr); 928 PetscFunctionReturn(0); 929 } 930 931 #undef __FUNCT__ 932 #define __FUNCT__ "MatGetDiagonal_IS" 933 PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v) 934 { 935 Mat_IS *is = (Mat_IS*)A->data; 936 PetscErrorCode ierr; 937 938 PetscFunctionBegin; 939 /* get diagonal of the local matrix */ 940 ierr = MatGetDiagonal(is->A,is->y);CHKERRQ(ierr); 941 942 /* scatter diagonal back into global vector */ 943 ierr = VecSet(v,0);CHKERRQ(ierr); 944 ierr = VecScatterBegin(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 945 ierr = VecScatterEnd(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 946 PetscFunctionReturn(0); 947 } 948 949 #undef __FUNCT__ 950 #define __FUNCT__ "MatSetOption_IS" 951 PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg) 952 { 953 Mat_IS *a = (Mat_IS*)A->data; 954 PetscErrorCode ierr; 955 956 PetscFunctionBegin; 957 ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 958 PetscFunctionReturn(0); 959 } 960 961 #undef __FUNCT__ 962 #define __FUNCT__ "MatCreateIS" 963 /*@ 964 MatCreateIS - Creates a "process" unassmembled matrix, it is assembled on each 965 process but not across processes. 966 967 Input Parameters: 968 + comm - MPI communicator that will share the matrix 969 . bs - block size of the matrix 970 . m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products 971 . rmap - local to global map for rows 972 - cmap - local to global map for cols 973 974 Output Parameter: 975 . A - the resulting matrix 976 977 Level: advanced 978 979 Notes: See MATIS for more details 980 m and n are NOT related to the size of the map, they are the size of the part of the vector owned 981 by that process. The sizes of rmap and cmap define the size of the local matrices. 982 If either rmap or cmap are NULL, than the matrix is assumed to be square 983 984 .seealso: MATIS, MatSetLocalToGlobalMapping() 985 @*/ 986 PetscErrorCode MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping rmap,ISLocalToGlobalMapping cmap,Mat *A) 987 { 988 PetscErrorCode ierr; 989 990 PetscFunctionBegin; 991 if (!rmap && !cmap) SETERRQ(comm,PETSC_ERR_USER,"You need to provide at least one of the mapping"); 992 ierr = MatCreate(comm,A);CHKERRQ(ierr); 993 ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr); 994 ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr); 995 ierr = MatSetType(*A,MATIS);CHKERRQ(ierr); 996 ierr = MatSetUp(*A);CHKERRQ(ierr); 997 if (rmap && cmap) { 998 ierr = MatSetLocalToGlobalMapping(*A,rmap,cmap);CHKERRQ(ierr); 999 } else if (!rmap) { 1000 ierr = MatSetLocalToGlobalMapping(*A,cmap,cmap);CHKERRQ(ierr); 1001 } else { 1002 ierr = MatSetLocalToGlobalMapping(*A,rmap,rmap);CHKERRQ(ierr); 1003 } 1004 PetscFunctionReturn(0); 1005 } 1006 1007 /*MC 1008 MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition type preconditioners (e.g. PCBDDC). 1009 This stores the matrices in globally unassembled form. Each processor 1010 assembles only its local Neumann problem and the parallel matrix vector 1011 product is handled "implicitly". 1012 1013 Operations Provided: 1014 + MatMult() 1015 . MatMultAdd() 1016 . MatMultTranspose() 1017 . MatMultTransposeAdd() 1018 . MatZeroEntries() 1019 . MatSetOption() 1020 . MatZeroRows() 1021 . MatZeroRowsLocal() 1022 . MatSetValues() 1023 . MatSetValuesLocal() 1024 . MatScale() 1025 . MatGetDiagonal() 1026 - MatSetLocalToGlobalMapping() 1027 1028 Options Database Keys: 1029 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions() 1030 1031 Notes: Options prefix for the inner matrix are given by -is_mat_xxx 1032 1033 You must call MatSetLocalToGlobalMapping() before using this matrix type. 1034 1035 You can do matrix preallocation on the local matrix after you obtain it with 1036 MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation() 1037 1038 Level: advanced 1039 1040 .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), PCBDDC 1041 1042 M*/ 1043 1044 #undef __FUNCT__ 1045 #define __FUNCT__ "MatCreate_IS" 1046 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A) 1047 { 1048 PetscErrorCode ierr; 1049 Mat_IS *b; 1050 1051 PetscFunctionBegin; 1052 ierr = PetscNewLog(A,&b);CHKERRQ(ierr); 1053 A->data = (void*)b; 1054 1055 /* matrix ops */ 1056 ierr = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 1057 A->ops->mult = MatMult_IS; 1058 A->ops->multadd = MatMultAdd_IS; 1059 A->ops->multtranspose = MatMultTranspose_IS; 1060 A->ops->multtransposeadd = MatMultTransposeAdd_IS; 1061 A->ops->destroy = MatDestroy_IS; 1062 A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS; 1063 A->ops->setvalues = MatSetValues_IS; 1064 A->ops->setvalueslocal = MatSetValuesLocal_IS; 1065 A->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_IS; 1066 A->ops->zerorows = MatZeroRows_IS; 1067 A->ops->zerorowslocal = MatZeroRowsLocal_IS; 1068 A->ops->assemblybegin = MatAssemblyBegin_IS; 1069 A->ops->assemblyend = MatAssemblyEnd_IS; 1070 A->ops->view = MatView_IS; 1071 A->ops->zeroentries = MatZeroEntries_IS; 1072 A->ops->scale = MatScale_IS; 1073 A->ops->getdiagonal = MatGetDiagonal_IS; 1074 A->ops->setoption = MatSetOption_IS; 1075 A->ops->ishermitian = MatIsHermitian_IS; 1076 A->ops->issymmetric = MatIsSymmetric_IS; 1077 A->ops->duplicate = MatDuplicate_IS; 1078 1079 /* special MATIS functions */ 1080 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr); 1081 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr); 1082 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr); 1083 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr); 1084 ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr); 1085 PetscFunctionReturn(0); 1086 } 1087