1 2 /* 3 Creates a matrix class for using the Neumann-Neumann type preconditioners. 4 This stores the matrices in globally unassembled form. Each processor 5 assembles only its local Neumann problem and the parallel matrix vector 6 product is handled "implicitly". 7 8 Currently this allows for only one subdomain per processor. 9 */ 10 11 #include <../src/mat/impls/is/matis.h> /*I "petscmat.h" I*/ 12 13 static PetscErrorCode MatISComputeSF_Private(Mat); 14 static PetscErrorCode MatISZeroRowsLocal_Private(Mat,PetscInt,const PetscInt[],PetscScalar); 15 16 #undef __FUNCT__ 17 #define __FUNCT__ "MatCopy_IS" 18 PetscErrorCode MatCopy_IS(Mat A,Mat B,MatStructure str) 19 { 20 Mat_IS *a = (Mat_IS*)A->data,*b; 21 PetscBool ismatis; 22 PetscErrorCode ierr; 23 24 PetscFunctionBegin; 25 ierr = PetscObjectTypeCompare((PetscObject)B,MATIS,&ismatis);CHKERRQ(ierr); 26 if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Need to be implemented"); 27 b = (Mat_IS*)B->data; 28 ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr); 29 PetscFunctionReturn(0); 30 } 31 32 #undef __FUNCT__ 33 #define __FUNCT__ "MatMissingDiagonal_IS" 34 PetscErrorCode MatMissingDiagonal_IS(Mat A,PetscBool *missing,PetscInt *d) 35 { 36 Mat_IS *a = (Mat_IS*)A->data; 37 PetscErrorCode ierr; 38 39 PetscFunctionBegin; 40 if (d) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Need to be implemented"); 41 ierr = MatMissingDiagonal(a->A,missing,NULL);CHKERRQ(ierr); 42 PetscFunctionReturn(0); 43 } 44 45 #undef __FUNCT__ 46 #define __FUNCT__ "MatISComputeSF_Private" 47 static PetscErrorCode MatISComputeSF_Private(Mat B) 48 { 49 Mat_IS *matis = (Mat_IS*)(B->data); 50 const PetscInt *gidxs; 51 PetscErrorCode ierr; 52 53 PetscFunctionBegin; 54 ierr = MatGetSize(matis->A,&matis->sf_nleaves,NULL);CHKERRQ(ierr); 55 ierr = MatGetLocalSize(B,&matis->sf_nroots,NULL);CHKERRQ(ierr); 56 ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr); 57 ierr = ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr); 58 /* PETSC_OWN_POINTER refers to ilocal which is NULL */ 59 ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr); 60 ierr = ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr); 61 ierr = PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);CHKERRQ(ierr); 62 PetscFunctionReturn(0); 63 } 64 65 #undef __FUNCT__ 66 #define __FUNCT__ "MatISSetPreallocation" 67 /*@ 68 MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix. 69 70 Collective on MPI_Comm 71 72 Input Parameters: 73 + B - the matrix 74 . d_nz - number of nonzeros per row in DIAGONAL portion of local submatrix 75 (same value is used for all local rows) 76 . d_nnz - array containing the number of nonzeros in the various rows of the 77 DIAGONAL portion of the local submatrix (possibly different for each row) 78 or NULL, if d_nz is used to specify the nonzero structure. 79 The size of this array is equal to the number of local rows, i.e 'm'. 80 For matrices that will be factored, you must leave room for (and set) 81 the diagonal entry even if it is zero. 82 . o_nz - number of nonzeros per row in the OFF-DIAGONAL portion of local 83 submatrix (same value is used for all local rows). 84 - o_nnz - array containing the number of nonzeros in the various rows of the 85 OFF-DIAGONAL portion of the local submatrix (possibly different for 86 each row) or NULL, if o_nz is used to specify the nonzero 87 structure. The size of this array is equal to the number 88 of local rows, i.e 'm'. 89 90 If the *_nnz parameter is given then the *_nz parameter is ignored 91 92 Level: intermediate 93 94 Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition 95 from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local 96 matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects. 97 98 .keywords: matrix 99 100 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat(), MATIS 101 @*/ 102 PetscErrorCode MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 103 { 104 PetscErrorCode ierr; 105 106 PetscFunctionBegin; 107 PetscValidHeaderSpecific(B,MAT_CLASSID,1); 108 PetscValidType(B,1); 109 ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr); 110 PetscFunctionReturn(0); 111 } 112 113 #undef __FUNCT__ 114 #define __FUNCT__ "MatISSetPreallocation_IS" 115 static PetscErrorCode MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 116 { 117 Mat_IS *matis = (Mat_IS*)(B->data); 118 PetscInt bs,i,nlocalcols; 119 PetscErrorCode ierr; 120 121 PetscFunctionBegin; 122 if (!matis->A) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping"); 123 if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */ 124 ierr = MatISComputeSF_Private(B);CHKERRQ(ierr); 125 } 126 if (!d_nnz) { 127 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz; 128 } else { 129 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i]; 130 } 131 if (!o_nnz) { 132 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz; 133 } else { 134 for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i]; 135 } 136 ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 137 ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr); 138 ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr); 139 ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 140 for (i=0;i<matis->sf_nleaves;i++) { 141 matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols); 142 } 143 ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr); 144 for (i=0;i<matis->sf_nleaves/bs;i++) { 145 matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs; 146 } 147 ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr); 148 for (i=0;i<matis->sf_nleaves/bs;i++) { 149 matis->sf_leafdata[i] = matis->sf_leafdata[i]-i; 150 } 151 ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr); 152 PetscFunctionReturn(0); 153 } 154 155 #undef __FUNCT__ 156 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private" 157 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce) 158 { 159 Mat_IS *matis = (Mat_IS*)(A->data); 160 PetscInt *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership; 161 const PetscInt *global_indices_r,*global_indices_c; 162 PetscInt i,j,bs,rows,cols; 163 PetscInt lrows,lcols; 164 PetscInt local_rows,local_cols; 165 PetscMPIInt nsubdomains; 166 PetscBool isdense,issbaij; 167 PetscErrorCode ierr; 168 169 PetscFunctionBegin; 170 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr); 171 ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr); 172 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 173 ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr); 174 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr); 175 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr); 176 ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr); 177 if (A->rmap->mapping != A->cmap->mapping) { 178 ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr); 179 } else { 180 global_indices_c = global_indices_r; 181 } 182 183 if (issbaij) { 184 ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr); 185 } 186 /* 187 An SF reduce is needed to sum up properly on shared rows. 188 Note that generally preallocation is not exact, since it overestimates nonzeros 189 */ 190 if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */ 191 ierr = MatISComputeSF_Private(A);CHKERRQ(ierr); 192 } 193 ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr); 194 ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr); 195 /* All processes need to compute entire row ownership */ 196 ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr); 197 ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr); 198 for (i=0;i<nsubdomains;i++) { 199 for (j=mat_ranges[i];j<mat_ranges[i+1];j++) { 200 row_ownership[j] = i; 201 } 202 } 203 ierr = MatGetOwnershipRangesColumn(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr); 204 205 /* 206 my_dnz and my_onz contains exact contribution to preallocation from each local mat 207 then, they will be summed up properly. This way, preallocation is always sufficient 208 */ 209 ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr); 210 /* preallocation as a MATAIJ */ 211 if (isdense) { /* special case for dense local matrices */ 212 for (i=0;i<local_rows;i++) { 213 PetscInt index_row = global_indices_r[i]; 214 for (j=i;j<local_rows;j++) { 215 PetscInt owner = row_ownership[index_row]; 216 PetscInt index_col = global_indices_c[j]; 217 if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */ 218 my_dnz[i] += 1; 219 } else { /* offdiag block */ 220 my_onz[i] += 1; 221 } 222 /* same as before, interchanging rows and cols */ 223 if (i != j) { 224 owner = row_ownership[index_col]; 225 if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) { 226 my_dnz[j] += 1; 227 } else { 228 my_onz[j] += 1; 229 } 230 } 231 } 232 } 233 } else { /* TODO: this could be optimized using MatGetRowIJ */ 234 for (i=0;i<local_rows;i++) { 235 const PetscInt *cols; 236 PetscInt ncols,index_row = global_indices_r[i]; 237 ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr); 238 for (j=0;j<ncols;j++) { 239 PetscInt owner = row_ownership[index_row]; 240 PetscInt index_col = global_indices_c[cols[j]]; 241 if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */ 242 my_dnz[i] += 1; 243 } else { /* offdiag block */ 244 my_onz[i] += 1; 245 } 246 /* same as before, interchanging rows and cols */ 247 if (issbaij && index_col != index_row) { 248 owner = row_ownership[index_col]; 249 if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) { 250 my_dnz[cols[j]] += 1; 251 } else { 252 my_onz[cols[j]] += 1; 253 } 254 } 255 } 256 ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr); 257 } 258 } 259 ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr); 260 if (global_indices_c != global_indices_r) { 261 ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr); 262 } 263 ierr = PetscFree(row_ownership);CHKERRQ(ierr); 264 265 /* Reduce my_dnz and my_onz */ 266 if (maxreduce) { 267 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr); 268 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr); 269 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr); 270 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr); 271 } else { 272 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr); 273 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr); 274 ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr); 275 ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr); 276 } 277 ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr); 278 279 /* Resize preallocation if overestimated */ 280 for (i=0;i<lrows;i++) { 281 dnz[i] = PetscMin(dnz[i],lcols); 282 onz[i] = PetscMin(onz[i],cols-lcols); 283 } 284 /* set preallocation */ 285 ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr); 286 for (i=0;i<lrows/bs;i++) { 287 dnz[i] = dnz[i*bs]/bs; 288 onz[i] = onz[i*bs]/bs; 289 } 290 ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr); 291 ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr); 292 ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr); 293 if (issbaij) { 294 ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr); 295 } 296 PetscFunctionReturn(0); 297 } 298 299 #undef __FUNCT__ 300 #define __FUNCT__ "MatISGetMPIXAIJ_IS" 301 static PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M) 302 { 303 Mat_IS *matis = (Mat_IS*)(mat->data); 304 Mat local_mat; 305 /* info on mat */ 306 PetscInt bs,rows,cols,lrows,lcols; 307 PetscInt local_rows,local_cols; 308 PetscBool isdense,issbaij,isseqaij; 309 PetscMPIInt nsubdomains; 310 /* values insertion */ 311 PetscScalar *array; 312 /* work */ 313 PetscErrorCode ierr; 314 315 PetscFunctionBegin; 316 /* get info from mat */ 317 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr); 318 if (nsubdomains == 1) { 319 if (reuse == MAT_INITIAL_MATRIX) { 320 ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr); 321 } else { 322 ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 323 } 324 PetscFunctionReturn(0); 325 } 326 ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr); 327 ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr); 328 ierr = MatGetLocalSize(mat,&lrows,&lcols);CHKERRQ(ierr); 329 ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr); 330 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr); 331 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr); 332 ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr); 333 334 if (reuse == MAT_INITIAL_MATRIX) { 335 MatType new_mat_type; 336 PetscBool issbaij_red; 337 338 /* determining new matrix type */ 339 ierr = MPIU_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 340 if (issbaij_red) { 341 new_mat_type = MATSBAIJ; 342 } else { 343 if (bs>1) { 344 new_mat_type = MATBAIJ; 345 } else { 346 new_mat_type = MATAIJ; 347 } 348 } 349 350 ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr); 351 ierr = MatSetSizes(*M,lrows,lcols,rows,cols);CHKERRQ(ierr); 352 ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr); 353 ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr); 354 ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr); 355 } else { 356 PetscInt mbs,mrows,mcols,mlrows,mlcols; 357 /* some checks */ 358 ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr); 359 ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr); 360 ierr = MatGetLocalSize(*M,&mlrows,&mlcols);CHKERRQ(ierr); 361 if (mrows != rows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows); 362 if (mcols != cols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols); 363 if (mlrows != lrows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows); 364 if (mlcols != lcols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols); 365 if (mbs != bs) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs); 366 ierr = MatZeroEntries(*M);CHKERRQ(ierr); 367 } 368 369 if (issbaij) { 370 ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);CHKERRQ(ierr); 371 } else { 372 ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr); 373 local_mat = matis->A; 374 } 375 376 /* Set values */ 377 ierr = MatSetLocalToGlobalMapping(*M,mat->rmap->mapping,mat->cmap->mapping);CHKERRQ(ierr); 378 if (isdense) { /* special case for dense local matrices */ 379 PetscInt i,*dummy_rows,*dummy_cols; 380 381 if (local_rows != local_cols) { 382 ierr = PetscMalloc2(local_rows,&dummy_rows,local_cols,&dummy_cols);CHKERRQ(ierr); 383 for (i=0;i<local_rows;i++) dummy_rows[i] = i; 384 for (i=0;i<local_cols;i++) dummy_cols[i] = i; 385 } else { 386 ierr = PetscMalloc1(local_rows,&dummy_rows);CHKERRQ(ierr); 387 for (i=0;i<local_rows;i++) dummy_rows[i] = i; 388 dummy_cols = dummy_rows; 389 } 390 ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr); 391 ierr = MatDenseGetArray(local_mat,&array);CHKERRQ(ierr); 392 ierr = MatSetValuesLocal(*M,local_rows,dummy_rows,local_cols,dummy_cols,array,ADD_VALUES);CHKERRQ(ierr); 393 ierr = MatDenseRestoreArray(local_mat,&array);CHKERRQ(ierr); 394 if (dummy_rows != dummy_cols) { 395 ierr = PetscFree2(dummy_rows,dummy_cols);CHKERRQ(ierr); 396 } else { 397 ierr = PetscFree(dummy_rows);CHKERRQ(ierr); 398 } 399 } else if (isseqaij) { 400 PetscInt i,nvtxs,*xadj,*adjncy; 401 PetscBool done; 402 403 ierr = MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr); 404 if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__); 405 ierr = MatSeqAIJGetArray(local_mat,&array);CHKERRQ(ierr); 406 for (i=0;i<nvtxs;i++) { 407 ierr = MatSetValuesLocal(*M,1,&i,xadj[i+1]-xadj[i],adjncy+xadj[i],array+xadj[i],ADD_VALUES);CHKERRQ(ierr); 408 } 409 ierr = MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr); 410 if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__); 411 ierr = MatSeqAIJRestoreArray(local_mat,&array);CHKERRQ(ierr); 412 } else { /* very basic values insertion for all other matrix types */ 413 PetscInt i; 414 415 for (i=0;i<local_rows;i++) { 416 PetscInt j; 417 const PetscInt *local_indices_cols; 418 419 ierr = MatGetRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr); 420 ierr = MatSetValuesLocal(*M,1,&i,j,local_indices_cols,array,ADD_VALUES);CHKERRQ(ierr); 421 ierr = MatRestoreRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr); 422 } 423 } 424 ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 425 ierr = MatDestroy(&local_mat);CHKERRQ(ierr); 426 ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 427 if (isdense) { 428 ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr); 429 } 430 PetscFunctionReturn(0); 431 } 432 433 #undef __FUNCT__ 434 #define __FUNCT__ "MatISGetMPIXAIJ" 435 /*@ 436 MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format 437 438 Input Parameter: 439 . mat - the matrix (should be of type MATIS) 440 . reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 441 442 Output Parameter: 443 . newmat - the matrix in AIJ format 444 445 Level: developer 446 447 Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested. 448 449 .seealso: MATIS 450 @*/ 451 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat) 452 { 453 PetscErrorCode ierr; 454 455 PetscFunctionBegin; 456 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 457 PetscValidLogicalCollectiveEnum(mat,reuse,2); 458 PetscValidPointer(newmat,3); 459 if (reuse != MAT_INITIAL_MATRIX) { 460 PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3); 461 PetscCheckSameComm(mat,1,*newmat,3); 462 if (mat == *newmat) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix"); 463 } 464 ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr); 465 PetscFunctionReturn(0); 466 } 467 468 #undef __FUNCT__ 469 #define __FUNCT__ "MatDuplicate_IS" 470 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat) 471 { 472 PetscErrorCode ierr; 473 Mat_IS *matis = (Mat_IS*)(mat->data); 474 PetscInt bs,m,n,M,N; 475 Mat B,localmat; 476 477 PetscFunctionBegin; 478 ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr); 479 ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr); 480 ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr); 481 ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,mat->cmap->mapping,&B);CHKERRQ(ierr); 482 ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr); 483 ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr); 484 ierr = MatDestroy(&localmat);CHKERRQ(ierr); 485 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 486 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 487 *newmat = B; 488 PetscFunctionReturn(0); 489 } 490 491 #undef __FUNCT__ 492 #define __FUNCT__ "MatIsHermitian_IS" 493 PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool *flg) 494 { 495 PetscErrorCode ierr; 496 Mat_IS *matis = (Mat_IS*)A->data; 497 PetscBool local_sym; 498 499 PetscFunctionBegin; 500 ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr); 501 ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr); 502 PetscFunctionReturn(0); 503 } 504 505 #undef __FUNCT__ 506 #define __FUNCT__ "MatIsSymmetric_IS" 507 PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool *flg) 508 { 509 PetscErrorCode ierr; 510 Mat_IS *matis = (Mat_IS*)A->data; 511 PetscBool local_sym; 512 513 PetscFunctionBegin; 514 ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr); 515 ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr); 516 PetscFunctionReturn(0); 517 } 518 519 #undef __FUNCT__ 520 #define __FUNCT__ "MatDestroy_IS" 521 PetscErrorCode MatDestroy_IS(Mat A) 522 { 523 PetscErrorCode ierr; 524 Mat_IS *b = (Mat_IS*)A->data; 525 526 PetscFunctionBegin; 527 ierr = MatDestroy(&b->A);CHKERRQ(ierr); 528 ierr = VecScatterDestroy(&b->cctx);CHKERRQ(ierr); 529 ierr = VecScatterDestroy(&b->rctx);CHKERRQ(ierr); 530 ierr = VecDestroy(&b->x);CHKERRQ(ierr); 531 ierr = VecDestroy(&b->y);CHKERRQ(ierr); 532 ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr); 533 ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr); 534 ierr = PetscFree(A->data);CHKERRQ(ierr); 535 ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr); 536 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr); 537 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr); 538 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr); 539 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr); 540 PetscFunctionReturn(0); 541 } 542 543 #undef __FUNCT__ 544 #define __FUNCT__ "MatMult_IS" 545 PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y) 546 { 547 PetscErrorCode ierr; 548 Mat_IS *is = (Mat_IS*)A->data; 549 PetscScalar zero = 0.0; 550 551 PetscFunctionBegin; 552 /* scatter the global vector x into the local work vector */ 553 ierr = VecScatterBegin(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 554 ierr = VecScatterEnd(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 555 556 /* multiply the local matrix */ 557 ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr); 558 559 /* scatter product back into global memory */ 560 ierr = VecSet(y,zero);CHKERRQ(ierr); 561 ierr = VecScatterBegin(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 562 ierr = VecScatterEnd(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 563 PetscFunctionReturn(0); 564 } 565 566 #undef __FUNCT__ 567 #define __FUNCT__ "MatMultAdd_IS" 568 PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3) 569 { 570 Vec temp_vec; 571 PetscErrorCode ierr; 572 573 PetscFunctionBegin; /* v3 = v2 + A * v1.*/ 574 if (v3 != v2) { 575 ierr = MatMult(A,v1,v3);CHKERRQ(ierr); 576 ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr); 577 } else { 578 ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr); 579 ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr); 580 ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr); 581 ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr); 582 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr); 583 } 584 PetscFunctionReturn(0); 585 } 586 587 #undef __FUNCT__ 588 #define __FUNCT__ "MatMultTranspose_IS" 589 PetscErrorCode MatMultTranspose_IS(Mat A,Vec y,Vec x) 590 { 591 Mat_IS *is = (Mat_IS*)A->data; 592 PetscErrorCode ierr; 593 594 PetscFunctionBegin; 595 /* scatter the global vector x into the local work vector */ 596 ierr = VecScatterBegin(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 597 ierr = VecScatterEnd(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 598 599 /* multiply the local matrix */ 600 ierr = MatMultTranspose(is->A,is->y,is->x);CHKERRQ(ierr); 601 602 /* scatter product back into global vector */ 603 ierr = VecSet(x,0);CHKERRQ(ierr); 604 ierr = VecScatterBegin(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 605 ierr = VecScatterEnd(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 606 PetscFunctionReturn(0); 607 } 608 609 #undef __FUNCT__ 610 #define __FUNCT__ "MatMultTransposeAdd_IS" 611 PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3) 612 { 613 Vec temp_vec; 614 PetscErrorCode ierr; 615 616 PetscFunctionBegin; /* v3 = v2 + A' * v1.*/ 617 if (v3 != v2) { 618 ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr); 619 ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr); 620 } else { 621 ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr); 622 ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr); 623 ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr); 624 ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr); 625 ierr = VecDestroy(&temp_vec);CHKERRQ(ierr); 626 } 627 PetscFunctionReturn(0); 628 } 629 630 #undef __FUNCT__ 631 #define __FUNCT__ "MatView_IS" 632 PetscErrorCode MatView_IS(Mat A,PetscViewer viewer) 633 { 634 Mat_IS *a = (Mat_IS*)A->data; 635 PetscErrorCode ierr; 636 PetscViewer sviewer; 637 638 PetscFunctionBegin; 639 ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 640 ierr = MatView(a->A,sviewer);CHKERRQ(ierr); 641 ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 642 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 643 PetscFunctionReturn(0); 644 } 645 646 #undef __FUNCT__ 647 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS" 648 PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping) 649 { 650 PetscErrorCode ierr; 651 PetscInt nr,rbs,nc,cbs; 652 Mat_IS *is = (Mat_IS*)A->data; 653 IS from,to; 654 Vec cglobal,rglobal; 655 656 PetscFunctionBegin; 657 PetscCheckSameComm(A,1,rmapping,2); 658 PetscCheckSameComm(A,1,cmapping,3); 659 /* Destroy any previous data */ 660 ierr = VecDestroy(&is->x);CHKERRQ(ierr); 661 ierr = VecDestroy(&is->y);CHKERRQ(ierr); 662 ierr = VecScatterDestroy(&is->rctx);CHKERRQ(ierr); 663 ierr = VecScatterDestroy(&is->cctx);CHKERRQ(ierr); 664 ierr = MatDestroy(&is->A);CHKERRQ(ierr); 665 ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr); 666 ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr); 667 668 /* Setup Layout and set local to global maps */ 669 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr); 670 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr); 671 ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr); 672 ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr); 673 674 /* Create the local matrix A */ 675 ierr = ISLocalToGlobalMappingGetSize(rmapping,&nr);CHKERRQ(ierr); 676 ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&rbs);CHKERRQ(ierr); 677 ierr = ISLocalToGlobalMappingGetSize(cmapping,&nc);CHKERRQ(ierr); 678 ierr = ISLocalToGlobalMappingGetBlockSize(cmapping,&cbs);CHKERRQ(ierr); 679 ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr); 680 ierr = MatSetType(is->A,MATAIJ);CHKERRQ(ierr); 681 ierr = MatSetSizes(is->A,nr,nc,nr,nc);CHKERRQ(ierr); 682 ierr = MatSetBlockSizes(is->A,rbs,cbs);CHKERRQ(ierr); 683 ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr); 684 ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr); 685 ierr = MatSetFromOptions(is->A);CHKERRQ(ierr); 686 687 /* Create the local work vectors */ 688 ierr = MatCreateVecs(is->A,&is->x,&is->y);CHKERRQ(ierr); 689 690 /* setup the global to local scatters */ 691 ierr = MatCreateVecs(A,&cglobal,&rglobal);CHKERRQ(ierr); 692 ierr = ISCreateStride(PETSC_COMM_SELF,nr,0,1,&to);CHKERRQ(ierr); 693 ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr); 694 ierr = VecScatterCreate(rglobal,from,is->y,to,&is->rctx);CHKERRQ(ierr); 695 if (rmapping != cmapping) { 696 ierr = ISDestroy(&to);CHKERRQ(ierr); 697 ierr = ISDestroy(&from);CHKERRQ(ierr); 698 ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&to);CHKERRQ(ierr); 699 ierr = ISLocalToGlobalMappingApplyIS(cmapping,to,&from);CHKERRQ(ierr); 700 ierr = VecScatterCreate(cglobal,from,is->x,to,&is->cctx);CHKERRQ(ierr); 701 } else { 702 ierr = PetscObjectReference((PetscObject)is->rctx);CHKERRQ(ierr); 703 is->cctx = is->rctx; 704 } 705 ierr = VecDestroy(&rglobal);CHKERRQ(ierr); 706 ierr = VecDestroy(&cglobal);CHKERRQ(ierr); 707 ierr = ISDestroy(&to);CHKERRQ(ierr); 708 ierr = ISDestroy(&from);CHKERRQ(ierr); 709 ierr = MatSetUp(A);CHKERRQ(ierr); 710 PetscFunctionReturn(0); 711 } 712 713 #undef __FUNCT__ 714 #define __FUNCT__ "MatSetValues_IS" 715 PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv) 716 { 717 Mat_IS *is = (Mat_IS*)mat->data; 718 PetscErrorCode ierr; 719 #if defined(PETSC_USE_DEBUG) 720 PetscInt i,zm,zn; 721 #endif 722 PetscInt rows_l[2048],cols_l[2048]; 723 724 PetscFunctionBegin; 725 #if defined(PETSC_USE_DEBUG) 726 if (m > 2048 || n > 2048) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= 2048: they are %D %D",m,n); 727 /* count negative indices */ 728 for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++; 729 for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++; 730 #endif 731 ierr = ISGlobalToLocalMappingApply(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr); 732 ierr = ISGlobalToLocalMappingApply(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr); 733 #if defined(PETSC_USE_DEBUG) 734 /* count negative indices (should be the same as before) */ 735 for (i=0;i<m;i++) if (rows_l[i] < 0) zm--; 736 for (i=0;i<n;i++) if (cols_l[i] < 0) zn--; 737 if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS"); 738 if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS"); 739 #endif 740 ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr); 741 PetscFunctionReturn(0); 742 } 743 744 #undef __FUNCT__ 745 #define __FUNCT__ "MatSetValuesBlocked_IS" 746 PetscErrorCode MatSetValuesBlocked_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv) 747 { 748 Mat_IS *is = (Mat_IS*)mat->data; 749 PetscErrorCode ierr; 750 #if defined(PETSC_USE_DEBUG) 751 PetscInt i,zm,zn; 752 #endif 753 PetscInt rows_l[2048],cols_l[2048]; 754 755 PetscFunctionBegin; 756 #if defined(PETSC_USE_DEBUG) 757 if (m > 2048 || n > 2048) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= 2048: they are %D %D",m,n); 758 /* count negative indices */ 759 for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++; 760 for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++; 761 #endif 762 ierr = ISGlobalToLocalMappingApplyBlock(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr); 763 ierr = ISGlobalToLocalMappingApplyBlock(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr); 764 #if defined(PETSC_USE_DEBUG) 765 /* count negative indices (should be the same as before) */ 766 for (i=0;i<m;i++) if (rows_l[i] < 0) zm--; 767 for (i=0;i<n;i++) if (cols_l[i] < 0) zn--; 768 if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS"); 769 if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS"); 770 #endif 771 ierr = MatSetValuesBlocked(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr); 772 PetscFunctionReturn(0); 773 } 774 775 #undef __FUNCT__ 776 #define __FUNCT__ "MatSetValuesLocal_IS" 777 PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 778 { 779 PetscErrorCode ierr; 780 Mat_IS *is = (Mat_IS*)A->data; 781 782 PetscFunctionBegin; 783 ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr); 784 PetscFunctionReturn(0); 785 } 786 787 #undef __FUNCT__ 788 #define __FUNCT__ "MatSetValuesBlockedLocal_IS" 789 PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv) 790 { 791 PetscErrorCode ierr; 792 Mat_IS *is = (Mat_IS*)A->data; 793 794 PetscFunctionBegin; 795 ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr); 796 PetscFunctionReturn(0); 797 } 798 799 #undef __FUNCT__ 800 #define __FUNCT__ "MatZeroRows_IS" 801 PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 802 { 803 Mat_IS *matis = (Mat_IS*)A->data; 804 PetscInt nr,nl,len,i; 805 PetscInt *lrows; 806 PetscErrorCode ierr; 807 808 PetscFunctionBegin; 809 /* get locally owned rows */ 810 ierr = MatZeroRowsMapLocal_Private(A,n,rows,&len,&lrows);CHKERRQ(ierr); 811 /* fix right hand side if needed */ 812 if (x && b) { 813 const PetscScalar *xx; 814 PetscScalar *bb; 815 816 ierr = VecGetArrayRead(x, &xx);CHKERRQ(ierr); 817 ierr = VecGetArray(b, &bb);CHKERRQ(ierr); 818 for (i=0;i<len;++i) bb[lrows[i]] = diag*xx[lrows[i]]; 819 ierr = VecRestoreArrayRead(x, &xx);CHKERRQ(ierr); 820 ierr = VecRestoreArray(b, &bb);CHKERRQ(ierr); 821 } 822 /* get rows associated to the local matrices */ 823 if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */ 824 ierr = MatISComputeSF_Private(A);CHKERRQ(ierr); 825 } 826 ierr = MatGetSize(matis->A,&nl,NULL);CHKERRQ(ierr); 827 ierr = PetscMemzero(matis->sf_leafdata,nl*sizeof(PetscInt));CHKERRQ(ierr); 828 ierr = PetscMemzero(matis->sf_rootdata,A->rmap->n*sizeof(PetscInt));CHKERRQ(ierr); 829 for (i=0;i<len;i++) matis->sf_rootdata[lrows[i]] = 1; 830 ierr = PetscFree(lrows);CHKERRQ(ierr); 831 ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 832 ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr); 833 ierr = PetscMalloc1(nl,&lrows);CHKERRQ(ierr); 834 for (i=0,nr=0;i<nl;i++) if (matis->sf_leafdata[i]) lrows[nr++] = i; 835 ierr = MatISZeroRowsLocal_Private(A,nr,lrows,diag);CHKERRQ(ierr); 836 ierr = PetscFree(lrows);CHKERRQ(ierr); 837 PetscFunctionReturn(0); 838 } 839 840 #undef __FUNCT__ 841 #define __FUNCT__ "MatISZeroRowsLocal_Private" 842 static PetscErrorCode MatISZeroRowsLocal_Private(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag) 843 { 844 Mat_IS *is = (Mat_IS*)A->data; 845 PetscErrorCode ierr; 846 847 PetscFunctionBegin; 848 if (diag != 0.) { 849 /* 850 Set up is->x as a "counting vector". This is in order to MatMult_IS 851 work properly in the interface nodes. 852 */ 853 Vec counter; 854 ierr = MatCreateVecs(A,NULL,&counter);CHKERRQ(ierr); 855 ierr = VecSet(counter,0.);CHKERRQ(ierr); 856 ierr = VecSet(is->y,1.);CHKERRQ(ierr); 857 ierr = VecScatterBegin(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 858 ierr = VecScatterEnd(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 859 ierr = VecScatterBegin(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 860 ierr = VecScatterEnd(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 861 ierr = VecDestroy(&counter);CHKERRQ(ierr); 862 } 863 864 if (!n) { 865 is->pure_neumann = PETSC_TRUE; 866 } else { 867 PetscInt i; 868 is->pure_neumann = PETSC_FALSE; 869 870 ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr); 871 if (diag != 0.) { 872 const PetscScalar *array; 873 ierr = VecGetArrayRead(is->y,&array);CHKERRQ(ierr); 874 for (i=0; i<n; i++) { 875 ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr); 876 } 877 ierr = VecRestoreArrayRead(is->y,&array);CHKERRQ(ierr); 878 } 879 ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 880 ierr = MatAssemblyEnd(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 881 } 882 PetscFunctionReturn(0); 883 } 884 885 #undef __FUNCT__ 886 #define __FUNCT__ "MatAssemblyBegin_IS" 887 PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type) 888 { 889 Mat_IS *is = (Mat_IS*)A->data; 890 PetscErrorCode ierr; 891 892 PetscFunctionBegin; 893 ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr); 894 PetscFunctionReturn(0); 895 } 896 897 #undef __FUNCT__ 898 #define __FUNCT__ "MatAssemblyEnd_IS" 899 PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type) 900 { 901 Mat_IS *is = (Mat_IS*)A->data; 902 PetscErrorCode ierr; 903 904 PetscFunctionBegin; 905 ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr); 906 PetscFunctionReturn(0); 907 } 908 909 #undef __FUNCT__ 910 #define __FUNCT__ "MatISGetLocalMat_IS" 911 PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local) 912 { 913 Mat_IS *is = (Mat_IS*)mat->data; 914 915 PetscFunctionBegin; 916 *local = is->A; 917 PetscFunctionReturn(0); 918 } 919 920 #undef __FUNCT__ 921 #define __FUNCT__ "MatISGetLocalMat" 922 /*@ 923 MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix. 924 925 Input Parameter: 926 . mat - the matrix 927 928 Output Parameter: 929 . local - the local matrix 930 931 Level: advanced 932 933 Notes: 934 This can be called if you have precomputed the nonzero structure of the 935 matrix and want to provide it to the inner matrix object to improve the performance 936 of the MatSetValues() operation. 937 938 .seealso: MATIS 939 @*/ 940 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local) 941 { 942 PetscErrorCode ierr; 943 944 PetscFunctionBegin; 945 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 946 PetscValidPointer(local,2); 947 ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr); 948 PetscFunctionReturn(0); 949 } 950 951 #undef __FUNCT__ 952 #define __FUNCT__ "MatISSetLocalMat_IS" 953 PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local) 954 { 955 Mat_IS *is = (Mat_IS*)mat->data; 956 PetscInt nrows,ncols,orows,ocols; 957 PetscErrorCode ierr; 958 959 PetscFunctionBegin; 960 if (is->A) { 961 ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr); 962 ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr); 963 if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %dx%d (you passed a %dx%d matrix)\n",orows,ocols,nrows,ncols); 964 } 965 ierr = PetscObjectReference((PetscObject)local);CHKERRQ(ierr); 966 ierr = MatDestroy(&is->A);CHKERRQ(ierr); 967 is->A = local; 968 PetscFunctionReturn(0); 969 } 970 971 #undef __FUNCT__ 972 #define __FUNCT__ "MatISSetLocalMat" 973 /*@ 974 MatISSetLocalMat - Replace the local matrix stored inside a MATIS object. 975 976 Input Parameter: 977 . mat - the matrix 978 . local - the local matrix 979 980 Output Parameter: 981 982 Level: advanced 983 984 Notes: 985 This can be called if you have precomputed the local matrix and 986 want to provide it to the matrix object MATIS. 987 988 .seealso: MATIS 989 @*/ 990 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local) 991 { 992 PetscErrorCode ierr; 993 994 PetscFunctionBegin; 995 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 996 PetscValidHeaderSpecific(local,MAT_CLASSID,2); 997 ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr); 998 PetscFunctionReturn(0); 999 } 1000 1001 #undef __FUNCT__ 1002 #define __FUNCT__ "MatZeroEntries_IS" 1003 PetscErrorCode MatZeroEntries_IS(Mat A) 1004 { 1005 Mat_IS *a = (Mat_IS*)A->data; 1006 PetscErrorCode ierr; 1007 1008 PetscFunctionBegin; 1009 ierr = MatZeroEntries(a->A);CHKERRQ(ierr); 1010 PetscFunctionReturn(0); 1011 } 1012 1013 #undef __FUNCT__ 1014 #define __FUNCT__ "MatScale_IS" 1015 PetscErrorCode MatScale_IS(Mat A,PetscScalar a) 1016 { 1017 Mat_IS *is = (Mat_IS*)A->data; 1018 PetscErrorCode ierr; 1019 1020 PetscFunctionBegin; 1021 ierr = MatScale(is->A,a);CHKERRQ(ierr); 1022 PetscFunctionReturn(0); 1023 } 1024 1025 #undef __FUNCT__ 1026 #define __FUNCT__ "MatGetDiagonal_IS" 1027 PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v) 1028 { 1029 Mat_IS *is = (Mat_IS*)A->data; 1030 PetscErrorCode ierr; 1031 1032 PetscFunctionBegin; 1033 /* get diagonal of the local matrix */ 1034 ierr = MatGetDiagonal(is->A,is->y);CHKERRQ(ierr); 1035 1036 /* scatter diagonal back into global vector */ 1037 ierr = VecSet(v,0);CHKERRQ(ierr); 1038 ierr = VecScatterBegin(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1039 ierr = VecScatterEnd(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1040 PetscFunctionReturn(0); 1041 } 1042 1043 #undef __FUNCT__ 1044 #define __FUNCT__ "MatSetOption_IS" 1045 PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg) 1046 { 1047 Mat_IS *a = (Mat_IS*)A->data; 1048 PetscErrorCode ierr; 1049 1050 PetscFunctionBegin; 1051 ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 1052 PetscFunctionReturn(0); 1053 } 1054 1055 #undef __FUNCT__ 1056 #define __FUNCT__ "MatCreateIS" 1057 /*@ 1058 MatCreateIS - Creates a "process" unassembled matrix, assembled on each 1059 process but not across processes. 1060 1061 Input Parameters: 1062 + comm - MPI communicator that will share the matrix 1063 . bs - block size of the matrix 1064 . m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products 1065 . rmap - local to global map for rows 1066 - cmap - local to global map for cols 1067 1068 Output Parameter: 1069 . A - the resulting matrix 1070 1071 Level: advanced 1072 1073 Notes: See MATIS for more details. 1074 m and n are NOT related to the size of the map; they are the size of the part of the vector owned 1075 by that process. The sizes of rmap and cmap define the size of the local matrices. 1076 If either rmap or cmap are NULL, then the matrix is assumed to be square. 1077 1078 .seealso: MATIS, MatSetLocalToGlobalMapping() 1079 @*/ 1080 PetscErrorCode MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping rmap,ISLocalToGlobalMapping cmap,Mat *A) 1081 { 1082 PetscErrorCode ierr; 1083 1084 PetscFunctionBegin; 1085 if (!rmap && !cmap) SETERRQ(comm,PETSC_ERR_USER,"You need to provide at least one of the mapping"); 1086 ierr = MatCreate(comm,A);CHKERRQ(ierr); 1087 ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr); 1088 ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr); 1089 ierr = MatSetType(*A,MATIS);CHKERRQ(ierr); 1090 if (rmap && cmap) { 1091 ierr = MatSetLocalToGlobalMapping(*A,rmap,cmap);CHKERRQ(ierr); 1092 } else if (!rmap) { 1093 ierr = MatSetLocalToGlobalMapping(*A,cmap,cmap);CHKERRQ(ierr); 1094 } else { 1095 ierr = MatSetLocalToGlobalMapping(*A,rmap,rmap);CHKERRQ(ierr); 1096 } 1097 PetscFunctionReturn(0); 1098 } 1099 1100 /*MC 1101 MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition type preconditioners (e.g. PCBDDC). 1102 This stores the matrices in globally unassembled form. Each processor 1103 assembles only its local Neumann problem and the parallel matrix vector 1104 product is handled "implicitly". 1105 1106 Operations Provided: 1107 + MatMult() 1108 . MatMultAdd() 1109 . MatMultTranspose() 1110 . MatMultTransposeAdd() 1111 . MatZeroEntries() 1112 . MatSetOption() 1113 . MatZeroRows() 1114 . MatSetValues() 1115 . MatSetValuesBlocked() 1116 . MatSetValuesLocal() 1117 . MatSetValuesBlockedLocal() 1118 . MatScale() 1119 . MatGetDiagonal() 1120 . MatMissingDiagonal() 1121 . MatDuplicate() 1122 . MatCopy() 1123 - MatSetLocalToGlobalMapping() 1124 1125 Options Database Keys: 1126 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions() 1127 1128 Notes: Options prefix for the inner matrix are given by -is_mat_xxx 1129 1130 You must call MatSetLocalToGlobalMapping() before using this matrix type. 1131 1132 You can do matrix preallocation on the local matrix after you obtain it with 1133 MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation() 1134 1135 Level: advanced 1136 1137 .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), PCBDDC, MatCreateIS() 1138 1139 M*/ 1140 1141 #undef __FUNCT__ 1142 #define __FUNCT__ "MatCreate_IS" 1143 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A) 1144 { 1145 PetscErrorCode ierr; 1146 Mat_IS *b; 1147 1148 PetscFunctionBegin; 1149 ierr = PetscNewLog(A,&b);CHKERRQ(ierr); 1150 A->data = (void*)b; 1151 1152 /* matrix ops */ 1153 ierr = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 1154 A->ops->mult = MatMult_IS; 1155 A->ops->multadd = MatMultAdd_IS; 1156 A->ops->multtranspose = MatMultTranspose_IS; 1157 A->ops->multtransposeadd = MatMultTransposeAdd_IS; 1158 A->ops->destroy = MatDestroy_IS; 1159 A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS; 1160 A->ops->setvalues = MatSetValues_IS; 1161 A->ops->setvalues = MatSetValuesBlocked_IS; 1162 A->ops->setvalueslocal = MatSetValuesLocal_IS; 1163 A->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_IS; 1164 A->ops->zerorows = MatZeroRows_IS; 1165 A->ops->assemblybegin = MatAssemblyBegin_IS; 1166 A->ops->assemblyend = MatAssemblyEnd_IS; 1167 A->ops->view = MatView_IS; 1168 A->ops->zeroentries = MatZeroEntries_IS; 1169 A->ops->scale = MatScale_IS; 1170 A->ops->getdiagonal = MatGetDiagonal_IS; 1171 A->ops->setoption = MatSetOption_IS; 1172 A->ops->ishermitian = MatIsHermitian_IS; 1173 A->ops->issymmetric = MatIsSymmetric_IS; 1174 A->ops->duplicate = MatDuplicate_IS; 1175 A->ops->missingdiagonal = MatMissingDiagonal_IS; 1176 A->ops->copy = MatCopy_IS; 1177 1178 /* special MATIS functions */ 1179 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr); 1180 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr); 1181 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr); 1182 ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr); 1183 ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr); 1184 PetscFunctionReturn(0); 1185 } 1186