1 #define PETSCMAT_DLL 2 3 #include "src/mat/impls/baij/mpi/mpibaij.h" /*I "petscmat.h" I*/ 4 #include "mpisbaij.h" 5 #include "src/mat/impls/sbaij/seq/sbaij.h" 6 7 EXTERN PetscErrorCode MatSetUpMultiply_MPISBAIJ(Mat); 8 EXTERN PetscErrorCode MatSetUpMultiply_MPISBAIJ_2comm(Mat); 9 EXTERN PetscErrorCode DisAssemble_MPISBAIJ(Mat); 10 EXTERN PetscErrorCode MatIncreaseOverlap_MPISBAIJ(Mat,PetscInt,IS[],PetscInt); 11 EXTERN PetscErrorCode MatGetValues_SeqSBAIJ(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],PetscScalar []); 12 EXTERN PetscErrorCode MatGetValues_SeqBAIJ(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],PetscScalar []); 13 EXTERN PetscErrorCode MatSetValues_SeqSBAIJ(Mat,PetscInt,const PetscInt [],PetscInt,const PetscInt [],const PetscScalar [],InsertMode); 14 EXTERN PetscErrorCode MatSetValuesBlocked_SeqSBAIJ(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode); 15 EXTERN PetscErrorCode MatSetValuesBlocked_SeqBAIJ(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode); 16 EXTERN PetscErrorCode MatGetRow_SeqSBAIJ(Mat,PetscInt,PetscInt*,PetscInt**,PetscScalar**); 17 EXTERN PetscErrorCode MatRestoreRow_SeqSBAIJ(Mat,PetscInt,PetscInt*,PetscInt**,PetscScalar**); 18 EXTERN PetscErrorCode MatZeroRows_SeqSBAIJ(Mat,IS,PetscScalar*); 19 EXTERN PetscErrorCode MatZeroRows_SeqBAIJ(Mat,IS,PetscScalar *); 20 EXTERN PetscErrorCode MatGetRowMaxAbs_MPISBAIJ(Mat,Vec,PetscInt[]); 21 EXTERN PetscErrorCode MatRelax_MPISBAIJ(Mat,Vec,PetscReal,MatSORType,PetscReal,PetscInt,PetscInt,Vec); 22 23 EXTERN_C_BEGIN 24 #undef __FUNCT__ 25 #define __FUNCT__ "MatStoreValues_MPISBAIJ" 26 PetscErrorCode PETSCMAT_DLLEXPORT MatStoreValues_MPISBAIJ(Mat mat) 27 { 28 Mat_MPISBAIJ *aij = (Mat_MPISBAIJ *)mat->data; 29 PetscErrorCode ierr; 30 31 PetscFunctionBegin; 32 ierr = MatStoreValues(aij->A);CHKERRQ(ierr); 33 ierr = MatStoreValues(aij->B);CHKERRQ(ierr); 34 PetscFunctionReturn(0); 35 } 36 EXTERN_C_END 37 38 EXTERN_C_BEGIN 39 #undef __FUNCT__ 40 #define __FUNCT__ "MatRetrieveValues_MPISBAIJ" 41 PetscErrorCode PETSCMAT_DLLEXPORT MatRetrieveValues_MPISBAIJ(Mat mat) 42 { 43 Mat_MPISBAIJ *aij = (Mat_MPISBAIJ *)mat->data; 44 PetscErrorCode ierr; 45 46 PetscFunctionBegin; 47 ierr = MatRetrieveValues(aij->A);CHKERRQ(ierr); 48 ierr = MatRetrieveValues(aij->B);CHKERRQ(ierr); 49 PetscFunctionReturn(0); 50 } 51 EXTERN_C_END 52 53 54 #define CHUNKSIZE 10 55 56 #define MatSetValues_SeqSBAIJ_A_Private(row,col,value,addv) \ 57 { \ 58 \ 59 brow = row/bs; \ 60 rp = aj + ai[brow]; ap = aa + bs2*ai[brow]; \ 61 rmax = aimax[brow]; nrow = ailen[brow]; \ 62 bcol = col/bs; \ 63 ridx = row % bs; cidx = col % bs; \ 64 low = 0; high = nrow; \ 65 while (high-low > 3) { \ 66 t = (low+high)/2; \ 67 if (rp[t] > bcol) high = t; \ 68 else low = t; \ 69 } \ 70 for (_i=low; _i<high; _i++) { \ 71 if (rp[_i] > bcol) break; \ 72 if (rp[_i] == bcol) { \ 73 bap = ap + bs2*_i + bs*cidx + ridx; \ 74 if (addv == ADD_VALUES) *bap += value; \ 75 else *bap = value; \ 76 goto a_noinsert; \ 77 } \ 78 } \ 79 if (a->nonew == 1) goto a_noinsert; \ 80 if (a->nonew == -1) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \ 81 MatSeqXAIJReallocateAIJ(A,a->mbs,bs2,nrow,brow,bcol,rmax,aa,ai,aj,rp,ap,aimax,a->nonew,MatScalar); \ 82 N = nrow++ - 1; \ 83 /* shift up all the later entries in this row */ \ 84 for (ii=N; ii>=_i; ii--) { \ 85 rp[ii+1] = rp[ii]; \ 86 ierr = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr); \ 87 } \ 88 if (N>=_i) { ierr = PetscMemzero(ap+bs2*_i,bs2*sizeof(MatScalar));CHKERRQ(ierr); } \ 89 rp[_i] = bcol; \ 90 ap[bs2*_i + bs*cidx + ridx] = value; \ 91 a_noinsert:; \ 92 ailen[brow] = nrow; \ 93 } 94 95 #define MatSetValues_SeqSBAIJ_B_Private(row,col,value,addv) \ 96 { \ 97 brow = row/bs; \ 98 rp = bj + bi[brow]; ap = ba + bs2*bi[brow]; \ 99 rmax = bimax[brow]; nrow = bilen[brow]; \ 100 bcol = col/bs; \ 101 ridx = row % bs; cidx = col % bs; \ 102 low = 0; high = nrow; \ 103 while (high-low > 3) { \ 104 t = (low+high)/2; \ 105 if (rp[t] > bcol) high = t; \ 106 else low = t; \ 107 } \ 108 for (_i=low; _i<high; _i++) { \ 109 if (rp[_i] > bcol) break; \ 110 if (rp[_i] == bcol) { \ 111 bap = ap + bs2*_i + bs*cidx + ridx; \ 112 if (addv == ADD_VALUES) *bap += value; \ 113 else *bap = value; \ 114 goto b_noinsert; \ 115 } \ 116 } \ 117 if (b->nonew == 1) goto b_noinsert; \ 118 if (b->nonew == -1) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \ 119 MatSeqXAIJReallocateAIJ(B,b->mbs,bs2,nrow,brow,bcol,rmax,ba,bi,bj,rp,ap,bimax,b->nonew,MatScalar); \ 120 N = nrow++ - 1; \ 121 /* shift up all the later entries in this row */ \ 122 for (ii=N; ii>=_i; ii--) { \ 123 rp[ii+1] = rp[ii]; \ 124 ierr = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr); \ 125 } \ 126 if (N>=_i) { ierr = PetscMemzero(ap+bs2*_i,bs2*sizeof(MatScalar));CHKERRQ(ierr);} \ 127 rp[_i] = bcol; \ 128 ap[bs2*_i + bs*cidx + ridx] = value; \ 129 b_noinsert:; \ 130 bilen[brow] = nrow; \ 131 } 132 133 /* Only add/insert a(i,j) with i<=j (blocks). 134 Any a(i,j) with i>j input by user is ingored. 135 */ 136 #undef __FUNCT__ 137 #define __FUNCT__ "MatSetValues_MPISBAIJ" 138 PetscErrorCode MatSetValues_MPISBAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv) 139 { 140 Mat_MPISBAIJ *baij = (Mat_MPISBAIJ*)mat->data; 141 MatScalar value; 142 PetscTruth roworiented = baij->roworiented; 143 PetscErrorCode ierr; 144 PetscInt i,j,row,col; 145 PetscInt rstart_orig=mat->rmap->rstart; 146 PetscInt rend_orig=mat->rmap->rend,cstart_orig=mat->cmap->rstart; 147 PetscInt cend_orig=mat->cmap->rend,bs=mat->rmap->bs; 148 149 /* Some Variables required in the macro */ 150 Mat A = baij->A; 151 Mat_SeqSBAIJ *a = (Mat_SeqSBAIJ*)(A)->data; 152 PetscInt *aimax=a->imax,*ai=a->i,*ailen=a->ilen,*aj=a->j; 153 MatScalar *aa=a->a; 154 155 Mat B = baij->B; 156 Mat_SeqBAIJ *b = (Mat_SeqBAIJ*)(B)->data; 157 PetscInt *bimax=b->imax,*bi=b->i,*bilen=b->ilen,*bj=b->j; 158 MatScalar *ba=b->a; 159 160 PetscInt *rp,ii,nrow,_i,rmax,N,brow,bcol; 161 PetscInt low,high,t,ridx,cidx,bs2=a->bs2; 162 MatScalar *ap,*bap; 163 164 /* for stash */ 165 PetscInt n_loc, *in_loc = PETSC_NULL; 166 MatScalar *v_loc = PETSC_NULL; 167 168 PetscFunctionBegin; 169 170 if (!baij->donotstash){ 171 if (n > baij->n_loc) { 172 ierr = PetscFree(baij->in_loc);CHKERRQ(ierr); 173 ierr = PetscFree(baij->v_loc);CHKERRQ(ierr); 174 ierr = PetscMalloc(n*sizeof(PetscInt),&baij->in_loc);CHKERRQ(ierr); 175 ierr = PetscMalloc(n*sizeof(MatScalar),&baij->v_loc);CHKERRQ(ierr); 176 baij->n_loc = n; 177 } 178 in_loc = baij->in_loc; 179 v_loc = baij->v_loc; 180 } 181 182 for (i=0; i<m; i++) { 183 if (im[i] < 0) continue; 184 #if defined(PETSC_USE_DEBUG) 185 if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1); 186 #endif 187 if (im[i] >= rstart_orig && im[i] < rend_orig) { /* this processor entry */ 188 row = im[i] - rstart_orig; /* local row index */ 189 for (j=0; j<n; j++) { 190 if (im[i]/bs > in[j]/bs){ 191 if (a->ignore_ltriangular){ 192 continue; /* ignore lower triangular blocks */ 193 } else { 194 SETERRQ(PETSC_ERR_USER,"Lower triangular value cannot be set for sbaij format. Ignoring these values, run with -mat_ignore_lower_triangular or call MatSetOption(mat,MAT_IGNORE_LOWER_TRIANGULAR,PETSC_TRUE)"); 195 } 196 } 197 if (in[j] >= cstart_orig && in[j] < cend_orig){ /* diag entry (A) */ 198 col = in[j] - cstart_orig; /* local col index */ 199 brow = row/bs; bcol = col/bs; 200 if (brow > bcol) continue; /* ignore lower triangular blocks of A */ 201 if (roworiented) value = v[i*n+j]; else value = v[i+j*m]; 202 MatSetValues_SeqSBAIJ_A_Private(row,col,value,addv); 203 /* ierr = MatSetValues_SeqBAIJ(baij->A,1,&row,1,&col,&value,addv);CHKERRQ(ierr); */ 204 } else if (in[j] < 0) continue; 205 #if defined(PETSC_USE_DEBUG) 206 else if (in[j] >= mat->cmap->N) {SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1);} 207 #endif 208 else { /* off-diag entry (B) */ 209 if (mat->was_assembled) { 210 if (!baij->colmap) { 211 ierr = CreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr); 212 } 213 #if defined (PETSC_USE_CTABLE) 214 ierr = PetscTableFind(baij->colmap,in[j]/bs + 1,&col);CHKERRQ(ierr); 215 col = col - 1; 216 #else 217 col = baij->colmap[in[j]/bs] - 1; 218 #endif 219 if (col < 0 && !((Mat_SeqSBAIJ*)(baij->A->data))->nonew) { 220 ierr = DisAssemble_MPISBAIJ(mat);CHKERRQ(ierr); 221 col = in[j]; 222 /* Reinitialize the variables required by MatSetValues_SeqBAIJ_B_Private() */ 223 B = baij->B; 224 b = (Mat_SeqBAIJ*)(B)->data; 225 bimax=b->imax;bi=b->i;bilen=b->ilen;bj=b->j; 226 ba=b->a; 227 } else col += in[j]%bs; 228 } else col = in[j]; 229 if (roworiented) value = v[i*n+j]; else value = v[i+j*m]; 230 MatSetValues_SeqSBAIJ_B_Private(row,col,value,addv); 231 /* ierr = MatSetValues_SeqBAIJ(baij->B,1,&row,1,&col,&value,addv);CHKERRQ(ierr); */ 232 } 233 } 234 } else { /* off processor entry */ 235 if (!baij->donotstash) { 236 n_loc = 0; 237 for (j=0; j<n; j++){ 238 if (im[i]/bs > in[j]/bs) continue; /* ignore lower triangular blocks */ 239 in_loc[n_loc] = in[j]; 240 if (roworiented) { 241 v_loc[n_loc] = v[i*n+j]; 242 } else { 243 v_loc[n_loc] = v[j*m+i]; 244 } 245 n_loc++; 246 } 247 ierr = MatStashValuesRow_Private(&mat->stash,im[i],n_loc,in_loc,v_loc);CHKERRQ(ierr); 248 } 249 } 250 } 251 PetscFunctionReturn(0); 252 } 253 254 #undef __FUNCT__ 255 #define __FUNCT__ "MatSetValuesBlocked_MPISBAIJ" 256 PetscErrorCode MatSetValuesBlocked_MPISBAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const MatScalar v[],InsertMode addv) 257 { 258 Mat_MPISBAIJ *baij = (Mat_MPISBAIJ*)mat->data; 259 const MatScalar *value; 260 MatScalar *barray=baij->barray; 261 PetscTruth roworiented = baij->roworiented; 262 PetscErrorCode ierr; 263 PetscInt i,j,ii,jj,row,col,rstart=baij->rstartbs; 264 PetscInt rend=baij->rendbs,cstart=baij->rstartbs,stepval; 265 PetscInt cend=baij->rendbs,bs=mat->rmap->bs,bs2=baij->bs2; 266 267 PetscFunctionBegin; 268 if(!barray) { 269 ierr = PetscMalloc(bs2*sizeof(MatScalar),&barray);CHKERRQ(ierr); 270 baij->barray = barray; 271 } 272 273 if (roworiented) { 274 stepval = (n-1)*bs; 275 } else { 276 stepval = (m-1)*bs; 277 } 278 for (i=0; i<m; i++) { 279 if (im[i] < 0) continue; 280 #if defined(PETSC_USE_DEBUG) 281 if (im[i] >= baij->Mbs) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Row too large, row %D max %D",im[i],baij->Mbs-1); 282 #endif 283 if (im[i] >= rstart && im[i] < rend) { 284 row = im[i] - rstart; 285 for (j=0; j<n; j++) { 286 /* If NumCol = 1 then a copy is not required */ 287 if ((roworiented) && (n == 1)) { 288 barray = (MatScalar*) v + i*bs2; 289 } else if((!roworiented) && (m == 1)) { 290 barray = (MatScalar*) v + j*bs2; 291 } else { /* Here a copy is required */ 292 if (roworiented) { 293 value = v + i*(stepval+bs)*bs + j*bs; 294 } else { 295 value = v + j*(stepval+bs)*bs + i*bs; 296 } 297 for (ii=0; ii<bs; ii++,value+=stepval) { 298 for (jj=0; jj<bs; jj++) { 299 *barray++ = *value++; 300 } 301 } 302 barray -=bs2; 303 } 304 305 if (in[j] >= cstart && in[j] < cend){ 306 col = in[j] - cstart; 307 ierr = MatSetValuesBlocked_SeqSBAIJ(baij->A,1,&row,1,&col,barray,addv);CHKERRQ(ierr); 308 } 309 else if (in[j] < 0) continue; 310 #if defined(PETSC_USE_DEBUG) 311 else if (in[j] >= baij->Nbs) {SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column too large, col %D max %D",in[j],baij->Nbs-1);} 312 #endif 313 else { 314 if (mat->was_assembled) { 315 if (!baij->colmap) { 316 ierr = CreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr); 317 } 318 319 #if defined(PETSC_USE_DEBUG) 320 #if defined (PETSC_USE_CTABLE) 321 { PetscInt data; 322 ierr = PetscTableFind(baij->colmap,in[j]+1,&data);CHKERRQ(ierr); 323 if ((data - 1) % bs) SETERRQ(PETSC_ERR_PLIB,"Incorrect colmap"); 324 } 325 #else 326 if ((baij->colmap[in[j]] - 1) % bs) SETERRQ(PETSC_ERR_PLIB,"Incorrect colmap"); 327 #endif 328 #endif 329 #if defined (PETSC_USE_CTABLE) 330 ierr = PetscTableFind(baij->colmap,in[j]+1,&col);CHKERRQ(ierr); 331 col = (col - 1)/bs; 332 #else 333 col = (baij->colmap[in[j]] - 1)/bs; 334 #endif 335 if (col < 0 && !((Mat_SeqBAIJ*)(baij->A->data))->nonew) { 336 ierr = DisAssemble_MPISBAIJ(mat);CHKERRQ(ierr); 337 col = in[j]; 338 } 339 } 340 else col = in[j]; 341 ierr = MatSetValuesBlocked_SeqBAIJ(baij->B,1,&row,1,&col,barray,addv);CHKERRQ(ierr); 342 } 343 } 344 } else { 345 if (!baij->donotstash) { 346 if (roworiented) { 347 ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr); 348 } else { 349 ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr); 350 } 351 } 352 } 353 } 354 PetscFunctionReturn(0); 355 } 356 357 #undef __FUNCT__ 358 #define __FUNCT__ "MatGetValues_MPISBAIJ" 359 PetscErrorCode MatGetValues_MPISBAIJ(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[]) 360 { 361 Mat_MPISBAIJ *baij = (Mat_MPISBAIJ*)mat->data; 362 PetscErrorCode ierr; 363 PetscInt bs=mat->rmap->bs,i,j,bsrstart = mat->rmap->rstart,bsrend = mat->rmap->rend; 364 PetscInt bscstart = mat->cmap->rstart,bscend = mat->cmap->rend,row,col,data; 365 366 PetscFunctionBegin; 367 for (i=0; i<m; i++) { 368 if (idxm[i] < 0) continue; /* SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",idxm[i]); */ 369 if (idxm[i] >= mat->rmap->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",idxm[i],mat->rmap->N-1); 370 if (idxm[i] >= bsrstart && idxm[i] < bsrend) { 371 row = idxm[i] - bsrstart; 372 for (j=0; j<n; j++) { 373 if (idxn[j] < 0) continue; /* SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Negative column %D",idxn[j]); */ 374 if (idxn[j] >= mat->cmap->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",idxn[j],mat->cmap->N-1); 375 if (idxn[j] >= bscstart && idxn[j] < bscend){ 376 col = idxn[j] - bscstart; 377 ierr = MatGetValues_SeqSBAIJ(baij->A,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr); 378 } else { 379 if (!baij->colmap) { 380 ierr = CreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr); 381 } 382 #if defined (PETSC_USE_CTABLE) 383 ierr = PetscTableFind(baij->colmap,idxn[j]/bs+1,&data);CHKERRQ(ierr); 384 data --; 385 #else 386 data = baij->colmap[idxn[j]/bs]-1; 387 #endif 388 if((data < 0) || (baij->garray[data/bs] != idxn[j]/bs)) *(v+i*n+j) = 0.0; 389 else { 390 col = data + idxn[j]%bs; 391 ierr = MatGetValues_SeqBAIJ(baij->B,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr); 392 } 393 } 394 } 395 } else { 396 SETERRQ(PETSC_ERR_SUP,"Only local values currently supported"); 397 } 398 } 399 PetscFunctionReturn(0); 400 } 401 402 #undef __FUNCT__ 403 #define __FUNCT__ "MatNorm_MPISBAIJ" 404 PetscErrorCode MatNorm_MPISBAIJ(Mat mat,NormType type,PetscReal *norm) 405 { 406 Mat_MPISBAIJ *baij = (Mat_MPISBAIJ*)mat->data; 407 PetscErrorCode ierr; 408 PetscReal sum[2],*lnorm2; 409 410 PetscFunctionBegin; 411 if (baij->size == 1) { 412 ierr = MatNorm(baij->A,type,norm);CHKERRQ(ierr); 413 } else { 414 if (type == NORM_FROBENIUS) { 415 ierr = PetscMalloc(2*sizeof(PetscReal),&lnorm2);CHKERRQ(ierr); 416 ierr = MatNorm(baij->A,type,lnorm2);CHKERRQ(ierr); 417 *lnorm2 = (*lnorm2)*(*lnorm2); lnorm2++; /* squar power of norm(A) */ 418 ierr = MatNorm(baij->B,type,lnorm2);CHKERRQ(ierr); 419 *lnorm2 = (*lnorm2)*(*lnorm2); lnorm2--; /* squar power of norm(B) */ 420 ierr = MPI_Allreduce(lnorm2,&sum,2,MPIU_REAL,MPI_SUM,((PetscObject)mat)->comm);CHKERRQ(ierr); 421 *norm = sqrt(sum[0] + 2*sum[1]); 422 ierr = PetscFree(lnorm2);CHKERRQ(ierr); 423 } else if (type == NORM_INFINITY || type == NORM_1) { /* max row/column sum */ 424 Mat_SeqSBAIJ *amat=(Mat_SeqSBAIJ*)baij->A->data; 425 Mat_SeqBAIJ *bmat=(Mat_SeqBAIJ*)baij->B->data; 426 PetscReal *rsum,*rsum2,vabs; 427 PetscInt *jj,*garray=baij->garray,rstart=baij->rstartbs,nz; 428 PetscInt brow,bcol,col,bs=baij->A->rmap->bs,row,grow,gcol,mbs=amat->mbs; 429 MatScalar *v; 430 431 ierr = PetscMalloc((2*mat->cmap->N+1)*sizeof(PetscReal),&rsum);CHKERRQ(ierr); 432 rsum2 = rsum + mat->cmap->N; 433 ierr = PetscMemzero(rsum,mat->cmap->N*sizeof(PetscReal));CHKERRQ(ierr); 434 /* Amat */ 435 v = amat->a; jj = amat->j; 436 for (brow=0; brow<mbs; brow++) { 437 grow = bs*(rstart + brow); 438 nz = amat->i[brow+1] - amat->i[brow]; 439 for (bcol=0; bcol<nz; bcol++){ 440 gcol = bs*(rstart + *jj); jj++; 441 for (col=0; col<bs; col++){ 442 for (row=0; row<bs; row++){ 443 vabs = PetscAbsScalar(*v); v++; 444 rsum[gcol+col] += vabs; 445 /* non-diagonal block */ 446 if (bcol > 0 && vabs > 0.0) rsum[grow+row] += vabs; 447 } 448 } 449 } 450 } 451 /* Bmat */ 452 v = bmat->a; jj = bmat->j; 453 for (brow=0; brow<mbs; brow++) { 454 grow = bs*(rstart + brow); 455 nz = bmat->i[brow+1] - bmat->i[brow]; 456 for (bcol=0; bcol<nz; bcol++){ 457 gcol = bs*garray[*jj]; jj++; 458 for (col=0; col<bs; col++){ 459 for (row=0; row<bs; row++){ 460 vabs = PetscAbsScalar(*v); v++; 461 rsum[gcol+col] += vabs; 462 rsum[grow+row] += vabs; 463 } 464 } 465 } 466 } 467 ierr = MPI_Allreduce(rsum,rsum2,mat->cmap->N,MPIU_REAL,MPI_SUM,((PetscObject)mat)->comm);CHKERRQ(ierr); 468 *norm = 0.0; 469 for (col=0; col<mat->cmap->N; col++) { 470 if (rsum2[col] > *norm) *norm = rsum2[col]; 471 } 472 ierr = PetscFree(rsum);CHKERRQ(ierr); 473 } else { 474 SETERRQ(PETSC_ERR_SUP,"No support for this norm yet"); 475 } 476 } 477 PetscFunctionReturn(0); 478 } 479 480 #undef __FUNCT__ 481 #define __FUNCT__ "MatAssemblyBegin_MPISBAIJ" 482 PetscErrorCode MatAssemblyBegin_MPISBAIJ(Mat mat,MatAssemblyType mode) 483 { 484 Mat_MPISBAIJ *baij = (Mat_MPISBAIJ*)mat->data; 485 PetscErrorCode ierr; 486 PetscInt nstash,reallocs; 487 InsertMode addv; 488 489 PetscFunctionBegin; 490 if (baij->donotstash) { 491 PetscFunctionReturn(0); 492 } 493 494 /* make sure all processors are either in INSERTMODE or ADDMODE */ 495 ierr = MPI_Allreduce(&mat->insertmode,&addv,1,MPI_INT,MPI_BOR,((PetscObject)mat)->comm);CHKERRQ(ierr); 496 if (addv == (ADD_VALUES|INSERT_VALUES)) { 497 SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Some processors inserted others added"); 498 } 499 mat->insertmode = addv; /* in case this processor had no cache */ 500 501 ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr); 502 ierr = MatStashScatterBegin_Private(mat,&mat->bstash,baij->rangebs);CHKERRQ(ierr); 503 ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr); 504 ierr = PetscInfo2(mat,"Stash has %D entries,uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr); 505 ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr); 506 ierr = PetscInfo2(mat,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr); 507 PetscFunctionReturn(0); 508 } 509 510 #undef __FUNCT__ 511 #define __FUNCT__ "MatAssemblyEnd_MPISBAIJ" 512 PetscErrorCode MatAssemblyEnd_MPISBAIJ(Mat mat,MatAssemblyType mode) 513 { 514 Mat_MPISBAIJ *baij=(Mat_MPISBAIJ*)mat->data; 515 Mat_SeqSBAIJ *a=(Mat_SeqSBAIJ*)baij->A->data; 516 PetscErrorCode ierr; 517 PetscInt i,j,rstart,ncols,flg,bs2=baij->bs2; 518 PetscInt *row,*col; 519 PetscTruth other_disassembled; 520 PetscMPIInt n; 521 PetscTruth r1,r2,r3; 522 MatScalar *val; 523 InsertMode addv = mat->insertmode; 524 525 /* do not use 'b=(Mat_SeqBAIJ*)baij->B->data' as B can be reset in disassembly */ 526 PetscFunctionBegin; 527 528 if (!baij->donotstash) { 529 while (1) { 530 ierr = MatStashScatterGetMesg_Private(&mat->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr); 531 if (!flg) break; 532 533 for (i=0; i<n;) { 534 /* Now identify the consecutive vals belonging to the same row */ 535 for (j=i,rstart=row[j]; j<n; j++) { if (row[j] != rstart) break; } 536 if (j < n) ncols = j-i; 537 else ncols = n-i; 538 /* Now assemble all these values with a single function call */ 539 ierr = MatSetValues_MPISBAIJ(mat,1,row+i,ncols,col+i,val+i,addv);CHKERRQ(ierr); 540 i = j; 541 } 542 } 543 ierr = MatStashScatterEnd_Private(&mat->stash);CHKERRQ(ierr); 544 /* Now process the block-stash. Since the values are stashed column-oriented, 545 set the roworiented flag to column oriented, and after MatSetValues() 546 restore the original flags */ 547 r1 = baij->roworiented; 548 r2 = a->roworiented; 549 r3 = ((Mat_SeqBAIJ*)baij->B->data)->roworiented; 550 baij->roworiented = PETSC_FALSE; 551 a->roworiented = PETSC_FALSE; 552 ((Mat_SeqBAIJ*)baij->B->data)->roworiented = PETSC_FALSE; /* b->roworinted */ 553 while (1) { 554 ierr = MatStashScatterGetMesg_Private(&mat->bstash,&n,&row,&col,&val,&flg);CHKERRQ(ierr); 555 if (!flg) break; 556 557 for (i=0; i<n;) { 558 /* Now identify the consecutive vals belonging to the same row */ 559 for (j=i,rstart=row[j]; j<n; j++) { if (row[j] != rstart) break; } 560 if (j < n) ncols = j-i; 561 else ncols = n-i; 562 ierr = MatSetValuesBlocked_MPISBAIJ(mat,1,row+i,ncols,col+i,val+i*bs2,addv);CHKERRQ(ierr); 563 i = j; 564 } 565 } 566 ierr = MatStashScatterEnd_Private(&mat->bstash);CHKERRQ(ierr); 567 baij->roworiented = r1; 568 a->roworiented = r2; 569 ((Mat_SeqBAIJ*)baij->B->data)->roworiented = r3; /* b->roworinted */ 570 } 571 572 ierr = MatAssemblyBegin(baij->A,mode);CHKERRQ(ierr); 573 ierr = MatAssemblyEnd(baij->A,mode);CHKERRQ(ierr); 574 575 /* determine if any processor has disassembled, if so we must 576 also disassemble ourselfs, in order that we may reassemble. */ 577 /* 578 if nonzero structure of submatrix B cannot change then we know that 579 no processor disassembled thus we can skip this stuff 580 */ 581 if (!((Mat_SeqBAIJ*)baij->B->data)->nonew) { 582 ierr = MPI_Allreduce(&mat->was_assembled,&other_disassembled,1,MPI_INT,MPI_PROD,((PetscObject)mat)->comm);CHKERRQ(ierr); 583 if (mat->was_assembled && !other_disassembled) { 584 ierr = DisAssemble_MPISBAIJ(mat);CHKERRQ(ierr); 585 } 586 } 587 588 if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) { 589 ierr = MatSetUpMultiply_MPISBAIJ(mat);CHKERRQ(ierr); /* setup Mvctx and sMvctx */ 590 } 591 ((Mat_SeqBAIJ*)baij->B->data)->compressedrow.use = PETSC_TRUE; /* b->compressedrow.use */ 592 ierr = MatAssemblyBegin(baij->B,mode);CHKERRQ(ierr); 593 ierr = MatAssemblyEnd(baij->B,mode);CHKERRQ(ierr); 594 595 ierr = PetscFree(baij->rowvalues);CHKERRQ(ierr); 596 baij->rowvalues = 0; 597 598 PetscFunctionReturn(0); 599 } 600 601 extern PetscErrorCode MatSetValues_MPIBAIJ(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode); 602 #undef __FUNCT__ 603 #define __FUNCT__ "MatView_MPISBAIJ_ASCIIorDraworSocket" 604 static PetscErrorCode MatView_MPISBAIJ_ASCIIorDraworSocket(Mat mat,PetscViewer viewer) 605 { 606 Mat_MPISBAIJ *baij = (Mat_MPISBAIJ*)mat->data; 607 PetscErrorCode ierr; 608 PetscInt bs = mat->rmap->bs; 609 PetscMPIInt size = baij->size,rank = baij->rank; 610 PetscTruth iascii,isdraw; 611 PetscViewer sviewer; 612 PetscViewerFormat format; 613 614 PetscFunctionBegin; 615 ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr); 616 ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);CHKERRQ(ierr); 617 if (iascii) { 618 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 619 if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) { 620 MatInfo info; 621 ierr = MPI_Comm_rank(((PetscObject)mat)->comm,&rank);CHKERRQ(ierr); 622 ierr = MatGetInfo(mat,MAT_LOCAL,&info);CHKERRQ(ierr); 623 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D bs %D mem %D\n", 624 rank,mat->rmap->N,(PetscInt)info.nz_used*bs,(PetscInt)info.nz_allocated*bs, 625 mat->rmap->bs,(PetscInt)info.memory);CHKERRQ(ierr); 626 ierr = MatGetInfo(baij->A,MAT_LOCAL,&info);CHKERRQ(ierr); 627 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] on-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used*bs);CHKERRQ(ierr); 628 ierr = MatGetInfo(baij->B,MAT_LOCAL,&info);CHKERRQ(ierr); 629 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] off-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used*bs);CHKERRQ(ierr); 630 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 631 ierr = PetscViewerASCIIPrintf(viewer,"Information on VecScatter used in matrix-vector product: \n");CHKERRQ(ierr); 632 ierr = VecScatterView(baij->Mvctx,viewer);CHKERRQ(ierr); 633 PetscFunctionReturn(0); 634 } else if (format == PETSC_VIEWER_ASCII_INFO) { 635 ierr = PetscViewerASCIIPrintf(viewer," block size is %D\n",bs);CHKERRQ(ierr); 636 PetscFunctionReturn(0); 637 } else if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) { 638 PetscFunctionReturn(0); 639 } 640 } 641 642 if (isdraw) { 643 PetscDraw draw; 644 PetscTruth isnull; 645 ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 646 ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0); 647 } 648 649 if (size == 1) { 650 ierr = PetscObjectSetName((PetscObject)baij->A,((PetscObject)mat)->name);CHKERRQ(ierr); 651 ierr = MatView(baij->A,viewer);CHKERRQ(ierr); 652 } else { 653 /* assemble the entire matrix onto first processor. */ 654 Mat A; 655 Mat_SeqSBAIJ *Aloc; 656 Mat_SeqBAIJ *Bloc; 657 PetscInt M = mat->rmap->N,N = mat->cmap->N,*ai,*aj,col,i,j,k,*rvals,mbs = baij->mbs; 658 MatScalar *a; 659 660 /* Should this be the same type as mat? */ 661 ierr = MatCreate(((PetscObject)mat)->comm,&A);CHKERRQ(ierr); 662 if (!rank) { 663 ierr = MatSetSizes(A,M,N,M,N);CHKERRQ(ierr); 664 } else { 665 ierr = MatSetSizes(A,0,0,M,N);CHKERRQ(ierr); 666 } 667 ierr = MatSetType(A,MATMPISBAIJ);CHKERRQ(ierr); 668 ierr = MatMPISBAIJSetPreallocation(A,mat->rmap->bs,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr); 669 ierr = PetscLogObjectParent(mat,A);CHKERRQ(ierr); 670 671 /* copy over the A part */ 672 Aloc = (Mat_SeqSBAIJ*)baij->A->data; 673 ai = Aloc->i; aj = Aloc->j; a = Aloc->a; 674 ierr = PetscMalloc(bs*sizeof(PetscInt),&rvals);CHKERRQ(ierr); 675 676 for (i=0; i<mbs; i++) { 677 rvals[0] = bs*(baij->rstartbs + i); 678 for (j=1; j<bs; j++) { rvals[j] = rvals[j-1] + 1; } 679 for (j=ai[i]; j<ai[i+1]; j++) { 680 col = (baij->cstartbs+aj[j])*bs; 681 for (k=0; k<bs; k++) { 682 ierr = MatSetValues_MPISBAIJ(A,bs,rvals,1,&col,a,INSERT_VALUES);CHKERRQ(ierr); 683 col++; a += bs; 684 } 685 } 686 } 687 /* copy over the B part */ 688 Bloc = (Mat_SeqBAIJ*)baij->B->data; 689 ai = Bloc->i; aj = Bloc->j; a = Bloc->a; 690 for (i=0; i<mbs; i++) { 691 692 rvals[0] = bs*(baij->rstartbs + i); 693 for (j=1; j<bs; j++) { rvals[j] = rvals[j-1] + 1; } 694 for (j=ai[i]; j<ai[i+1]; j++) { 695 col = baij->garray[aj[j]]*bs; 696 for (k=0; k<bs; k++) { 697 ierr = MatSetValues_MPIBAIJ(A,bs,rvals,1,&col,a,INSERT_VALUES);CHKERRQ(ierr); 698 col++; a += bs; 699 } 700 } 701 } 702 ierr = PetscFree(rvals);CHKERRQ(ierr); 703 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 704 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 705 /* 706 Everyone has to call to draw the matrix since the graphics waits are 707 synchronized across all processors that share the PetscDraw object 708 */ 709 ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr); 710 if (!rank) { 711 ierr = PetscObjectSetName((PetscObject)((Mat_MPISBAIJ*)(A->data))->A,((PetscObject)mat)->name);CHKERRQ(ierr); 712 ierr = MatView(((Mat_MPISBAIJ*)(A->data))->A,sviewer);CHKERRQ(ierr); 713 } 714 ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr); 715 ierr = MatDestroy(A);CHKERRQ(ierr); 716 } 717 PetscFunctionReturn(0); 718 } 719 720 #undef __FUNCT__ 721 #define __FUNCT__ "MatView_MPISBAIJ" 722 PetscErrorCode MatView_MPISBAIJ(Mat mat,PetscViewer viewer) 723 { 724 PetscErrorCode ierr; 725 PetscTruth iascii,isdraw,issocket,isbinary; 726 727 PetscFunctionBegin; 728 ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr); 729 ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);CHKERRQ(ierr); 730 ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_SOCKET,&issocket);CHKERRQ(ierr); 731 ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);CHKERRQ(ierr); 732 if (iascii || isdraw || issocket || isbinary) { 733 ierr = MatView_MPISBAIJ_ASCIIorDraworSocket(mat,viewer);CHKERRQ(ierr); 734 } else { 735 SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by MPISBAIJ matrices",((PetscObject)viewer)->type_name); 736 } 737 PetscFunctionReturn(0); 738 } 739 740 #undef __FUNCT__ 741 #define __FUNCT__ "MatDestroy_MPISBAIJ" 742 PetscErrorCode MatDestroy_MPISBAIJ(Mat mat) 743 { 744 Mat_MPISBAIJ *baij = (Mat_MPISBAIJ*)mat->data; 745 PetscErrorCode ierr; 746 747 PetscFunctionBegin; 748 #if defined(PETSC_USE_LOG) 749 PetscLogObjectState((PetscObject)mat,"Rows=%D,Cols=%D",mat->rmap->N,mat->cmap->N); 750 #endif 751 ierr = MatStashDestroy_Private(&mat->stash);CHKERRQ(ierr); 752 ierr = MatStashDestroy_Private(&mat->bstash);CHKERRQ(ierr); 753 ierr = MatDestroy(baij->A);CHKERRQ(ierr); 754 ierr = MatDestroy(baij->B);CHKERRQ(ierr); 755 #if defined (PETSC_USE_CTABLE) 756 if (baij->colmap) {ierr = PetscTableDestroy(baij->colmap);CHKERRQ(ierr);} 757 #else 758 ierr = PetscFree(baij->colmap);CHKERRQ(ierr); 759 #endif 760 ierr = PetscFree(baij->garray);CHKERRQ(ierr); 761 if (baij->lvec) {ierr = VecDestroy(baij->lvec);CHKERRQ(ierr);} 762 if (baij->Mvctx) {ierr = VecScatterDestroy(baij->Mvctx);CHKERRQ(ierr);} 763 if (baij->slvec0) { 764 ierr = VecDestroy(baij->slvec0);CHKERRQ(ierr); 765 ierr = VecDestroy(baij->slvec0b);CHKERRQ(ierr); 766 } 767 if (baij->slvec1) { 768 ierr = VecDestroy(baij->slvec1);CHKERRQ(ierr); 769 ierr = VecDestroy(baij->slvec1a);CHKERRQ(ierr); 770 ierr = VecDestroy(baij->slvec1b);CHKERRQ(ierr); 771 } 772 if (baij->sMvctx) {ierr = VecScatterDestroy(baij->sMvctx);CHKERRQ(ierr);} 773 ierr = PetscFree(baij->rowvalues);CHKERRQ(ierr); 774 ierr = PetscFree(baij->barray);CHKERRQ(ierr); 775 ierr = PetscFree(baij->hd);CHKERRQ(ierr); 776 #if defined(PETSC_USE_MAT_SINGLE) 777 ierr = PetscFree(baij->setvaluescopy);CHKERRQ(ierr); 778 #endif 779 ierr = PetscFree(baij->in_loc);CHKERRQ(ierr); 780 ierr = PetscFree(baij->v_loc);CHKERRQ(ierr); 781 ierr = PetscFree(baij->rangebs);CHKERRQ(ierr); 782 ierr = PetscFree(baij);CHKERRQ(ierr); 783 784 ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr); 785 ierr = PetscObjectComposeFunction((PetscObject)mat,"MatStoreValues_C","",PETSC_NULL);CHKERRQ(ierr); 786 ierr = PetscObjectComposeFunction((PetscObject)mat,"MatRetrieveValues_C","",PETSC_NULL);CHKERRQ(ierr); 787 ierr = PetscObjectComposeFunction((PetscObject)mat,"MatGetDiagonalBlock_C","",PETSC_NULL);CHKERRQ(ierr); 788 ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPISBAIJSetPreallocation_C","",PETSC_NULL);CHKERRQ(ierr); 789 PetscFunctionReturn(0); 790 } 791 792 #undef __FUNCT__ 793 #define __FUNCT__ "MatMult_MPISBAIJ" 794 PetscErrorCode MatMult_MPISBAIJ(Mat A,Vec xx,Vec yy) 795 { 796 Mat_MPISBAIJ *a = (Mat_MPISBAIJ*)A->data; 797 PetscErrorCode ierr; 798 PetscInt nt,mbs=a->mbs,bs=A->rmap->bs; 799 PetscScalar *x,*from,zero=0.0; 800 801 PetscFunctionBegin; 802 ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr); 803 if (nt != A->cmap->n) { 804 SETERRQ(PETSC_ERR_ARG_SIZ,"Incompatible partition of A and xx"); 805 } 806 807 /* diagonal part */ 808 ierr = (*a->A->ops->mult)(a->A,xx,a->slvec1a);CHKERRQ(ierr); 809 ierr = VecSet(a->slvec1b,zero);CHKERRQ(ierr); 810 811 /* subdiagonal part */ 812 ierr = (*a->B->ops->multtranspose)(a->B,xx,a->slvec0b);CHKERRQ(ierr); 813 814 /* copy x into the vec slvec0 */ 815 ierr = VecGetArray(a->slvec0,&from);CHKERRQ(ierr); 816 ierr = VecGetArray(xx,&x);CHKERRQ(ierr); 817 818 ierr = PetscMemcpy(from,x,bs*mbs*sizeof(MatScalar));CHKERRQ(ierr); 819 ierr = VecRestoreArray(a->slvec0,&from);CHKERRQ(ierr); 820 ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr); 821 822 ierr = VecScatterBegin(a->sMvctx,a->slvec0,a->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 823 ierr = VecScatterEnd(a->sMvctx,a->slvec0,a->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 824 /* supperdiagonal part */ 825 ierr = (*a->B->ops->multadd)(a->B,a->slvec1b,a->slvec1a,yy);CHKERRQ(ierr); 826 PetscFunctionReturn(0); 827 } 828 829 #undef __FUNCT__ 830 #define __FUNCT__ "MatMult_MPISBAIJ_2comm" 831 PetscErrorCode MatMult_MPISBAIJ_2comm(Mat A,Vec xx,Vec yy) 832 { 833 Mat_MPISBAIJ *a = (Mat_MPISBAIJ*)A->data; 834 PetscErrorCode ierr; 835 PetscInt nt; 836 837 PetscFunctionBegin; 838 ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr); 839 if (nt != A->cmap->n) { 840 SETERRQ(PETSC_ERR_ARG_SIZ,"Incompatible partition of A and xx"); 841 } 842 ierr = VecGetLocalSize(yy,&nt);CHKERRQ(ierr); 843 if (nt != A->rmap->N) { 844 SETERRQ(PETSC_ERR_ARG_SIZ,"Incompatible parition of A and yy"); 845 } 846 847 ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 848 /* do diagonal part */ 849 ierr = (*a->A->ops->mult)(a->A,xx,yy);CHKERRQ(ierr); 850 /* do supperdiagonal part */ 851 ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 852 ierr = (*a->B->ops->multadd)(a->B,a->lvec,yy,yy);CHKERRQ(ierr); 853 /* do subdiagonal part */ 854 ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr); 855 ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 856 ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 857 858 PetscFunctionReturn(0); 859 } 860 861 #undef __FUNCT__ 862 #define __FUNCT__ "MatMultAdd_MPISBAIJ" 863 PetscErrorCode MatMultAdd_MPISBAIJ(Mat A,Vec xx,Vec yy,Vec zz) 864 { 865 Mat_MPISBAIJ *a = (Mat_MPISBAIJ*)A->data; 866 PetscErrorCode ierr; 867 PetscInt mbs=a->mbs,bs=A->rmap->bs; 868 PetscScalar *x,*from,zero=0.0; 869 870 PetscFunctionBegin; 871 /* 872 PetscSynchronizedPrintf(((PetscObject)A)->comm," MatMultAdd is called ...\n"); 873 PetscSynchronizedFlush(((PetscObject)A)->comm); 874 */ 875 /* diagonal part */ 876 ierr = (*a->A->ops->multadd)(a->A,xx,yy,a->slvec1a);CHKERRQ(ierr); 877 ierr = VecSet(a->slvec1b,zero);CHKERRQ(ierr); 878 879 /* subdiagonal part */ 880 ierr = (*a->B->ops->multtranspose)(a->B,xx,a->slvec0b);CHKERRQ(ierr); 881 882 /* copy x into the vec slvec0 */ 883 ierr = VecGetArray(a->slvec0,&from);CHKERRQ(ierr); 884 ierr = VecGetArray(xx,&x);CHKERRQ(ierr); 885 ierr = PetscMemcpy(from,x,bs*mbs*sizeof(MatScalar));CHKERRQ(ierr); 886 ierr = VecRestoreArray(a->slvec0,&from);CHKERRQ(ierr); 887 888 ierr = VecScatterBegin(a->sMvctx,a->slvec0,a->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 889 ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr); 890 ierr = VecScatterEnd(a->sMvctx,a->slvec0,a->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 891 892 /* supperdiagonal part */ 893 ierr = (*a->B->ops->multadd)(a->B,a->slvec1b,a->slvec1a,zz);CHKERRQ(ierr); 894 895 PetscFunctionReturn(0); 896 } 897 898 #undef __FUNCT__ 899 #define __FUNCT__ "MatMultAdd_MPISBAIJ_2comm" 900 PetscErrorCode MatMultAdd_MPISBAIJ_2comm(Mat A,Vec xx,Vec yy,Vec zz) 901 { 902 Mat_MPISBAIJ *a = (Mat_MPISBAIJ*)A->data; 903 PetscErrorCode ierr; 904 905 PetscFunctionBegin; 906 ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 907 /* do diagonal part */ 908 ierr = (*a->A->ops->multadd)(a->A,xx,yy,zz);CHKERRQ(ierr); 909 /* do supperdiagonal part */ 910 ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 911 ierr = (*a->B->ops->multadd)(a->B,a->lvec,zz,zz);CHKERRQ(ierr); 912 913 /* do subdiagonal part */ 914 ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr); 915 ierr = VecScatterBegin(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 916 ierr = VecScatterEnd(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 917 918 PetscFunctionReturn(0); 919 } 920 921 /* 922 This only works correctly for square matrices where the subblock A->A is the 923 diagonal block 924 */ 925 #undef __FUNCT__ 926 #define __FUNCT__ "MatGetDiagonal_MPISBAIJ" 927 PetscErrorCode MatGetDiagonal_MPISBAIJ(Mat A,Vec v) 928 { 929 Mat_MPISBAIJ *a = (Mat_MPISBAIJ*)A->data; 930 PetscErrorCode ierr; 931 932 PetscFunctionBegin; 933 /* if (a->rmap->N != a->cmap->N) SETERRQ(PETSC_ERR_SUP,"Supports only square matrix where A->A is diag block"); */ 934 ierr = MatGetDiagonal(a->A,v);CHKERRQ(ierr); 935 PetscFunctionReturn(0); 936 } 937 938 #undef __FUNCT__ 939 #define __FUNCT__ "MatScale_MPISBAIJ" 940 PetscErrorCode MatScale_MPISBAIJ(Mat A,PetscScalar aa) 941 { 942 Mat_MPISBAIJ *a = (Mat_MPISBAIJ*)A->data; 943 PetscErrorCode ierr; 944 945 PetscFunctionBegin; 946 ierr = MatScale(a->A,aa);CHKERRQ(ierr); 947 ierr = MatScale(a->B,aa);CHKERRQ(ierr); 948 PetscFunctionReturn(0); 949 } 950 951 #undef __FUNCT__ 952 #define __FUNCT__ "MatGetRow_MPISBAIJ" 953 PetscErrorCode MatGetRow_MPISBAIJ(Mat matin,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 954 { 955 Mat_MPISBAIJ *mat = (Mat_MPISBAIJ*)matin->data; 956 PetscScalar *vworkA,*vworkB,**pvA,**pvB,*v_p; 957 PetscErrorCode ierr; 958 PetscInt bs = matin->rmap->bs,bs2 = mat->bs2,i,*cworkA,*cworkB,**pcA,**pcB; 959 PetscInt nztot,nzA,nzB,lrow,brstart = matin->rmap->rstart,brend = matin->rmap->rend; 960 PetscInt *cmap,*idx_p,cstart = mat->rstartbs; 961 962 PetscFunctionBegin; 963 if (mat->getrowactive == PETSC_TRUE) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Already active"); 964 mat->getrowactive = PETSC_TRUE; 965 966 if (!mat->rowvalues && (idx || v)) { 967 /* 968 allocate enough space to hold information from the longest row. 969 */ 970 Mat_SeqSBAIJ *Aa = (Mat_SeqSBAIJ*)mat->A->data; 971 Mat_SeqBAIJ *Ba = (Mat_SeqBAIJ*)mat->B->data; 972 PetscInt max = 1,mbs = mat->mbs,tmp; 973 for (i=0; i<mbs; i++) { 974 tmp = Aa->i[i+1] - Aa->i[i] + Ba->i[i+1] - Ba->i[i]; /* row length */ 975 if (max < tmp) { max = tmp; } 976 } 977 ierr = PetscMalloc(max*bs2*(sizeof(PetscInt)+sizeof(PetscScalar)),&mat->rowvalues);CHKERRQ(ierr); 978 mat->rowindices = (PetscInt*)(mat->rowvalues + max*bs2); 979 } 980 981 if (row < brstart || row >= brend) SETERRQ(PETSC_ERR_SUP,"Only local rows") 982 lrow = row - brstart; /* local row index */ 983 984 pvA = &vworkA; pcA = &cworkA; pvB = &vworkB; pcB = &cworkB; 985 if (!v) {pvA = 0; pvB = 0;} 986 if (!idx) {pcA = 0; if (!v) pcB = 0;} 987 ierr = (*mat->A->ops->getrow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr); 988 ierr = (*mat->B->ops->getrow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr); 989 nztot = nzA + nzB; 990 991 cmap = mat->garray; 992 if (v || idx) { 993 if (nztot) { 994 /* Sort by increasing column numbers, assuming A and B already sorted */ 995 PetscInt imark = -1; 996 if (v) { 997 *v = v_p = mat->rowvalues; 998 for (i=0; i<nzB; i++) { 999 if (cmap[cworkB[i]/bs] < cstart) v_p[i] = vworkB[i]; 1000 else break; 1001 } 1002 imark = i; 1003 for (i=0; i<nzA; i++) v_p[imark+i] = vworkA[i]; 1004 for (i=imark; i<nzB; i++) v_p[nzA+i] = vworkB[i]; 1005 } 1006 if (idx) { 1007 *idx = idx_p = mat->rowindices; 1008 if (imark > -1) { 1009 for (i=0; i<imark; i++) { 1010 idx_p[i] = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs; 1011 } 1012 } else { 1013 for (i=0; i<nzB; i++) { 1014 if (cmap[cworkB[i]/bs] < cstart) 1015 idx_p[i] = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs ; 1016 else break; 1017 } 1018 imark = i; 1019 } 1020 for (i=0; i<nzA; i++) idx_p[imark+i] = cstart*bs + cworkA[i]; 1021 for (i=imark; i<nzB; i++) idx_p[nzA+i] = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs ; 1022 } 1023 } else { 1024 if (idx) *idx = 0; 1025 if (v) *v = 0; 1026 } 1027 } 1028 *nz = nztot; 1029 ierr = (*mat->A->ops->restorerow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr); 1030 ierr = (*mat->B->ops->restorerow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr); 1031 PetscFunctionReturn(0); 1032 } 1033 1034 #undef __FUNCT__ 1035 #define __FUNCT__ "MatRestoreRow_MPISBAIJ" 1036 PetscErrorCode MatRestoreRow_MPISBAIJ(Mat mat,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 1037 { 1038 Mat_MPISBAIJ *baij = (Mat_MPISBAIJ*)mat->data; 1039 1040 PetscFunctionBegin; 1041 if (!baij->getrowactive) { 1042 SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"MatGetRow() must be called first"); 1043 } 1044 baij->getrowactive = PETSC_FALSE; 1045 PetscFunctionReturn(0); 1046 } 1047 1048 #undef __FUNCT__ 1049 #define __FUNCT__ "MatGetRowUpperTriangular_MPISBAIJ" 1050 PetscErrorCode MatGetRowUpperTriangular_MPISBAIJ(Mat A) 1051 { 1052 Mat_MPISBAIJ *a = (Mat_MPISBAIJ*)A->data; 1053 Mat_SeqSBAIJ *aA = (Mat_SeqSBAIJ*)a->A->data; 1054 1055 PetscFunctionBegin; 1056 aA->getrow_utriangular = PETSC_TRUE; 1057 PetscFunctionReturn(0); 1058 } 1059 #undef __FUNCT__ 1060 #define __FUNCT__ "MatRestoreRowUpperTriangular_MPISBAIJ" 1061 PetscErrorCode MatRestoreRowUpperTriangular_MPISBAIJ(Mat A) 1062 { 1063 Mat_MPISBAIJ *a = (Mat_MPISBAIJ*)A->data; 1064 Mat_SeqSBAIJ *aA = (Mat_SeqSBAIJ*)a->A->data; 1065 1066 PetscFunctionBegin; 1067 aA->getrow_utriangular = PETSC_FALSE; 1068 PetscFunctionReturn(0); 1069 } 1070 1071 #undef __FUNCT__ 1072 #define __FUNCT__ "MatRealPart_MPISBAIJ" 1073 PetscErrorCode MatRealPart_MPISBAIJ(Mat A) 1074 { 1075 Mat_MPISBAIJ *a = (Mat_MPISBAIJ*)A->data; 1076 PetscErrorCode ierr; 1077 1078 PetscFunctionBegin; 1079 ierr = MatRealPart(a->A);CHKERRQ(ierr); 1080 ierr = MatRealPart(a->B);CHKERRQ(ierr); 1081 PetscFunctionReturn(0); 1082 } 1083 1084 #undef __FUNCT__ 1085 #define __FUNCT__ "MatImaginaryPart_MPISBAIJ" 1086 PetscErrorCode MatImaginaryPart_MPISBAIJ(Mat A) 1087 { 1088 Mat_MPISBAIJ *a = (Mat_MPISBAIJ*)A->data; 1089 PetscErrorCode ierr; 1090 1091 PetscFunctionBegin; 1092 ierr = MatImaginaryPart(a->A);CHKERRQ(ierr); 1093 ierr = MatImaginaryPart(a->B);CHKERRQ(ierr); 1094 PetscFunctionReturn(0); 1095 } 1096 1097 #undef __FUNCT__ 1098 #define __FUNCT__ "MatZeroEntries_MPISBAIJ" 1099 PetscErrorCode MatZeroEntries_MPISBAIJ(Mat A) 1100 { 1101 Mat_MPISBAIJ *l = (Mat_MPISBAIJ*)A->data; 1102 PetscErrorCode ierr; 1103 1104 PetscFunctionBegin; 1105 ierr = MatZeroEntries(l->A);CHKERRQ(ierr); 1106 ierr = MatZeroEntries(l->B);CHKERRQ(ierr); 1107 PetscFunctionReturn(0); 1108 } 1109 1110 #undef __FUNCT__ 1111 #define __FUNCT__ "MatGetInfo_MPISBAIJ" 1112 PetscErrorCode MatGetInfo_MPISBAIJ(Mat matin,MatInfoType flag,MatInfo *info) 1113 { 1114 Mat_MPISBAIJ *a = (Mat_MPISBAIJ*)matin->data; 1115 Mat A = a->A,B = a->B; 1116 PetscErrorCode ierr; 1117 PetscReal isend[5],irecv[5]; 1118 1119 PetscFunctionBegin; 1120 info->block_size = (PetscReal)matin->rmap->bs; 1121 ierr = MatGetInfo(A,MAT_LOCAL,info);CHKERRQ(ierr); 1122 isend[0] = info->nz_used; isend[1] = info->nz_allocated; isend[2] = info->nz_unneeded; 1123 isend[3] = info->memory; isend[4] = info->mallocs; 1124 ierr = MatGetInfo(B,MAT_LOCAL,info);CHKERRQ(ierr); 1125 isend[0] += info->nz_used; isend[1] += info->nz_allocated; isend[2] += info->nz_unneeded; 1126 isend[3] += info->memory; isend[4] += info->mallocs; 1127 if (flag == MAT_LOCAL) { 1128 info->nz_used = isend[0]; 1129 info->nz_allocated = isend[1]; 1130 info->nz_unneeded = isend[2]; 1131 info->memory = isend[3]; 1132 info->mallocs = isend[4]; 1133 } else if (flag == MAT_GLOBAL_MAX) { 1134 ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPI_MAX,((PetscObject)matin)->comm);CHKERRQ(ierr); 1135 info->nz_used = irecv[0]; 1136 info->nz_allocated = irecv[1]; 1137 info->nz_unneeded = irecv[2]; 1138 info->memory = irecv[3]; 1139 info->mallocs = irecv[4]; 1140 } else if (flag == MAT_GLOBAL_SUM) { 1141 ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPI_SUM,((PetscObject)matin)->comm);CHKERRQ(ierr); 1142 info->nz_used = irecv[0]; 1143 info->nz_allocated = irecv[1]; 1144 info->nz_unneeded = irecv[2]; 1145 info->memory = irecv[3]; 1146 info->mallocs = irecv[4]; 1147 } else { 1148 SETERRQ1(PETSC_ERR_ARG_WRONG,"Unknown MatInfoType argument %d",(int)flag); 1149 } 1150 info->fill_ratio_given = 0; /* no parallel LU/ILU/Cholesky */ 1151 info->fill_ratio_needed = 0; 1152 info->factor_mallocs = 0; 1153 PetscFunctionReturn(0); 1154 } 1155 1156 #undef __FUNCT__ 1157 #define __FUNCT__ "MatSetOption_MPISBAIJ" 1158 PetscErrorCode MatSetOption_MPISBAIJ(Mat A,MatOption op,PetscTruth flg) 1159 { 1160 Mat_MPISBAIJ *a = (Mat_MPISBAIJ*)A->data; 1161 Mat_SeqSBAIJ *aA = (Mat_SeqSBAIJ*)a->A->data; 1162 PetscErrorCode ierr; 1163 1164 PetscFunctionBegin; 1165 switch (op) { 1166 case MAT_NEW_NONZERO_LOCATIONS: 1167 case MAT_NEW_NONZERO_ALLOCATION_ERR: 1168 case MAT_KEEP_ZEROED_ROWS: 1169 case MAT_NEW_NONZERO_LOCATION_ERR: 1170 ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 1171 ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr); 1172 break; 1173 case MAT_ROW_ORIENTED: 1174 a->roworiented = flg; 1175 ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 1176 ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr); 1177 break; 1178 case MAT_NEW_DIAGONALS: 1179 ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr); 1180 break; 1181 case MAT_IGNORE_OFF_PROC_ENTRIES: 1182 a->donotstash = flg; 1183 break; 1184 case MAT_USE_HASH_TABLE: 1185 a->ht_flag = flg; 1186 break; 1187 case MAT_HERMITIAN: 1188 if (flg) SETERRQ(PETSC_ERR_SUP,"Matrix must be symmetric"); 1189 case MAT_SYMMETRIC: 1190 case MAT_STRUCTURALLY_SYMMETRIC: 1191 case MAT_SYMMETRY_ETERNAL: 1192 if (!flg) SETERRQ(PETSC_ERR_SUP,"Matrix must be symmetric"); 1193 ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr); 1194 break; 1195 case MAT_IGNORE_LOWER_TRIANGULAR: 1196 aA->ignore_ltriangular = flg; 1197 break; 1198 case MAT_ERROR_LOWER_TRIANGULAR: 1199 aA->ignore_ltriangular = flg; 1200 break; 1201 case MAT_GETROW_UPPERTRIANGULAR: 1202 aA->getrow_utriangular = flg; 1203 break; 1204 default: 1205 SETERRQ1(PETSC_ERR_SUP,"unknown option %d",op); 1206 } 1207 PetscFunctionReturn(0); 1208 } 1209 1210 #undef __FUNCT__ 1211 #define __FUNCT__ "MatTranspose_MPISBAIJ" 1212 PetscErrorCode MatTranspose_MPISBAIJ(Mat A,MatReuse reuse,Mat *B) 1213 { 1214 PetscErrorCode ierr; 1215 PetscFunctionBegin; 1216 if (MAT_INITIAL_MATRIX || *B != A) { 1217 ierr = MatDuplicate(A,MAT_COPY_VALUES,B);CHKERRQ(ierr); 1218 } 1219 PetscFunctionReturn(0); 1220 } 1221 1222 #undef __FUNCT__ 1223 #define __FUNCT__ "MatDiagonalScale_MPISBAIJ" 1224 PetscErrorCode MatDiagonalScale_MPISBAIJ(Mat mat,Vec ll,Vec rr) 1225 { 1226 Mat_MPISBAIJ *baij = (Mat_MPISBAIJ*)mat->data; 1227 Mat a=baij->A, b=baij->B; 1228 PetscErrorCode ierr; 1229 PetscInt nv,m,n; 1230 PetscTruth flg; 1231 1232 PetscFunctionBegin; 1233 if (ll != rr){ 1234 ierr = VecEqual(ll,rr,&flg);CHKERRQ(ierr); 1235 if (!flg) 1236 SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"For symmetric format, left and right scaling vectors must be same\n"); 1237 } 1238 if (!ll) PetscFunctionReturn(0); 1239 1240 ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr); 1241 if (m != n) SETERRQ2(PETSC_ERR_ARG_SIZ,"For symmetric format, local size %d %d must be same",m,n); 1242 1243 ierr = VecGetLocalSize(rr,&nv);CHKERRQ(ierr); 1244 if (nv!=n) SETERRQ(PETSC_ERR_ARG_SIZ,"Left and right vector non-conforming local size"); 1245 1246 ierr = VecScatterBegin(baij->Mvctx,rr,baij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1247 1248 /* left diagonalscale the off-diagonal part */ 1249 ierr = (*b->ops->diagonalscale)(b,ll,PETSC_NULL);CHKERRQ(ierr); 1250 1251 /* scale the diagonal part */ 1252 ierr = (*a->ops->diagonalscale)(a,ll,rr);CHKERRQ(ierr); 1253 1254 /* right diagonalscale the off-diagonal part */ 1255 ierr = VecScatterEnd(baij->Mvctx,rr,baij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1256 ierr = (*b->ops->diagonalscale)(b,PETSC_NULL,baij->lvec);CHKERRQ(ierr); 1257 PetscFunctionReturn(0); 1258 } 1259 1260 #undef __FUNCT__ 1261 #define __FUNCT__ "MatSetUnfactored_MPISBAIJ" 1262 PetscErrorCode MatSetUnfactored_MPISBAIJ(Mat A) 1263 { 1264 Mat_MPISBAIJ *a = (Mat_MPISBAIJ*)A->data; 1265 PetscErrorCode ierr; 1266 1267 PetscFunctionBegin; 1268 ierr = MatSetUnfactored(a->A);CHKERRQ(ierr); 1269 PetscFunctionReturn(0); 1270 } 1271 1272 static PetscErrorCode MatDuplicate_MPISBAIJ(Mat,MatDuplicateOption,Mat *); 1273 1274 #undef __FUNCT__ 1275 #define __FUNCT__ "MatEqual_MPISBAIJ" 1276 PetscErrorCode MatEqual_MPISBAIJ(Mat A,Mat B,PetscTruth *flag) 1277 { 1278 Mat_MPISBAIJ *matB = (Mat_MPISBAIJ*)B->data,*matA = (Mat_MPISBAIJ*)A->data; 1279 Mat a,b,c,d; 1280 PetscTruth flg; 1281 PetscErrorCode ierr; 1282 1283 PetscFunctionBegin; 1284 a = matA->A; b = matA->B; 1285 c = matB->A; d = matB->B; 1286 1287 ierr = MatEqual(a,c,&flg);CHKERRQ(ierr); 1288 if (flg) { 1289 ierr = MatEqual(b,d,&flg);CHKERRQ(ierr); 1290 } 1291 ierr = MPI_Allreduce(&flg,flag,1,MPI_INT,MPI_LAND,((PetscObject)A)->comm);CHKERRQ(ierr); 1292 PetscFunctionReturn(0); 1293 } 1294 1295 #undef __FUNCT__ 1296 #define __FUNCT__ "MatCopy_MPISBAIJ" 1297 PetscErrorCode MatCopy_MPISBAIJ(Mat A,Mat B,MatStructure str) 1298 { 1299 PetscErrorCode ierr; 1300 Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data; 1301 Mat_MPISBAIJ *b = (Mat_MPISBAIJ *)B->data; 1302 1303 PetscFunctionBegin; 1304 /* If the two matrices don't have the same copy implementation, they aren't compatible for fast copy. */ 1305 if ((str != SAME_NONZERO_PATTERN) || (A->ops->copy != B->ops->copy)) { 1306 ierr = MatGetRowUpperTriangular(A);CHKERRQ(ierr); 1307 ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr); 1308 ierr = MatRestoreRowUpperTriangular(A);CHKERRQ(ierr); 1309 } else { 1310 ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr); 1311 ierr = MatCopy(a->B,b->B,str);CHKERRQ(ierr); 1312 } 1313 PetscFunctionReturn(0); 1314 } 1315 1316 #undef __FUNCT__ 1317 #define __FUNCT__ "MatSetUpPreallocation_MPISBAIJ" 1318 PetscErrorCode MatSetUpPreallocation_MPISBAIJ(Mat A) 1319 { 1320 PetscErrorCode ierr; 1321 1322 PetscFunctionBegin; 1323 ierr = MatMPISBAIJSetPreallocation(A,-PetscMax(A->rmap->bs,1),PETSC_DEFAULT,0,PETSC_DEFAULT,0);CHKERRQ(ierr); 1324 PetscFunctionReturn(0); 1325 } 1326 1327 #include "petscblaslapack.h" 1328 #undef __FUNCT__ 1329 #define __FUNCT__ "MatAXPY_MPISBAIJ" 1330 PetscErrorCode MatAXPY_MPISBAIJ(Mat Y,PetscScalar a,Mat X,MatStructure str) 1331 { 1332 PetscErrorCode ierr; 1333 Mat_MPISBAIJ *xx=(Mat_MPISBAIJ *)X->data,*yy=(Mat_MPISBAIJ *)Y->data; 1334 PetscBLASInt bnz,one=1; 1335 Mat_SeqSBAIJ *xa,*ya; 1336 Mat_SeqBAIJ *xb,*yb; 1337 1338 PetscFunctionBegin; 1339 if (str == SAME_NONZERO_PATTERN) { 1340 PetscScalar alpha = a; 1341 xa = (Mat_SeqSBAIJ *)xx->A->data; 1342 ya = (Mat_SeqSBAIJ *)yy->A->data; 1343 bnz = PetscBLASIntCast(xa->nz); 1344 BLASaxpy_(&bnz,&alpha,xa->a,&one,ya->a,&one); 1345 xb = (Mat_SeqBAIJ *)xx->B->data; 1346 yb = (Mat_SeqBAIJ *)yy->B->data; 1347 bnz = PetscBLASIntCast(xb->nz); 1348 BLASaxpy_(&bnz,&alpha,xb->a,&one,yb->a,&one); 1349 } else { 1350 ierr = MatGetRowUpperTriangular(X);CHKERRQ(ierr); 1351 ierr = MatAXPY_Basic(Y,a,X,str);CHKERRQ(ierr); 1352 ierr = MatRestoreRowUpperTriangular(X);CHKERRQ(ierr); 1353 } 1354 PetscFunctionReturn(0); 1355 } 1356 1357 #undef __FUNCT__ 1358 #define __FUNCT__ "MatGetSubMatrices_MPISBAIJ" 1359 PetscErrorCode MatGetSubMatrices_MPISBAIJ(Mat A,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *B[]) 1360 { 1361 PetscErrorCode ierr; 1362 PetscInt i; 1363 PetscTruth flg; 1364 1365 PetscFunctionBegin; 1366 for (i=0; i<n; i++) { 1367 ierr = ISEqual(irow[i],icol[i],&flg);CHKERRQ(ierr); 1368 if (!flg) { 1369 SETERRQ(PETSC_ERR_SUP,"Can only get symmetric submatrix for MPISBAIJ matrices"); 1370 } 1371 } 1372 ierr = MatGetSubMatrices_MPIBAIJ(A,n,irow,icol,scall,B);CHKERRQ(ierr); 1373 PetscFunctionReturn(0); 1374 } 1375 1376 1377 /* -------------------------------------------------------------------*/ 1378 static struct _MatOps MatOps_Values = { 1379 MatSetValues_MPISBAIJ, 1380 MatGetRow_MPISBAIJ, 1381 MatRestoreRow_MPISBAIJ, 1382 MatMult_MPISBAIJ, 1383 /* 4*/ MatMultAdd_MPISBAIJ, 1384 MatMult_MPISBAIJ, /* transpose versions are same as non-transpose */ 1385 MatMultAdd_MPISBAIJ, 1386 0, 1387 0, 1388 0, 1389 /*10*/ 0, 1390 0, 1391 0, 1392 MatRelax_MPISBAIJ, 1393 MatTranspose_MPISBAIJ, 1394 /*15*/ MatGetInfo_MPISBAIJ, 1395 MatEqual_MPISBAIJ, 1396 MatGetDiagonal_MPISBAIJ, 1397 MatDiagonalScale_MPISBAIJ, 1398 MatNorm_MPISBAIJ, 1399 /*20*/ MatAssemblyBegin_MPISBAIJ, 1400 MatAssemblyEnd_MPISBAIJ, 1401 0, 1402 MatSetOption_MPISBAIJ, 1403 MatZeroEntries_MPISBAIJ, 1404 /*25*/ 0, 1405 0, 1406 0, 1407 0, 1408 0, 1409 /*30*/ MatSetUpPreallocation_MPISBAIJ, 1410 0, 1411 0, 1412 0, 1413 0, 1414 /*35*/ MatDuplicate_MPISBAIJ, 1415 0, 1416 0, 1417 0, 1418 0, 1419 /*40*/ MatAXPY_MPISBAIJ, 1420 MatGetSubMatrices_MPISBAIJ, 1421 MatIncreaseOverlap_MPISBAIJ, 1422 MatGetValues_MPISBAIJ, 1423 MatCopy_MPISBAIJ, 1424 /*45*/ 0, 1425 MatScale_MPISBAIJ, 1426 0, 1427 0, 1428 0, 1429 /*50*/ 0, 1430 0, 1431 0, 1432 0, 1433 0, 1434 /*55*/ 0, 1435 0, 1436 MatSetUnfactored_MPISBAIJ, 1437 0, 1438 MatSetValuesBlocked_MPISBAIJ, 1439 /*60*/ 0, 1440 0, 1441 0, 1442 0, 1443 0, 1444 /*65*/ 0, 1445 0, 1446 0, 1447 0, 1448 0, 1449 /*70*/ MatGetRowMaxAbs_MPISBAIJ, 1450 0, 1451 0, 1452 0, 1453 0, 1454 /*75*/ 0, 1455 0, 1456 0, 1457 0, 1458 0, 1459 /*80*/ 0, 1460 0, 1461 0, 1462 0, 1463 MatLoad_MPISBAIJ, 1464 /*85*/ 0, 1465 0, 1466 0, 1467 0, 1468 0, 1469 /*90*/ 0, 1470 0, 1471 0, 1472 0, 1473 0, 1474 /*95*/ 0, 1475 0, 1476 0, 1477 0, 1478 0, 1479 /*100*/0, 1480 0, 1481 0, 1482 0, 1483 0, 1484 /*105*/0, 1485 MatRealPart_MPISBAIJ, 1486 MatImaginaryPart_MPISBAIJ, 1487 MatGetRowUpperTriangular_MPISBAIJ, 1488 MatRestoreRowUpperTriangular_MPISBAIJ 1489 }; 1490 1491 1492 EXTERN_C_BEGIN 1493 #undef __FUNCT__ 1494 #define __FUNCT__ "MatGetDiagonalBlock_MPISBAIJ" 1495 PetscErrorCode PETSCMAT_DLLEXPORT MatGetDiagonalBlock_MPISBAIJ(Mat A,PetscTruth *iscopy,MatReuse reuse,Mat *a) 1496 { 1497 PetscFunctionBegin; 1498 *a = ((Mat_MPISBAIJ *)A->data)->A; 1499 *iscopy = PETSC_FALSE; 1500 PetscFunctionReturn(0); 1501 } 1502 EXTERN_C_END 1503 1504 EXTERN_C_BEGIN 1505 #undef __FUNCT__ 1506 #define __FUNCT__ "MatMPISBAIJSetPreallocation_MPISBAIJ" 1507 PetscErrorCode PETSCMAT_DLLEXPORT MatMPISBAIJSetPreallocation_MPISBAIJ(Mat B,PetscInt bs,PetscInt d_nz,PetscInt *d_nnz,PetscInt o_nz,PetscInt *o_nnz) 1508 { 1509 Mat_MPISBAIJ *b; 1510 PetscErrorCode ierr; 1511 PetscInt i,mbs,Mbs,newbs = PetscAbs(bs); 1512 1513 PetscFunctionBegin; 1514 if (bs < 0){ 1515 ierr = PetscOptionsBegin(((PetscObject)B)->comm,((PetscObject)B)->prefix,"Options for MPISBAIJ matrix","Mat");CHKERRQ(ierr); 1516 ierr = PetscOptionsInt("-mat_block_size","Set the blocksize used to store the matrix","MatMPIBAIJSetPreallocation",newbs,&newbs,PETSC_NULL);CHKERRQ(ierr); 1517 ierr = PetscOptionsEnd();CHKERRQ(ierr); 1518 bs = PetscAbs(bs); 1519 } 1520 if ((d_nnz || o_nnz) && newbs != bs) { 1521 SETERRQ(PETSC_ERR_ARG_WRONG,"Cannot change blocksize from command line if setting d_nnz or o_nnz"); 1522 } 1523 bs = newbs; 1524 1525 if (d_nz == PETSC_DECIDE || d_nz == PETSC_DEFAULT) d_nz = 3; 1526 if (o_nz == PETSC_DECIDE || o_nz == PETSC_DEFAULT) o_nz = 1; 1527 if (d_nz < 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"d_nz cannot be less than 0: value %D",d_nz); 1528 if (o_nz < 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"o_nz cannot be less than 0: value %D",o_nz); 1529 1530 B->rmap->bs = B->cmap->bs = bs; 1531 ierr = PetscMapSetUp(B->rmap);CHKERRQ(ierr); 1532 ierr = PetscMapSetUp(B->cmap);CHKERRQ(ierr); 1533 1534 if (d_nnz) { 1535 for (i=0; i<B->rmap->n/bs; i++) { 1536 if (d_nnz[i] < 0) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"d_nnz cannot be less than -1: local row %D value %D",i,d_nnz[i]); 1537 } 1538 } 1539 if (o_nnz) { 1540 for (i=0; i<B->rmap->n/bs; i++) { 1541 if (o_nnz[i] < 0) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"o_nnz cannot be less than -1: local row %D value %D",i,o_nnz[i]); 1542 } 1543 } 1544 B->preallocated = PETSC_TRUE; 1545 1546 b = (Mat_MPISBAIJ*)B->data; 1547 mbs = B->rmap->n/bs; 1548 Mbs = B->rmap->N/bs; 1549 if (mbs*bs != B->rmap->n) { 1550 SETERRQ2(PETSC_ERR_ARG_SIZ,"No of local rows %D must be divisible by blocksize %D",B->rmap->N,bs); 1551 } 1552 1553 B->rmap->bs = bs; 1554 b->bs2 = bs*bs; 1555 b->mbs = mbs; 1556 b->nbs = mbs; 1557 b->Mbs = Mbs; 1558 b->Nbs = Mbs; 1559 1560 for (i=0; i<=b->size; i++) { 1561 b->rangebs[i] = B->rmap->range[i]/bs; 1562 } 1563 b->rstartbs = B->rmap->rstart/bs; 1564 b->rendbs = B->rmap->rend/bs; 1565 1566 b->cstartbs = B->cmap->rstart/bs; 1567 b->cendbs = B->cmap->rend/bs; 1568 1569 ierr = MatCreate(PETSC_COMM_SELF,&b->A);CHKERRQ(ierr); 1570 ierr = MatSetSizes(b->A,B->rmap->n,B->cmap->n,B->rmap->n,B->cmap->n);CHKERRQ(ierr); 1571 ierr = MatSetType(b->A,MATSEQSBAIJ);CHKERRQ(ierr); 1572 ierr = MatSeqSBAIJSetPreallocation(b->A,bs,d_nz,d_nnz);CHKERRQ(ierr); 1573 ierr = PetscLogObjectParent(B,b->A);CHKERRQ(ierr); 1574 1575 ierr = MatCreate(PETSC_COMM_SELF,&b->B);CHKERRQ(ierr); 1576 ierr = MatSetSizes(b->B,B->rmap->n,B->cmap->N,B->rmap->n,B->cmap->N);CHKERRQ(ierr); 1577 ierr = MatSetType(b->B,MATSEQBAIJ);CHKERRQ(ierr); 1578 ierr = MatSeqBAIJSetPreallocation(b->B,bs,o_nz,o_nnz);CHKERRQ(ierr); 1579 ierr = PetscLogObjectParent(B,b->B);CHKERRQ(ierr); 1580 1581 /* build cache for off array entries formed */ 1582 ierr = MatStashCreate_Private(((PetscObject)B)->comm,bs,&B->bstash);CHKERRQ(ierr); 1583 1584 PetscFunctionReturn(0); 1585 } 1586 EXTERN_C_END 1587 1588 EXTERN_C_BEGIN 1589 #if defined(PETSC_HAVE_MUMPS) 1590 extern PetscErrorCode PETSCMAT_DLLEXPORT MatGetFactor_mpisbaij_mumps(Mat,MatFactorType,Mat*); 1591 #endif 1592 #if defined(PETSC_HAVE_SPOOLES) 1593 extern PetscErrorCode PETSCMAT_DLLEXPORT MatGetFactor_mpisbaij_spooles(Mat,MatFactorType,Mat*); 1594 #endif 1595 EXTERN_C_END 1596 1597 /*MC 1598 MATMPISBAIJ - MATMPISBAIJ = "mpisbaij" - A matrix type to be used for distributed symmetric sparse block matrices, 1599 based on block compressed sparse row format. Only the upper triangular portion of the matrix is stored. 1600 1601 Options Database Keys: 1602 . -mat_type mpisbaij - sets the matrix type to "mpisbaij" during a call to MatSetFromOptions() 1603 1604 Level: beginner 1605 1606 .seealso: MatCreateMPISBAIJ 1607 M*/ 1608 1609 EXTERN_C_BEGIN 1610 #undef __FUNCT__ 1611 #define __FUNCT__ "MatCreate_MPISBAIJ" 1612 PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_MPISBAIJ(Mat B) 1613 { 1614 Mat_MPISBAIJ *b; 1615 PetscErrorCode ierr; 1616 PetscTruth flg; 1617 1618 PetscFunctionBegin; 1619 1620 ierr = PetscNewLog(B,Mat_MPISBAIJ,&b);CHKERRQ(ierr); 1621 B->data = (void*)b; 1622 ierr = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr); 1623 1624 B->ops->destroy = MatDestroy_MPISBAIJ; 1625 B->ops->view = MatView_MPISBAIJ; 1626 B->mapping = 0; 1627 B->assembled = PETSC_FALSE; 1628 1629 B->insertmode = NOT_SET_VALUES; 1630 ierr = MPI_Comm_rank(((PetscObject)B)->comm,&b->rank);CHKERRQ(ierr); 1631 ierr = MPI_Comm_size(((PetscObject)B)->comm,&b->size);CHKERRQ(ierr); 1632 1633 /* build local table of row and column ownerships */ 1634 ierr = PetscMalloc((b->size+2)*sizeof(PetscInt),&b->rangebs);CHKERRQ(ierr); 1635 1636 /* build cache for off array entries formed */ 1637 ierr = MatStashCreate_Private(((PetscObject)B)->comm,1,&B->stash);CHKERRQ(ierr); 1638 b->donotstash = PETSC_FALSE; 1639 b->colmap = PETSC_NULL; 1640 b->garray = PETSC_NULL; 1641 b->roworiented = PETSC_TRUE; 1642 1643 /* stuff used in block assembly */ 1644 b->barray = 0; 1645 1646 /* stuff used for matrix vector multiply */ 1647 b->lvec = 0; 1648 b->Mvctx = 0; 1649 b->slvec0 = 0; 1650 b->slvec0b = 0; 1651 b->slvec1 = 0; 1652 b->slvec1a = 0; 1653 b->slvec1b = 0; 1654 b->sMvctx = 0; 1655 1656 /* stuff for MatGetRow() */ 1657 b->rowindices = 0; 1658 b->rowvalues = 0; 1659 b->getrowactive = PETSC_FALSE; 1660 1661 /* hash table stuff */ 1662 b->ht = 0; 1663 b->hd = 0; 1664 b->ht_size = 0; 1665 b->ht_flag = PETSC_FALSE; 1666 b->ht_fact = 0; 1667 b->ht_total_ct = 0; 1668 b->ht_insert_ct = 0; 1669 1670 b->in_loc = 0; 1671 b->v_loc = 0; 1672 b->n_loc = 0; 1673 ierr = PetscOptionsBegin(((PetscObject)B)->comm,PETSC_NULL,"Options for loading MPISBAIJ matrix 1","Mat");CHKERRQ(ierr); 1674 ierr = PetscOptionsTruth("-mat_use_hash_table","Use hash table to save memory in constructing matrix","MatSetOption",PETSC_FALSE,&flg,PETSC_NULL);CHKERRQ(ierr); 1675 if (flg) { 1676 PetscReal fact = 1.39; 1677 ierr = MatSetOption(B,MAT_USE_HASH_TABLE,PETSC_TRUE);CHKERRQ(ierr); 1678 ierr = PetscOptionsReal("-mat_use_hash_table","Use hash table factor","MatMPIBAIJSetHashTableFactor",fact,&fact,PETSC_NULL);CHKERRQ(ierr); 1679 if (fact <= 1.0) fact = 1.39; 1680 ierr = MatMPIBAIJSetHashTableFactor(B,fact);CHKERRQ(ierr); 1681 ierr = PetscInfo1(B,"Hash table Factor used %5.2f\n",fact);CHKERRQ(ierr); 1682 } 1683 ierr = PetscOptionsEnd();CHKERRQ(ierr); 1684 1685 #if defined(PETSC_HAVE_MUMPS) 1686 ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_mpisbaij_mumps_C", 1687 "MatGetFactor_mpisbaij_mumps", 1688 MatGetFactor_mpisbaij_mumps);CHKERRQ(ierr); 1689 #endif 1690 #if defined(PETSC_HAVE_SPOOLES) 1691 ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_mpisbaij_spooles_C", 1692 "MatGetFactor_mpisbaij_spooles", 1693 MatGetFactor_mpisbaij_spooles);CHKERRQ(ierr); 1694 #endif 1695 ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatStoreValues_C", 1696 "MatStoreValues_MPISBAIJ", 1697 MatStoreValues_MPISBAIJ);CHKERRQ(ierr); 1698 ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatRetrieveValues_C", 1699 "MatRetrieveValues_MPISBAIJ", 1700 MatRetrieveValues_MPISBAIJ);CHKERRQ(ierr); 1701 ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetDiagonalBlock_C", 1702 "MatGetDiagonalBlock_MPISBAIJ", 1703 MatGetDiagonalBlock_MPISBAIJ);CHKERRQ(ierr); 1704 ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPISBAIJSetPreallocation_C", 1705 "MatMPISBAIJSetPreallocation_MPISBAIJ", 1706 MatMPISBAIJSetPreallocation_MPISBAIJ);CHKERRQ(ierr); 1707 B->symmetric = PETSC_TRUE; 1708 B->structurally_symmetric = PETSC_TRUE; 1709 B->symmetric_set = PETSC_TRUE; 1710 B->structurally_symmetric_set = PETSC_TRUE; 1711 ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPISBAIJ);CHKERRQ(ierr); 1712 PetscFunctionReturn(0); 1713 } 1714 EXTERN_C_END 1715 1716 /*MC 1717 MATSBAIJ - MATSBAIJ = "sbaij" - A matrix type to be used for symmetric block sparse matrices. 1718 1719 This matrix type is identical to MATSEQSBAIJ when constructed with a single process communicator, 1720 and MATMPISBAIJ otherwise. 1721 1722 Options Database Keys: 1723 . -mat_type sbaij - sets the matrix type to "sbaij" during a call to MatSetFromOptions() 1724 1725 Level: beginner 1726 1727 .seealso: MatCreateMPISBAIJ,MATSEQSBAIJ,MATMPISBAIJ 1728 M*/ 1729 1730 EXTERN_C_BEGIN 1731 #undef __FUNCT__ 1732 #define __FUNCT__ "MatCreate_SBAIJ" 1733 PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_SBAIJ(Mat A) 1734 { 1735 PetscErrorCode ierr; 1736 PetscMPIInt size; 1737 1738 PetscFunctionBegin; 1739 ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr); 1740 if (size == 1) { 1741 ierr = MatSetType(A,MATSEQSBAIJ);CHKERRQ(ierr); 1742 } else { 1743 ierr = MatSetType(A,MATMPISBAIJ);CHKERRQ(ierr); 1744 } 1745 PetscFunctionReturn(0); 1746 } 1747 EXTERN_C_END 1748 1749 #undef __FUNCT__ 1750 #define __FUNCT__ "MatMPISBAIJSetPreallocation" 1751 /*@C 1752 MatMPISBAIJSetPreallocation - For good matrix assembly performance 1753 the user should preallocate the matrix storage by setting the parameters 1754 d_nz (or d_nnz) and o_nz (or o_nnz). By setting these parameters accurately, 1755 performance can be increased by more than a factor of 50. 1756 1757 Collective on Mat 1758 1759 Input Parameters: 1760 + A - the matrix 1761 . bs - size of blockk 1762 . d_nz - number of block nonzeros per block row in diagonal portion of local 1763 submatrix (same for all local rows) 1764 . d_nnz - array containing the number of block nonzeros in the various block rows 1765 in the upper triangular and diagonal part of the in diagonal portion of the local 1766 (possibly different for each block row) or PETSC_NULL. You must leave room 1767 for the diagonal entry even if it is zero. 1768 . o_nz - number of block nonzeros per block row in the off-diagonal portion of local 1769 submatrix (same for all local rows). 1770 - o_nnz - array containing the number of nonzeros in the various block rows of the 1771 off-diagonal portion of the local submatrix (possibly different for 1772 each block row) or PETSC_NULL. 1773 1774 1775 Options Database Keys: 1776 . -mat_no_unroll - uses code that does not unroll the loops in the 1777 block calculations (much slower) 1778 . -mat_block_size - size of the blocks to use 1779 1780 Notes: 1781 1782 If PETSC_DECIDE or PETSC_DETERMINE is used for a particular argument on one processor 1783 than it must be used on all processors that share the object for that argument. 1784 1785 If the *_nnz parameter is given then the *_nz parameter is ignored 1786 1787 Storage Information: 1788 For a square global matrix we define each processor's diagonal portion 1789 to be its local rows and the corresponding columns (a square submatrix); 1790 each processor's off-diagonal portion encompasses the remainder of the 1791 local matrix (a rectangular submatrix). 1792 1793 The user can specify preallocated storage for the diagonal part of 1794 the local submatrix with either d_nz or d_nnz (not both). Set 1795 d_nz=PETSC_DEFAULT and d_nnz=PETSC_NULL for PETSc to control dynamic 1796 memory allocation. Likewise, specify preallocated storage for the 1797 off-diagonal part of the local submatrix with o_nz or o_nnz (not both). 1798 1799 You can call MatGetInfo() to get information on how effective the preallocation was; 1800 for example the fields mallocs,nz_allocated,nz_used,nz_unneeded; 1801 You can also run with the option -info and look for messages with the string 1802 malloc in them to see if additional memory allocation was needed. 1803 1804 Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In 1805 the figure below we depict these three local rows and all columns (0-11). 1806 1807 .vb 1808 0 1 2 3 4 5 6 7 8 9 10 11 1809 ------------------- 1810 row 3 | o o o d d d o o o o o o 1811 row 4 | o o o d d d o o o o o o 1812 row 5 | o o o d d d o o o o o o 1813 ------------------- 1814 .ve 1815 1816 Thus, any entries in the d locations are stored in the d (diagonal) 1817 submatrix, and any entries in the o locations are stored in the 1818 o (off-diagonal) submatrix. Note that the d matrix is stored in 1819 MatSeqSBAIJ format and the o submatrix in MATSEQBAIJ format. 1820 1821 Now d_nz should indicate the number of block nonzeros per row in the upper triangular 1822 plus the diagonal part of the d matrix, 1823 and o_nz should indicate the number of block nonzeros per row in the o matrix. 1824 In general, for PDE problems in which most nonzeros are near the diagonal, 1825 one expects d_nz >> o_nz. For large problems you MUST preallocate memory 1826 or you will get TERRIBLE performance; see the users' manual chapter on 1827 matrices. 1828 1829 Level: intermediate 1830 1831 .keywords: matrix, block, aij, compressed row, sparse, parallel 1832 1833 .seealso: MatCreate(), MatCreateSeqSBAIJ(), MatSetValues(), MatCreateMPIBAIJ() 1834 @*/ 1835 PetscErrorCode PETSCMAT_DLLEXPORT MatMPISBAIJSetPreallocation(Mat B,PetscInt bs,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 1836 { 1837 PetscErrorCode ierr,(*f)(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]); 1838 1839 PetscFunctionBegin; 1840 ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPISBAIJSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr); 1841 if (f) { 1842 ierr = (*f)(B,bs,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr); 1843 } 1844 PetscFunctionReturn(0); 1845 } 1846 1847 #undef __FUNCT__ 1848 #define __FUNCT__ "MatCreateMPISBAIJ" 1849 /*@C 1850 MatCreateMPISBAIJ - Creates a sparse parallel matrix in symmetric block AIJ format 1851 (block compressed row). For good matrix assembly performance 1852 the user should preallocate the matrix storage by setting the parameters 1853 d_nz (or d_nnz) and o_nz (or o_nnz). By setting these parameters accurately, 1854 performance can be increased by more than a factor of 50. 1855 1856 Collective on MPI_Comm 1857 1858 Input Parameters: 1859 + comm - MPI communicator 1860 . bs - size of blockk 1861 . m - number of local rows (or PETSC_DECIDE to have calculated if M is given) 1862 This value should be the same as the local size used in creating the 1863 y vector for the matrix-vector product y = Ax. 1864 . n - number of local columns (or PETSC_DECIDE to have calculated if N is given) 1865 This value should be the same as the local size used in creating the 1866 x vector for the matrix-vector product y = Ax. 1867 . M - number of global rows (or PETSC_DETERMINE to have calculated if m is given) 1868 . N - number of global columns (or PETSC_DETERMINE to have calculated if n is given) 1869 . d_nz - number of block nonzeros per block row in diagonal portion of local 1870 submatrix (same for all local rows) 1871 . d_nnz - array containing the number of block nonzeros in the various block rows 1872 in the upper triangular portion of the in diagonal portion of the local 1873 (possibly different for each block block row) or PETSC_NULL. 1874 You must leave room for the diagonal entry even if it is zero. 1875 . o_nz - number of block nonzeros per block row in the off-diagonal portion of local 1876 submatrix (same for all local rows). 1877 - o_nnz - array containing the number of nonzeros in the various block rows of the 1878 off-diagonal portion of the local submatrix (possibly different for 1879 each block row) or PETSC_NULL. 1880 1881 Output Parameter: 1882 . A - the matrix 1883 1884 Options Database Keys: 1885 . -mat_no_unroll - uses code that does not unroll the loops in the 1886 block calculations (much slower) 1887 . -mat_block_size - size of the blocks to use 1888 . -mat_mpi - use the parallel matrix data structures even on one processor 1889 (defaults to using SeqBAIJ format on one processor) 1890 1891 It is recommended that one use the MatCreate(), MatSetType() and/or MatSetFromOptions(), 1892 MatXXXXSetPreallocation() paradgm instead of this routine directly. This is definitely 1893 true if you plan to use the external direct solvers such as SuperLU, MUMPS or Spooles. 1894 [MatXXXXSetPreallocation() is, for example, MatSeqAIJSetPreallocation] 1895 1896 Notes: 1897 The number of rows and columns must be divisible by blocksize. 1898 This matrix type does not support complex Hermitian operation. 1899 1900 The user MUST specify either the local or global matrix dimensions 1901 (possibly both). 1902 1903 If PETSC_DECIDE or PETSC_DETERMINE is used for a particular argument on one processor 1904 than it must be used on all processors that share the object for that argument. 1905 1906 If the *_nnz parameter is given then the *_nz parameter is ignored 1907 1908 Storage Information: 1909 For a square global matrix we define each processor's diagonal portion 1910 to be its local rows and the corresponding columns (a square submatrix); 1911 each processor's off-diagonal portion encompasses the remainder of the 1912 local matrix (a rectangular submatrix). 1913 1914 The user can specify preallocated storage for the diagonal part of 1915 the local submatrix with either d_nz or d_nnz (not both). Set 1916 d_nz=PETSC_DEFAULT and d_nnz=PETSC_NULL for PETSc to control dynamic 1917 memory allocation. Likewise, specify preallocated storage for the 1918 off-diagonal part of the local submatrix with o_nz or o_nnz (not both). 1919 1920 Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In 1921 the figure below we depict these three local rows and all columns (0-11). 1922 1923 .vb 1924 0 1 2 3 4 5 6 7 8 9 10 11 1925 ------------------- 1926 row 3 | o o o d d d o o o o o o 1927 row 4 | o o o d d d o o o o o o 1928 row 5 | o o o d d d o o o o o o 1929 ------------------- 1930 .ve 1931 1932 Thus, any entries in the d locations are stored in the d (diagonal) 1933 submatrix, and any entries in the o locations are stored in the 1934 o (off-diagonal) submatrix. Note that the d matrix is stored in 1935 MatSeqSBAIJ format and the o submatrix in MATSEQBAIJ format. 1936 1937 Now d_nz should indicate the number of block nonzeros per row in the upper triangular 1938 plus the diagonal part of the d matrix, 1939 and o_nz should indicate the number of block nonzeros per row in the o matrix. 1940 In general, for PDE problems in which most nonzeros are near the diagonal, 1941 one expects d_nz >> o_nz. For large problems you MUST preallocate memory 1942 or you will get TERRIBLE performance; see the users' manual chapter on 1943 matrices. 1944 1945 Level: intermediate 1946 1947 .keywords: matrix, block, aij, compressed row, sparse, parallel 1948 1949 .seealso: MatCreate(), MatCreateSeqSBAIJ(), MatSetValues(), MatCreateMPIBAIJ() 1950 @*/ 1951 1952 PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPISBAIJ(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[],Mat *A) 1953 { 1954 PetscErrorCode ierr; 1955 PetscMPIInt size; 1956 1957 PetscFunctionBegin; 1958 ierr = MatCreate(comm,A);CHKERRQ(ierr); 1959 ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr); 1960 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 1961 if (size > 1) { 1962 ierr = MatSetType(*A,MATMPISBAIJ);CHKERRQ(ierr); 1963 ierr = MatMPISBAIJSetPreallocation(*A,bs,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr); 1964 } else { 1965 ierr = MatSetType(*A,MATSEQSBAIJ);CHKERRQ(ierr); 1966 ierr = MatSeqSBAIJSetPreallocation(*A,bs,d_nz,d_nnz);CHKERRQ(ierr); 1967 } 1968 PetscFunctionReturn(0); 1969 } 1970 1971 1972 #undef __FUNCT__ 1973 #define __FUNCT__ "MatDuplicate_MPISBAIJ" 1974 static PetscErrorCode MatDuplicate_MPISBAIJ(Mat matin,MatDuplicateOption cpvalues,Mat *newmat) 1975 { 1976 Mat mat; 1977 Mat_MPISBAIJ *a,*oldmat = (Mat_MPISBAIJ*)matin->data; 1978 PetscErrorCode ierr; 1979 PetscInt len=0,nt,bs=matin->rmap->bs,mbs=oldmat->mbs; 1980 PetscScalar *array; 1981 1982 PetscFunctionBegin; 1983 *newmat = 0; 1984 ierr = MatCreate(((PetscObject)matin)->comm,&mat);CHKERRQ(ierr); 1985 ierr = MatSetSizes(mat,matin->rmap->n,matin->cmap->n,matin->rmap->N,matin->cmap->N);CHKERRQ(ierr); 1986 ierr = MatSetType(mat,((PetscObject)matin)->type_name);CHKERRQ(ierr); 1987 ierr = PetscMemcpy(mat->ops,matin->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 1988 ierr = PetscMapCopy(((PetscObject)matin)->comm,matin->rmap,mat->rmap);CHKERRQ(ierr); 1989 ierr = PetscMapCopy(((PetscObject)matin)->comm,matin->cmap,mat->cmap);CHKERRQ(ierr); 1990 1991 mat->factor = matin->factor; 1992 mat->preallocated = PETSC_TRUE; 1993 mat->assembled = PETSC_TRUE; 1994 mat->insertmode = NOT_SET_VALUES; 1995 1996 a = (Mat_MPISBAIJ*)mat->data; 1997 a->bs2 = oldmat->bs2; 1998 a->mbs = oldmat->mbs; 1999 a->nbs = oldmat->nbs; 2000 a->Mbs = oldmat->Mbs; 2001 a->Nbs = oldmat->Nbs; 2002 2003 2004 a->size = oldmat->size; 2005 a->rank = oldmat->rank; 2006 a->donotstash = oldmat->donotstash; 2007 a->roworiented = oldmat->roworiented; 2008 a->rowindices = 0; 2009 a->rowvalues = 0; 2010 a->getrowactive = PETSC_FALSE; 2011 a->barray = 0; 2012 a->rstartbs = oldmat->rstartbs; 2013 a->rendbs = oldmat->rendbs; 2014 a->cstartbs = oldmat->cstartbs; 2015 a->cendbs = oldmat->cendbs; 2016 2017 /* hash table stuff */ 2018 a->ht = 0; 2019 a->hd = 0; 2020 a->ht_size = 0; 2021 a->ht_flag = oldmat->ht_flag; 2022 a->ht_fact = oldmat->ht_fact; 2023 a->ht_total_ct = 0; 2024 a->ht_insert_ct = 0; 2025 2026 ierr = PetscMemcpy(a->rangebs,oldmat->rangebs,(a->size+2)*sizeof(PetscInt));CHKERRQ(ierr); 2027 ierr = MatStashCreate_Private(((PetscObject)matin)->comm,1,&mat->stash);CHKERRQ(ierr); 2028 ierr = MatStashCreate_Private(((PetscObject)matin)->comm,matin->rmap->bs,&mat->bstash);CHKERRQ(ierr); 2029 if (oldmat->colmap) { 2030 #if defined (PETSC_USE_CTABLE) 2031 ierr = PetscTableCreateCopy(oldmat->colmap,&a->colmap);CHKERRQ(ierr); 2032 #else 2033 ierr = PetscMalloc((a->Nbs)*sizeof(PetscInt),&a->colmap);CHKERRQ(ierr); 2034 ierr = PetscLogObjectMemory(mat,(a->Nbs)*sizeof(PetscInt));CHKERRQ(ierr); 2035 ierr = PetscMemcpy(a->colmap,oldmat->colmap,(a->Nbs)*sizeof(PetscInt));CHKERRQ(ierr); 2036 #endif 2037 } else a->colmap = 0; 2038 2039 if (oldmat->garray && (len = ((Mat_SeqBAIJ*)(oldmat->B->data))->nbs)) { 2040 ierr = PetscMalloc(len*sizeof(PetscInt),&a->garray);CHKERRQ(ierr); 2041 ierr = PetscLogObjectMemory(mat,len*sizeof(PetscInt));CHKERRQ(ierr); 2042 ierr = PetscMemcpy(a->garray,oldmat->garray,len*sizeof(PetscInt));CHKERRQ(ierr); 2043 } else a->garray = 0; 2044 2045 ierr = VecDuplicate(oldmat->lvec,&a->lvec);CHKERRQ(ierr); 2046 ierr = PetscLogObjectParent(mat,a->lvec);CHKERRQ(ierr); 2047 ierr = VecScatterCopy(oldmat->Mvctx,&a->Mvctx);CHKERRQ(ierr); 2048 ierr = PetscLogObjectParent(mat,a->Mvctx);CHKERRQ(ierr); 2049 2050 ierr = VecDuplicate(oldmat->slvec0,&a->slvec0);CHKERRQ(ierr); 2051 ierr = PetscLogObjectParent(mat,a->slvec0);CHKERRQ(ierr); 2052 ierr = VecDuplicate(oldmat->slvec1,&a->slvec1);CHKERRQ(ierr); 2053 ierr = PetscLogObjectParent(mat,a->slvec1);CHKERRQ(ierr); 2054 2055 ierr = VecGetLocalSize(a->slvec1,&nt);CHKERRQ(ierr); 2056 ierr = VecGetArray(a->slvec1,&array);CHKERRQ(ierr); 2057 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs*mbs,array,&a->slvec1a);CHKERRQ(ierr); 2058 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,nt-bs*mbs,array+bs*mbs,&a->slvec1b);CHKERRQ(ierr); 2059 ierr = VecRestoreArray(a->slvec1,&array);CHKERRQ(ierr); 2060 ierr = VecGetArray(a->slvec0,&array);CHKERRQ(ierr); 2061 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,nt-bs*mbs,array+bs*mbs,&a->slvec0b);CHKERRQ(ierr); 2062 ierr = VecRestoreArray(a->slvec0,&array);CHKERRQ(ierr); 2063 ierr = PetscLogObjectParent(mat,a->slvec0);CHKERRQ(ierr); 2064 ierr = PetscLogObjectParent(mat,a->slvec1);CHKERRQ(ierr); 2065 ierr = PetscLogObjectParent(mat,a->slvec0b);CHKERRQ(ierr); 2066 ierr = PetscLogObjectParent(mat,a->slvec1a);CHKERRQ(ierr); 2067 ierr = PetscLogObjectParent(mat,a->slvec1b);CHKERRQ(ierr); 2068 2069 /* ierr = VecScatterCopy(oldmat->sMvctx,&a->sMvctx); - not written yet, replaced by the lazy trick: */ 2070 ierr = PetscObjectReference((PetscObject)oldmat->sMvctx);CHKERRQ(ierr); 2071 a->sMvctx = oldmat->sMvctx; 2072 ierr = PetscLogObjectParent(mat,a->sMvctx);CHKERRQ(ierr); 2073 2074 ierr = MatDuplicate(oldmat->A,cpvalues,&a->A);CHKERRQ(ierr); 2075 ierr = PetscLogObjectParent(mat,a->A);CHKERRQ(ierr); 2076 ierr = MatDuplicate(oldmat->B,cpvalues,&a->B);CHKERRQ(ierr); 2077 ierr = PetscLogObjectParent(mat,a->B);CHKERRQ(ierr); 2078 ierr = PetscFListDuplicate(((PetscObject)matin)->qlist,&((PetscObject)mat)->qlist);CHKERRQ(ierr); 2079 *newmat = mat; 2080 PetscFunctionReturn(0); 2081 } 2082 2083 #include "petscsys.h" 2084 2085 #undef __FUNCT__ 2086 #define __FUNCT__ "MatLoad_MPISBAIJ" 2087 PetscErrorCode MatLoad_MPISBAIJ(PetscViewer viewer, const MatType type,Mat *newmat) 2088 { 2089 Mat A; 2090 PetscErrorCode ierr; 2091 PetscInt i,nz,j,rstart,rend; 2092 PetscScalar *vals,*buf; 2093 MPI_Comm comm = ((PetscObject)viewer)->comm; 2094 MPI_Status status; 2095 PetscMPIInt rank,size,tag = ((PetscObject)viewer)->tag,*sndcounts = 0,*browners,maxnz,*rowners,*locrowlens,mmbs; 2096 PetscInt header[4],*rowlengths = 0,M,N,m,*cols; 2097 PetscInt *procsnz = 0,jj,*mycols,*ibuf; 2098 PetscInt bs=1,Mbs,mbs,extra_rows; 2099 PetscInt *dlens,*odlens,*mask,*masked1,*masked2,rowcount,odcount; 2100 PetscInt dcount,kmax,k,nzcount,tmp; 2101 int fd; 2102 2103 PetscFunctionBegin; 2104 ierr = PetscOptionsBegin(comm,PETSC_NULL,"Options for loading MPISBAIJ matrix 2","Mat");CHKERRQ(ierr); 2105 ierr = PetscOptionsInt("-matload_block_size","Set the blocksize used to store the matrix","MatLoad",bs,&bs,PETSC_NULL);CHKERRQ(ierr); 2106 ierr = PetscOptionsEnd();CHKERRQ(ierr); 2107 2108 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 2109 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 2110 if (!rank) { 2111 ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr); 2112 ierr = PetscBinaryRead(fd,(char *)header,4,PETSC_INT);CHKERRQ(ierr); 2113 if (header[0] != MAT_FILE_COOKIE) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"not matrix object"); 2114 if (header[3] < 0) { 2115 SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"Matrix stored in special format, cannot load as MPISBAIJ"); 2116 } 2117 } 2118 2119 ierr = MPI_Bcast(header+1,3,MPIU_INT,0,comm);CHKERRQ(ierr); 2120 M = header[1]; N = header[2]; 2121 2122 if (M != N) SETERRQ(PETSC_ERR_SUP,"Can only do square matrices"); 2123 2124 /* 2125 This code adds extra rows to make sure the number of rows is 2126 divisible by the blocksize 2127 */ 2128 Mbs = M/bs; 2129 extra_rows = bs - M + bs*(Mbs); 2130 if (extra_rows == bs) extra_rows = 0; 2131 else Mbs++; 2132 if (extra_rows &&!rank) { 2133 ierr = PetscInfo(viewer,"Padding loaded matrix to match blocksize\n");CHKERRQ(ierr); 2134 } 2135 2136 /* determine ownership of all rows */ 2137 mbs = Mbs/size + ((Mbs % size) > rank); 2138 m = mbs*bs; 2139 ierr = PetscMalloc(2*(size+2)*sizeof(PetscMPIInt),&rowners);CHKERRQ(ierr); 2140 browners = rowners + size + 1; 2141 mmbs = PetscMPIIntCast(mbs); 2142 ierr = MPI_Allgather(&mmbs,1,MPI_INT,rowners+1,1,MPI_INT,comm);CHKERRQ(ierr); 2143 rowners[0] = 0; 2144 for (i=2; i<=size; i++) rowners[i] += rowners[i-1]; 2145 for (i=0; i<=size; i++) browners[i] = rowners[i]*bs; 2146 rstart = rowners[rank]; 2147 rend = rowners[rank+1]; 2148 2149 /* distribute row lengths to all processors */ 2150 ierr = PetscMalloc((rend-rstart)*bs*sizeof(PetscMPIInt),&locrowlens);CHKERRQ(ierr); 2151 if (!rank) { 2152 ierr = PetscMalloc((M+extra_rows)*sizeof(PetscInt),&rowlengths);CHKERRQ(ierr); 2153 ierr = PetscBinaryRead(fd,rowlengths,M,PETSC_INT);CHKERRQ(ierr); 2154 for (i=0; i<extra_rows; i++) rowlengths[M+i] = 1; 2155 ierr = PetscMalloc(size*sizeof(PetscMPIInt),&sndcounts);CHKERRQ(ierr); 2156 for (i=0; i<size; i++) sndcounts[i] = browners[i+1] - browners[i]; 2157 ierr = MPI_Scatterv(rowlengths,sndcounts,browners,MPIU_INT,locrowlens,(rend-rstart)*bs,MPIU_INT,0,comm);CHKERRQ(ierr); 2158 ierr = PetscFree(sndcounts);CHKERRQ(ierr); 2159 } else { 2160 ierr = MPI_Scatterv(0,0,0,MPIU_INT,locrowlens,(rend-rstart)*bs,MPIU_INT,0,comm);CHKERRQ(ierr); 2161 } 2162 2163 if (!rank) { /* procs[0] */ 2164 /* calculate the number of nonzeros on each processor */ 2165 ierr = PetscMalloc(size*sizeof(PetscInt),&procsnz);CHKERRQ(ierr); 2166 ierr = PetscMemzero(procsnz,size*sizeof(PetscInt));CHKERRQ(ierr); 2167 for (i=0; i<size; i++) { 2168 for (j=rowners[i]*bs; j< rowners[i+1]*bs; j++) { 2169 procsnz[i] += rowlengths[j]; 2170 } 2171 } 2172 ierr = PetscFree(rowlengths);CHKERRQ(ierr); 2173 2174 /* determine max buffer needed and allocate it */ 2175 maxnz = 0; 2176 for (i=0; i<size; i++) { 2177 maxnz = PetscMax(maxnz,procsnz[i]); 2178 } 2179 ierr = PetscMalloc(maxnz*sizeof(PetscInt),&cols);CHKERRQ(ierr); 2180 2181 /* read in my part of the matrix column indices */ 2182 nz = procsnz[0]; 2183 ierr = PetscMalloc(nz*sizeof(PetscInt),&ibuf);CHKERRQ(ierr); 2184 mycols = ibuf; 2185 if (size == 1) nz -= extra_rows; 2186 ierr = PetscBinaryRead(fd,mycols,nz,PETSC_INT);CHKERRQ(ierr); 2187 if (size == 1) for (i=0; i< extra_rows; i++) { mycols[nz+i] = M+i; } 2188 2189 /* read in every ones (except the last) and ship off */ 2190 for (i=1; i<size-1; i++) { 2191 nz = procsnz[i]; 2192 ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr); 2193 ierr = MPI_Send(cols,nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr); 2194 } 2195 /* read in the stuff for the last proc */ 2196 if (size != 1) { 2197 nz = procsnz[size-1] - extra_rows; /* the extra rows are not on the disk */ 2198 ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr); 2199 for (i=0; i<extra_rows; i++) cols[nz+i] = M+i; 2200 ierr = MPI_Send(cols,nz+extra_rows,MPIU_INT,size-1,tag,comm);CHKERRQ(ierr); 2201 } 2202 ierr = PetscFree(cols);CHKERRQ(ierr); 2203 } else { /* procs[i], i>0 */ 2204 /* determine buffer space needed for message */ 2205 nz = 0; 2206 for (i=0; i<m; i++) { 2207 nz += locrowlens[i]; 2208 } 2209 ierr = PetscMalloc(nz*sizeof(PetscInt),&ibuf);CHKERRQ(ierr); 2210 mycols = ibuf; 2211 /* receive message of column indices*/ 2212 ierr = MPI_Recv(mycols,nz,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr); 2213 ierr = MPI_Get_count(&status,MPIU_INT,&maxnz);CHKERRQ(ierr); 2214 if (maxnz != nz) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file"); 2215 } 2216 2217 /* loop over local rows, determining number of off diagonal entries */ 2218 ierr = PetscMalloc(2*(rend-rstart+1)*sizeof(PetscInt),&dlens);CHKERRQ(ierr); 2219 odlens = dlens + (rend-rstart); 2220 ierr = PetscMalloc(3*Mbs*sizeof(PetscInt),&mask);CHKERRQ(ierr); 2221 ierr = PetscMemzero(mask,3*Mbs*sizeof(PetscInt));CHKERRQ(ierr); 2222 masked1 = mask + Mbs; 2223 masked2 = masked1 + Mbs; 2224 rowcount = 0; nzcount = 0; 2225 for (i=0; i<mbs; i++) { 2226 dcount = 0; 2227 odcount = 0; 2228 for (j=0; j<bs; j++) { 2229 kmax = locrowlens[rowcount]; 2230 for (k=0; k<kmax; k++) { 2231 tmp = mycols[nzcount++]/bs; /* block col. index */ 2232 if (!mask[tmp]) { 2233 mask[tmp] = 1; 2234 if (tmp < rstart || tmp >= rend) masked2[odcount++] = tmp; /* entry in off-diag portion */ 2235 else masked1[dcount++] = tmp; /* entry in diag portion */ 2236 } 2237 } 2238 rowcount++; 2239 } 2240 2241 dlens[i] = dcount; /* d_nzz[i] */ 2242 odlens[i] = odcount; /* o_nzz[i] */ 2243 2244 /* zero out the mask elements we set */ 2245 for (j=0; j<dcount; j++) mask[masked1[j]] = 0; 2246 for (j=0; j<odcount; j++) mask[masked2[j]] = 0; 2247 } 2248 2249 /* create our matrix */ 2250 ierr = MatCreate(comm,&A);CHKERRQ(ierr); 2251 ierr = MatSetSizes(A,m,m,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 2252 ierr = MatSetType(A,type);CHKERRQ(ierr); 2253 ierr = MatMPISBAIJSetPreallocation(A,bs,0,dlens,0,odlens);CHKERRQ(ierr); 2254 2255 if (!rank) { 2256 ierr = PetscMalloc(maxnz*sizeof(PetscScalar),&buf);CHKERRQ(ierr); 2257 /* read in my part of the matrix numerical values */ 2258 nz = procsnz[0]; 2259 vals = buf; 2260 mycols = ibuf; 2261 if (size == 1) nz -= extra_rows; 2262 ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr); 2263 if (size == 1) for (i=0; i< extra_rows; i++) { vals[nz+i] = 1.0; } 2264 2265 /* insert into matrix */ 2266 jj = rstart*bs; 2267 for (i=0; i<m; i++) { 2268 ierr = MatSetValues(A,1,&jj,locrowlens[i],mycols,vals,INSERT_VALUES);CHKERRQ(ierr); 2269 mycols += locrowlens[i]; 2270 vals += locrowlens[i]; 2271 jj++; 2272 } 2273 2274 /* read in other processors (except the last one) and ship out */ 2275 for (i=1; i<size-1; i++) { 2276 nz = procsnz[i]; 2277 vals = buf; 2278 ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr); 2279 ierr = MPI_Send(vals,nz,MPIU_SCALAR,i,((PetscObject)A)->tag,comm);CHKERRQ(ierr); 2280 } 2281 /* the last proc */ 2282 if (size != 1){ 2283 nz = procsnz[i] - extra_rows; 2284 vals = buf; 2285 ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr); 2286 for (i=0; i<extra_rows; i++) vals[nz+i] = 1.0; 2287 ierr = MPI_Send(vals,nz+extra_rows,MPIU_SCALAR,size-1,((PetscObject)A)->tag,comm);CHKERRQ(ierr); 2288 } 2289 ierr = PetscFree(procsnz);CHKERRQ(ierr); 2290 2291 } else { 2292 /* receive numeric values */ 2293 ierr = PetscMalloc(nz*sizeof(PetscScalar),&buf);CHKERRQ(ierr); 2294 2295 /* receive message of values*/ 2296 vals = buf; 2297 mycols = ibuf; 2298 ierr = MPI_Recv(vals,nz,MPIU_SCALAR,0,((PetscObject)A)->tag,comm,&status);CHKERRQ(ierr); 2299 ierr = MPI_Get_count(&status,MPIU_SCALAR,&maxnz);CHKERRQ(ierr); 2300 if (maxnz != nz) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file"); 2301 2302 /* insert into matrix */ 2303 jj = rstart*bs; 2304 for (i=0; i<m; i++) { 2305 ierr = MatSetValues_MPISBAIJ(A,1,&jj,locrowlens[i],mycols,vals,INSERT_VALUES);CHKERRQ(ierr); 2306 mycols += locrowlens[i]; 2307 vals += locrowlens[i]; 2308 jj++; 2309 } 2310 } 2311 2312 ierr = PetscFree(locrowlens);CHKERRQ(ierr); 2313 ierr = PetscFree(buf);CHKERRQ(ierr); 2314 ierr = PetscFree(ibuf);CHKERRQ(ierr); 2315 ierr = PetscFree(rowners);CHKERRQ(ierr); 2316 ierr = PetscFree(dlens);CHKERRQ(ierr); 2317 ierr = PetscFree(mask);CHKERRQ(ierr); 2318 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2319 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2320 *newmat = A; 2321 PetscFunctionReturn(0); 2322 } 2323 2324 #undef __FUNCT__ 2325 #define __FUNCT__ "MatMPISBAIJSetHashTableFactor" 2326 /*XXXXX@ 2327 MatMPISBAIJSetHashTableFactor - Sets the factor required to compute the size of the HashTable. 2328 2329 Input Parameters: 2330 . mat - the matrix 2331 . fact - factor 2332 2333 Collective on Mat 2334 2335 Level: advanced 2336 2337 Notes: 2338 This can also be set by the command line option: -mat_use_hash_table fact 2339 2340 .keywords: matrix, hashtable, factor, HT 2341 2342 .seealso: MatSetOption() 2343 @XXXXX*/ 2344 2345 2346 #undef __FUNCT__ 2347 #define __FUNCT__ "MatGetRowMaxAbs_MPISBAIJ" 2348 PetscErrorCode MatGetRowMaxAbs_MPISBAIJ(Mat A,Vec v,PetscInt idx[]) 2349 { 2350 Mat_MPISBAIJ *a = (Mat_MPISBAIJ*)A->data; 2351 Mat_SeqBAIJ *b = (Mat_SeqBAIJ*)(a->B)->data; 2352 PetscReal atmp; 2353 PetscReal *work,*svalues,*rvalues; 2354 PetscErrorCode ierr; 2355 PetscInt i,bs,mbs,*bi,*bj,brow,j,ncols,krow,kcol,col,row,Mbs,bcol; 2356 PetscMPIInt rank,size; 2357 PetscInt *rowners_bs,dest,count,source; 2358 PetscScalar *va; 2359 MatScalar *ba; 2360 MPI_Status stat; 2361 2362 PetscFunctionBegin; 2363 if (idx) SETERRQ(PETSC_ERR_SUP,"Send email to petsc-maint@mcs.anl.gov"); 2364 ierr = MatGetRowMaxAbs(a->A,v,PETSC_NULL);CHKERRQ(ierr); 2365 ierr = VecGetArray(v,&va);CHKERRQ(ierr); 2366 2367 ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr); 2368 ierr = MPI_Comm_rank(((PetscObject)A)->comm,&rank);CHKERRQ(ierr); 2369 2370 bs = A->rmap->bs; 2371 mbs = a->mbs; 2372 Mbs = a->Mbs; 2373 ba = b->a; 2374 bi = b->i; 2375 bj = b->j; 2376 2377 /* find ownerships */ 2378 rowners_bs = A->rmap->range; 2379 2380 /* each proc creates an array to be distributed */ 2381 ierr = PetscMalloc(bs*Mbs*sizeof(PetscReal),&work);CHKERRQ(ierr); 2382 ierr = PetscMemzero(work,bs*Mbs*sizeof(PetscReal));CHKERRQ(ierr); 2383 2384 /* row_max for B */ 2385 if (rank != size-1){ 2386 for (i=0; i<mbs; i++) { 2387 ncols = bi[1] - bi[0]; bi++; 2388 brow = bs*i; 2389 for (j=0; j<ncols; j++){ 2390 bcol = bs*(*bj); 2391 for (kcol=0; kcol<bs; kcol++){ 2392 col = bcol + kcol; /* local col index */ 2393 col += rowners_bs[rank+1]; /* global col index */ 2394 for (krow=0; krow<bs; krow++){ 2395 atmp = PetscAbsScalar(*ba); ba++; 2396 row = brow + krow; /* local row index */ 2397 if (PetscRealPart(va[row]) < atmp) va[row] = atmp; 2398 if (work[col] < atmp) work[col] = atmp; 2399 } 2400 } 2401 bj++; 2402 } 2403 } 2404 2405 /* send values to its owners */ 2406 for (dest=rank+1; dest<size; dest++){ 2407 svalues = work + rowners_bs[dest]; 2408 count = rowners_bs[dest+1]-rowners_bs[dest]; 2409 ierr = MPI_Send(svalues,count,MPIU_REAL,dest,rank,((PetscObject)A)->comm);CHKERRQ(ierr); 2410 } 2411 } 2412 2413 /* receive values */ 2414 if (rank){ 2415 rvalues = work; 2416 count = rowners_bs[rank+1]-rowners_bs[rank]; 2417 for (source=0; source<rank; source++){ 2418 ierr = MPI_Recv(rvalues,count,MPIU_REAL,MPI_ANY_SOURCE,MPI_ANY_TAG,((PetscObject)A)->comm,&stat);CHKERRQ(ierr); 2419 /* process values */ 2420 for (i=0; i<count; i++){ 2421 if (PetscRealPart(va[i]) < rvalues[i]) va[i] = rvalues[i]; 2422 } 2423 } 2424 } 2425 2426 ierr = VecRestoreArray(v,&va);CHKERRQ(ierr); 2427 ierr = PetscFree(work);CHKERRQ(ierr); 2428 PetscFunctionReturn(0); 2429 } 2430 2431 #undef __FUNCT__ 2432 #define __FUNCT__ "MatRelax_MPISBAIJ" 2433 PetscErrorCode MatRelax_MPISBAIJ(Mat matin,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx) 2434 { 2435 Mat_MPISBAIJ *mat = (Mat_MPISBAIJ*)matin->data; 2436 PetscErrorCode ierr; 2437 PetscInt mbs=mat->mbs,bs=matin->rmap->bs; 2438 PetscScalar *x,*b,*ptr,zero=0.0; 2439 Vec bb1; 2440 2441 PetscFunctionBegin; 2442 if (its <= 0 || lits <= 0) SETERRQ2(PETSC_ERR_ARG_WRONG,"Relaxation requires global its %D and local its %D both positive",its,lits); 2443 if (bs > 1) 2444 SETERRQ(PETSC_ERR_SUP,"SSOR for block size > 1 is not yet implemented"); 2445 2446 if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP){ 2447 if ( flag & SOR_ZERO_INITIAL_GUESS ) { 2448 ierr = (*mat->A->ops->relax)(mat->A,bb,omega,flag,fshift,lits,lits,xx);CHKERRQ(ierr); 2449 its--; 2450 } 2451 2452 ierr = VecDuplicate(bb,&bb1);CHKERRQ(ierr); 2453 while (its--){ 2454 2455 /* lower triangular part: slvec0b = - B^T*xx */ 2456 ierr = (*mat->B->ops->multtranspose)(mat->B,xx,mat->slvec0b);CHKERRQ(ierr); 2457 2458 /* copy xx into slvec0a */ 2459 ierr = VecGetArray(mat->slvec0,&ptr);CHKERRQ(ierr); 2460 ierr = VecGetArray(xx,&x);CHKERRQ(ierr); 2461 ierr = PetscMemcpy(ptr,x,bs*mbs*sizeof(MatScalar));CHKERRQ(ierr); 2462 ierr = VecRestoreArray(mat->slvec0,&ptr);CHKERRQ(ierr); 2463 2464 ierr = VecScale(mat->slvec0,-1.0);CHKERRQ(ierr); 2465 2466 /* copy bb into slvec1a */ 2467 ierr = VecGetArray(mat->slvec1,&ptr);CHKERRQ(ierr); 2468 ierr = VecGetArray(bb,&b);CHKERRQ(ierr); 2469 ierr = PetscMemcpy(ptr,b,bs*mbs*sizeof(MatScalar));CHKERRQ(ierr); 2470 ierr = VecRestoreArray(mat->slvec1,&ptr);CHKERRQ(ierr); 2471 2472 /* set slvec1b = 0 */ 2473 ierr = VecSet(mat->slvec1b,zero);CHKERRQ(ierr); 2474 2475 ierr = VecScatterBegin(mat->sMvctx,mat->slvec0,mat->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 2476 ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr); 2477 ierr = VecRestoreArray(bb,&b);CHKERRQ(ierr); 2478 ierr = VecScatterEnd(mat->sMvctx,mat->slvec0,mat->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 2479 2480 /* upper triangular part: bb1 = bb1 - B*x */ 2481 ierr = (*mat->B->ops->multadd)(mat->B,mat->slvec1b,mat->slvec1a,bb1);CHKERRQ(ierr); 2482 2483 /* local diagonal sweep */ 2484 ierr = (*mat->A->ops->relax)(mat->A,bb1,omega,SOR_SYMMETRIC_SWEEP,fshift,lits,lits,xx);CHKERRQ(ierr); 2485 } 2486 ierr = VecDestroy(bb1);CHKERRQ(ierr); 2487 } else { 2488 SETERRQ(PETSC_ERR_SUP,"MatSORType is not supported for SBAIJ matrix format"); 2489 } 2490 PetscFunctionReturn(0); 2491 } 2492 2493 #undef __FUNCT__ 2494 #define __FUNCT__ "MatRelax_MPISBAIJ_2comm" 2495 PetscErrorCode MatRelax_MPISBAIJ_2comm(Mat matin,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx) 2496 { 2497 Mat_MPISBAIJ *mat = (Mat_MPISBAIJ*)matin->data; 2498 PetscErrorCode ierr; 2499 Vec lvec1,bb1; 2500 2501 PetscFunctionBegin; 2502 if (its <= 0 || lits <= 0) SETERRQ2(PETSC_ERR_ARG_WRONG,"Relaxation requires global its %D and local its %D both positive",its,lits); 2503 if (matin->rmap->bs > 1) 2504 SETERRQ(PETSC_ERR_SUP,"SSOR for block size > 1 is not yet implemented"); 2505 2506 if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP){ 2507 if ( flag & SOR_ZERO_INITIAL_GUESS ) { 2508 ierr = (*mat->A->ops->relax)(mat->A,bb,omega,flag,fshift,lits,lits,xx);CHKERRQ(ierr); 2509 its--; 2510 } 2511 2512 ierr = VecDuplicate(mat->lvec,&lvec1);CHKERRQ(ierr); 2513 ierr = VecDuplicate(bb,&bb1);CHKERRQ(ierr); 2514 while (its--){ 2515 ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 2516 2517 /* lower diagonal part: bb1 = bb - B^T*xx */ 2518 ierr = (*mat->B->ops->multtranspose)(mat->B,xx,lvec1);CHKERRQ(ierr); 2519 ierr = VecScale(lvec1,-1.0);CHKERRQ(ierr); 2520 2521 ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 2522 ierr = VecCopy(bb,bb1);CHKERRQ(ierr); 2523 ierr = VecScatterBegin(mat->Mvctx,lvec1,bb1,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 2524 2525 /* upper diagonal part: bb1 = bb1 - B*x */ 2526 ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr); 2527 ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb1,bb1);CHKERRQ(ierr); 2528 2529 ierr = VecScatterEnd(mat->Mvctx,lvec1,bb1,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 2530 2531 /* diagonal sweep */ 2532 ierr = (*mat->A->ops->relax)(mat->A,bb1,omega,SOR_SYMMETRIC_SWEEP,fshift,lits,lits,xx);CHKERRQ(ierr); 2533 } 2534 ierr = VecDestroy(lvec1);CHKERRQ(ierr); 2535 ierr = VecDestroy(bb1);CHKERRQ(ierr); 2536 } else { 2537 SETERRQ(PETSC_ERR_SUP,"MatSORType is not supported for SBAIJ matrix format"); 2538 } 2539 PetscFunctionReturn(0); 2540 } 2541 2542