1 #define PETSCMAT_DLL 2 3 /* 4 This provides a matrix that consists of Mats 5 */ 6 7 #include "src/mat/matimpl.h" /*I "petscmat.h" I*/ 8 #include "src/mat/impls/baij/seq/baij.h" /* use the common AIJ data-structure */ 9 #include "petscksp.h" 10 11 #define CHUNKSIZE 15 12 13 typedef struct { 14 SEQAIJHEADER(Mat); 15 SEQBAIJHEADER; 16 Mat *diags; 17 18 Vec left,right; /* dummy vectors to perform local parts of product */ 19 } Mat_BlockMat; 20 21 #undef __FUNCT__ 22 #define __FUNCT__ "MatRelax_BlockMat" 23 PetscErrorCode MatRelax_BlockMat(Mat A,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx) 24 { 25 Mat_BlockMat *a = (Mat_BlockMat*)A->data; 26 PetscScalar *x; 27 const Mat *v = a->a; 28 const PetscScalar *b; 29 PetscErrorCode ierr; 30 PetscInt n = A->cmap.n,i,mbs = n/A->rmap.bs,j,bs = A->rmap.bs; 31 const PetscInt *idx; 32 IS row,col; 33 MatFactorInfo info; 34 Vec left = a->left,right = a->right; 35 Mat *diag; 36 37 PetscFunctionBegin; 38 its = its*lits; 39 if (its <= 0) SETERRQ2(PETSC_ERR_ARG_WRONG,"Relaxation requires global its %D and local its %D both positive",its,lits); 40 if (flag & SOR_EISENSTAT) SETERRQ(PETSC_ERR_SUP,"No support yet for Eisenstat"); 41 if (omega != 1.0) SETERRQ(PETSC_ERR_SUP,"No support yet for omega not equal to 1.0"); 42 if (fshift) SETERRQ(PETSC_ERR_SUP,"No support yet for fshift"); 43 44 if (!a->diags) { 45 ierr = PetscMalloc(mbs*sizeof(Mat),&a->diags);CHKERRQ(ierr); 46 ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr); 47 for (i=0; i<mbs; i++) { 48 ierr = MatGetOrdering(a->a[a->diag[i]], MATORDERING_ND,&row,&col);CHKERRQ(ierr); 49 ierr = MatLUFactorSymbolic(a->a[a->diag[i]],row,col,&info,a->diags+i);CHKERRQ(ierr); 50 ierr = MatLUFactorNumeric(a->a[a->diag[i]],&info,a->diags+i);CHKERRQ(ierr); 51 ierr = ISDestroy(row);CHKERRQ(ierr); 52 ierr = ISDestroy(col);CHKERRQ(ierr); 53 } 54 } 55 diag = a->diags; 56 57 ierr = VecSet(xx,0.0);CHKERRQ(ierr); 58 ierr = VecGetArray(xx,&x);CHKERRQ(ierr); 59 ierr = VecGetArray(bb,(PetscScalar**)&b);CHKERRQ(ierr); 60 61 /* need to add code for when initial guess is zero, see MatRelax_SeqAIJ */ 62 while (its--) { 63 if (flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP){ 64 65 for (i=0; i<mbs; i++) { 66 n = a->i[i+1] - a->i[i]; 67 idx = a->j + a->i[i]; 68 v = a->a + a->i[i]; 69 70 ierr = VecSet(left,0.0);CHKERRQ(ierr); 71 for (j=0; j<n; j++) { 72 if (idx[j] != i) { 73 ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr); 74 ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr); 75 ierr = VecResetArray(right);CHKERRQ(ierr); 76 } 77 } 78 ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr); 79 ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr); 80 ierr = VecResetArray(right);CHKERRQ(ierr); 81 82 ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr); 83 ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr); 84 ierr = VecResetArray(right);CHKERRQ(ierr); 85 } 86 } 87 if (flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP){ 88 89 for (i=mbs-1; i>=0; i--) { 90 n = a->i[i+1] - a->i[i]; 91 idx = a->j + a->i[i]; 92 v = a->a + a->i[i]; 93 94 ierr = VecSet(left,0.0);CHKERRQ(ierr); 95 for (j=0; j<n; j++) { 96 if (idx[j] != i) { 97 ierr = VecPlaceArray(right,x + idx[j]*bs);CHKERRQ(ierr); 98 ierr = MatMultAdd(v[j],right,left,left);CHKERRQ(ierr); 99 ierr = VecResetArray(right);CHKERRQ(ierr); 100 } 101 } 102 ierr = VecPlaceArray(right,b + i*bs);CHKERRQ(ierr); 103 ierr = VecAYPX(left,-1.0,right);CHKERRQ(ierr); 104 ierr = VecResetArray(right);CHKERRQ(ierr); 105 106 ierr = VecPlaceArray(right,x + i*bs);CHKERRQ(ierr); 107 ierr = MatSolve(diag[i],left,right);CHKERRQ(ierr); 108 ierr = VecResetArray(right);CHKERRQ(ierr); 109 110 } 111 } 112 } 113 ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr); 114 ierr = VecRestoreArray(bb,(PetscScalar**)&b);CHKERRQ(ierr); 115 PetscFunctionReturn(0); 116 } 117 118 #undef __FUNCT__ 119 #define __FUNCT__ "MatSetValues_BlockMat" 120 PetscErrorCode MatSetValues_BlockMat(Mat A,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode is) 121 { 122 Mat_BlockMat *a = (Mat_BlockMat*)A->data; 123 PetscInt *rp,k,low,high,t,ii,row,nrow,i,col,l,rmax,N,lastcol = -1; 124 PetscInt *imax=a->imax,*ai=a->i,*ailen=a->ilen; 125 PetscInt *aj=a->j,nonew=a->nonew,bs=A->rmap.bs,brow,bcol; 126 PetscErrorCode ierr; 127 PetscInt ridx,cidx; 128 PetscTruth roworiented=a->roworiented; 129 MatScalar value; 130 Mat *ap,*aa = a->a; 131 132 PetscFunctionBegin; 133 for (k=0; k<m; k++) { /* loop over added rows */ 134 row = im[k]; 135 brow = row/bs; 136 if (row < 0) continue; 137 #if defined(PETSC_USE_DEBUG) 138 if (row >= A->rmap.N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",row,A->rmap.N-1); 139 #endif 140 rp = aj + ai[brow]; 141 ap = aa + ai[brow]; 142 rmax = imax[brow]; 143 nrow = ailen[brow]; 144 low = 0; 145 high = nrow; 146 for (l=0; l<n; l++) { /* loop over added columns */ 147 if (in[l] < 0) continue; 148 #if defined(PETSC_USE_DEBUG) 149 if (in[l] >= A->cmap.n) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[l],A->cmap.n-1); 150 #endif 151 col = in[l]; bcol = col/bs; 152 ridx = row % bs; cidx = col % bs; 153 if (roworiented) { 154 value = v[l + k*n]; 155 } else { 156 value = v[k + l*m]; 157 } 158 if (col <= lastcol) low = 0; else high = nrow; 159 lastcol = col; 160 while (high-low > 7) { 161 t = (low+high)/2; 162 if (rp[t] > bcol) high = t; 163 else low = t; 164 } 165 for (i=low; i<high; i++) { 166 if (rp[i] > bcol) break; 167 if (rp[i] == bcol) { 168 /* printf("row %d col %d found i %d\n",brow,bcol,i);*/ 169 goto noinsert1; 170 } 171 } 172 if (nonew == 1) goto noinsert1; 173 if (nonew == -1) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) in the matrix", row, col); 174 MatSeqXAIJReallocateAIJ(A,a->mbs,1,nrow,brow,bcol,rmax,aa,ai,aj,rp,ap,imax,nonew,Mat); 175 N = nrow++ - 1; high++; 176 /* shift up all the later entries in this row */ 177 /* printf("N %d i %d\n",N,i);*/ 178 for (ii=N; ii>=i; ii--) { 179 rp[ii+1] = rp[ii]; 180 ap[ii+1] = ap[ii]; 181 } 182 if (N>=i) ap[i] = 0; 183 rp[i] = bcol; 184 a->nz++; 185 noinsert1:; 186 if (!*(ap+i)) { 187 /* printf("create matrix at i %d rw %d col %d\n",i,brow,bcol);*/ 188 ierr = MatCreateSeqAIJ(PETSC_COMM_SELF,bs,bs,0,0,ap+i);CHKERRQ(ierr); 189 } 190 /* printf("numerical value at i %d row %d col %d cidx %d ridx %d value %g\n",i,brow,bcol,cidx,ridx,value);*/ 191 ierr = MatSetValues(ap[i],1,&ridx,1,&cidx,&value,is);CHKERRQ(ierr); 192 low = i; 193 } 194 /* printf("nrow for row %d %d\n",nrow,brow);*/ 195 ailen[brow] = nrow; 196 } 197 A->same_nonzero = PETSC_FALSE; 198 PetscFunctionReturn(0); 199 } 200 201 #undef __FUNCT__ 202 #define __FUNCT__ "MatLoad_BlockMat" 203 PetscErrorCode MatLoad_BlockMat(PetscViewer viewer, MatType type,Mat *A) 204 { 205 PetscErrorCode ierr; 206 Mat tmpA; 207 PetscInt i,m,n,bs = 1,ncols; 208 const PetscInt *cols; 209 const PetscScalar *values; 210 211 PetscFunctionBegin; 212 ierr = MatLoad_SeqAIJ(viewer,MATSEQAIJ,&tmpA);CHKERRQ(ierr); 213 214 ierr = MatGetLocalSize(tmpA,&m,&n);CHKERRQ(ierr); 215 ierr = PetscOptionsBegin(PETSC_COMM_SELF,PETSC_NULL,"Options for loading BlockMat matrix","Mat");CHKERRQ(ierr); 216 ierr = PetscOptionsInt("-matload_block_size","Set the blocksize used to store the matrix","MatLoad",bs,&bs,PETSC_NULL);CHKERRQ(ierr); 217 ierr = PetscOptionsEnd();CHKERRQ(ierr); 218 ierr = MatCreateBlockMat(PETSC_COMM_SELF,m,n,bs,A);CHKERRQ(ierr); 219 for (i=0; i<m; i++) { 220 ierr = MatGetRow(tmpA,i,&ncols,&cols,&values);CHKERRQ(ierr); 221 ierr = MatSetValues(*A,1,&i,ncols,cols,values,INSERT_VALUES);CHKERRQ(ierr); 222 ierr = MatRestoreRow(tmpA,i,&ncols,&cols,&values);CHKERRQ(ierr); 223 } 224 ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 225 ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 226 PetscFunctionReturn(0); 227 } 228 229 #undef __FUNCT__ 230 #define __FUNCT__ "MatView_BlockMat" 231 PetscErrorCode MatView_BlockMat(Mat A,PetscViewer viewer) 232 { 233 Mat_SeqAIJ *a = (Mat_SeqAIJ*)A->data; 234 PetscErrorCode ierr; 235 const char *name; 236 PetscViewerFormat format; 237 238 PetscFunctionBegin; 239 ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr); 240 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 241 if (format == PETSC_VIEWER_ASCII_FACTOR_INFO || format == PETSC_VIEWER_ASCII_INFO) { 242 ierr = PetscViewerASCIIPrintf(viewer,"Nonzero block matrices = %D \n",a->nz);CHKERRQ(ierr); 243 } 244 PetscFunctionReturn(0); 245 } 246 247 #undef __FUNCT__ 248 #define __FUNCT__ "MatDestroy_BlockMat" 249 PetscErrorCode MatDestroy_BlockMat(Mat mat) 250 { 251 PetscErrorCode ierr; 252 Mat_BlockMat *bmat = (Mat_BlockMat*)mat->data; 253 PetscInt i; 254 255 PetscFunctionBegin; 256 if (bmat->right) { 257 ierr = VecDestroy(bmat->right);CHKERRQ(ierr); 258 } 259 if (bmat->left) { 260 ierr = VecDestroy(bmat->left);CHKERRQ(ierr); 261 } 262 if (bmat->diags) { 263 for (i=0; i<mat->rmap.n/mat->rmap.bs; i++) { 264 if (bmat->diags[i]) {ierr = MatDestroy(bmat->diags[i]);CHKERRQ(ierr);} 265 } 266 } 267 if (bmat->a) { 268 for (i=0; i<bmat->nz; i++) { 269 if (bmat->a[i]) {ierr = MatDestroy(bmat->a[i]);CHKERRQ(ierr);} 270 } 271 } 272 ierr = MatSeqXAIJFreeAIJ(mat,(PetscScalar**)&bmat->a,&bmat->j,&bmat->i);CHKERRQ(ierr); 273 ierr = PetscFree(bmat);CHKERRQ(ierr); 274 PetscFunctionReturn(0); 275 } 276 277 #undef __FUNCT__ 278 #define __FUNCT__ "MatMult_BlockMat" 279 PetscErrorCode MatMult_BlockMat(Mat A,Vec x,Vec y) 280 { 281 Mat_BlockMat *bmat = (Mat_BlockMat*)A->data; 282 PetscErrorCode ierr; 283 PetscScalar *xx,*yy; 284 PetscInt *aj,i,*ii,jrow,m = A->rmap.n/A->rmap.bs,bs = A->rmap.bs,n,j; 285 Mat *aa; 286 287 PetscFunctionBegin; 288 CHKMEMQ; 289 /* 290 Standard CSR multiply except each entry is a Mat 291 */ 292 ierr = VecGetArray(x,&xx);CHKERRQ(ierr); 293 294 ierr = VecSet(y,0.0);CHKERRQ(ierr); 295 ierr = VecGetArray(y,&yy);CHKERRQ(ierr); 296 aj = bmat->j; 297 aa = bmat->a; 298 ii = bmat->i; 299 for (i=0; i<m; i++) { 300 jrow = ii[i]; 301 ierr = VecPlaceArray(bmat->left,yy + bs*i);CHKERRQ(ierr); 302 n = ii[i+1] - jrow; 303 ierr = VecSet(bmat->left,0.0);CHKERRQ(ierr); 304 for (j=0; j<n; j++) { 305 ierr = VecPlaceArray(bmat->right,xx + bs*aj[jrow]);CHKERRQ(ierr); 306 ierr = MatMultAdd(aa[jrow],bmat->right,bmat->left,bmat->left);CHKERRQ(ierr); 307 ierr = VecResetArray(bmat->right);CHKERRQ(ierr); 308 jrow++; 309 } 310 ierr = VecResetArray(bmat->left);CHKERRQ(ierr); 311 } 312 ierr = VecRestoreArray(x,&xx);CHKERRQ(ierr); 313 ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr); 314 CHKMEMQ; 315 PetscFunctionReturn(0); 316 } 317 318 #undef __FUNCT__ 319 #define __FUNCT__ "MatMultAdd_BlockMat" 320 PetscErrorCode MatMultAdd_BlockMat(Mat A,Vec x,Vec y,Vec z) 321 { 322 PetscFunctionBegin; 323 PetscFunctionReturn(0); 324 } 325 326 #undef __FUNCT__ 327 #define __FUNCT__ "MatMultTranspose_BlockMat" 328 PetscErrorCode MatMultTranspose_BlockMat(Mat A,Vec x,Vec y) 329 { 330 PetscFunctionBegin; 331 PetscFunctionReturn(0); 332 } 333 334 #undef __FUNCT__ 335 #define __FUNCT__ "MatMultTransposeAdd_BlockMat" 336 PetscErrorCode MatMultTransposeAdd_BlockMat(Mat A,Vec x,Vec y,Vec z) 337 { 338 PetscFunctionBegin; 339 PetscFunctionReturn(0); 340 } 341 342 #undef __FUNCT__ 343 #define __FUNCT__ "MatSetBlockSize_BlockMat" 344 PetscErrorCode MatSetBlockSize_BlockMat(Mat A,PetscInt bs) 345 { 346 Mat_BlockMat *bmat = (Mat_BlockMat*)A->data; 347 PetscErrorCode ierr; 348 PetscInt nz = 10,i; 349 350 PetscFunctionBegin; 351 if (A->rmap.n % bs) SETERRQ2(PETSC_ERR_ARG_INCOMP,"Blocksize %D does not divide number of rows %D",bs,A->rmap.n); 352 if (A->cmap.n % bs) SETERRQ2(PETSC_ERR_ARG_INCOMP,"Blocksize %D does not divide number of columns %D",bs,A->cmap.n); 353 A->rmap.bs = A->cmap.bs = bs; 354 355 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,PETSC_NULL,&bmat->right);CHKERRQ(ierr); 356 ierr = VecCreateSeq(PETSC_COMM_SELF,bs,&bmat->left);CHKERRQ(ierr); 357 358 359 ierr = PetscMalloc2(A->rmap.n,PetscInt,&bmat->imax,A->rmap.n,PetscInt,&bmat->ilen);CHKERRQ(ierr); 360 for (i=0; i<A->rmap.n; i++) bmat->imax[i] = nz; 361 nz = nz*A->rmap.n; 362 363 bmat->mbs = A->rmap.n/A->rmap.bs; 364 365 /* bmat->ilen will count nonzeros in each row so far. */ 366 for (i=0; i<bmat->mbs; i++) { bmat->ilen[i] = 0;} 367 368 /* allocate the matrix space */ 369 ierr = PetscMalloc3(nz,Mat,&bmat->a,nz,PetscInt,&bmat->j,A->rmap.n+1,PetscInt,&bmat->i);CHKERRQ(ierr); 370 bmat->i[0] = 0; 371 for (i=1; i<bmat->mbs+1; i++) { 372 bmat->i[i] = bmat->i[i-1] + bmat->imax[i-1]; 373 } 374 bmat->singlemalloc = PETSC_TRUE; 375 bmat->free_a = PETSC_TRUE; 376 bmat->free_ij = PETSC_TRUE; 377 378 bmat->nz = 0; 379 bmat->maxnz = nz; 380 A->info.nz_unneeded = (double)bmat->maxnz; 381 382 PetscFunctionReturn(0); 383 } 384 385 /* 386 Adds diagonal pointers to sparse matrix structure. 387 */ 388 #undef __FUNCT__ 389 #define __FUNCT__ "MatMarkDiagonal_BlockMat" 390 PetscErrorCode MatMarkDiagonal_BlockMat(Mat A) 391 { 392 Mat_BlockMat *a = (Mat_BlockMat*)A->data; 393 PetscErrorCode ierr; 394 PetscInt i,j,mbs = A->rmap.n/A->rmap.bs; 395 396 PetscFunctionBegin; 397 if (!a->diag) { 398 ierr = PetscMalloc(mbs*sizeof(PetscInt),&a->diag);CHKERRQ(ierr); 399 } 400 for (i=0; i<mbs; i++) { 401 a->diag[i] = a->i[i+1]; 402 for (j=a->i[i]; j<a->i[i+1]; j++) { 403 if (a->j[j] == i) { 404 a->diag[i] = j; 405 break; 406 } 407 } 408 } 409 PetscFunctionReturn(0); 410 } 411 412 #undef __FUNCT__ 413 #define __FUNCT__ "MatGetSubMatrix_BlockMat" 414 PetscErrorCode MatGetSubMatrix_BlockMat(Mat A,IS isrow,IS iscol,PetscInt csize,MatReuse scall,Mat *B) 415 { 416 Mat_BlockMat *a = (Mat_BlockMat*)A->data; 417 Mat_SeqAIJ *c; 418 PetscErrorCode ierr; 419 PetscInt i,k,first,step,lensi,nrows,ncols; 420 PetscInt *j_new,*i_new,*aj = a->j,*ai = a->i,ii,*ailen = a->ilen; 421 PetscScalar *a_new,value; 422 Mat C,*aa = a->a; 423 PetscTruth stride,equal; 424 425 PetscFunctionBegin; 426 ierr = ISEqual(isrow,iscol,&equal);CHKERRQ(ierr); 427 if (!equal) SETERRQ(PETSC_ERR_SUP,"Only for idential column and row indices"); 428 ierr = ISStride(iscol,&stride);CHKERRQ(ierr); 429 if (!stride) SETERRQ(PETSC_ERR_SUP,"Only for stride indices"); 430 ierr = ISStrideGetInfo(iscol,&first,&step);CHKERRQ(ierr); 431 if (step != A->rmap.bs) SETERRQ(PETSC_ERR_SUP,"Can only select one entry from each block"); 432 433 ierr = ISGetLocalSize(isrow,&nrows);CHKERRQ(ierr); 434 ncols = nrows; 435 436 /* create submatrix */ 437 if (scall == MAT_REUSE_MATRIX) { 438 PetscInt n_cols,n_rows; 439 C = *B; 440 ierr = MatGetSize(C,&n_rows,&n_cols);CHKERRQ(ierr); 441 if (n_rows != nrows || n_cols != ncols) SETERRQ(PETSC_ERR_ARG_SIZ,"Reused submatrix wrong size"); 442 ierr = MatZeroEntries(C);CHKERRQ(ierr); 443 } else { 444 ierr = MatCreate(A->comm,&C);CHKERRQ(ierr); 445 ierr = MatSetSizes(C,nrows,ncols,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 446 ierr = MatSetType(C,MATSEQAIJ);CHKERRQ(ierr); 447 ierr = MatSeqAIJSetPreallocation_SeqAIJ(C,0,ailen);CHKERRQ(ierr); 448 } 449 c = (Mat_SeqAIJ*)C->data; 450 451 /* loop over rows inserting into submatrix */ 452 a_new = c->a; 453 j_new = c->j; 454 i_new = c->i; 455 456 for (i=0; i<nrows; i++) { 457 ii = ai[i]; 458 lensi = ailen[i]; 459 for (k=0; k<lensi; k++) { 460 *j_new++ = *aj++; 461 ierr = MatGetValue(*aa++,first,first,value);CHKERRQ(ierr); 462 *a_new++ = value; 463 } 464 i_new[i+1] = i_new[i] + lensi; 465 c->ilen[i] = lensi; 466 } 467 468 ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 469 ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 470 *B = C; 471 PetscFunctionReturn(0); 472 } 473 474 #undef __FUNCT__ 475 #define __FUNCT__ "MatAssemblyEnd_BlockMat" 476 PetscErrorCode MatAssemblyEnd_BlockMat(Mat A,MatAssemblyType mode) 477 { 478 Mat_BlockMat *a = (Mat_BlockMat*)A->data; 479 PetscErrorCode ierr; 480 PetscInt fshift = 0,i,j,*ai = a->i,*aj = a->j,*imax = a->imax; 481 PetscInt m = a->mbs,*ip,N,*ailen = a->ilen,rmax = 0; 482 Mat *aa = a->a,*ap; 483 484 PetscFunctionBegin; 485 if (mode == MAT_FLUSH_ASSEMBLY) PetscFunctionReturn(0); 486 487 if (m) rmax = ailen[0]; /* determine row with most nonzeros */ 488 for (i=1; i<m; i++) { 489 /* move each row back by the amount of empty slots (fshift) before it*/ 490 fshift += imax[i-1] - ailen[i-1]; 491 rmax = PetscMax(rmax,ailen[i]); 492 if (fshift) { 493 ip = aj + ai[i] ; 494 ap = aa + ai[i] ; 495 N = ailen[i]; 496 for (j=0; j<N; j++) { 497 ip[j-fshift] = ip[j]; 498 ap[j-fshift] = ap[j]; 499 } 500 } 501 ai[i] = ai[i-1] + ailen[i-1]; 502 } 503 if (m) { 504 fshift += imax[m-1] - ailen[m-1]; 505 ai[m] = ai[m-1] + ailen[m-1]; 506 } 507 /* reset ilen and imax for each row */ 508 for (i=0; i<m; i++) { 509 ailen[i] = imax[i] = ai[i+1] - ai[i]; 510 } 511 a->nz = ai[m]; 512 for (i=0; i<a->nz; i++) { 513 #if defined(PETSC_USE_DEBUG) 514 if (!aa[i]) SETERRQ3(PETSC_ERR_PLIB,"Null matrix at location %D column %D nz %D",i,aj[i],a->nz); 515 #endif 516 ierr = MatAssemblyBegin(aa[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 517 ierr = MatAssemblyEnd(aa[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 518 } 519 CHKMEMQ; 520 ierr = PetscInfo4(A,"Matrix size: %D X %D; storage space: %D unneeded,%D used\n",m,A->cmap.n,fshift,a->nz);CHKERRQ(ierr); 521 ierr = PetscInfo1(A,"Number of mallocs during MatSetValues() is %D\n",a->reallocs);CHKERRQ(ierr); 522 ierr = PetscInfo1(A,"Maximum nonzeros in any row is %D\n",rmax);CHKERRQ(ierr); 523 a->reallocs = 0; 524 A->info.nz_unneeded = (double)fshift; 525 a->rmax = rmax; 526 527 A->same_nonzero = PETSC_TRUE; 528 ierr = MatMarkDiagonal_BlockMat(A);CHKERRQ(ierr); 529 PetscFunctionReturn(0); 530 } 531 532 static struct _MatOps MatOps_Values = {MatSetValues_BlockMat, 533 0, 534 0, 535 MatMult_BlockMat, 536 /* 4*/ MatMultAdd_BlockMat, 537 MatMultTranspose_BlockMat, 538 MatMultTransposeAdd_BlockMat, 539 0, 540 0, 541 0, 542 /*10*/ 0, 543 0, 544 0, 545 MatRelax_BlockMat, 546 0, 547 /*15*/ 0, 548 0, 549 0, 550 0, 551 0, 552 /*20*/ 0, 553 MatAssemblyEnd_BlockMat, 554 0, 555 0, 556 0, 557 /*25*/ 0, 558 0, 559 0, 560 0, 561 0, 562 /*30*/ 0, 563 0, 564 0, 565 0, 566 0, 567 /*35*/ 0, 568 0, 569 0, 570 0, 571 0, 572 /*40*/ 0, 573 0, 574 0, 575 0, 576 0, 577 /*45*/ 0, 578 0, 579 0, 580 0, 581 0, 582 /*50*/ MatSetBlockSize_BlockMat, 583 0, 584 0, 585 0, 586 0, 587 /*55*/ 0, 588 0, 589 0, 590 0, 591 0, 592 /*60*/ MatGetSubMatrix_BlockMat, 593 MatDestroy_BlockMat, 594 MatView_BlockMat, 595 0, 596 0, 597 /*65*/ 0, 598 0, 599 0, 600 0, 601 0, 602 /*70*/ 0, 603 0, 604 0, 605 0, 606 0, 607 /*75*/ 0, 608 0, 609 0, 610 0, 611 0, 612 /*80*/ 0, 613 0, 614 0, 615 0, 616 MatLoad_BlockMat, 617 /*85*/ 0, 618 0, 619 0, 620 0, 621 0, 622 /*90*/ 0, 623 0, 624 0, 625 0, 626 0, 627 /*95*/ 0, 628 0, 629 0, 630 0}; 631 632 /*MC 633 MATBLOCKMAT - A matrix that is defined by a set of Mat's that represents a sparse block matrix 634 consisting of (usually) sparse blocks. 635 636 Level: advanced 637 638 .seealso: MatCreateBlockMat() 639 640 M*/ 641 642 EXTERN_C_BEGIN 643 #undef __FUNCT__ 644 #define __FUNCT__ "MatCreate_BlockMat" 645 PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_BlockMat(Mat A) 646 { 647 Mat_BlockMat *b; 648 PetscErrorCode ierr; 649 650 PetscFunctionBegin; 651 ierr = PetscMemcpy(A->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr); 652 ierr = PetscNew(Mat_BlockMat,&b);CHKERRQ(ierr); 653 654 A->data = (void*)b; 655 656 ierr = PetscMapInitialize(A->comm,&A->rmap);CHKERRQ(ierr); 657 ierr = PetscMapInitialize(A->comm,&A->cmap);CHKERRQ(ierr); 658 659 A->assembled = PETSC_TRUE; 660 A->preallocated = PETSC_FALSE; 661 ierr = PetscObjectChangeTypeName((PetscObject)A,MATBLOCKMAT);CHKERRQ(ierr); 662 PetscFunctionReturn(0); 663 } 664 EXTERN_C_END 665 666 #undef __FUNCT__ 667 #define __FUNCT__ "MatCreateBlockMat" 668 /*@C 669 MatCreateBlockMat - Creates a new matrix based sparse Mat storage 670 671 Collective on MPI_Comm 672 673 Input Parameters: 674 + comm - MPI communicator 675 . m - number of rows 676 . n - number of columns 677 - bs - size of each submatrix 678 679 680 Output Parameter: 681 . A - the matrix 682 683 Level: intermediate 684 685 PETSc requires that matrices and vectors being used for certain 686 operations are partitioned accordingly. For example, when 687 creating a bmat matrix, A, that supports parallel matrix-vector 688 products using MatMult(A,x,y) the user should set the number 689 of local matrix rows to be the number of local elements of the 690 corresponding result vector, y. Note that this is information is 691 required for use of the matrix interface routines, even though 692 the bmat matrix may not actually be physically partitioned. 693 For example, 694 695 .keywords: matrix, bmat, create 696 697 .seealso: MATBLOCKMAT 698 @*/ 699 PetscErrorCode PETSCMAT_DLLEXPORT MatCreateBlockMat(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt bs,Mat *A) 700 { 701 PetscErrorCode ierr; 702 703 PetscFunctionBegin; 704 ierr = MatCreate(comm,A);CHKERRQ(ierr); 705 ierr = MatSetSizes(*A,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 706 ierr = MatSetType(*A,MATBLOCKMAT);CHKERRQ(ierr); 707 ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr); 708 PetscFunctionReturn(0); 709 } 710 711 712 713