18a729477SBarry Smith 2c6db04a5SJed Brown #include <../src/mat/impls/aij/mpi/mpiaij.h> /*I "petscmat.h" I*/ 39a6d0b0bSJed Brown #include <petsc-private/vecimpl.h> 4c6db04a5SJed Brown #include <petscblaslapack.h> 50c312b8eSJed Brown #include <petscsf.h> 68a729477SBarry Smith 701bebe75SBarry Smith /*MC 801bebe75SBarry Smith MATAIJ - MATAIJ = "aij" - A matrix type to be used for sparse matrices. 901bebe75SBarry Smith 1001bebe75SBarry Smith This matrix type is identical to MATSEQAIJ when constructed with a single process communicator, 1101bebe75SBarry Smith and MATMPIAIJ otherwise. As a result, for single process communicators, 1201bebe75SBarry Smith MatSeqAIJSetPreallocation is supported, and similarly MatMPIAIJSetPreallocation is supported 1301bebe75SBarry Smith for communicators controlling multiple processes. It is recommended that you call both of 1401bebe75SBarry Smith the above preallocation routines for simplicity. 1501bebe75SBarry Smith 1601bebe75SBarry Smith Options Database Keys: 1701bebe75SBarry Smith . -mat_type aij - sets the matrix type to "aij" during a call to MatSetFromOptions() 1801bebe75SBarry Smith 199ae82921SPaul Mullowney Developer Notes: Subclasses include MATAIJCUSP, MATAIJCUSPARSE, MATAIJPERM, MATAIJCRL, and also automatically switches over to use inodes when 2001bebe75SBarry Smith enough exist. 2101bebe75SBarry Smith 2201bebe75SBarry Smith Level: beginner 2301bebe75SBarry Smith 2469b1f4b7SBarry Smith .seealso: MatCreateAIJ(), MatCreateSeqAIJ(), MATSEQAIJ,MATMPIAIJ 2501bebe75SBarry Smith M*/ 2601bebe75SBarry Smith 2701bebe75SBarry Smith /*MC 2801bebe75SBarry Smith MATAIJCRL - MATAIJCRL = "aijcrl" - A matrix type to be used for sparse matrices. 2901bebe75SBarry Smith 3001bebe75SBarry Smith This matrix type is identical to MATSEQAIJCRL when constructed with a single process communicator, 3101bebe75SBarry Smith and MATMPIAIJCRL otherwise. As a result, for single process communicators, 3201bebe75SBarry Smith MatSeqAIJSetPreallocation() is supported, and similarly MatMPIAIJSetPreallocation() is supported 3301bebe75SBarry Smith for communicators controlling multiple processes. It is recommended that you call both of 3401bebe75SBarry Smith the above preallocation routines for simplicity. 3501bebe75SBarry Smith 3601bebe75SBarry Smith Options Database Keys: 3701bebe75SBarry Smith . -mat_type aijcrl - sets the matrix type to "aijcrl" during a call to MatSetFromOptions() 3801bebe75SBarry Smith 3901bebe75SBarry Smith Level: beginner 4001bebe75SBarry Smith 4101bebe75SBarry Smith .seealso: MatCreateMPIAIJCRL,MATSEQAIJCRL,MATMPIAIJCRL, MATSEQAIJCRL, MATMPIAIJCRL 4201bebe75SBarry Smith M*/ 4301bebe75SBarry Smith 44dd6ea824SBarry Smith #undef __FUNCT__ 45f2c98031SJed Brown #define __FUNCT__ "MatFindNonzeroRows_MPIAIJ" 46f2c98031SJed Brown PetscErrorCode MatFindNonzeroRows_MPIAIJ(Mat M,IS *keptrows) 4727d4218bSShri Abhyankar { 4827d4218bSShri Abhyankar PetscErrorCode ierr; 4927d4218bSShri Abhyankar Mat_MPIAIJ *mat = (Mat_MPIAIJ*)M->data; 5027d4218bSShri Abhyankar Mat_SeqAIJ *a = (Mat_SeqAIJ*)mat->A->data; 5127d4218bSShri Abhyankar Mat_SeqAIJ *b = (Mat_SeqAIJ*)mat->B->data; 5227d4218bSShri Abhyankar const PetscInt *ia,*ib; 5327d4218bSShri Abhyankar const MatScalar *aa,*bb; 5427d4218bSShri Abhyankar PetscInt na,nb,i,j,*rows,cnt=0,n0rows; 5527d4218bSShri Abhyankar PetscInt m = M->rmap->n,rstart = M->rmap->rstart; 5627d4218bSShri Abhyankar 5727d4218bSShri Abhyankar PetscFunctionBegin; 5827d4218bSShri Abhyankar *keptrows = 0; 5927d4218bSShri Abhyankar ia = a->i; 6027d4218bSShri Abhyankar ib = b->i; 6127d4218bSShri Abhyankar for (i=0; i<m; i++) { 6227d4218bSShri Abhyankar na = ia[i+1] - ia[i]; 6327d4218bSShri Abhyankar nb = ib[i+1] - ib[i]; 6427d4218bSShri Abhyankar if (!na && !nb) { 6527d4218bSShri Abhyankar cnt++; 6627d4218bSShri Abhyankar goto ok1; 6727d4218bSShri Abhyankar } 6827d4218bSShri Abhyankar aa = a->a + ia[i]; 6927d4218bSShri Abhyankar for (j=0; j<na; j++) { 7027d4218bSShri Abhyankar if (aa[j] != 0.0) goto ok1; 7127d4218bSShri Abhyankar } 7227d4218bSShri Abhyankar bb = b->a + ib[i]; 7327d4218bSShri Abhyankar for (j=0; j <nb; j++) { 7427d4218bSShri Abhyankar if (bb[j] != 0.0) goto ok1; 7527d4218bSShri Abhyankar } 7627d4218bSShri Abhyankar cnt++; 7727d4218bSShri Abhyankar ok1:; 7827d4218bSShri Abhyankar } 79ce94432eSBarry Smith ierr = MPI_Allreduce(&cnt,&n0rows,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)M));CHKERRQ(ierr); 8027d4218bSShri Abhyankar if (!n0rows) PetscFunctionReturn(0); 8127d4218bSShri Abhyankar ierr = PetscMalloc((M->rmap->n-cnt)*sizeof(PetscInt),&rows);CHKERRQ(ierr); 8227d4218bSShri Abhyankar cnt = 0; 8327d4218bSShri Abhyankar for (i=0; i<m; i++) { 8427d4218bSShri Abhyankar na = ia[i+1] - ia[i]; 8527d4218bSShri Abhyankar nb = ib[i+1] - ib[i]; 8627d4218bSShri Abhyankar if (!na && !nb) continue; 8727d4218bSShri Abhyankar aa = a->a + ia[i]; 8827d4218bSShri Abhyankar for (j=0; j<na;j++) { 8927d4218bSShri Abhyankar if (aa[j] != 0.0) { 9027d4218bSShri Abhyankar rows[cnt++] = rstart + i; 9127d4218bSShri Abhyankar goto ok2; 9227d4218bSShri Abhyankar } 9327d4218bSShri Abhyankar } 9427d4218bSShri Abhyankar bb = b->a + ib[i]; 9527d4218bSShri Abhyankar for (j=0; j<nb; j++) { 9627d4218bSShri Abhyankar if (bb[j] != 0.0) { 9727d4218bSShri Abhyankar rows[cnt++] = rstart + i; 9827d4218bSShri Abhyankar goto ok2; 9927d4218bSShri Abhyankar } 10027d4218bSShri Abhyankar } 10127d4218bSShri Abhyankar ok2:; 10227d4218bSShri Abhyankar } 103ce94432eSBarry Smith ierr = ISCreateGeneral(PetscObjectComm((PetscObject)M),cnt,rows,PETSC_OWN_POINTER,keptrows);CHKERRQ(ierr); 10427d4218bSShri Abhyankar PetscFunctionReturn(0); 10527d4218bSShri Abhyankar } 10627d4218bSShri Abhyankar 10727d4218bSShri Abhyankar #undef __FUNCT__ 108f1f41ecbSJed Brown #define __FUNCT__ "MatFindZeroDiagonals_MPIAIJ" 109f1f41ecbSJed Brown PetscErrorCode MatFindZeroDiagonals_MPIAIJ(Mat M,IS *zrows) 110f1f41ecbSJed Brown { 111f1f41ecbSJed Brown Mat_MPIAIJ *aij = (Mat_MPIAIJ*)M->data; 112f1f41ecbSJed Brown PetscErrorCode ierr; 113f1f41ecbSJed Brown PetscInt i,rstart,nrows,*rows; 114f1f41ecbSJed Brown 115f1f41ecbSJed Brown PetscFunctionBegin; 1160298fd71SBarry Smith *zrows = NULL; 117f1f41ecbSJed Brown ierr = MatFindZeroDiagonals_SeqAIJ_Private(aij->A,&nrows,&rows);CHKERRQ(ierr); 1180298fd71SBarry Smith ierr = MatGetOwnershipRange(M,&rstart,NULL);CHKERRQ(ierr); 119f1f41ecbSJed Brown for (i=0; i<nrows; i++) rows[i] += rstart; 120ce94432eSBarry Smith ierr = ISCreateGeneral(PetscObjectComm((PetscObject)M),nrows,rows,PETSC_OWN_POINTER,zrows);CHKERRQ(ierr); 121f1f41ecbSJed Brown PetscFunctionReturn(0); 122f1f41ecbSJed Brown } 123f1f41ecbSJed Brown 124f1f41ecbSJed Brown #undef __FUNCT__ 1250716a85fSBarry Smith #define __FUNCT__ "MatGetColumnNorms_MPIAIJ" 1260716a85fSBarry Smith PetscErrorCode MatGetColumnNorms_MPIAIJ(Mat A,NormType type,PetscReal *norms) 1270716a85fSBarry Smith { 1280716a85fSBarry Smith PetscErrorCode ierr; 1290716a85fSBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ*)A->data; 1300716a85fSBarry Smith PetscInt i,n,*garray = aij->garray; 1310716a85fSBarry Smith Mat_SeqAIJ *a_aij = (Mat_SeqAIJ*) aij->A->data; 1320716a85fSBarry Smith Mat_SeqAIJ *b_aij = (Mat_SeqAIJ*) aij->B->data; 1330716a85fSBarry Smith PetscReal *work; 1340716a85fSBarry Smith 1350716a85fSBarry Smith PetscFunctionBegin; 1360298fd71SBarry Smith ierr = MatGetSize(A,NULL,&n);CHKERRQ(ierr); 1370716a85fSBarry Smith ierr = PetscMalloc(n*sizeof(PetscReal),&work);CHKERRQ(ierr); 1380716a85fSBarry Smith ierr = PetscMemzero(work,n*sizeof(PetscReal));CHKERRQ(ierr); 1390716a85fSBarry Smith if (type == NORM_2) { 1400716a85fSBarry Smith for (i=0; i<a_aij->i[aij->A->rmap->n]; i++) { 1410716a85fSBarry Smith work[A->cmap->rstart + a_aij->j[i]] += PetscAbsScalar(a_aij->a[i]*a_aij->a[i]); 1420716a85fSBarry Smith } 1430716a85fSBarry Smith for (i=0; i<b_aij->i[aij->B->rmap->n]; i++) { 1440716a85fSBarry Smith work[garray[b_aij->j[i]]] += PetscAbsScalar(b_aij->a[i]*b_aij->a[i]); 1450716a85fSBarry Smith } 1460716a85fSBarry Smith } else if (type == NORM_1) { 1470716a85fSBarry Smith for (i=0; i<a_aij->i[aij->A->rmap->n]; i++) { 1480716a85fSBarry Smith work[A->cmap->rstart + a_aij->j[i]] += PetscAbsScalar(a_aij->a[i]); 1490716a85fSBarry Smith } 1500716a85fSBarry Smith for (i=0; i<b_aij->i[aij->B->rmap->n]; i++) { 1510716a85fSBarry Smith work[garray[b_aij->j[i]]] += PetscAbsScalar(b_aij->a[i]); 1520716a85fSBarry Smith } 1530716a85fSBarry Smith } else if (type == NORM_INFINITY) { 1540716a85fSBarry Smith for (i=0; i<a_aij->i[aij->A->rmap->n]; i++) { 1550716a85fSBarry Smith work[A->cmap->rstart + a_aij->j[i]] = PetscMax(PetscAbsScalar(a_aij->a[i]), work[A->cmap->rstart + a_aij->j[i]]); 1560716a85fSBarry Smith } 1570716a85fSBarry Smith for (i=0; i<b_aij->i[aij->B->rmap->n]; i++) { 1580716a85fSBarry Smith work[garray[b_aij->j[i]]] = PetscMax(PetscAbsScalar(b_aij->a[i]),work[garray[b_aij->j[i]]]); 1590716a85fSBarry Smith } 1600716a85fSBarry Smith 161ce94432eSBarry Smith } else SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Unknown NormType"); 1620716a85fSBarry Smith if (type == NORM_INFINITY) { 1630716a85fSBarry Smith ierr = MPI_Allreduce(work,norms,n,MPIU_REAL,MPIU_MAX,A->hdr.comm);CHKERRQ(ierr); 1640716a85fSBarry Smith } else { 1650716a85fSBarry Smith ierr = MPI_Allreduce(work,norms,n,MPIU_REAL,MPIU_SUM,A->hdr.comm);CHKERRQ(ierr); 1660716a85fSBarry Smith } 1670716a85fSBarry Smith ierr = PetscFree(work);CHKERRQ(ierr); 1680716a85fSBarry Smith if (type == NORM_2) { 1698f1a2a5eSBarry Smith for (i=0; i<n; i++) norms[i] = PetscSqrtReal(norms[i]); 1700716a85fSBarry Smith } 1710716a85fSBarry Smith PetscFunctionReturn(0); 1720716a85fSBarry Smith } 1730716a85fSBarry Smith 1740716a85fSBarry Smith #undef __FUNCT__ 175dd6ea824SBarry Smith #define __FUNCT__ "MatDistribute_MPIAIJ" 176dd6ea824SBarry Smith /* 177dd6ea824SBarry Smith Distributes a SeqAIJ matrix across a set of processes. Code stolen from 178dd6ea824SBarry Smith MatLoad_MPIAIJ(). Horrible lack of reuse. Should be a routine for each matrix type. 179dd6ea824SBarry Smith 180dd6ea824SBarry Smith Only for square matrices 181b30237c6SBarry Smith 182b30237c6SBarry Smith Used by a preconditioner, hence PETSC_EXTERN 183dd6ea824SBarry Smith */ 1845a576424SJed Brown PETSC_EXTERN PetscErrorCode MatDistribute_MPIAIJ(MPI_Comm comm,Mat gmat,PetscInt m,MatReuse reuse,Mat *inmat) 185dd6ea824SBarry Smith { 186dd6ea824SBarry Smith PetscMPIInt rank,size; 187efcf75d5SBarry Smith PetscInt *rowners,*dlens,*olens,i,rstart,rend,j,jj,nz,*gmataj,cnt,row,*ld,bses[2]; 188dd6ea824SBarry Smith PetscErrorCode ierr; 189dd6ea824SBarry Smith Mat mat; 190dd6ea824SBarry Smith Mat_SeqAIJ *gmata; 191dd6ea824SBarry Smith PetscMPIInt tag; 192dd6ea824SBarry Smith MPI_Status status; 193ace3abfcSBarry Smith PetscBool aij; 194dd6ea824SBarry Smith MatScalar *gmataa,*ao,*ad,*gmataarestore=0; 195dd6ea824SBarry Smith 196dd6ea824SBarry Smith PetscFunctionBegin; 197dd6ea824SBarry Smith ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 198dd6ea824SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 199dd6ea824SBarry Smith if (!rank) { 200251f4c67SDmitry Karpeev ierr = PetscObjectTypeCompare((PetscObject)gmat,MATSEQAIJ,&aij);CHKERRQ(ierr); 201ce94432eSBarry Smith if (!aij) SETERRQ1(PetscObjectComm((PetscObject)gmat),PETSC_ERR_SUP,"Currently no support for input matrix of type %s\n",((PetscObject)gmat)->type_name); 202dd6ea824SBarry Smith } 203dd6ea824SBarry Smith if (reuse == MAT_INITIAL_MATRIX) { 204dd6ea824SBarry Smith ierr = MatCreate(comm,&mat);CHKERRQ(ierr); 205dd6ea824SBarry Smith ierr = MatSetSizes(mat,m,m,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 206efcf75d5SBarry Smith if (!rank) { 207efcf75d5SBarry Smith bses[0] = gmat->rmap->bs; 208efcf75d5SBarry Smith bses[1] = gmat->cmap->bs; 209efcf75d5SBarry Smith } 210efcf75d5SBarry Smith ierr = MPI_Bcast(bses,2,MPIU_INT,0,comm);CHKERRQ(ierr); 211efcf75d5SBarry Smith ierr = MatSetBlockSizes(mat,bses[0],bses[1]);CHKERRQ(ierr); 212dd6ea824SBarry Smith ierr = MatSetType(mat,MATAIJ);CHKERRQ(ierr); 213dd6ea824SBarry Smith ierr = PetscMalloc((size+1)*sizeof(PetscInt),&rowners);CHKERRQ(ierr); 214dd6ea824SBarry Smith ierr = PetscMalloc2(m,PetscInt,&dlens,m,PetscInt,&olens);CHKERRQ(ierr); 215dd6ea824SBarry Smith ierr = MPI_Allgather(&m,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr); 2162205254eSKarl Rupp 217dd6ea824SBarry Smith rowners[0] = 0; 2182205254eSKarl Rupp for (i=2; i<=size; i++) rowners[i] += rowners[i-1]; 219dd6ea824SBarry Smith rstart = rowners[rank]; 220dd6ea824SBarry Smith rend = rowners[rank+1]; 221dd6ea824SBarry Smith ierr = PetscObjectGetNewTag((PetscObject)mat,&tag);CHKERRQ(ierr); 222dd6ea824SBarry Smith if (!rank) { 223dd6ea824SBarry Smith gmata = (Mat_SeqAIJ*) gmat->data; 224dd6ea824SBarry Smith /* send row lengths to all processors */ 225dd6ea824SBarry Smith for (i=0; i<m; i++) dlens[i] = gmata->ilen[i]; 226dd6ea824SBarry Smith for (i=1; i<size; i++) { 227dd6ea824SBarry Smith ierr = MPI_Send(gmata->ilen + rowners[i],rowners[i+1]-rowners[i],MPIU_INT,i,tag,comm);CHKERRQ(ierr); 228dd6ea824SBarry Smith } 229dd6ea824SBarry Smith /* determine number diagonal and off-diagonal counts */ 230dd6ea824SBarry Smith ierr = PetscMemzero(olens,m*sizeof(PetscInt));CHKERRQ(ierr); 231dd6ea824SBarry Smith ierr = PetscMalloc(m*sizeof(PetscInt),&ld);CHKERRQ(ierr); 232dd6ea824SBarry Smith ierr = PetscMemzero(ld,m*sizeof(PetscInt));CHKERRQ(ierr); 233dd6ea824SBarry Smith jj = 0; 234dd6ea824SBarry Smith for (i=0; i<m; i++) { 235dd6ea824SBarry Smith for (j=0; j<dlens[i]; j++) { 236dd6ea824SBarry Smith if (gmata->j[jj] < rstart) ld[i]++; 237dd6ea824SBarry Smith if (gmata->j[jj] < rstart || gmata->j[jj] >= rend) olens[i]++; 238dd6ea824SBarry Smith jj++; 239dd6ea824SBarry Smith } 240dd6ea824SBarry Smith } 241dd6ea824SBarry Smith /* send column indices to other processes */ 242dd6ea824SBarry Smith for (i=1; i<size; i++) { 243dd6ea824SBarry Smith nz = gmata->i[rowners[i+1]]-gmata->i[rowners[i]]; 244dd6ea824SBarry Smith ierr = MPI_Send(&nz,1,MPIU_INT,i,tag,comm);CHKERRQ(ierr); 245dd6ea824SBarry Smith ierr = MPI_Send(gmata->j + gmata->i[rowners[i]],nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr); 246dd6ea824SBarry Smith } 247dd6ea824SBarry Smith 248dd6ea824SBarry Smith /* send numerical values to other processes */ 249dd6ea824SBarry Smith for (i=1; i<size; i++) { 250dd6ea824SBarry Smith nz = gmata->i[rowners[i+1]]-gmata->i[rowners[i]]; 251dd6ea824SBarry Smith ierr = MPI_Send(gmata->a + gmata->i[rowners[i]],nz,MPIU_SCALAR,i,tag,comm);CHKERRQ(ierr); 252dd6ea824SBarry Smith } 253dd6ea824SBarry Smith gmataa = gmata->a; 254dd6ea824SBarry Smith gmataj = gmata->j; 255dd6ea824SBarry Smith 256dd6ea824SBarry Smith } else { 257dd6ea824SBarry Smith /* receive row lengths */ 258dd6ea824SBarry Smith ierr = MPI_Recv(dlens,m,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr); 259dd6ea824SBarry Smith /* receive column indices */ 260dd6ea824SBarry Smith ierr = MPI_Recv(&nz,1,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr); 261dd6ea824SBarry Smith ierr = PetscMalloc2(nz,PetscScalar,&gmataa,nz,PetscInt,&gmataj);CHKERRQ(ierr); 262dd6ea824SBarry Smith ierr = MPI_Recv(gmataj,nz,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr); 263dd6ea824SBarry Smith /* determine number diagonal and off-diagonal counts */ 264dd6ea824SBarry Smith ierr = PetscMemzero(olens,m*sizeof(PetscInt));CHKERRQ(ierr); 265dd6ea824SBarry Smith ierr = PetscMalloc(m*sizeof(PetscInt),&ld);CHKERRQ(ierr); 266dd6ea824SBarry Smith ierr = PetscMemzero(ld,m*sizeof(PetscInt));CHKERRQ(ierr); 267dd6ea824SBarry Smith jj = 0; 268dd6ea824SBarry Smith for (i=0; i<m; i++) { 269dd6ea824SBarry Smith for (j=0; j<dlens[i]; j++) { 270dd6ea824SBarry Smith if (gmataj[jj] < rstart) ld[i]++; 271dd6ea824SBarry Smith if (gmataj[jj] < rstart || gmataj[jj] >= rend) olens[i]++; 272dd6ea824SBarry Smith jj++; 273dd6ea824SBarry Smith } 274dd6ea824SBarry Smith } 275dd6ea824SBarry Smith /* receive numerical values */ 276dd6ea824SBarry Smith ierr = PetscMemzero(gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); 277dd6ea824SBarry Smith ierr = MPI_Recv(gmataa,nz,MPIU_SCALAR,0,tag,comm,&status);CHKERRQ(ierr); 278dd6ea824SBarry Smith } 279dd6ea824SBarry Smith /* set preallocation */ 280dd6ea824SBarry Smith for (i=0; i<m; i++) { 281dd6ea824SBarry Smith dlens[i] -= olens[i]; 282dd6ea824SBarry Smith } 283dd6ea824SBarry Smith ierr = MatSeqAIJSetPreallocation(mat,0,dlens);CHKERRQ(ierr); 284dd6ea824SBarry Smith ierr = MatMPIAIJSetPreallocation(mat,0,dlens,0,olens);CHKERRQ(ierr); 285dd6ea824SBarry Smith 286dd6ea824SBarry Smith for (i=0; i<m; i++) { 287dd6ea824SBarry Smith dlens[i] += olens[i]; 288dd6ea824SBarry Smith } 289dd6ea824SBarry Smith cnt = 0; 290dd6ea824SBarry Smith for (i=0; i<m; i++) { 291dd6ea824SBarry Smith row = rstart + i; 292dd6ea824SBarry Smith ierr = MatSetValues(mat,1,&row,dlens[i],gmataj+cnt,gmataa+cnt,INSERT_VALUES);CHKERRQ(ierr); 293dd6ea824SBarry Smith cnt += dlens[i]; 294dd6ea824SBarry Smith } 295dd6ea824SBarry Smith if (rank) { 296dd6ea824SBarry Smith ierr = PetscFree2(gmataa,gmataj);CHKERRQ(ierr); 297dd6ea824SBarry Smith } 298dd6ea824SBarry Smith ierr = PetscFree2(dlens,olens);CHKERRQ(ierr); 299dd6ea824SBarry Smith ierr = PetscFree(rowners);CHKERRQ(ierr); 3002205254eSKarl Rupp 301dd6ea824SBarry Smith ((Mat_MPIAIJ*)(mat->data))->ld = ld; 3022205254eSKarl Rupp 303dd6ea824SBarry Smith *inmat = mat; 304dd6ea824SBarry Smith } else { /* column indices are already set; only need to move over numerical values from process 0 */ 305dd6ea824SBarry Smith Mat_SeqAIJ *Ad = (Mat_SeqAIJ*)((Mat_MPIAIJ*)((*inmat)->data))->A->data; 306dd6ea824SBarry Smith Mat_SeqAIJ *Ao = (Mat_SeqAIJ*)((Mat_MPIAIJ*)((*inmat)->data))->B->data; 307dd6ea824SBarry Smith mat = *inmat; 308dd6ea824SBarry Smith ierr = PetscObjectGetNewTag((PetscObject)mat,&tag);CHKERRQ(ierr); 309dd6ea824SBarry Smith if (!rank) { 310dd6ea824SBarry Smith /* send numerical values to other processes */ 311dd6ea824SBarry Smith gmata = (Mat_SeqAIJ*) gmat->data; 312dd6ea824SBarry Smith ierr = MatGetOwnershipRanges(mat,(const PetscInt**)&rowners);CHKERRQ(ierr); 313dd6ea824SBarry Smith gmataa = gmata->a; 314dd6ea824SBarry Smith for (i=1; i<size; i++) { 315dd6ea824SBarry Smith nz = gmata->i[rowners[i+1]]-gmata->i[rowners[i]]; 316dd6ea824SBarry Smith ierr = MPI_Send(gmataa + gmata->i[rowners[i]],nz,MPIU_SCALAR,i,tag,comm);CHKERRQ(ierr); 317dd6ea824SBarry Smith } 318dd6ea824SBarry Smith nz = gmata->i[rowners[1]]-gmata->i[rowners[0]]; 319dd6ea824SBarry Smith } else { 320dd6ea824SBarry Smith /* receive numerical values from process 0*/ 321dd6ea824SBarry Smith nz = Ad->nz + Ao->nz; 322dd6ea824SBarry Smith ierr = PetscMalloc(nz*sizeof(PetscScalar),&gmataa);CHKERRQ(ierr); gmataarestore = gmataa; 323dd6ea824SBarry Smith ierr = MPI_Recv(gmataa,nz,MPIU_SCALAR,0,tag,comm,&status);CHKERRQ(ierr); 324dd6ea824SBarry Smith } 325dd6ea824SBarry Smith /* transfer numerical values into the diagonal A and off diagonal B parts of mat */ 326dd6ea824SBarry Smith ld = ((Mat_MPIAIJ*)(mat->data))->ld; 327dd6ea824SBarry Smith ad = Ad->a; 328dd6ea824SBarry Smith ao = Ao->a; 329d0f46423SBarry Smith if (mat->rmap->n) { 330dd6ea824SBarry Smith i = 0; 331dd6ea824SBarry Smith nz = ld[i]; ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ao += nz; gmataa += nz; 332dd6ea824SBarry Smith nz = Ad->i[i+1] - Ad->i[i]; ierr = PetscMemcpy(ad,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ad += nz; gmataa += nz; 333dd6ea824SBarry Smith } 334d0f46423SBarry Smith for (i=1; i<mat->rmap->n; i++) { 335dd6ea824SBarry Smith nz = Ao->i[i] - Ao->i[i-1] - ld[i-1] + ld[i]; ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ao += nz; gmataa += nz; 336dd6ea824SBarry Smith nz = Ad->i[i+1] - Ad->i[i]; ierr = PetscMemcpy(ad,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ad += nz; gmataa += nz; 337dd6ea824SBarry Smith } 338dd6ea824SBarry Smith i--; 339d0f46423SBarry Smith if (mat->rmap->n) { 34022d28d08SBarry Smith nz = Ao->i[i+1] - Ao->i[i] - ld[i]; ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); 341dd6ea824SBarry Smith } 342dd6ea824SBarry Smith if (rank) { 343dd6ea824SBarry Smith ierr = PetscFree(gmataarestore);CHKERRQ(ierr); 344dd6ea824SBarry Smith } 345dd6ea824SBarry Smith } 346dd6ea824SBarry Smith ierr = MatAssemblyBegin(mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 347dd6ea824SBarry Smith ierr = MatAssemblyEnd(mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 348dd6ea824SBarry Smith PetscFunctionReturn(0); 349dd6ea824SBarry Smith } 350dd6ea824SBarry Smith 3510f5bd95cSBarry Smith /* 3520f5bd95cSBarry Smith Local utility routine that creates a mapping from the global column 3539e25ed09SBarry Smith number to the local number in the off-diagonal part of the local 3540f5bd95cSBarry Smith storage of the matrix. When PETSC_USE_CTABLE is used this is scalable at 3550f5bd95cSBarry Smith a slightly higher hash table cost; without it it is not scalable (each processor 3560f5bd95cSBarry Smith has an order N integer array but is fast to acess. 3579e25ed09SBarry Smith */ 3584a2ae208SSatish Balay #undef __FUNCT__ 359ab9863d7SBarry Smith #define __FUNCT__ "MatCreateColmap_MPIAIJ_Private" 360ab9863d7SBarry Smith PetscErrorCode MatCreateColmap_MPIAIJ_Private(Mat mat) 3619e25ed09SBarry Smith { 36244a69424SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 3636849ba73SBarry Smith PetscErrorCode ierr; 364d0f46423SBarry Smith PetscInt n = aij->B->cmap->n,i; 365dbb450caSBarry Smith 3663a40ed3dSBarry Smith PetscFunctionBegin; 3675e1f6667SBarry Smith if (!aij->garray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"MPIAIJ Matrix was assembled but is missing garray"); 368aa482453SBarry Smith #if defined(PETSC_USE_CTABLE) 369e23dfa41SBarry Smith ierr = PetscTableCreate(n,mat->cmap->N+1,&aij->colmap);CHKERRQ(ierr); 370b1fc9764SSatish Balay for (i=0; i<n; i++) { 3713861aac3SJed Brown ierr = PetscTableAdd(aij->colmap,aij->garray[i]+1,i+1,INSERT_VALUES);CHKERRQ(ierr); 372b1fc9764SSatish Balay } 373b1fc9764SSatish Balay #else 374d0f46423SBarry Smith ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscInt),&aij->colmap);CHKERRQ(ierr); 375d0f46423SBarry Smith ierr = PetscLogObjectMemory(mat,mat->cmap->N*sizeof(PetscInt));CHKERRQ(ierr); 376d0f46423SBarry Smith ierr = PetscMemzero(aij->colmap,mat->cmap->N*sizeof(PetscInt));CHKERRQ(ierr); 377905e6a2fSBarry Smith for (i=0; i<n; i++) aij->colmap[aij->garray[i]] = i+1; 378b1fc9764SSatish Balay #endif 3793a40ed3dSBarry Smith PetscFunctionReturn(0); 3809e25ed09SBarry Smith } 3819e25ed09SBarry Smith 38230770e4dSSatish Balay #define MatSetValues_SeqAIJ_A_Private(row,col,value,addv) \ 3830520107fSSatish Balay { \ 384db4deed7SKarl Rupp if (col <= lastcol1) low1 = 0; \ 385db4deed7SKarl Rupp else high1 = nrow1; \ 386fd3458f5SBarry Smith lastcol1 = col;\ 387fd3458f5SBarry Smith while (high1-low1 > 5) { \ 388fd3458f5SBarry Smith t = (low1+high1)/2; \ 389fd3458f5SBarry Smith if (rp1[t] > col) high1 = t; \ 390fd3458f5SBarry Smith else low1 = t; \ 391ba4e3ef2SSatish Balay } \ 392fd3458f5SBarry Smith for (_i=low1; _i<high1; _i++) { \ 393fd3458f5SBarry Smith if (rp1[_i] > col) break; \ 394fd3458f5SBarry Smith if (rp1[_i] == col) { \ 395fd3458f5SBarry Smith if (addv == ADD_VALUES) ap1[_i] += value; \ 396fd3458f5SBarry Smith else ap1[_i] = value; \ 39730770e4dSSatish Balay goto a_noinsert; \ 3980520107fSSatish Balay } \ 3990520107fSSatish Balay } \ 400e44c0bd4SBarry Smith if (value == 0.0 && ignorezeroentries) {low1 = 0; high1 = nrow1;goto a_noinsert;} \ 401e44c0bd4SBarry Smith if (nonew == 1) {low1 = 0; high1 = nrow1; goto a_noinsert;} \ 402e32f2f54SBarry Smith if (nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \ 403fef13f97SBarry Smith MatSeqXAIJReallocateAIJ(A,am,1,nrow1,row,col,rmax1,aa,ai,aj,rp1,ap1,aimax,nonew,MatScalar); \ 404669a8dbcSSatish Balay N = nrow1++ - 1; a->nz++; high1++; \ 4050520107fSSatish Balay /* shift up all the later entries in this row */ \ 4060520107fSSatish Balay for (ii=N; ii>=_i; ii--) { \ 407fd3458f5SBarry Smith rp1[ii+1] = rp1[ii]; \ 408fd3458f5SBarry Smith ap1[ii+1] = ap1[ii]; \ 4090520107fSSatish Balay } \ 410fd3458f5SBarry Smith rp1[_i] = col; \ 411fd3458f5SBarry Smith ap1[_i] = value; \ 41230770e4dSSatish Balay a_noinsert: ; \ 413fd3458f5SBarry Smith ailen[row] = nrow1; \ 4140520107fSSatish Balay } 4150a198c4cSBarry Smith 416085a36d4SBarry Smith 41730770e4dSSatish Balay #define MatSetValues_SeqAIJ_B_Private(row,col,value,addv) \ 41830770e4dSSatish Balay { \ 419db4deed7SKarl Rupp if (col <= lastcol2) low2 = 0; \ 420db4deed7SKarl Rupp else high2 = nrow2; \ 421fd3458f5SBarry Smith lastcol2 = col; \ 422fd3458f5SBarry Smith while (high2-low2 > 5) { \ 423fd3458f5SBarry Smith t = (low2+high2)/2; \ 424fd3458f5SBarry Smith if (rp2[t] > col) high2 = t; \ 425fd3458f5SBarry Smith else low2 = t; \ 426ba4e3ef2SSatish Balay } \ 427fd3458f5SBarry Smith for (_i=low2; _i<high2; _i++) { \ 428fd3458f5SBarry Smith if (rp2[_i] > col) break; \ 429fd3458f5SBarry Smith if (rp2[_i] == col) { \ 430fd3458f5SBarry Smith if (addv == ADD_VALUES) ap2[_i] += value; \ 431fd3458f5SBarry Smith else ap2[_i] = value; \ 43230770e4dSSatish Balay goto b_noinsert; \ 43330770e4dSSatish Balay } \ 43430770e4dSSatish Balay } \ 435e44c0bd4SBarry Smith if (value == 0.0 && ignorezeroentries) {low2 = 0; high2 = nrow2; goto b_noinsert;} \ 436e44c0bd4SBarry Smith if (nonew == 1) {low2 = 0; high2 = nrow2; goto b_noinsert;} \ 437e32f2f54SBarry Smith if (nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \ 438fef13f97SBarry Smith MatSeqXAIJReallocateAIJ(B,bm,1,nrow2,row,col,rmax2,ba,bi,bj,rp2,ap2,bimax,nonew,MatScalar); \ 439669a8dbcSSatish Balay N = nrow2++ - 1; b->nz++; high2++; \ 44030770e4dSSatish Balay /* shift up all the later entries in this row */ \ 44130770e4dSSatish Balay for (ii=N; ii>=_i; ii--) { \ 442fd3458f5SBarry Smith rp2[ii+1] = rp2[ii]; \ 443fd3458f5SBarry Smith ap2[ii+1] = ap2[ii]; \ 44430770e4dSSatish Balay } \ 445fd3458f5SBarry Smith rp2[_i] = col; \ 446fd3458f5SBarry Smith ap2[_i] = value; \ 44730770e4dSSatish Balay b_noinsert: ; \ 448fd3458f5SBarry Smith bilen[row] = nrow2; \ 44930770e4dSSatish Balay } 45030770e4dSSatish Balay 4514a2ae208SSatish Balay #undef __FUNCT__ 4522fd7e33dSBarry Smith #define __FUNCT__ "MatSetValuesRow_MPIAIJ" 4532fd7e33dSBarry Smith PetscErrorCode MatSetValuesRow_MPIAIJ(Mat A,PetscInt row,const PetscScalar v[]) 4542fd7e33dSBarry Smith { 4552fd7e33dSBarry Smith Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data; 4562fd7e33dSBarry Smith Mat_SeqAIJ *a = (Mat_SeqAIJ*)mat->A->data,*b = (Mat_SeqAIJ*)mat->B->data; 4572fd7e33dSBarry Smith PetscErrorCode ierr; 4582fd7e33dSBarry Smith PetscInt l,*garray = mat->garray,diag; 4592fd7e33dSBarry Smith 4602fd7e33dSBarry Smith PetscFunctionBegin; 4612fd7e33dSBarry Smith /* code only works for square matrices A */ 4622fd7e33dSBarry Smith 4632fd7e33dSBarry Smith /* find size of row to the left of the diagonal part */ 4642fd7e33dSBarry Smith ierr = MatGetOwnershipRange(A,&diag,0);CHKERRQ(ierr); 4652fd7e33dSBarry Smith row = row - diag; 4662fd7e33dSBarry Smith for (l=0; l<b->i[row+1]-b->i[row]; l++) { 4672fd7e33dSBarry Smith if (garray[b->j[b->i[row]+l]] > diag) break; 4682fd7e33dSBarry Smith } 4692fd7e33dSBarry Smith ierr = PetscMemcpy(b->a+b->i[row],v,l*sizeof(PetscScalar));CHKERRQ(ierr); 4702fd7e33dSBarry Smith 4712fd7e33dSBarry Smith /* diagonal part */ 4722fd7e33dSBarry Smith ierr = PetscMemcpy(a->a+a->i[row],v+l,(a->i[row+1]-a->i[row])*sizeof(PetscScalar));CHKERRQ(ierr); 4732fd7e33dSBarry Smith 4742fd7e33dSBarry Smith /* right of diagonal part */ 4752fd7e33dSBarry Smith ierr = PetscMemcpy(b->a+b->i[row]+l,v+l+a->i[row+1]-a->i[row],(b->i[row+1]-b->i[row]-l)*sizeof(PetscScalar));CHKERRQ(ierr); 4762fd7e33dSBarry Smith PetscFunctionReturn(0); 4772fd7e33dSBarry Smith } 4782fd7e33dSBarry Smith 4792fd7e33dSBarry Smith #undef __FUNCT__ 4804a2ae208SSatish Balay #define __FUNCT__ "MatSetValues_MPIAIJ" 481b1d57f15SBarry Smith PetscErrorCode MatSetValues_MPIAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv) 4828a729477SBarry Smith { 48344a69424SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 48487828ca2SBarry Smith PetscScalar value; 485dfbe8321SBarry Smith PetscErrorCode ierr; 486d0f46423SBarry Smith PetscInt i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend; 487d0f46423SBarry Smith PetscInt cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col; 488ace3abfcSBarry Smith PetscBool roworiented = aij->roworiented; 4898a729477SBarry Smith 4900520107fSSatish Balay /* Some Variables required in the macro */ 4914ee7247eSSatish Balay Mat A = aij->A; 4924ee7247eSSatish Balay Mat_SeqAIJ *a = (Mat_SeqAIJ*)A->data; 49357809a77SBarry Smith PetscInt *aimax = a->imax,*ai = a->i,*ailen = a->ilen,*aj = a->j; 494a77337e4SBarry Smith MatScalar *aa = a->a; 495ace3abfcSBarry Smith PetscBool ignorezeroentries = a->ignorezeroentries; 49630770e4dSSatish Balay Mat B = aij->B; 49730770e4dSSatish Balay Mat_SeqAIJ *b = (Mat_SeqAIJ*)B->data; 498d0f46423SBarry Smith PetscInt *bimax = b->imax,*bi = b->i,*bilen = b->ilen,*bj = b->j,bm = aij->B->rmap->n,am = aij->A->rmap->n; 499a77337e4SBarry Smith MatScalar *ba = b->a; 50030770e4dSSatish Balay 501fd3458f5SBarry Smith PetscInt *rp1,*rp2,ii,nrow1,nrow2,_i,rmax1,rmax2,N,low1,high1,low2,high2,t,lastcol1,lastcol2; 5028d76821aSHong Zhang PetscInt nonew; 503a77337e4SBarry Smith MatScalar *ap1,*ap2; 5044ee7247eSSatish Balay 5053a40ed3dSBarry Smith PetscFunctionBegin; 50671fd2e92SBarry Smith if (v) PetscValidScalarPointer(v,6); 5078a729477SBarry Smith for (i=0; i<m; i++) { 5085ef9f2a5SBarry Smith if (im[i] < 0) continue; 5092515c552SBarry Smith #if defined(PETSC_USE_DEBUG) 510e32f2f54SBarry Smith if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1); 5110a198c4cSBarry Smith #endif 5124b0e389bSBarry Smith if (im[i] >= rstart && im[i] < rend) { 5134b0e389bSBarry Smith row = im[i] - rstart; 514fd3458f5SBarry Smith lastcol1 = -1; 515fd3458f5SBarry Smith rp1 = aj + ai[row]; 516fd3458f5SBarry Smith ap1 = aa + ai[row]; 517fd3458f5SBarry Smith rmax1 = aimax[row]; 518fd3458f5SBarry Smith nrow1 = ailen[row]; 519fd3458f5SBarry Smith low1 = 0; 520fd3458f5SBarry Smith high1 = nrow1; 521fd3458f5SBarry Smith lastcol2 = -1; 522fd3458f5SBarry Smith rp2 = bj + bi[row]; 523d498b1e9SBarry Smith ap2 = ba + bi[row]; 524fd3458f5SBarry Smith rmax2 = bimax[row]; 525d498b1e9SBarry Smith nrow2 = bilen[row]; 526fd3458f5SBarry Smith low2 = 0; 527fd3458f5SBarry Smith high2 = nrow2; 528fd3458f5SBarry Smith 5291eb62cbbSBarry Smith for (j=0; j<n; j++) { 530db4deed7SKarl Rupp if (v) { 531db4deed7SKarl Rupp if (roworiented) value = v[i*n+j]; 532db4deed7SKarl Rupp else value = v[i+j*m]; 533db4deed7SKarl Rupp } else value = 0.0; 534abc0a331SBarry Smith if (ignorezeroentries && value == 0.0 && (addv == ADD_VALUES)) continue; 535fd3458f5SBarry Smith if (in[j] >= cstart && in[j] < cend) { 536fd3458f5SBarry Smith col = in[j] - cstart; 5378d76821aSHong Zhang nonew = a->nonew; 53830770e4dSSatish Balay MatSetValues_SeqAIJ_A_Private(row,col,value,addv); 539273d9f13SBarry Smith } else if (in[j] < 0) continue; 5402515c552SBarry Smith #if defined(PETSC_USE_DEBUG) 541cb9801acSJed Brown else if (in[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1); 5420a198c4cSBarry Smith #endif 5431eb62cbbSBarry Smith else { 544227d817aSBarry Smith if (mat->was_assembled) { 545905e6a2fSBarry Smith if (!aij->colmap) { 546ab9863d7SBarry Smith ierr = MatCreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr); 547905e6a2fSBarry Smith } 548aa482453SBarry Smith #if defined(PETSC_USE_CTABLE) 5490f5bd95cSBarry Smith ierr = PetscTableFind(aij->colmap,in[j]+1,&col);CHKERRQ(ierr); 550fa46199cSSatish Balay col--; 551b1fc9764SSatish Balay #else 552905e6a2fSBarry Smith col = aij->colmap[in[j]] - 1; 553b1fc9764SSatish Balay #endif 5540e9bae81SBarry Smith if (col < 0 && !((Mat_SeqAIJ*)(aij->B->data))->nonew) { 555ab9863d7SBarry Smith ierr = MatDisAssemble_MPIAIJ(mat);CHKERRQ(ierr); 5564b0e389bSBarry Smith col = in[j]; 5579bf004c3SSatish Balay /* Reinitialize the variables required by MatSetValues_SeqAIJ_B_Private() */ 558f9508a3cSSatish Balay B = aij->B; 559f9508a3cSSatish Balay b = (Mat_SeqAIJ*)B->data; 560e44c0bd4SBarry Smith bimax = b->imax; bi = b->i; bilen = b->ilen; bj = b->j; ba = b->a; 561d498b1e9SBarry Smith rp2 = bj + bi[row]; 562d498b1e9SBarry Smith ap2 = ba + bi[row]; 563d498b1e9SBarry Smith rmax2 = bimax[row]; 564d498b1e9SBarry Smith nrow2 = bilen[row]; 565d498b1e9SBarry Smith low2 = 0; 566d498b1e9SBarry Smith high2 = nrow2; 567d0f46423SBarry Smith bm = aij->B->rmap->n; 568f9508a3cSSatish Balay ba = b->a; 5690e9bae81SBarry Smith } else if (col < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", im[i], in[j]); 570c48de900SBarry Smith } else col = in[j]; 5718d76821aSHong Zhang nonew = b->nonew; 57230770e4dSSatish Balay MatSetValues_SeqAIJ_B_Private(row,col,value,addv); 5731eb62cbbSBarry Smith } 5741eb62cbbSBarry Smith } 5755ef9f2a5SBarry Smith } else { 5764cb17eb5SBarry Smith if (mat->nooffprocentries) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Setting off process row %D even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set",im[i]); 57790f02eecSBarry Smith if (!aij->donotstash) { 5785080c13bSMatthew G Knepley mat->assembled = PETSC_FALSE; 579d36fbae8SSatish Balay if (roworiented) { 580ace3abfcSBarry Smith ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,(PetscBool)(ignorezeroentries && (addv == ADD_VALUES)));CHKERRQ(ierr); 581d36fbae8SSatish Balay } else { 582ace3abfcSBarry Smith ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,(PetscBool)(ignorezeroentries && (addv == ADD_VALUES)));CHKERRQ(ierr); 5834b0e389bSBarry Smith } 5841eb62cbbSBarry Smith } 5858a729477SBarry Smith } 58690f02eecSBarry Smith } 5873a40ed3dSBarry Smith PetscFunctionReturn(0); 5888a729477SBarry Smith } 5898a729477SBarry Smith 5904a2ae208SSatish Balay #undef __FUNCT__ 5914a2ae208SSatish Balay #define __FUNCT__ "MatGetValues_MPIAIJ" 592b1d57f15SBarry Smith PetscErrorCode MatGetValues_MPIAIJ(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[]) 593b49de8d1SLois Curfman McInnes { 594b49de8d1SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 595dfbe8321SBarry Smith PetscErrorCode ierr; 596d0f46423SBarry Smith PetscInt i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend; 597d0f46423SBarry Smith PetscInt cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col; 598b49de8d1SLois Curfman McInnes 5993a40ed3dSBarry Smith PetscFunctionBegin; 600b49de8d1SLois Curfman McInnes for (i=0; i<m; i++) { 601e32f2f54SBarry Smith if (idxm[i] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",idxm[i]);*/ 602e32f2f54SBarry Smith if (idxm[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",idxm[i],mat->rmap->N-1); 603b49de8d1SLois Curfman McInnes if (idxm[i] >= rstart && idxm[i] < rend) { 604b49de8d1SLois Curfman McInnes row = idxm[i] - rstart; 605b49de8d1SLois Curfman McInnes for (j=0; j<n; j++) { 606e32f2f54SBarry Smith if (idxn[j] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative column: %D",idxn[j]); */ 607e32f2f54SBarry Smith if (idxn[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",idxn[j],mat->cmap->N-1); 608b49de8d1SLois Curfman McInnes if (idxn[j] >= cstart && idxn[j] < cend) { 609b49de8d1SLois Curfman McInnes col = idxn[j] - cstart; 610b49de8d1SLois Curfman McInnes ierr = MatGetValues(aij->A,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr); 611fa852ad4SSatish Balay } else { 612905e6a2fSBarry Smith if (!aij->colmap) { 613ab9863d7SBarry Smith ierr = MatCreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr); 614905e6a2fSBarry Smith } 615aa482453SBarry Smith #if defined(PETSC_USE_CTABLE) 6160f5bd95cSBarry Smith ierr = PetscTableFind(aij->colmap,idxn[j]+1,&col);CHKERRQ(ierr); 617fa46199cSSatish Balay col--; 618b1fc9764SSatish Balay #else 619905e6a2fSBarry Smith col = aij->colmap[idxn[j]] - 1; 620b1fc9764SSatish Balay #endif 621e60e1c95SSatish Balay if ((col < 0) || (aij->garray[col] != idxn[j])) *(v+i*n+j) = 0.0; 622d9d09a02SSatish Balay else { 623b49de8d1SLois Curfman McInnes ierr = MatGetValues(aij->B,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr); 624b49de8d1SLois Curfman McInnes } 625b49de8d1SLois Curfman McInnes } 626b49de8d1SLois Curfman McInnes } 627f23aa3ddSBarry Smith } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local values currently supported"); 628b49de8d1SLois Curfman McInnes } 6293a40ed3dSBarry Smith PetscFunctionReturn(0); 630b49de8d1SLois Curfman McInnes } 631bc5ccf88SSatish Balay 632bd0c2dcbSBarry Smith extern PetscErrorCode MatMultDiagonalBlock_MPIAIJ(Mat,Vec,Vec); 633bd0c2dcbSBarry Smith 6344a2ae208SSatish Balay #undef __FUNCT__ 6354a2ae208SSatish Balay #define __FUNCT__ "MatAssemblyBegin_MPIAIJ" 636dfbe8321SBarry Smith PetscErrorCode MatAssemblyBegin_MPIAIJ(Mat mat,MatAssemblyType mode) 637bc5ccf88SSatish Balay { 638bc5ccf88SSatish Balay Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 639dfbe8321SBarry Smith PetscErrorCode ierr; 640b1d57f15SBarry Smith PetscInt nstash,reallocs; 641bc5ccf88SSatish Balay InsertMode addv; 642bc5ccf88SSatish Balay 643bc5ccf88SSatish Balay PetscFunctionBegin; 6442205254eSKarl Rupp if (aij->donotstash || mat->nooffprocentries) PetscFunctionReturn(0); 645bc5ccf88SSatish Balay 646bc5ccf88SSatish Balay /* make sure all processors are either in INSERTMODE or ADDMODE */ 647ce94432eSBarry Smith ierr = MPI_Allreduce((PetscEnum*)&mat->insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 648ce94432eSBarry Smith if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Some processors inserted others added"); 649bc5ccf88SSatish Balay mat->insertmode = addv; /* in case this processor had no cache */ 650bc5ccf88SSatish Balay 651d0f46423SBarry Smith ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr); 6528798bf22SSatish Balay ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr); 653ae15b995SBarry Smith ierr = PetscInfo2(aij->A,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr); 654bc5ccf88SSatish Balay PetscFunctionReturn(0); 655bc5ccf88SSatish Balay } 656bc5ccf88SSatish Balay 6574a2ae208SSatish Balay #undef __FUNCT__ 6584a2ae208SSatish Balay #define __FUNCT__ "MatAssemblyEnd_MPIAIJ" 659dfbe8321SBarry Smith PetscErrorCode MatAssemblyEnd_MPIAIJ(Mat mat,MatAssemblyType mode) 660bc5ccf88SSatish Balay { 661bc5ccf88SSatish Balay Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 66291c97fd4SSatish Balay Mat_SeqAIJ *a = (Mat_SeqAIJ*)aij->A->data; 6636849ba73SBarry Smith PetscErrorCode ierr; 664b1d57f15SBarry Smith PetscMPIInt n; 665b1d57f15SBarry Smith PetscInt i,j,rstart,ncols,flg; 666e44c0bd4SBarry Smith PetscInt *row,*col; 667ace3abfcSBarry Smith PetscBool other_disassembled; 66887828ca2SBarry Smith PetscScalar *val; 669bc5ccf88SSatish Balay InsertMode addv = mat->insertmode; 670bc5ccf88SSatish Balay 67191c97fd4SSatish Balay /* do not use 'b = (Mat_SeqAIJ*)aij->B->data' as B can be reset in disassembly */ 6726e111a19SKarl Rupp 673bc5ccf88SSatish Balay PetscFunctionBegin; 6744cb17eb5SBarry Smith if (!aij->donotstash && !mat->nooffprocentries) { 675a2d1c673SSatish Balay while (1) { 6768798bf22SSatish Balay ierr = MatStashScatterGetMesg_Private(&mat->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr); 677a2d1c673SSatish Balay if (!flg) break; 678a2d1c673SSatish Balay 679bc5ccf88SSatish Balay for (i=0; i<n; ) { 680bc5ccf88SSatish Balay /* Now identify the consecutive vals belonging to the same row */ 6812205254eSKarl Rupp for (j=i,rstart=row[j]; j<n; j++) { 6822205254eSKarl Rupp if (row[j] != rstart) break; 6832205254eSKarl Rupp } 684bc5ccf88SSatish Balay if (j < n) ncols = j-i; 685bc5ccf88SSatish Balay else ncols = n-i; 686bc5ccf88SSatish Balay /* Now assemble all these values with a single function call */ 687bc5ccf88SSatish Balay ierr = MatSetValues_MPIAIJ(mat,1,row+i,ncols,col+i,val+i,addv);CHKERRQ(ierr); 6882205254eSKarl Rupp 689bc5ccf88SSatish Balay i = j; 690bc5ccf88SSatish Balay } 691bc5ccf88SSatish Balay } 6928798bf22SSatish Balay ierr = MatStashScatterEnd_Private(&mat->stash);CHKERRQ(ierr); 693bc5ccf88SSatish Balay } 694bc5ccf88SSatish Balay ierr = MatAssemblyBegin(aij->A,mode);CHKERRQ(ierr); 695bc5ccf88SSatish Balay ierr = MatAssemblyEnd(aij->A,mode);CHKERRQ(ierr); 696bc5ccf88SSatish Balay 697bc5ccf88SSatish Balay /* determine if any processor has disassembled, if so we must 698bc5ccf88SSatish Balay also disassemble ourselfs, in order that we may reassemble. */ 699bc5ccf88SSatish Balay /* 700bc5ccf88SSatish Balay if nonzero structure of submatrix B cannot change then we know that 701bc5ccf88SSatish Balay no processor disassembled thus we can skip this stuff 702bc5ccf88SSatish Balay */ 703bc5ccf88SSatish Balay if (!((Mat_SeqAIJ*)aij->B->data)->nonew) { 704ce94432eSBarry Smith ierr = MPI_Allreduce(&mat->was_assembled,&other_disassembled,1,MPIU_BOOL,MPI_PROD,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 705bc5ccf88SSatish Balay if (mat->was_assembled && !other_disassembled) { 706ab9863d7SBarry Smith ierr = MatDisAssemble_MPIAIJ(mat);CHKERRQ(ierr); 707ad59fb31SSatish Balay } 708ad59fb31SSatish Balay } 709bc5ccf88SSatish Balay if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) { 710bc5ccf88SSatish Balay ierr = MatSetUpMultiply_MPIAIJ(mat);CHKERRQ(ierr); 711bc5ccf88SSatish Balay } 7124e0d8c25SBarry Smith ierr = MatSetOption(aij->B,MAT_USE_INODES,PETSC_FALSE);CHKERRQ(ierr); 7134e35b6f3SSatish Balay ierr = MatSetOption(aij->B,MAT_CHECK_COMPRESSED_ROW,PETSC_FALSE);CHKERRQ(ierr); 714bc5ccf88SSatish Balay ierr = MatAssemblyBegin(aij->B,mode);CHKERRQ(ierr); 715bc5ccf88SSatish Balay ierr = MatAssemblyEnd(aij->B,mode);CHKERRQ(ierr); 716bc5ccf88SSatish Balay 7171d79065fSBarry Smith ierr = PetscFree2(aij->rowvalues,aij->rowindices);CHKERRQ(ierr); 7182205254eSKarl Rupp 719606d414cSSatish Balay aij->rowvalues = 0; 720a30b2313SHong Zhang 721a30b2313SHong Zhang /* used by MatAXPY() */ 72291c97fd4SSatish Balay a->xtoy = 0; ((Mat_SeqAIJ*)aij->B->data)->xtoy = 0; /* b->xtoy = 0 */ 72391c97fd4SSatish Balay a->XtoY = 0; ((Mat_SeqAIJ*)aij->B->data)->XtoY = 0; /* b->XtoY = 0 */ 724a30b2313SHong Zhang 7256bf464f9SBarry Smith ierr = VecDestroy(&aij->diag);CHKERRQ(ierr); 726bd0c2dcbSBarry Smith if (a->inode.size) mat->ops->multdiagonalblock = MatMultDiagonalBlock_MPIAIJ; 727bc5ccf88SSatish Balay PetscFunctionReturn(0); 728bc5ccf88SSatish Balay } 729bc5ccf88SSatish Balay 7304a2ae208SSatish Balay #undef __FUNCT__ 7314a2ae208SSatish Balay #define __FUNCT__ "MatZeroEntries_MPIAIJ" 732dfbe8321SBarry Smith PetscErrorCode MatZeroEntries_MPIAIJ(Mat A) 7331eb62cbbSBarry Smith { 73444a69424SLois Curfman McInnes Mat_MPIAIJ *l = (Mat_MPIAIJ*)A->data; 735dfbe8321SBarry Smith PetscErrorCode ierr; 7363a40ed3dSBarry Smith 7373a40ed3dSBarry Smith PetscFunctionBegin; 73878b31e54SBarry Smith ierr = MatZeroEntries(l->A);CHKERRQ(ierr); 73978b31e54SBarry Smith ierr = MatZeroEntries(l->B);CHKERRQ(ierr); 7403a40ed3dSBarry Smith PetscFunctionReturn(0); 7411eb62cbbSBarry Smith } 7421eb62cbbSBarry Smith 7434a2ae208SSatish Balay #undef __FUNCT__ 7444a2ae208SSatish Balay #define __FUNCT__ "MatZeroRows_MPIAIJ" 7452b40b63fSBarry Smith PetscErrorCode MatZeroRows_MPIAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 7461eb62cbbSBarry Smith { 74744a69424SLois Curfman McInnes Mat_MPIAIJ *l = (Mat_MPIAIJ*)A->data; 7486849ba73SBarry Smith PetscErrorCode ierr; 7497adad957SLisandro Dalcin PetscMPIInt size = l->size,imdex,n,rank = l->rank,tag = ((PetscObject)A)->tag,lastidx = -1; 750d0f46423SBarry Smith PetscInt i,*owners = A->rmap->range; 751b1d57f15SBarry Smith PetscInt *nprocs,j,idx,nsends,row; 752b1d57f15SBarry Smith PetscInt nmax,*svalues,*starts,*owner,nrecvs; 753b1d57f15SBarry Smith PetscInt *rvalues,count,base,slen,*source; 754d0f46423SBarry Smith PetscInt *lens,*lrows,*values,rstart=A->rmap->rstart; 755ce94432eSBarry Smith MPI_Comm comm; 7561eb62cbbSBarry Smith MPI_Request *send_waits,*recv_waits; 7571eb62cbbSBarry Smith MPI_Status recv_status,*send_status; 75897b48c8fSBarry Smith const PetscScalar *xx; 75997b48c8fSBarry Smith PetscScalar *bb; 7606543fbbaSBarry Smith #if defined(PETSC_DEBUG) 761ace3abfcSBarry Smith PetscBool found = PETSC_FALSE; 7626543fbbaSBarry Smith #endif 7631eb62cbbSBarry Smith 7643a40ed3dSBarry Smith PetscFunctionBegin; 765ce94432eSBarry Smith ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr); 7661eb62cbbSBarry Smith /* first count number of contributors to each processor */ 767b1d57f15SBarry Smith ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr); 768b1d57f15SBarry Smith ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr); 769b1d57f15SBarry Smith ierr = PetscMalloc((N+1)*sizeof(PetscInt),&owner);CHKERRQ(ierr); /* see note*/ 7706543fbbaSBarry Smith j = 0; 7711eb62cbbSBarry Smith for (i=0; i<N; i++) { 7726543fbbaSBarry Smith if (lastidx > (idx = rows[i])) j = 0; 7736543fbbaSBarry Smith lastidx = idx; 7746543fbbaSBarry Smith for (; j<size; j++) { 7751eb62cbbSBarry Smith if (idx >= owners[j] && idx < owners[j+1]) { 7766543fbbaSBarry Smith nprocs[2*j]++; 7776543fbbaSBarry Smith nprocs[2*j+1] = 1; 7786543fbbaSBarry Smith owner[i] = j; 7796543fbbaSBarry Smith #if defined(PETSC_DEBUG) 7806543fbbaSBarry Smith found = PETSC_TRUE; 7816543fbbaSBarry Smith #endif 7826543fbbaSBarry Smith break; 7831eb62cbbSBarry Smith } 7841eb62cbbSBarry Smith } 7856543fbbaSBarry Smith #if defined(PETSC_DEBUG) 786e32f2f54SBarry Smith if (!found) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index out of range"); 7876543fbbaSBarry Smith found = PETSC_FALSE; 7886543fbbaSBarry Smith #endif 7891eb62cbbSBarry Smith } 7902205254eSKarl Rupp nsends = 0; 7912205254eSKarl Rupp for (i=0; i<size; i++) nsends += nprocs[2*i+1]; 7921eb62cbbSBarry Smith 7937367270fSBarry Smith if (A->nooffproczerorows) { 7947367270fSBarry Smith if (nsends > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"You called MatSetOption(,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE) but set an off process zero row"); 7957367270fSBarry Smith nrecvs = nsends; 7967367270fSBarry Smith nmax = N; 7977367270fSBarry Smith } else { 7981eb62cbbSBarry Smith /* inform other processors of number of messages and max length*/ 799c1dc657dSBarry Smith ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr); 8007367270fSBarry Smith } 8011eb62cbbSBarry Smith 8021eb62cbbSBarry Smith /* post receives: */ 803b1d57f15SBarry Smith ierr = PetscMalloc((nrecvs+1)*(nmax+1)*sizeof(PetscInt),&rvalues);CHKERRQ(ierr); 804b0a32e0cSBarry Smith ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr); 8051eb62cbbSBarry Smith for (i=0; i<nrecvs; i++) { 806b1d57f15SBarry Smith ierr = MPI_Irecv(rvalues+nmax*i,nmax,MPIU_INT,MPI_ANY_SOURCE,tag,comm,recv_waits+i);CHKERRQ(ierr); 8071eb62cbbSBarry Smith } 8081eb62cbbSBarry Smith 8091eb62cbbSBarry Smith /* do sends: 8101eb62cbbSBarry Smith 1) starts[i] gives the starting index in svalues for stuff going to 8111eb62cbbSBarry Smith the ith processor 8121eb62cbbSBarry Smith */ 813b1d57f15SBarry Smith ierr = PetscMalloc((N+1)*sizeof(PetscInt),&svalues);CHKERRQ(ierr); 814b0a32e0cSBarry Smith ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr); 815b1d57f15SBarry Smith ierr = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr); 8161eb62cbbSBarry Smith 8171eb62cbbSBarry Smith starts[0] = 0; 8182205254eSKarl Rupp for (i=1; i<size; i++) starts[i] = starts[i-1] + nprocs[2*i-2]; 8192205254eSKarl Rupp for (i=0; i<N; i++) svalues[starts[owner[i]]++] = rows[i]; 8202205254eSKarl Rupp 8212205254eSKarl Rupp starts[0] = 0; 8222205254eSKarl Rupp for (i=1; i<size+1; i++) starts[i] = starts[i-1] + nprocs[2*i-2]; 8231eb62cbbSBarry Smith count = 0; 82417699dbbSLois Curfman McInnes for (i=0; i<size; i++) { 825c1dc657dSBarry Smith if (nprocs[2*i+1]) { 826b1d57f15SBarry Smith ierr = MPI_Isend(svalues+starts[i],nprocs[2*i],MPIU_INT,i,tag,comm,send_waits+count++);CHKERRQ(ierr); 8271eb62cbbSBarry Smith } 8281eb62cbbSBarry Smith } 829606d414cSSatish Balay ierr = PetscFree(starts);CHKERRQ(ierr); 8301eb62cbbSBarry Smith 83117699dbbSLois Curfman McInnes base = owners[rank]; 8321eb62cbbSBarry Smith 8331eb62cbbSBarry Smith /* wait on receives */ 8341d79065fSBarry Smith ierr = PetscMalloc2(nrecvs,PetscInt,&lens,nrecvs,PetscInt,&source);CHKERRQ(ierr); 8351eb62cbbSBarry Smith count = nrecvs; slen = 0; 8361eb62cbbSBarry Smith while (count) { 837ca161407SBarry Smith ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr); 8381eb62cbbSBarry Smith /* unpack receives into our local space */ 839b1d57f15SBarry Smith ierr = MPI_Get_count(&recv_status,MPIU_INT,&n);CHKERRQ(ierr); 8402205254eSKarl Rupp 841d6dfbf8fSBarry Smith source[imdex] = recv_status.MPI_SOURCE; 842d6dfbf8fSBarry Smith lens[imdex] = n; 8431eb62cbbSBarry Smith slen += n; 8441eb62cbbSBarry Smith count--; 8451eb62cbbSBarry Smith } 846606d414cSSatish Balay ierr = PetscFree(recv_waits);CHKERRQ(ierr); 8471eb62cbbSBarry Smith 8481eb62cbbSBarry Smith /* move the data into the send scatter */ 849b1d57f15SBarry Smith ierr = PetscMalloc((slen+1)*sizeof(PetscInt),&lrows);CHKERRQ(ierr); 8501eb62cbbSBarry Smith count = 0; 8511eb62cbbSBarry Smith for (i=0; i<nrecvs; i++) { 8521eb62cbbSBarry Smith values = rvalues + i*nmax; 8532205254eSKarl Rupp for (j=0; j<lens[i]; j++) lrows[count++] = values[j] - base; 8541eb62cbbSBarry Smith } 855606d414cSSatish Balay ierr = PetscFree(rvalues);CHKERRQ(ierr); 8561d79065fSBarry Smith ierr = PetscFree2(lens,source);CHKERRQ(ierr); 857606d414cSSatish Balay ierr = PetscFree(owner);CHKERRQ(ierr); 858606d414cSSatish Balay ierr = PetscFree(nprocs);CHKERRQ(ierr); 8591eb62cbbSBarry Smith 86097b48c8fSBarry Smith /* fix right hand side if needed */ 86197b48c8fSBarry Smith if (x && b) { 86297b48c8fSBarry Smith ierr = VecGetArrayRead(x,&xx);CHKERRQ(ierr); 86397b48c8fSBarry Smith ierr = VecGetArray(b,&bb);CHKERRQ(ierr); 8642205254eSKarl Rupp for (i=0; i<slen; i++) bb[lrows[i]] = diag*xx[lrows[i]]; 86597b48c8fSBarry Smith ierr = VecRestoreArrayRead(x,&xx);CHKERRQ(ierr); 86697b48c8fSBarry Smith ierr = VecRestoreArray(b,&bb);CHKERRQ(ierr); 86797b48c8fSBarry Smith } 8686eb55b6aSBarry Smith /* 8696eb55b6aSBarry Smith Zero the required rows. If the "diagonal block" of the matrix 870a8c7a070SBarry Smith is square and the user wishes to set the diagonal we use separate 8716eb55b6aSBarry Smith code so that MatSetValues() is not called for each diagonal allocating 8726eb55b6aSBarry Smith new memory, thus calling lots of mallocs and slowing things down. 8736eb55b6aSBarry Smith 8746eb55b6aSBarry Smith */ 875e2d53e46SBarry Smith /* must zero l->B before l->A because the (diag) case below may put values into l->B*/ 8762b40b63fSBarry Smith ierr = MatZeroRows(l->B,slen,lrows,0.0,0,0);CHKERRQ(ierr); 877d0f46423SBarry Smith if ((diag != 0.0) && (l->A->rmap->N == l->A->cmap->N)) { 8782b40b63fSBarry Smith ierr = MatZeroRows(l->A,slen,lrows,diag,0,0);CHKERRQ(ierr); 879f4df32b1SMatthew Knepley } else if (diag != 0.0) { 8802b40b63fSBarry Smith ierr = MatZeroRows(l->A,slen,lrows,0.0,0,0);CHKERRQ(ierr); 8812205254eSKarl Rupp if (((Mat_SeqAIJ*)l->A->data)->nonew) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatZeroRows() on rectangular matrices cannot be used with the Mat options\nMAT_NEW_NONZERO_LOCATIONS,MAT_NEW_NONZERO_LOCATION_ERR,MAT_NEW_NONZERO_ALLOCATION_ERR"); 882e2d53e46SBarry Smith for (i = 0; i < slen; i++) { 883e2d53e46SBarry Smith row = lrows[i] + rstart; 884f4df32b1SMatthew Knepley ierr = MatSetValues(A,1,&row,1,&row,&diag,INSERT_VALUES);CHKERRQ(ierr); 885e2d53e46SBarry Smith } 886e2d53e46SBarry Smith ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 887e2d53e46SBarry Smith ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 8886eb55b6aSBarry Smith } else { 8892b40b63fSBarry Smith ierr = MatZeroRows(l->A,slen,lrows,0.0,0,0);CHKERRQ(ierr); 8906eb55b6aSBarry Smith } 891606d414cSSatish Balay ierr = PetscFree(lrows);CHKERRQ(ierr); 89272dacd9aSBarry Smith 8931eb62cbbSBarry Smith /* wait on sends */ 8941eb62cbbSBarry Smith if (nsends) { 895b0a32e0cSBarry Smith ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr); 896ca161407SBarry Smith ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr); 897606d414cSSatish Balay ierr = PetscFree(send_status);CHKERRQ(ierr); 8981eb62cbbSBarry Smith } 899606d414cSSatish Balay ierr = PetscFree(send_waits);CHKERRQ(ierr); 900606d414cSSatish Balay ierr = PetscFree(svalues);CHKERRQ(ierr); 9013a40ed3dSBarry Smith PetscFunctionReturn(0); 9021eb62cbbSBarry Smith } 9031eb62cbbSBarry Smith 9044a2ae208SSatish Balay #undef __FUNCT__ 9059c7c4993SBarry Smith #define __FUNCT__ "MatZeroRowsColumns_MPIAIJ" 9069c7c4993SBarry Smith PetscErrorCode MatZeroRowsColumns_MPIAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 9079c7c4993SBarry Smith { 9089c7c4993SBarry Smith Mat_MPIAIJ *l = (Mat_MPIAIJ*)A->data; 9099c7c4993SBarry Smith PetscErrorCode ierr; 9109c7c4993SBarry Smith PetscMPIInt size = l->size,imdex,n,rank = l->rank,tag = ((PetscObject)A)->tag,lastidx = -1; 9119c7c4993SBarry Smith PetscInt i,*owners = A->rmap->range; 912564f14d6SBarry Smith PetscInt *nprocs,j,idx,nsends; 9139c7c4993SBarry Smith PetscInt nmax,*svalues,*starts,*owner,nrecvs; 9149c7c4993SBarry Smith PetscInt *rvalues,count,base,slen,*source; 915564f14d6SBarry Smith PetscInt *lens,*lrows,*values,m; 916ce94432eSBarry Smith MPI_Comm comm; 9179c7c4993SBarry Smith MPI_Request *send_waits,*recv_waits; 9189c7c4993SBarry Smith MPI_Status recv_status,*send_status; 9199c7c4993SBarry Smith const PetscScalar *xx; 920564f14d6SBarry Smith PetscScalar *bb,*mask; 921564f14d6SBarry Smith Vec xmask,lmask; 922564f14d6SBarry Smith Mat_SeqAIJ *aij = (Mat_SeqAIJ*)l->B->data; 923564f14d6SBarry Smith const PetscInt *aj, *ii,*ridx; 924564f14d6SBarry Smith PetscScalar *aa; 9259c7c4993SBarry Smith #if defined(PETSC_DEBUG) 9269c7c4993SBarry Smith PetscBool found = PETSC_FALSE; 9279c7c4993SBarry Smith #endif 9289c7c4993SBarry Smith 9299c7c4993SBarry Smith PetscFunctionBegin; 930ce94432eSBarry Smith ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr); 9319c7c4993SBarry Smith /* first count number of contributors to each processor */ 9329c7c4993SBarry Smith ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr); 9339c7c4993SBarry Smith ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr); 9349c7c4993SBarry Smith ierr = PetscMalloc((N+1)*sizeof(PetscInt),&owner);CHKERRQ(ierr); /* see note*/ 9359c7c4993SBarry Smith j = 0; 9369c7c4993SBarry Smith for (i=0; i<N; i++) { 9379c7c4993SBarry Smith if (lastidx > (idx = rows[i])) j = 0; 9389c7c4993SBarry Smith lastidx = idx; 9399c7c4993SBarry Smith for (; j<size; j++) { 9409c7c4993SBarry Smith if (idx >= owners[j] && idx < owners[j+1]) { 9419c7c4993SBarry Smith nprocs[2*j]++; 9429c7c4993SBarry Smith nprocs[2*j+1] = 1; 9439c7c4993SBarry Smith owner[i] = j; 9449c7c4993SBarry Smith #if defined(PETSC_DEBUG) 9459c7c4993SBarry Smith found = PETSC_TRUE; 9469c7c4993SBarry Smith #endif 9479c7c4993SBarry Smith break; 9489c7c4993SBarry Smith } 9499c7c4993SBarry Smith } 9509c7c4993SBarry Smith #if defined(PETSC_DEBUG) 9519c7c4993SBarry Smith if (!found) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index out of range"); 9529c7c4993SBarry Smith found = PETSC_FALSE; 9539c7c4993SBarry Smith #endif 9549c7c4993SBarry Smith } 9552205254eSKarl Rupp nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1]; 9569c7c4993SBarry Smith 9579c7c4993SBarry Smith /* inform other processors of number of messages and max length*/ 9589c7c4993SBarry Smith ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr); 9599c7c4993SBarry Smith 9609c7c4993SBarry Smith /* post receives: */ 9619c7c4993SBarry Smith ierr = PetscMalloc((nrecvs+1)*(nmax+1)*sizeof(PetscInt),&rvalues);CHKERRQ(ierr); 9629c7c4993SBarry Smith ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr); 9639c7c4993SBarry Smith for (i=0; i<nrecvs; i++) { 9649c7c4993SBarry Smith ierr = MPI_Irecv(rvalues+nmax*i,nmax,MPIU_INT,MPI_ANY_SOURCE,tag,comm,recv_waits+i);CHKERRQ(ierr); 9659c7c4993SBarry Smith } 9669c7c4993SBarry Smith 9679c7c4993SBarry Smith /* do sends: 9689c7c4993SBarry Smith 1) starts[i] gives the starting index in svalues for stuff going to 9699c7c4993SBarry Smith the ith processor 9709c7c4993SBarry Smith */ 9719c7c4993SBarry Smith ierr = PetscMalloc((N+1)*sizeof(PetscInt),&svalues);CHKERRQ(ierr); 9729c7c4993SBarry Smith ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr); 9739c7c4993SBarry Smith ierr = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr); 9749c7c4993SBarry Smith 9759c7c4993SBarry Smith starts[0] = 0; 9762205254eSKarl Rupp for (i=1; i<size; i++) starts[i] = starts[i-1] + nprocs[2*i-2]; 9772205254eSKarl Rupp for (i=0; i<N; i++) svalues[starts[owner[i]]++] = rows[i]; 9782205254eSKarl Rupp 9792205254eSKarl Rupp starts[0] = 0; 9802205254eSKarl Rupp for (i=1; i<size+1; i++) starts[i] = starts[i-1] + nprocs[2*i-2]; 9819c7c4993SBarry Smith count = 0; 9829c7c4993SBarry Smith for (i=0; i<size; i++) { 9839c7c4993SBarry Smith if (nprocs[2*i+1]) { 9849c7c4993SBarry Smith ierr = MPI_Isend(svalues+starts[i],nprocs[2*i],MPIU_INT,i,tag,comm,send_waits+count++);CHKERRQ(ierr); 9859c7c4993SBarry Smith } 9869c7c4993SBarry Smith } 9879c7c4993SBarry Smith ierr = PetscFree(starts);CHKERRQ(ierr); 9889c7c4993SBarry Smith 9899c7c4993SBarry Smith base = owners[rank]; 9909c7c4993SBarry Smith 9919c7c4993SBarry Smith /* wait on receives */ 9929c7c4993SBarry Smith ierr = PetscMalloc2(nrecvs,PetscInt,&lens,nrecvs,PetscInt,&source);CHKERRQ(ierr); 9939c7c4993SBarry Smith count = nrecvs; slen = 0; 9949c7c4993SBarry Smith while (count) { 9959c7c4993SBarry Smith ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr); 9969c7c4993SBarry Smith /* unpack receives into our local space */ 9979c7c4993SBarry Smith ierr = MPI_Get_count(&recv_status,MPIU_INT,&n);CHKERRQ(ierr); 9982205254eSKarl Rupp 9999c7c4993SBarry Smith source[imdex] = recv_status.MPI_SOURCE; 10009c7c4993SBarry Smith lens[imdex] = n; 10019c7c4993SBarry Smith slen += n; 10029c7c4993SBarry Smith count--; 10039c7c4993SBarry Smith } 10049c7c4993SBarry Smith ierr = PetscFree(recv_waits);CHKERRQ(ierr); 10059c7c4993SBarry Smith 10069c7c4993SBarry Smith /* move the data into the send scatter */ 10079c7c4993SBarry Smith ierr = PetscMalloc((slen+1)*sizeof(PetscInt),&lrows);CHKERRQ(ierr); 10089c7c4993SBarry Smith count = 0; 10099c7c4993SBarry Smith for (i=0; i<nrecvs; i++) { 10109c7c4993SBarry Smith values = rvalues + i*nmax; 10112205254eSKarl Rupp for (j=0; j<lens[i]; j++) lrows[count++] = values[j] - base; 10129c7c4993SBarry Smith } 10139c7c4993SBarry Smith ierr = PetscFree(rvalues);CHKERRQ(ierr); 10149c7c4993SBarry Smith ierr = PetscFree2(lens,source);CHKERRQ(ierr); 10159c7c4993SBarry Smith ierr = PetscFree(owner);CHKERRQ(ierr); 10169c7c4993SBarry Smith ierr = PetscFree(nprocs);CHKERRQ(ierr); 1017564f14d6SBarry Smith /* lrows are the local rows to be zeroed, slen is the number of local rows */ 10189c7c4993SBarry Smith 1019564f14d6SBarry Smith /* zero diagonal part of matrix */ 1020564f14d6SBarry Smith ierr = MatZeroRowsColumns(l->A,slen,lrows,diag,x,b);CHKERRQ(ierr); 10219c7c4993SBarry Smith 1022564f14d6SBarry Smith /* handle off diagonal part of matrix */ 10230298fd71SBarry Smith ierr = MatGetVecs(A,&xmask,NULL);CHKERRQ(ierr); 1024564f14d6SBarry Smith ierr = VecDuplicate(l->lvec,&lmask);CHKERRQ(ierr); 1025564f14d6SBarry Smith ierr = VecGetArray(xmask,&bb);CHKERRQ(ierr); 10262205254eSKarl Rupp for (i=0; i<slen; i++) bb[lrows[i]] = 1; 1027564f14d6SBarry Smith ierr = VecRestoreArray(xmask,&bb);CHKERRQ(ierr); 1028564f14d6SBarry Smith ierr = VecScatterBegin(l->Mvctx,xmask,lmask,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1029564f14d6SBarry Smith ierr = VecScatterEnd(l->Mvctx,xmask,lmask,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 10306bf464f9SBarry Smith ierr = VecDestroy(&xmask);CHKERRQ(ierr); 1031377aa5a1SBarry Smith if (x) { 1032564f14d6SBarry Smith ierr = VecScatterBegin(l->Mvctx,x,l->lvec,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1033564f14d6SBarry Smith ierr = VecScatterEnd(l->Mvctx,x,l->lvec,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1034564f14d6SBarry Smith ierr = VecGetArrayRead(l->lvec,&xx);CHKERRQ(ierr); 1035564f14d6SBarry Smith ierr = VecGetArray(b,&bb);CHKERRQ(ierr); 1036377aa5a1SBarry Smith } 1037377aa5a1SBarry Smith ierr = VecGetArray(lmask,&mask);CHKERRQ(ierr); 1038564f14d6SBarry Smith 1039564f14d6SBarry Smith /* remove zeroed rows of off diagonal matrix */ 1040564f14d6SBarry Smith ii = aij->i; 1041564f14d6SBarry Smith for (i=0; i<slen; i++) { 1042564f14d6SBarry Smith ierr = PetscMemzero(aij->a + ii[lrows[i]],(ii[lrows[i]+1] - ii[lrows[i]])*sizeof(PetscScalar));CHKERRQ(ierr); 10439c7c4993SBarry Smith } 1044564f14d6SBarry Smith 1045564f14d6SBarry Smith /* loop over all elements of off process part of matrix zeroing removed columns*/ 1046564f14d6SBarry Smith if (aij->compressedrow.use) { 1047564f14d6SBarry Smith m = aij->compressedrow.nrows; 1048564f14d6SBarry Smith ii = aij->compressedrow.i; 1049564f14d6SBarry Smith ridx = aij->compressedrow.rindex; 1050564f14d6SBarry Smith for (i=0; i<m; i++) { 1051564f14d6SBarry Smith n = ii[i+1] - ii[i]; 1052564f14d6SBarry Smith aj = aij->j + ii[i]; 1053564f14d6SBarry Smith aa = aij->a + ii[i]; 1054564f14d6SBarry Smith 1055564f14d6SBarry Smith for (j=0; j<n; j++) { 105625266a92SSatish Balay if (PetscAbsScalar(mask[*aj])) { 1057377aa5a1SBarry Smith if (b) bb[*ridx] -= *aa*xx[*aj]; 1058564f14d6SBarry Smith *aa = 0.0; 1059564f14d6SBarry Smith } 1060564f14d6SBarry Smith aa++; 1061564f14d6SBarry Smith aj++; 1062564f14d6SBarry Smith } 1063564f14d6SBarry Smith ridx++; 1064564f14d6SBarry Smith } 1065564f14d6SBarry Smith } else { /* do not use compressed row format */ 1066564f14d6SBarry Smith m = l->B->rmap->n; 1067564f14d6SBarry Smith for (i=0; i<m; i++) { 1068564f14d6SBarry Smith n = ii[i+1] - ii[i]; 1069564f14d6SBarry Smith aj = aij->j + ii[i]; 1070564f14d6SBarry Smith aa = aij->a + ii[i]; 1071564f14d6SBarry Smith for (j=0; j<n; j++) { 107225266a92SSatish Balay if (PetscAbsScalar(mask[*aj])) { 1073377aa5a1SBarry Smith if (b) bb[i] -= *aa*xx[*aj]; 1074564f14d6SBarry Smith *aa = 0.0; 1075564f14d6SBarry Smith } 1076564f14d6SBarry Smith aa++; 1077564f14d6SBarry Smith aj++; 1078564f14d6SBarry Smith } 1079564f14d6SBarry Smith } 1080564f14d6SBarry Smith } 1081377aa5a1SBarry Smith if (x) { 1082564f14d6SBarry Smith ierr = VecRestoreArray(b,&bb);CHKERRQ(ierr); 1083564f14d6SBarry Smith ierr = VecRestoreArrayRead(l->lvec,&xx);CHKERRQ(ierr); 1084377aa5a1SBarry Smith } 1085377aa5a1SBarry Smith ierr = VecRestoreArray(lmask,&mask);CHKERRQ(ierr); 10866bf464f9SBarry Smith ierr = VecDestroy(&lmask);CHKERRQ(ierr); 10879c7c4993SBarry Smith ierr = PetscFree(lrows);CHKERRQ(ierr); 10889c7c4993SBarry Smith 10899c7c4993SBarry Smith /* wait on sends */ 10909c7c4993SBarry Smith if (nsends) { 10919c7c4993SBarry Smith ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr); 10929c7c4993SBarry Smith ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr); 10939c7c4993SBarry Smith ierr = PetscFree(send_status);CHKERRQ(ierr); 10949c7c4993SBarry Smith } 10959c7c4993SBarry Smith ierr = PetscFree(send_waits);CHKERRQ(ierr); 10969c7c4993SBarry Smith ierr = PetscFree(svalues);CHKERRQ(ierr); 10979c7c4993SBarry Smith PetscFunctionReturn(0); 10989c7c4993SBarry Smith } 10999c7c4993SBarry Smith 11009c7c4993SBarry Smith #undef __FUNCT__ 11014a2ae208SSatish Balay #define __FUNCT__ "MatMult_MPIAIJ" 1102dfbe8321SBarry Smith PetscErrorCode MatMult_MPIAIJ(Mat A,Vec xx,Vec yy) 11031eb62cbbSBarry Smith { 1104416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1105dfbe8321SBarry Smith PetscErrorCode ierr; 1106b1d57f15SBarry Smith PetscInt nt; 1107416022c9SBarry Smith 11083a40ed3dSBarry Smith PetscFunctionBegin; 1109a2ce50c7SBarry Smith ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr); 111065e19b50SBarry Smith if (nt != A->cmap->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible partition of A (%D) and xx (%D)",A->cmap->n,nt); 1111ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1112f830108cSBarry Smith ierr = (*a->A->ops->mult)(a->A,xx,yy);CHKERRQ(ierr); 1113ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1114f830108cSBarry Smith ierr = (*a->B->ops->multadd)(a->B,a->lvec,yy,yy);CHKERRQ(ierr); 11153a40ed3dSBarry Smith PetscFunctionReturn(0); 11161eb62cbbSBarry Smith } 11171eb62cbbSBarry Smith 11184a2ae208SSatish Balay #undef __FUNCT__ 1119bd0c2dcbSBarry Smith #define __FUNCT__ "MatMultDiagonalBlock_MPIAIJ" 1120bd0c2dcbSBarry Smith PetscErrorCode MatMultDiagonalBlock_MPIAIJ(Mat A,Vec bb,Vec xx) 1121bd0c2dcbSBarry Smith { 1122bd0c2dcbSBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1123bd0c2dcbSBarry Smith PetscErrorCode ierr; 1124bd0c2dcbSBarry Smith 1125bd0c2dcbSBarry Smith PetscFunctionBegin; 1126bd0c2dcbSBarry Smith ierr = MatMultDiagonalBlock(a->A,bb,xx);CHKERRQ(ierr); 1127bd0c2dcbSBarry Smith PetscFunctionReturn(0); 1128bd0c2dcbSBarry Smith } 1129bd0c2dcbSBarry Smith 1130bd0c2dcbSBarry Smith #undef __FUNCT__ 11314a2ae208SSatish Balay #define __FUNCT__ "MatMultAdd_MPIAIJ" 1132dfbe8321SBarry Smith PetscErrorCode MatMultAdd_MPIAIJ(Mat A,Vec xx,Vec yy,Vec zz) 1133da3a660dSBarry Smith { 1134416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1135dfbe8321SBarry Smith PetscErrorCode ierr; 11363a40ed3dSBarry Smith 11373a40ed3dSBarry Smith PetscFunctionBegin; 1138ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1139f830108cSBarry Smith ierr = (*a->A->ops->multadd)(a->A,xx,yy,zz);CHKERRQ(ierr); 1140ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1141f830108cSBarry Smith ierr = (*a->B->ops->multadd)(a->B,a->lvec,zz,zz);CHKERRQ(ierr); 11423a40ed3dSBarry Smith PetscFunctionReturn(0); 1143da3a660dSBarry Smith } 1144da3a660dSBarry Smith 11454a2ae208SSatish Balay #undef __FUNCT__ 11464a2ae208SSatish Balay #define __FUNCT__ "MatMultTranspose_MPIAIJ" 1147dfbe8321SBarry Smith PetscErrorCode MatMultTranspose_MPIAIJ(Mat A,Vec xx,Vec yy) 1148da3a660dSBarry Smith { 1149416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1150dfbe8321SBarry Smith PetscErrorCode ierr; 1151ace3abfcSBarry Smith PetscBool merged; 1152da3a660dSBarry Smith 11533a40ed3dSBarry Smith PetscFunctionBegin; 1154a5ff213dSBarry Smith ierr = VecScatterGetMerged(a->Mvctx,&merged);CHKERRQ(ierr); 1155da3a660dSBarry Smith /* do nondiagonal part */ 11567c922b88SBarry Smith ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr); 1157a5ff213dSBarry Smith if (!merged) { 1158da3a660dSBarry Smith /* send it on its way */ 1159ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1160da3a660dSBarry Smith /* do local part */ 11617c922b88SBarry Smith ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr); 1162da3a660dSBarry Smith /* receive remote parts: note this assumes the values are not actually */ 1163a5ff213dSBarry Smith /* added in yy until the next line, */ 1164ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1165a5ff213dSBarry Smith } else { 1166a5ff213dSBarry Smith /* do local part */ 1167a5ff213dSBarry Smith ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr); 1168a5ff213dSBarry Smith /* send it on its way */ 1169ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1170a5ff213dSBarry Smith /* values actually were received in the Begin() but we need to call this nop */ 1171ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1172a5ff213dSBarry Smith } 11733a40ed3dSBarry Smith PetscFunctionReturn(0); 1174da3a660dSBarry Smith } 1175da3a660dSBarry Smith 1176cd0d46ebSvictorle #undef __FUNCT__ 11775fbd3699SBarry Smith #define __FUNCT__ "MatIsTranspose_MPIAIJ" 11787087cfbeSBarry Smith PetscErrorCode MatIsTranspose_MPIAIJ(Mat Amat,Mat Bmat,PetscReal tol,PetscBool *f) 1179cd0d46ebSvictorle { 11804f423910Svictorle MPI_Comm comm; 1181cd0d46ebSvictorle Mat_MPIAIJ *Aij = (Mat_MPIAIJ*) Amat->data, *Bij; 118266501d38Svictorle Mat Adia = Aij->A, Bdia, Aoff,Boff,*Aoffs,*Boffs; 1183cd0d46ebSvictorle IS Me,Notme; 11846849ba73SBarry Smith PetscErrorCode ierr; 1185b1d57f15SBarry Smith PetscInt M,N,first,last,*notme,i; 1186b1d57f15SBarry Smith PetscMPIInt size; 1187cd0d46ebSvictorle 1188cd0d46ebSvictorle PetscFunctionBegin; 118942e5f5b4Svictorle /* Easy test: symmetric diagonal block */ 119066501d38Svictorle Bij = (Mat_MPIAIJ*) Bmat->data; Bdia = Bij->A; 11915485867bSBarry Smith ierr = MatIsTranspose(Adia,Bdia,tol,f);CHKERRQ(ierr); 1192cd0d46ebSvictorle if (!*f) PetscFunctionReturn(0); 11934f423910Svictorle ierr = PetscObjectGetComm((PetscObject)Amat,&comm);CHKERRQ(ierr); 1194b1d57f15SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 1195b1d57f15SBarry Smith if (size == 1) PetscFunctionReturn(0); 119642e5f5b4Svictorle 119742e5f5b4Svictorle /* Hard test: off-diagonal block. This takes a MatGetSubMatrix. */ 1198cd0d46ebSvictorle ierr = MatGetSize(Amat,&M,&N);CHKERRQ(ierr); 1199cd0d46ebSvictorle ierr = MatGetOwnershipRange(Amat,&first,&last);CHKERRQ(ierr); 1200b1d57f15SBarry Smith ierr = PetscMalloc((N-last+first)*sizeof(PetscInt),¬me);CHKERRQ(ierr); 1201cd0d46ebSvictorle for (i=0; i<first; i++) notme[i] = i; 1202cd0d46ebSvictorle for (i=last; i<M; i++) notme[i-last+first] = i; 120370b3c8c7SBarry Smith ierr = ISCreateGeneral(MPI_COMM_SELF,N-last+first,notme,PETSC_COPY_VALUES,&Notme);CHKERRQ(ierr); 1204268466fbSBarry Smith ierr = ISCreateStride(MPI_COMM_SELF,last-first,first,1,&Me);CHKERRQ(ierr); 1205268466fbSBarry Smith ierr = MatGetSubMatrices(Amat,1,&Me,&Notme,MAT_INITIAL_MATRIX,&Aoffs);CHKERRQ(ierr); 120666501d38Svictorle Aoff = Aoffs[0]; 1207268466fbSBarry Smith ierr = MatGetSubMatrices(Bmat,1,&Notme,&Me,MAT_INITIAL_MATRIX,&Boffs);CHKERRQ(ierr); 120866501d38Svictorle Boff = Boffs[0]; 12095485867bSBarry Smith ierr = MatIsTranspose(Aoff,Boff,tol,f);CHKERRQ(ierr); 121066501d38Svictorle ierr = MatDestroyMatrices(1,&Aoffs);CHKERRQ(ierr); 121166501d38Svictorle ierr = MatDestroyMatrices(1,&Boffs);CHKERRQ(ierr); 12126bf464f9SBarry Smith ierr = ISDestroy(&Me);CHKERRQ(ierr); 12136bf464f9SBarry Smith ierr = ISDestroy(&Notme);CHKERRQ(ierr); 12143e0d0d19SHong Zhang ierr = PetscFree(notme);CHKERRQ(ierr); 1215cd0d46ebSvictorle PetscFunctionReturn(0); 1216cd0d46ebSvictorle } 1217cd0d46ebSvictorle 12184a2ae208SSatish Balay #undef __FUNCT__ 12194a2ae208SSatish Balay #define __FUNCT__ "MatMultTransposeAdd_MPIAIJ" 1220dfbe8321SBarry Smith PetscErrorCode MatMultTransposeAdd_MPIAIJ(Mat A,Vec xx,Vec yy,Vec zz) 1221da3a660dSBarry Smith { 1222416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1223dfbe8321SBarry Smith PetscErrorCode ierr; 1224da3a660dSBarry Smith 12253a40ed3dSBarry Smith PetscFunctionBegin; 1226da3a660dSBarry Smith /* do nondiagonal part */ 12277c922b88SBarry Smith ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr); 1228da3a660dSBarry Smith /* send it on its way */ 1229ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1230da3a660dSBarry Smith /* do local part */ 12317c922b88SBarry Smith ierr = (*a->A->ops->multtransposeadd)(a->A,xx,yy,zz);CHKERRQ(ierr); 1232a5ff213dSBarry Smith /* receive remote parts */ 1233ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 12343a40ed3dSBarry Smith PetscFunctionReturn(0); 1235da3a660dSBarry Smith } 1236da3a660dSBarry Smith 12371eb62cbbSBarry Smith /* 12381eb62cbbSBarry Smith This only works correctly for square matrices where the subblock A->A is the 12391eb62cbbSBarry Smith diagonal block 12401eb62cbbSBarry Smith */ 12414a2ae208SSatish Balay #undef __FUNCT__ 12424a2ae208SSatish Balay #define __FUNCT__ "MatGetDiagonal_MPIAIJ" 1243dfbe8321SBarry Smith PetscErrorCode MatGetDiagonal_MPIAIJ(Mat A,Vec v) 12441eb62cbbSBarry Smith { 1245dfbe8321SBarry Smith PetscErrorCode ierr; 1246416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 12473a40ed3dSBarry Smith 12483a40ed3dSBarry Smith PetscFunctionBegin; 1249ce94432eSBarry Smith if (A->rmap->N != A->cmap->N) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Supports only square matrix where A->A is diag block"); 1250e7e72b3dSBarry Smith if (A->rmap->rstart != A->cmap->rstart || A->rmap->rend != A->cmap->rend) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"row partition must equal col partition"); 12513a40ed3dSBarry Smith ierr = MatGetDiagonal(a->A,v);CHKERRQ(ierr); 12523a40ed3dSBarry Smith PetscFunctionReturn(0); 12531eb62cbbSBarry Smith } 12541eb62cbbSBarry Smith 12554a2ae208SSatish Balay #undef __FUNCT__ 12564a2ae208SSatish Balay #define __FUNCT__ "MatScale_MPIAIJ" 1257f4df32b1SMatthew Knepley PetscErrorCode MatScale_MPIAIJ(Mat A,PetscScalar aa) 1258052efed2SBarry Smith { 1259052efed2SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1260dfbe8321SBarry Smith PetscErrorCode ierr; 12613a40ed3dSBarry Smith 12623a40ed3dSBarry Smith PetscFunctionBegin; 1263f4df32b1SMatthew Knepley ierr = MatScale(a->A,aa);CHKERRQ(ierr); 1264f4df32b1SMatthew Knepley ierr = MatScale(a->B,aa);CHKERRQ(ierr); 12653a40ed3dSBarry Smith PetscFunctionReturn(0); 1266052efed2SBarry Smith } 1267052efed2SBarry Smith 12684a2ae208SSatish Balay #undef __FUNCT__ 12694a2ae208SSatish Balay #define __FUNCT__ "MatDestroy_MPIAIJ" 1270dfbe8321SBarry Smith PetscErrorCode MatDestroy_MPIAIJ(Mat mat) 12711eb62cbbSBarry Smith { 127244a69424SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 1273dfbe8321SBarry Smith PetscErrorCode ierr; 127483e2fdc7SBarry Smith 12753a40ed3dSBarry Smith PetscFunctionBegin; 1276aa482453SBarry Smith #if defined(PETSC_USE_LOG) 1277d0f46423SBarry Smith PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D",mat->rmap->N,mat->cmap->N); 1278a5a9c739SBarry Smith #endif 12798798bf22SSatish Balay ierr = MatStashDestroy_Private(&mat->stash);CHKERRQ(ierr); 12806bf464f9SBarry Smith ierr = VecDestroy(&aij->diag);CHKERRQ(ierr); 12816bf464f9SBarry Smith ierr = MatDestroy(&aij->A);CHKERRQ(ierr); 12826bf464f9SBarry Smith ierr = MatDestroy(&aij->B);CHKERRQ(ierr); 1283aa482453SBarry Smith #if defined(PETSC_USE_CTABLE) 12846bc0bbbfSBarry Smith ierr = PetscTableDestroy(&aij->colmap);CHKERRQ(ierr); 1285b1fc9764SSatish Balay #else 128605b42c5fSBarry Smith ierr = PetscFree(aij->colmap);CHKERRQ(ierr); 1287b1fc9764SSatish Balay #endif 128805b42c5fSBarry Smith ierr = PetscFree(aij->garray);CHKERRQ(ierr); 12896bf464f9SBarry Smith ierr = VecDestroy(&aij->lvec);CHKERRQ(ierr); 12906bf464f9SBarry Smith ierr = VecScatterDestroy(&aij->Mvctx);CHKERRQ(ierr); 129103095fedSBarry Smith ierr = PetscFree2(aij->rowvalues,aij->rowindices);CHKERRQ(ierr); 12928aa348c1SBarry Smith ierr = PetscFree(aij->ld);CHKERRQ(ierr); 1293bf0cc555SLisandro Dalcin ierr = PetscFree(mat->data);CHKERRQ(ierr); 1294901853e0SKris Buschelman 1295dbd8c25aSHong Zhang ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr); 1296bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)mat,"MatStoreValues_C",NULL);CHKERRQ(ierr); 1297bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)mat,"MatRetrieveValues_C",NULL);CHKERRQ(ierr); 1298bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)mat,"MatGetDiagonalBlock_C",NULL);CHKERRQ(ierr); 1299bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)mat,"MatIsTranspose_C",NULL);CHKERRQ(ierr); 1300bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAIJSetPreallocation_C",NULL);CHKERRQ(ierr); 1301bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAIJSetPreallocationCSR_C",NULL);CHKERRQ(ierr); 1302bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)mat,"MatDiagonalScaleLocal_C",NULL);CHKERRQ(ierr); 1303bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)mat,"MatConvert_mpiaij_mpisbaij_C",NULL);CHKERRQ(ierr); 13043a40ed3dSBarry Smith PetscFunctionReturn(0); 13051eb62cbbSBarry Smith } 1306ee50ffe9SBarry Smith 13074a2ae208SSatish Balay #undef __FUNCT__ 13088e2fed03SBarry Smith #define __FUNCT__ "MatView_MPIAIJ_Binary" 1309dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ_Binary(Mat mat,PetscViewer viewer) 13108e2fed03SBarry Smith { 13118e2fed03SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 13128e2fed03SBarry Smith Mat_SeqAIJ *A = (Mat_SeqAIJ*)aij->A->data; 13138e2fed03SBarry Smith Mat_SeqAIJ *B = (Mat_SeqAIJ*)aij->B->data; 13146849ba73SBarry Smith PetscErrorCode ierr; 131532dcc486SBarry Smith PetscMPIInt rank,size,tag = ((PetscObject)viewer)->tag; 13166f69ff64SBarry Smith int fd; 1317a788621eSSatish Balay PetscInt nz,header[4],*row_lengths,*range=0,rlen,i; 1318d0f46423SBarry Smith PetscInt nzmax,*column_indices,j,k,col,*garray = aij->garray,cnt,cstart = mat->cmap->rstart,rnz; 13198e2fed03SBarry Smith PetscScalar *column_values; 132085ebf7a4SBarry Smith PetscInt message_count,flowcontrolcount; 1321b37d52dbSMark F. Adams FILE *file; 13228e2fed03SBarry Smith 13238e2fed03SBarry Smith PetscFunctionBegin; 1324ce94432eSBarry Smith ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mat),&rank);CHKERRQ(ierr); 1325ce94432eSBarry Smith ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&size);CHKERRQ(ierr); 13268e2fed03SBarry Smith nz = A->nz + B->nz; 1327958c9bccSBarry Smith if (!rank) { 13280700a824SBarry Smith header[0] = MAT_FILE_CLASSID; 1329d0f46423SBarry Smith header[1] = mat->rmap->N; 1330d0f46423SBarry Smith header[2] = mat->cmap->N; 13312205254eSKarl Rupp 1332ce94432eSBarry Smith ierr = MPI_Reduce(&nz,&header[3],1,MPIU_INT,MPI_SUM,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 13338e2fed03SBarry Smith ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr); 13346f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,header,4,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 13358e2fed03SBarry Smith /* get largest number of rows any processor has */ 1336d0f46423SBarry Smith rlen = mat->rmap->n; 1337d0f46423SBarry Smith range = mat->rmap->range; 13382205254eSKarl Rupp for (i=1; i<size; i++) rlen = PetscMax(rlen,range[i+1] - range[i]); 13398e2fed03SBarry Smith } else { 1340ce94432eSBarry Smith ierr = MPI_Reduce(&nz,0,1,MPIU_INT,MPI_SUM,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 1341d0f46423SBarry Smith rlen = mat->rmap->n; 13428e2fed03SBarry Smith } 13438e2fed03SBarry Smith 13448e2fed03SBarry Smith /* load up the local row counts */ 1345b1d57f15SBarry Smith ierr = PetscMalloc((rlen+1)*sizeof(PetscInt),&row_lengths);CHKERRQ(ierr); 13462205254eSKarl Rupp for (i=0; i<mat->rmap->n; i++) row_lengths[i] = A->i[i+1] - A->i[i] + B->i[i+1] - B->i[i]; 13478e2fed03SBarry Smith 13488e2fed03SBarry Smith /* store the row lengths to the file */ 134985ebf7a4SBarry Smith ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr); 1350958c9bccSBarry Smith if (!rank) { 1351d0f46423SBarry Smith ierr = PetscBinaryWrite(fd,row_lengths,mat->rmap->n,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 13528e2fed03SBarry Smith for (i=1; i<size; i++) { 1353639ff905SBarry Smith ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr); 13548e2fed03SBarry Smith rlen = range[i+1] - range[i]; 1355ce94432eSBarry Smith ierr = MPIULong_Recv(row_lengths,rlen,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 13566f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,row_lengths,rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 13578e2fed03SBarry Smith } 1358639ff905SBarry Smith ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr); 13598e2fed03SBarry Smith } else { 1360639ff905SBarry Smith ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr); 1361ce94432eSBarry Smith ierr = MPIULong_Send(row_lengths,mat->rmap->n,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 1362639ff905SBarry Smith ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr); 13638e2fed03SBarry Smith } 13648e2fed03SBarry Smith ierr = PetscFree(row_lengths);CHKERRQ(ierr); 13658e2fed03SBarry Smith 13668e2fed03SBarry Smith /* load up the local column indices */ 13671147fc2aSKarl Rupp nzmax = nz; /* th processor needs space a largest processor needs */ 1368ce94432eSBarry Smith ierr = MPI_Reduce(&nz,&nzmax,1,MPIU_INT,MPI_MAX,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 1369b1d57f15SBarry Smith ierr = PetscMalloc((nzmax+1)*sizeof(PetscInt),&column_indices);CHKERRQ(ierr); 13708e2fed03SBarry Smith cnt = 0; 1371d0f46423SBarry Smith for (i=0; i<mat->rmap->n; i++) { 13728e2fed03SBarry Smith for (j=B->i[i]; j<B->i[i+1]; j++) { 13738e2fed03SBarry Smith if ((col = garray[B->j[j]]) > cstart) break; 13748e2fed03SBarry Smith column_indices[cnt++] = col; 13758e2fed03SBarry Smith } 13762205254eSKarl Rupp for (k=A->i[i]; k<A->i[i+1]; k++) column_indices[cnt++] = A->j[k] + cstart; 13772205254eSKarl Rupp for (; j<B->i[i+1]; j++) column_indices[cnt++] = garray[B->j[j]]; 13788e2fed03SBarry Smith } 1379e32f2f54SBarry Smith if (cnt != A->nz + B->nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Internal PETSc error: cnt = %D nz = %D",cnt,A->nz+B->nz); 13808e2fed03SBarry Smith 13818e2fed03SBarry Smith /* store the column indices to the file */ 138285ebf7a4SBarry Smith ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr); 1383958c9bccSBarry Smith if (!rank) { 13848e2fed03SBarry Smith MPI_Status status; 13856f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,column_indices,nz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 13868e2fed03SBarry Smith for (i=1; i<size; i++) { 1387639ff905SBarry Smith ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr); 1388ce94432eSBarry Smith ierr = MPI_Recv(&rnz,1,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr); 1389e32f2f54SBarry Smith if (rnz > nzmax) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Internal PETSc error: nz = %D nzmax = %D",nz,nzmax); 1390ce94432eSBarry Smith ierr = MPIULong_Recv(column_indices,rnz,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 13916f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,column_indices,rnz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 13928e2fed03SBarry Smith } 1393639ff905SBarry Smith ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr); 13948e2fed03SBarry Smith } else { 1395639ff905SBarry Smith ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr); 1396ce94432eSBarry Smith ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 1397ce94432eSBarry Smith ierr = MPIULong_Send(column_indices,nz,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 1398639ff905SBarry Smith ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr); 13998e2fed03SBarry Smith } 14008e2fed03SBarry Smith ierr = PetscFree(column_indices);CHKERRQ(ierr); 14018e2fed03SBarry Smith 14028e2fed03SBarry Smith /* load up the local column values */ 14038e2fed03SBarry Smith ierr = PetscMalloc((nzmax+1)*sizeof(PetscScalar),&column_values);CHKERRQ(ierr); 14048e2fed03SBarry Smith cnt = 0; 1405d0f46423SBarry Smith for (i=0; i<mat->rmap->n; i++) { 14068e2fed03SBarry Smith for (j=B->i[i]; j<B->i[i+1]; j++) { 14078e2fed03SBarry Smith if (garray[B->j[j]] > cstart) break; 14088e2fed03SBarry Smith column_values[cnt++] = B->a[j]; 14098e2fed03SBarry Smith } 14102205254eSKarl Rupp for (k=A->i[i]; k<A->i[i+1]; k++) column_values[cnt++] = A->a[k]; 14112205254eSKarl Rupp for (; j<B->i[i+1]; j++) column_values[cnt++] = B->a[j]; 14128e2fed03SBarry Smith } 1413e32f2f54SBarry Smith if (cnt != A->nz + B->nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Internal PETSc error: cnt = %D nz = %D",cnt,A->nz+B->nz); 14148e2fed03SBarry Smith 14158e2fed03SBarry Smith /* store the column values to the file */ 141685ebf7a4SBarry Smith ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr); 1417958c9bccSBarry Smith if (!rank) { 14188e2fed03SBarry Smith MPI_Status status; 14196f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,column_values,nz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr); 14208e2fed03SBarry Smith for (i=1; i<size; i++) { 1421639ff905SBarry Smith ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr); 1422ce94432eSBarry Smith ierr = MPI_Recv(&rnz,1,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr); 1423e32f2f54SBarry Smith if (rnz > nzmax) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Internal PETSc error: nz = %D nzmax = %D",nz,nzmax); 1424ce94432eSBarry Smith ierr = MPIULong_Recv(column_values,rnz,MPIU_SCALAR,i,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 14256f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,column_values,rnz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr); 14268e2fed03SBarry Smith } 1427639ff905SBarry Smith ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr); 14288e2fed03SBarry Smith } else { 1429639ff905SBarry Smith ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr); 1430ce94432eSBarry Smith ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 1431ce94432eSBarry Smith ierr = MPIULong_Send(column_values,nz,MPIU_SCALAR,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 1432639ff905SBarry Smith ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr); 14338e2fed03SBarry Smith } 14348e2fed03SBarry Smith ierr = PetscFree(column_values);CHKERRQ(ierr); 1435b37d52dbSMark F. Adams 1436b37d52dbSMark F. Adams ierr = PetscViewerBinaryGetInfoPointer(viewer,&file);CHKERRQ(ierr); 14372205254eSKarl Rupp if (file) fprintf(file,"-matload_block_size %d\n",(int)mat->rmap->bs); 14388e2fed03SBarry Smith PetscFunctionReturn(0); 14398e2fed03SBarry Smith } 14408e2fed03SBarry Smith 14419804daf3SBarry Smith #include <petscdraw.h> 14428e2fed03SBarry Smith #undef __FUNCT__ 14434a2ae208SSatish Balay #define __FUNCT__ "MatView_MPIAIJ_ASCIIorDraworSocket" 1444dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ_ASCIIorDraworSocket(Mat mat,PetscViewer viewer) 1445416022c9SBarry Smith { 144644a69424SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 1447dfbe8321SBarry Smith PetscErrorCode ierr; 144832dcc486SBarry Smith PetscMPIInt rank = aij->rank,size = aij->size; 1449ace3abfcSBarry Smith PetscBool isdraw,iascii,isbinary; 1450b0a32e0cSBarry Smith PetscViewer sviewer; 1451f3ef73ceSBarry Smith PetscViewerFormat format; 1452416022c9SBarry Smith 14533a40ed3dSBarry Smith PetscFunctionBegin; 1454251f4c67SDmitry Karpeev ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 1455251f4c67SDmitry Karpeev ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 1456251f4c67SDmitry Karpeev ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 145732077d6dSBarry Smith if (iascii) { 1458b0a32e0cSBarry Smith ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 1459456192e2SBarry Smith if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) { 14604e220ebcSLois Curfman McInnes MatInfo info; 1461ace3abfcSBarry Smith PetscBool inodes; 1462923f20ffSKris Buschelman 1463ce94432eSBarry Smith ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mat),&rank);CHKERRQ(ierr); 1464888f2ed8SSatish Balay ierr = MatGetInfo(mat,MAT_LOCAL,&info);CHKERRQ(ierr); 14650298fd71SBarry Smith ierr = MatInodeGetInodeSizes(aij->A,NULL,(PetscInt**)&inodes,NULL);CHKERRQ(ierr); 14667b23a99aSBarry Smith ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr); 1467923f20ffSKris Buschelman if (!inodes) { 146877431f27SBarry Smith ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D mem %D, not using I-node routines\n", 1469d0f46423SBarry Smith rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,(PetscInt)info.memory);CHKERRQ(ierr); 14706831982aSBarry Smith } else { 147177431f27SBarry Smith ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D mem %D, using I-node routines\n", 1472d0f46423SBarry Smith rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,(PetscInt)info.memory);CHKERRQ(ierr); 14736831982aSBarry Smith } 1474888f2ed8SSatish Balay ierr = MatGetInfo(aij->A,MAT_LOCAL,&info);CHKERRQ(ierr); 147577431f27SBarry Smith ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] on-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr); 1476888f2ed8SSatish Balay ierr = MatGetInfo(aij->B,MAT_LOCAL,&info);CHKERRQ(ierr); 147777431f27SBarry Smith ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] off-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr); 1478b0a32e0cSBarry Smith ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 14797b23a99aSBarry Smith ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr); 148007d81ca4SBarry Smith ierr = PetscViewerASCIIPrintf(viewer,"Information on VecScatter used in matrix-vector product: \n");CHKERRQ(ierr); 1481a40aa06bSLois Curfman McInnes ierr = VecScatterView(aij->Mvctx,viewer);CHKERRQ(ierr); 14823a40ed3dSBarry Smith PetscFunctionReturn(0); 1483fb9695e5SSatish Balay } else if (format == PETSC_VIEWER_ASCII_INFO) { 1484923f20ffSKris Buschelman PetscInt inodecount,inodelimit,*inodes; 1485923f20ffSKris Buschelman ierr = MatInodeGetInodeSizes(aij->A,&inodecount,&inodes,&inodelimit);CHKERRQ(ierr); 1486923f20ffSKris Buschelman if (inodes) { 1487923f20ffSKris Buschelman ierr = PetscViewerASCIIPrintf(viewer,"using I-node (on process 0) routines: found %D nodes, limit used is %D\n",inodecount,inodelimit);CHKERRQ(ierr); 1488d38fa0fbSBarry Smith } else { 1489d38fa0fbSBarry Smith ierr = PetscViewerASCIIPrintf(viewer,"not using I-node (on process 0) routines\n");CHKERRQ(ierr); 1490d38fa0fbSBarry Smith } 14913a40ed3dSBarry Smith PetscFunctionReturn(0); 14924aedb280SBarry Smith } else if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) { 14934aedb280SBarry Smith PetscFunctionReturn(0); 149408480c60SBarry Smith } 14958e2fed03SBarry Smith } else if (isbinary) { 14968e2fed03SBarry Smith if (size == 1) { 14977adad957SLisandro Dalcin ierr = PetscObjectSetName((PetscObject)aij->A,((PetscObject)mat)->name);CHKERRQ(ierr); 14988e2fed03SBarry Smith ierr = MatView(aij->A,viewer);CHKERRQ(ierr); 14998e2fed03SBarry Smith } else { 15008e2fed03SBarry Smith ierr = MatView_MPIAIJ_Binary(mat,viewer);CHKERRQ(ierr); 15018e2fed03SBarry Smith } 15028e2fed03SBarry Smith PetscFunctionReturn(0); 15030f5bd95cSBarry Smith } else if (isdraw) { 1504b0a32e0cSBarry Smith PetscDraw draw; 1505ace3abfcSBarry Smith PetscBool isnull; 1506b0a32e0cSBarry Smith ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 1507b0a32e0cSBarry Smith ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0); 150819bcc07fSBarry Smith } 150919bcc07fSBarry Smith 151017699dbbSLois Curfman McInnes if (size == 1) { 15117adad957SLisandro Dalcin ierr = PetscObjectSetName((PetscObject)aij->A,((PetscObject)mat)->name);CHKERRQ(ierr); 151278b31e54SBarry Smith ierr = MatView(aij->A,viewer);CHKERRQ(ierr); 15133a40ed3dSBarry Smith } else { 151495373324SBarry Smith /* assemble the entire matrix onto first processor. */ 151595373324SBarry Smith Mat A; 1516ec8511deSBarry Smith Mat_SeqAIJ *Aloc; 1517d0f46423SBarry Smith PetscInt M = mat->rmap->N,N = mat->cmap->N,m,*ai,*aj,row,*cols,i,*ct; 1518dd6ea824SBarry Smith MatScalar *a; 15192ee70a88SLois Curfman McInnes 152032a366e4SMatthew Knepley if (mat->rmap->N > 1024) { 1521ace3abfcSBarry Smith PetscBool flg = PETSC_FALSE; 152232a366e4SMatthew Knepley 15230298fd71SBarry Smith ierr = PetscOptionsGetBool(((PetscObject) mat)->prefix, "-mat_ascii_output_large", &flg,NULL);CHKERRQ(ierr); 1524ce94432eSBarry Smith if (!flg) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_OUTOFRANGE,"ASCII matrix output not allowed for matrices with more than 1024 rows, use binary format instead.\nYou can override this restriction using -mat_ascii_output_large."); 152532a366e4SMatthew Knepley } 15260805154bSBarry Smith 1527ce94432eSBarry Smith ierr = MatCreate(PetscObjectComm((PetscObject)mat),&A);CHKERRQ(ierr); 152817699dbbSLois Curfman McInnes if (!rank) { 1529f69a0ea3SMatthew Knepley ierr = MatSetSizes(A,M,N,M,N);CHKERRQ(ierr); 15303a40ed3dSBarry Smith } else { 1531f69a0ea3SMatthew Knepley ierr = MatSetSizes(A,0,0,M,N);CHKERRQ(ierr); 153295373324SBarry Smith } 1533f204ca49SKris Buschelman /* This is just a temporary matrix, so explicitly using MATMPIAIJ is probably best */ 1534f204ca49SKris Buschelman ierr = MatSetType(A,MATMPIAIJ);CHKERRQ(ierr); 15350298fd71SBarry Smith ierr = MatMPIAIJSetPreallocation(A,0,NULL,0,NULL);CHKERRQ(ierr); 15362b82e772SSatish Balay ierr = MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_FALSE);CHKERRQ(ierr); 153752e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,A);CHKERRQ(ierr); 1538416022c9SBarry Smith 153995373324SBarry Smith /* copy over the A part */ 1540ec8511deSBarry Smith Aloc = (Mat_SeqAIJ*)aij->A->data; 1541d0f46423SBarry Smith m = aij->A->rmap->n; ai = Aloc->i; aj = Aloc->j; a = Aloc->a; 1542d0f46423SBarry Smith row = mat->rmap->rstart; 15432205254eSKarl Rupp for (i=0; i<ai[m]; i++) aj[i] += mat->cmap->rstart; 154495373324SBarry Smith for (i=0; i<m; i++) { 1545416022c9SBarry Smith ierr = MatSetValues(A,1,&row,ai[i+1]-ai[i],aj,a,INSERT_VALUES);CHKERRQ(ierr); 154626fbe8dcSKarl Rupp row++; 154726fbe8dcSKarl Rupp a += ai[i+1]-ai[i]; aj += ai[i+1]-ai[i]; 154895373324SBarry Smith } 15492ee70a88SLois Curfman McInnes aj = Aloc->j; 15502205254eSKarl Rupp for (i=0; i<ai[m]; i++) aj[i] -= mat->cmap->rstart; 155195373324SBarry Smith 155295373324SBarry Smith /* copy over the B part */ 1553ec8511deSBarry Smith Aloc = (Mat_SeqAIJ*)aij->B->data; 1554d0f46423SBarry Smith m = aij->B->rmap->n; ai = Aloc->i; aj = Aloc->j; a = Aloc->a; 1555d0f46423SBarry Smith row = mat->rmap->rstart; 1556b1d57f15SBarry Smith ierr = PetscMalloc((ai[m]+1)*sizeof(PetscInt),&cols);CHKERRQ(ierr); 1557b0a32e0cSBarry Smith ct = cols; 15582205254eSKarl Rupp for (i=0; i<ai[m]; i++) cols[i] = aij->garray[aj[i]]; 155995373324SBarry Smith for (i=0; i<m; i++) { 1560416022c9SBarry Smith ierr = MatSetValues(A,1,&row,ai[i+1]-ai[i],cols,a,INSERT_VALUES);CHKERRQ(ierr); 15612205254eSKarl Rupp row++; 15622205254eSKarl Rupp a += ai[i+1]-ai[i]; cols += ai[i+1]-ai[i]; 156395373324SBarry Smith } 1564606d414cSSatish Balay ierr = PetscFree(ct);CHKERRQ(ierr); 15656d4a8577SBarry Smith ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 15666d4a8577SBarry Smith ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 156755843e3eSBarry Smith /* 156855843e3eSBarry Smith Everyone has to call to draw the matrix since the graphics waits are 1569b0a32e0cSBarry Smith synchronized across all processors that share the PetscDraw object 157055843e3eSBarry Smith */ 1571b0a32e0cSBarry Smith ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr); 1572e03a110bSBarry Smith if (!rank) { 15737adad957SLisandro Dalcin ierr = PetscObjectSetName((PetscObject)((Mat_MPIAIJ*)(A->data))->A,((PetscObject)mat)->name);CHKERRQ(ierr); 15747566de4bSShri Abhyankar /* Set the type name to MATMPIAIJ so that the correct type can be printed out by PetscObjectPrintClassNamePrefixType() in MatView_SeqAIJ_ASCII()*/ 15757566de4bSShri Abhyankar PetscStrcpy(((PetscObject)((Mat_MPIAIJ*)(A->data))->A)->type_name,MATMPIAIJ); 15766831982aSBarry Smith ierr = MatView(((Mat_MPIAIJ*)(A->data))->A,sviewer);CHKERRQ(ierr); 157795373324SBarry Smith } 1578b0a32e0cSBarry Smith ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr); 15796bf464f9SBarry Smith ierr = MatDestroy(&A);CHKERRQ(ierr); 158095373324SBarry Smith } 15813a40ed3dSBarry Smith PetscFunctionReturn(0); 15821eb62cbbSBarry Smith } 15831eb62cbbSBarry Smith 15844a2ae208SSatish Balay #undef __FUNCT__ 15854a2ae208SSatish Balay #define __FUNCT__ "MatView_MPIAIJ" 1586dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ(Mat mat,PetscViewer viewer) 1587416022c9SBarry Smith { 1588dfbe8321SBarry Smith PetscErrorCode ierr; 1589ace3abfcSBarry Smith PetscBool iascii,isdraw,issocket,isbinary; 1590416022c9SBarry Smith 15913a40ed3dSBarry Smith PetscFunctionBegin; 1592251f4c67SDmitry Karpeev ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 1593251f4c67SDmitry Karpeev ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 1594251f4c67SDmitry Karpeev ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 1595251f4c67SDmitry Karpeev ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSOCKET,&issocket);CHKERRQ(ierr); 159632077d6dSBarry Smith if (iascii || isdraw || isbinary || issocket) { 15977b2a1423SBarry Smith ierr = MatView_MPIAIJ_ASCIIorDraworSocket(mat,viewer);CHKERRQ(ierr); 1598416022c9SBarry Smith } 15993a40ed3dSBarry Smith PetscFunctionReturn(0); 1600416022c9SBarry Smith } 1601416022c9SBarry Smith 16024a2ae208SSatish Balay #undef __FUNCT__ 160341f059aeSBarry Smith #define __FUNCT__ "MatSOR_MPIAIJ" 160441f059aeSBarry Smith PetscErrorCode MatSOR_MPIAIJ(Mat matin,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx) 16058a729477SBarry Smith { 160644a69424SLois Curfman McInnes Mat_MPIAIJ *mat = (Mat_MPIAIJ*)matin->data; 1607dfbe8321SBarry Smith PetscErrorCode ierr; 16086987fefcSBarry Smith Vec bb1 = 0; 1609ace3abfcSBarry Smith PetscBool hasop; 16108a729477SBarry Smith 16113a40ed3dSBarry Smith PetscFunctionBegin; 1612a2b30743SBarry Smith if (flag == SOR_APPLY_UPPER) { 161341f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr); 1614a2b30743SBarry Smith PetscFunctionReturn(0); 1615a2b30743SBarry Smith } 1616a2b30743SBarry Smith 16174e980039SJed Brown if (its > 1 || ~flag & SOR_ZERO_INITIAL_GUESS || flag & SOR_EISENSTAT) { 16184e980039SJed Brown ierr = VecDuplicate(bb,&bb1);CHKERRQ(ierr); 16194e980039SJed Brown } 16204e980039SJed Brown 1621c16cb8f2SBarry Smith if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP) { 1622da3a660dSBarry Smith if (flag & SOR_ZERO_INITIAL_GUESS) { 162341f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr); 16242798e883SHong Zhang its--; 1625da3a660dSBarry Smith } 16262798e883SHong Zhang 16272798e883SHong Zhang while (its--) { 1628ca9f406cSSatish Balay ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1629ca9f406cSSatish Balay ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 16302798e883SHong Zhang 1631c14dc6b6SHong Zhang /* update rhs: bb1 = bb - B*x */ 1632efb30889SBarry Smith ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr); 1633c14dc6b6SHong Zhang ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr); 16342798e883SHong Zhang 1635c14dc6b6SHong Zhang /* local sweep */ 163641f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_SYMMETRIC_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr); 16372798e883SHong Zhang } 16383a40ed3dSBarry Smith } else if (flag & SOR_LOCAL_FORWARD_SWEEP) { 1639da3a660dSBarry Smith if (flag & SOR_ZERO_INITIAL_GUESS) { 164041f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr); 16412798e883SHong Zhang its--; 1642da3a660dSBarry Smith } 16432798e883SHong Zhang while (its--) { 1644ca9f406cSSatish Balay ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1645ca9f406cSSatish Balay ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 16462798e883SHong Zhang 1647c14dc6b6SHong Zhang /* update rhs: bb1 = bb - B*x */ 1648efb30889SBarry Smith ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr); 1649c14dc6b6SHong Zhang ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr); 1650c14dc6b6SHong Zhang 1651c14dc6b6SHong Zhang /* local sweep */ 165241f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_FORWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr); 16532798e883SHong Zhang } 16543a40ed3dSBarry Smith } else if (flag & SOR_LOCAL_BACKWARD_SWEEP) { 1655da3a660dSBarry Smith if (flag & SOR_ZERO_INITIAL_GUESS) { 165641f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr); 16572798e883SHong Zhang its--; 1658da3a660dSBarry Smith } 16592798e883SHong Zhang while (its--) { 1660ca9f406cSSatish Balay ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1661ca9f406cSSatish Balay ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 16622798e883SHong Zhang 1663c14dc6b6SHong Zhang /* update rhs: bb1 = bb - B*x */ 1664efb30889SBarry Smith ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr); 1665c14dc6b6SHong Zhang ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr); 16662798e883SHong Zhang 1667c14dc6b6SHong Zhang /* local sweep */ 166841f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_BACKWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr); 16692798e883SHong Zhang } 1670a7420bb7SBarry Smith } else if (flag & SOR_EISENSTAT) { 1671a7420bb7SBarry Smith Vec xx1; 1672a7420bb7SBarry Smith 1673a7420bb7SBarry Smith ierr = VecDuplicate(bb,&xx1);CHKERRQ(ierr); 167441f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb,omega,(MatSORType)(SOR_ZERO_INITIAL_GUESS | SOR_LOCAL_BACKWARD_SWEEP),fshift,lits,1,xx);CHKERRQ(ierr); 1675a7420bb7SBarry Smith 1676a7420bb7SBarry Smith ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1677a7420bb7SBarry Smith ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1678a7420bb7SBarry Smith if (!mat->diag) { 16790298fd71SBarry Smith ierr = MatGetVecs(matin,&mat->diag,NULL);CHKERRQ(ierr); 1680a7420bb7SBarry Smith ierr = MatGetDiagonal(matin,mat->diag);CHKERRQ(ierr); 1681a7420bb7SBarry Smith } 1682bd0c2dcbSBarry Smith ierr = MatHasOperation(matin,MATOP_MULT_DIAGONAL_BLOCK,&hasop);CHKERRQ(ierr); 1683bd0c2dcbSBarry Smith if (hasop) { 1684bd0c2dcbSBarry Smith ierr = MatMultDiagonalBlock(matin,xx,bb1);CHKERRQ(ierr); 1685bd0c2dcbSBarry Smith } else { 1686a7420bb7SBarry Smith ierr = VecPointwiseMult(bb1,mat->diag,xx);CHKERRQ(ierr); 1687bd0c2dcbSBarry Smith } 1688887ee2caSBarry Smith ierr = VecAYPX(bb1,(omega-2.0)/omega,bb);CHKERRQ(ierr); 1689887ee2caSBarry Smith 1690a7420bb7SBarry Smith ierr = MatMultAdd(mat->B,mat->lvec,bb1,bb1);CHKERRQ(ierr); 1691a7420bb7SBarry Smith 1692a7420bb7SBarry Smith /* local sweep */ 169341f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,(MatSORType)(SOR_ZERO_INITIAL_GUESS | SOR_LOCAL_FORWARD_SWEEP),fshift,lits,1,xx1);CHKERRQ(ierr); 1694a7420bb7SBarry Smith ierr = VecAXPY(xx,1.0,xx1);CHKERRQ(ierr); 16956bf464f9SBarry Smith ierr = VecDestroy(&xx1);CHKERRQ(ierr); 1696ce94432eSBarry Smith } else SETERRQ(PetscObjectComm((PetscObject)matin),PETSC_ERR_SUP,"Parallel SOR not supported"); 1697c14dc6b6SHong Zhang 16986bf464f9SBarry Smith ierr = VecDestroy(&bb1);CHKERRQ(ierr); 16993a40ed3dSBarry Smith PetscFunctionReturn(0); 17008a729477SBarry Smith } 1701a66be287SLois Curfman McInnes 17024a2ae208SSatish Balay #undef __FUNCT__ 170342e855d1Svictor #define __FUNCT__ "MatPermute_MPIAIJ" 170442e855d1Svictor PetscErrorCode MatPermute_MPIAIJ(Mat A,IS rowp,IS colp,Mat *B) 170542e855d1Svictor { 170672e6a0cfSJed Brown Mat aA,aB,Aperm; 170772e6a0cfSJed Brown const PetscInt *rwant,*cwant,*gcols,*ai,*bi,*aj,*bj; 170872e6a0cfSJed Brown PetscScalar *aa,*ba; 170972e6a0cfSJed Brown PetscInt i,j,m,n,ng,anz,bnz,*dnnz,*onnz,*tdnnz,*tonnz,*rdest,*cdest,*work,*gcdest; 171072e6a0cfSJed Brown PetscSF rowsf,sf; 17110298fd71SBarry Smith IS parcolp = NULL; 171272e6a0cfSJed Brown PetscBool done; 171342e855d1Svictor PetscErrorCode ierr; 171442e855d1Svictor 171542e855d1Svictor PetscFunctionBegin; 171672e6a0cfSJed Brown ierr = MatGetLocalSize(A,&m,&n);CHKERRQ(ierr); 171772e6a0cfSJed Brown ierr = ISGetIndices(rowp,&rwant);CHKERRQ(ierr); 171872e6a0cfSJed Brown ierr = ISGetIndices(colp,&cwant);CHKERRQ(ierr); 171972e6a0cfSJed Brown ierr = PetscMalloc3(PetscMax(m,n),PetscInt,&work,m,PetscInt,&rdest,n,PetscInt,&cdest);CHKERRQ(ierr); 172072e6a0cfSJed Brown 172172e6a0cfSJed Brown /* Invert row permutation to find out where my rows should go */ 1722ce94432eSBarry Smith ierr = PetscSFCreate(PetscObjectComm((PetscObject)A),&rowsf);CHKERRQ(ierr); 17230298fd71SBarry Smith ierr = PetscSFSetGraphLayout(rowsf,A->rmap,A->rmap->n,NULL,PETSC_OWN_POINTER,rwant);CHKERRQ(ierr); 1724e9e74f11SJed Brown ierr = PetscSFSetFromOptions(rowsf);CHKERRQ(ierr); 172572e6a0cfSJed Brown for (i=0; i<m; i++) work[i] = A->rmap->rstart + i; 17268bfbc91cSJed Brown ierr = PetscSFReduceBegin(rowsf,MPIU_INT,work,rdest,MPIU_REPLACE);CHKERRQ(ierr); 17278bfbc91cSJed Brown ierr = PetscSFReduceEnd(rowsf,MPIU_INT,work,rdest,MPIU_REPLACE);CHKERRQ(ierr); 172872e6a0cfSJed Brown 172972e6a0cfSJed Brown /* Invert column permutation to find out where my columns should go */ 1730ce94432eSBarry Smith ierr = PetscSFCreate(PetscObjectComm((PetscObject)A),&sf);CHKERRQ(ierr); 17310298fd71SBarry Smith ierr = PetscSFSetGraphLayout(sf,A->cmap,A->cmap->n,NULL,PETSC_OWN_POINTER,cwant);CHKERRQ(ierr); 1732e9e74f11SJed Brown ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr); 173372e6a0cfSJed Brown for (i=0; i<n; i++) work[i] = A->cmap->rstart + i; 17348bfbc91cSJed Brown ierr = PetscSFReduceBegin(sf,MPIU_INT,work,cdest,MPIU_REPLACE);CHKERRQ(ierr); 17358bfbc91cSJed Brown ierr = PetscSFReduceEnd(sf,MPIU_INT,work,cdest,MPIU_REPLACE);CHKERRQ(ierr); 173672e6a0cfSJed Brown ierr = PetscSFDestroy(&sf);CHKERRQ(ierr); 173772e6a0cfSJed Brown 173872e6a0cfSJed Brown ierr = ISRestoreIndices(rowp,&rwant);CHKERRQ(ierr); 173972e6a0cfSJed Brown ierr = ISRestoreIndices(colp,&cwant);CHKERRQ(ierr); 174072e6a0cfSJed Brown ierr = MatMPIAIJGetSeqAIJ(A,&aA,&aB,&gcols);CHKERRQ(ierr); 174172e6a0cfSJed Brown 174272e6a0cfSJed Brown /* Find out where my gcols should go */ 17430298fd71SBarry Smith ierr = MatGetSize(aB,NULL,&ng);CHKERRQ(ierr); 174472e6a0cfSJed Brown ierr = PetscMalloc(ng*sizeof(PetscInt),&gcdest);CHKERRQ(ierr); 1745ce94432eSBarry Smith ierr = PetscSFCreate(PetscObjectComm((PetscObject)A),&sf);CHKERRQ(ierr); 17460298fd71SBarry Smith ierr = PetscSFSetGraphLayout(sf,A->cmap,ng,NULL,PETSC_OWN_POINTER,gcols);CHKERRQ(ierr); 1747e9e74f11SJed Brown ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr); 174872e6a0cfSJed Brown ierr = PetscSFBcastBegin(sf,MPIU_INT,cdest,gcdest);CHKERRQ(ierr); 174972e6a0cfSJed Brown ierr = PetscSFBcastEnd(sf,MPIU_INT,cdest,gcdest);CHKERRQ(ierr); 175072e6a0cfSJed Brown ierr = PetscSFDestroy(&sf);CHKERRQ(ierr); 175172e6a0cfSJed Brown 175272e6a0cfSJed Brown ierr = PetscMalloc4(m,PetscInt,&dnnz,m,PetscInt,&onnz,m,PetscInt,&tdnnz,m,PetscInt,&tonnz);CHKERRQ(ierr); 175372e6a0cfSJed Brown ierr = PetscMemzero(dnnz,m*sizeof(PetscInt));CHKERRQ(ierr); 175472e6a0cfSJed Brown ierr = PetscMemzero(onnz,m*sizeof(PetscInt));CHKERRQ(ierr); 175572e6a0cfSJed Brown ierr = MatGetRowIJ(aA,0,PETSC_FALSE,PETSC_FALSE,&anz,&ai,&aj,&done);CHKERRQ(ierr); 175672e6a0cfSJed Brown ierr = MatGetRowIJ(aB,0,PETSC_FALSE,PETSC_FALSE,&bnz,&bi,&bj,&done);CHKERRQ(ierr); 175772e6a0cfSJed Brown for (i=0; i<m; i++) { 175872e6a0cfSJed Brown PetscInt row = rdest[i],rowner; 175972e6a0cfSJed Brown ierr = PetscLayoutFindOwner(A->rmap,row,&rowner);CHKERRQ(ierr); 176072e6a0cfSJed Brown for (j=ai[i]; j<ai[i+1]; j++) { 176172e6a0cfSJed Brown PetscInt cowner,col = cdest[aj[j]]; 176272e6a0cfSJed Brown ierr = PetscLayoutFindOwner(A->cmap,col,&cowner);CHKERRQ(ierr); /* Could build an index for the columns to eliminate this search */ 176372e6a0cfSJed Brown if (rowner == cowner) dnnz[i]++; 176472e6a0cfSJed Brown else onnz[i]++; 176572e6a0cfSJed Brown } 176672e6a0cfSJed Brown for (j=bi[i]; j<bi[i+1]; j++) { 176772e6a0cfSJed Brown PetscInt cowner,col = gcdest[bj[j]]; 176872e6a0cfSJed Brown ierr = PetscLayoutFindOwner(A->cmap,col,&cowner);CHKERRQ(ierr); 176972e6a0cfSJed Brown if (rowner == cowner) dnnz[i]++; 177072e6a0cfSJed Brown else onnz[i]++; 177172e6a0cfSJed Brown } 177272e6a0cfSJed Brown } 177372e6a0cfSJed Brown ierr = PetscMemzero(tdnnz,m*sizeof(PetscInt));CHKERRQ(ierr); 177472e6a0cfSJed Brown ierr = PetscMemzero(tonnz,m*sizeof(PetscInt));CHKERRQ(ierr); 177572e6a0cfSJed Brown ierr = PetscSFBcastBegin(rowsf,MPIU_INT,dnnz,tdnnz);CHKERRQ(ierr); 177672e6a0cfSJed Brown ierr = PetscSFBcastEnd(rowsf,MPIU_INT,dnnz,tdnnz);CHKERRQ(ierr); 177772e6a0cfSJed Brown ierr = PetscSFBcastBegin(rowsf,MPIU_INT,onnz,tonnz);CHKERRQ(ierr); 177872e6a0cfSJed Brown ierr = PetscSFBcastEnd(rowsf,MPIU_INT,onnz,tonnz);CHKERRQ(ierr); 177972e6a0cfSJed Brown ierr = PetscSFDestroy(&rowsf);CHKERRQ(ierr); 178072e6a0cfSJed Brown 1781ce94432eSBarry Smith ierr = MatCreateAIJ(PetscObjectComm((PetscObject)A),A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N,0,tdnnz,0,tonnz,&Aperm);CHKERRQ(ierr); 178272e6a0cfSJed Brown ierr = MatSeqAIJGetArray(aA,&aa);CHKERRQ(ierr); 178372e6a0cfSJed Brown ierr = MatSeqAIJGetArray(aB,&ba);CHKERRQ(ierr); 178472e6a0cfSJed Brown for (i=0; i<m; i++) { 178572e6a0cfSJed Brown PetscInt *acols = dnnz,*bcols = onnz; /* Repurpose now-unneeded arrays */ 178672e6a0cfSJed Brown PetscInt rowlen; 178772e6a0cfSJed Brown rowlen = ai[i+1] - ai[i]; 178872e6a0cfSJed Brown for (j=0; j<rowlen; j++) acols[j] = cdest[aj[ai[i]+j]]; 178972e6a0cfSJed Brown ierr = MatSetValues(Aperm,1,&rdest[i],rowlen,acols,aa+ai[i],INSERT_VALUES);CHKERRQ(ierr); 179072e6a0cfSJed Brown rowlen = bi[i+1] - bi[i]; 179172e6a0cfSJed Brown for (j=0; j<rowlen; j++) bcols[j] = gcdest[bj[bi[i]+j]]; 179272e6a0cfSJed Brown ierr = MatSetValues(Aperm,1,&rdest[i],rowlen,bcols,ba+bi[i],INSERT_VALUES);CHKERRQ(ierr); 179372e6a0cfSJed Brown } 179472e6a0cfSJed Brown ierr = MatAssemblyBegin(Aperm,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 179572e6a0cfSJed Brown ierr = MatAssemblyEnd(Aperm,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 179672e6a0cfSJed Brown ierr = MatRestoreRowIJ(aA,0,PETSC_FALSE,PETSC_FALSE,&anz,&ai,&aj,&done);CHKERRQ(ierr); 179772e6a0cfSJed Brown ierr = MatRestoreRowIJ(aB,0,PETSC_FALSE,PETSC_FALSE,&bnz,&bi,&bj,&done);CHKERRQ(ierr); 179872e6a0cfSJed Brown ierr = MatSeqAIJRestoreArray(aA,&aa);CHKERRQ(ierr); 179972e6a0cfSJed Brown ierr = MatSeqAIJRestoreArray(aB,&ba);CHKERRQ(ierr); 180072e6a0cfSJed Brown ierr = PetscFree4(dnnz,onnz,tdnnz,tonnz);CHKERRQ(ierr); 180172e6a0cfSJed Brown ierr = PetscFree3(work,rdest,cdest);CHKERRQ(ierr); 180272e6a0cfSJed Brown ierr = PetscFree(gcdest);CHKERRQ(ierr); 180372e6a0cfSJed Brown if (parcolp) {ierr = ISDestroy(&colp);CHKERRQ(ierr);} 180472e6a0cfSJed Brown *B = Aperm; 180542e855d1Svictor PetscFunctionReturn(0); 180642e855d1Svictor } 180742e855d1Svictor 180842e855d1Svictor #undef __FUNCT__ 18094a2ae208SSatish Balay #define __FUNCT__ "MatGetInfo_MPIAIJ" 1810dfbe8321SBarry Smith PetscErrorCode MatGetInfo_MPIAIJ(Mat matin,MatInfoType flag,MatInfo *info) 1811a66be287SLois Curfman McInnes { 1812a66be287SLois Curfman McInnes Mat_MPIAIJ *mat = (Mat_MPIAIJ*)matin->data; 1813a66be287SLois Curfman McInnes Mat A = mat->A,B = mat->B; 1814dfbe8321SBarry Smith PetscErrorCode ierr; 1815329f5518SBarry Smith PetscReal isend[5],irecv[5]; 1816a66be287SLois Curfman McInnes 18173a40ed3dSBarry Smith PetscFunctionBegin; 18184e220ebcSLois Curfman McInnes info->block_size = 1.0; 18194e220ebcSLois Curfman McInnes ierr = MatGetInfo(A,MAT_LOCAL,info);CHKERRQ(ierr); 18202205254eSKarl Rupp 18214e220ebcSLois Curfman McInnes isend[0] = info->nz_used; isend[1] = info->nz_allocated; isend[2] = info->nz_unneeded; 18224e220ebcSLois Curfman McInnes isend[3] = info->memory; isend[4] = info->mallocs; 18232205254eSKarl Rupp 18244e220ebcSLois Curfman McInnes ierr = MatGetInfo(B,MAT_LOCAL,info);CHKERRQ(ierr); 18252205254eSKarl Rupp 18264e220ebcSLois Curfman McInnes isend[0] += info->nz_used; isend[1] += info->nz_allocated; isend[2] += info->nz_unneeded; 18274e220ebcSLois Curfman McInnes isend[3] += info->memory; isend[4] += info->mallocs; 1828a66be287SLois Curfman McInnes if (flag == MAT_LOCAL) { 18294e220ebcSLois Curfman McInnes info->nz_used = isend[0]; 18304e220ebcSLois Curfman McInnes info->nz_allocated = isend[1]; 18314e220ebcSLois Curfman McInnes info->nz_unneeded = isend[2]; 18324e220ebcSLois Curfman McInnes info->memory = isend[3]; 18334e220ebcSLois Curfman McInnes info->mallocs = isend[4]; 1834a66be287SLois Curfman McInnes } else if (flag == MAT_GLOBAL_MAX) { 1835ce94432eSBarry Smith ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr); 18362205254eSKarl Rupp 18374e220ebcSLois Curfman McInnes info->nz_used = irecv[0]; 18384e220ebcSLois Curfman McInnes info->nz_allocated = irecv[1]; 18394e220ebcSLois Curfman McInnes info->nz_unneeded = irecv[2]; 18404e220ebcSLois Curfman McInnes info->memory = irecv[3]; 18414e220ebcSLois Curfman McInnes info->mallocs = irecv[4]; 1842a66be287SLois Curfman McInnes } else if (flag == MAT_GLOBAL_SUM) { 1843ce94432eSBarry Smith ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr); 18442205254eSKarl Rupp 18454e220ebcSLois Curfman McInnes info->nz_used = irecv[0]; 18464e220ebcSLois Curfman McInnes info->nz_allocated = irecv[1]; 18474e220ebcSLois Curfman McInnes info->nz_unneeded = irecv[2]; 18484e220ebcSLois Curfman McInnes info->memory = irecv[3]; 18494e220ebcSLois Curfman McInnes info->mallocs = irecv[4]; 1850a66be287SLois Curfman McInnes } 18514e220ebcSLois Curfman McInnes info->fill_ratio_given = 0; /* no parallel LU/ILU/Cholesky */ 18524e220ebcSLois Curfman McInnes info->fill_ratio_needed = 0; 18534e220ebcSLois Curfman McInnes info->factor_mallocs = 0; 18543a40ed3dSBarry Smith PetscFunctionReturn(0); 1855a66be287SLois Curfman McInnes } 1856a66be287SLois Curfman McInnes 18574a2ae208SSatish Balay #undef __FUNCT__ 18584a2ae208SSatish Balay #define __FUNCT__ "MatSetOption_MPIAIJ" 1859ace3abfcSBarry Smith PetscErrorCode MatSetOption_MPIAIJ(Mat A,MatOption op,PetscBool flg) 1860c74985f6SBarry Smith { 1861c0bbcb79SLois Curfman McInnes Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1862dfbe8321SBarry Smith PetscErrorCode ierr; 1863c74985f6SBarry Smith 18643a40ed3dSBarry Smith PetscFunctionBegin; 186512c028f9SKris Buschelman switch (op) { 1866512a5fc5SBarry Smith case MAT_NEW_NONZERO_LOCATIONS: 186712c028f9SKris Buschelman case MAT_NEW_NONZERO_ALLOCATION_ERR: 186828b2fa4aSMatthew Knepley case MAT_UNUSED_NONZERO_LOCATION_ERR: 1869a9817697SBarry Smith case MAT_KEEP_NONZERO_PATTERN: 187012c028f9SKris Buschelman case MAT_NEW_NONZERO_LOCATION_ERR: 187112c028f9SKris Buschelman case MAT_USE_INODES: 187212c028f9SKris Buschelman case MAT_IGNORE_ZERO_ENTRIES: 1873fa1f0d2cSMatthew G Knepley MatCheckPreallocated(A,1); 18744e0d8c25SBarry Smith ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 18754e0d8c25SBarry Smith ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr); 187612c028f9SKris Buschelman break; 187712c028f9SKris Buschelman case MAT_ROW_ORIENTED: 18784e0d8c25SBarry Smith a->roworiented = flg; 18792205254eSKarl Rupp 18804e0d8c25SBarry Smith ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 18814e0d8c25SBarry Smith ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr); 188212c028f9SKris Buschelman break; 18834e0d8c25SBarry Smith case MAT_NEW_DIAGONALS: 1884290bbb0aSBarry Smith ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr); 188512c028f9SKris Buschelman break; 188612c028f9SKris Buschelman case MAT_IGNORE_OFF_PROC_ENTRIES: 18875c0f0b64SBarry Smith a->donotstash = flg; 188812c028f9SKris Buschelman break; 1889ffa07934SHong Zhang case MAT_SPD: 1890ffa07934SHong Zhang A->spd_set = PETSC_TRUE; 1891ffa07934SHong Zhang A->spd = flg; 1892ffa07934SHong Zhang if (flg) { 1893ffa07934SHong Zhang A->symmetric = PETSC_TRUE; 1894ffa07934SHong Zhang A->structurally_symmetric = PETSC_TRUE; 1895ffa07934SHong Zhang A->symmetric_set = PETSC_TRUE; 1896ffa07934SHong Zhang A->structurally_symmetric_set = PETSC_TRUE; 1897ffa07934SHong Zhang } 1898ffa07934SHong Zhang break; 189977e54ba9SKris Buschelman case MAT_SYMMETRIC: 19004e0d8c25SBarry Smith ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 190125f421beSHong Zhang break; 190277e54ba9SKris Buschelman case MAT_STRUCTURALLY_SYMMETRIC: 1903eeffb40dSHong Zhang ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 1904eeffb40dSHong Zhang break; 1905bf108f30SBarry Smith case MAT_HERMITIAN: 1906eeffb40dSHong Zhang ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 1907eeffb40dSHong Zhang break; 1908bf108f30SBarry Smith case MAT_SYMMETRY_ETERNAL: 19094e0d8c25SBarry Smith ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 191077e54ba9SKris Buschelman break; 191112c028f9SKris Buschelman default: 1912e32f2f54SBarry Smith SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"unknown option %d",op); 19133a40ed3dSBarry Smith } 19143a40ed3dSBarry Smith PetscFunctionReturn(0); 1915c74985f6SBarry Smith } 1916c74985f6SBarry Smith 19174a2ae208SSatish Balay #undef __FUNCT__ 19184a2ae208SSatish Balay #define __FUNCT__ "MatGetRow_MPIAIJ" 1919b1d57f15SBarry Smith PetscErrorCode MatGetRow_MPIAIJ(Mat matin,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 192039e00950SLois Curfman McInnes { 1921154123eaSLois Curfman McInnes Mat_MPIAIJ *mat = (Mat_MPIAIJ*)matin->data; 192287828ca2SBarry Smith PetscScalar *vworkA,*vworkB,**pvA,**pvB,*v_p; 19236849ba73SBarry Smith PetscErrorCode ierr; 1924d0f46423SBarry Smith PetscInt i,*cworkA,*cworkB,**pcA,**pcB,cstart = matin->cmap->rstart; 1925d0f46423SBarry Smith PetscInt nztot,nzA,nzB,lrow,rstart = matin->rmap->rstart,rend = matin->rmap->rend; 1926b1d57f15SBarry Smith PetscInt *cmap,*idx_p; 192739e00950SLois Curfman McInnes 19283a40ed3dSBarry Smith PetscFunctionBegin; 1929e32f2f54SBarry Smith if (mat->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Already active"); 19307a0afa10SBarry Smith mat->getrowactive = PETSC_TRUE; 19317a0afa10SBarry Smith 193270f0671dSBarry Smith if (!mat->rowvalues && (idx || v)) { 19337a0afa10SBarry Smith /* 19347a0afa10SBarry Smith allocate enough space to hold information from the longest row. 19357a0afa10SBarry Smith */ 19367a0afa10SBarry Smith Mat_SeqAIJ *Aa = (Mat_SeqAIJ*)mat->A->data,*Ba = (Mat_SeqAIJ*)mat->B->data; 1937b1d57f15SBarry Smith PetscInt max = 1,tmp; 1938d0f46423SBarry Smith for (i=0; i<matin->rmap->n; i++) { 19397a0afa10SBarry Smith tmp = Aa->i[i+1] - Aa->i[i] + Ba->i[i+1] - Ba->i[i]; 19402205254eSKarl Rupp if (max < tmp) max = tmp; 19417a0afa10SBarry Smith } 19421d79065fSBarry Smith ierr = PetscMalloc2(max,PetscScalar,&mat->rowvalues,max,PetscInt,&mat->rowindices);CHKERRQ(ierr); 19437a0afa10SBarry Smith } 19447a0afa10SBarry Smith 1945e7e72b3dSBarry Smith if (row < rstart || row >= rend) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Only local rows"); 1946abc0e9e4SLois Curfman McInnes lrow = row - rstart; 194739e00950SLois Curfman McInnes 1948154123eaSLois Curfman McInnes pvA = &vworkA; pcA = &cworkA; pvB = &vworkB; pcB = &cworkB; 1949154123eaSLois Curfman McInnes if (!v) {pvA = 0; pvB = 0;} 1950154123eaSLois Curfman McInnes if (!idx) {pcA = 0; if (!v) pcB = 0;} 1951f830108cSBarry Smith ierr = (*mat->A->ops->getrow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr); 1952f830108cSBarry Smith ierr = (*mat->B->ops->getrow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr); 1953154123eaSLois Curfman McInnes nztot = nzA + nzB; 1954154123eaSLois Curfman McInnes 195570f0671dSBarry Smith cmap = mat->garray; 1956154123eaSLois Curfman McInnes if (v || idx) { 1957154123eaSLois Curfman McInnes if (nztot) { 1958154123eaSLois Curfman McInnes /* Sort by increasing column numbers, assuming A and B already sorted */ 1959b1d57f15SBarry Smith PetscInt imark = -1; 1960154123eaSLois Curfman McInnes if (v) { 196170f0671dSBarry Smith *v = v_p = mat->rowvalues; 196239e00950SLois Curfman McInnes for (i=0; i<nzB; i++) { 196370f0671dSBarry Smith if (cmap[cworkB[i]] < cstart) v_p[i] = vworkB[i]; 1964154123eaSLois Curfman McInnes else break; 1965154123eaSLois Curfman McInnes } 1966154123eaSLois Curfman McInnes imark = i; 196770f0671dSBarry Smith for (i=0; i<nzA; i++) v_p[imark+i] = vworkA[i]; 196870f0671dSBarry Smith for (i=imark; i<nzB; i++) v_p[nzA+i] = vworkB[i]; 1969154123eaSLois Curfman McInnes } 1970154123eaSLois Curfman McInnes if (idx) { 197170f0671dSBarry Smith *idx = idx_p = mat->rowindices; 197270f0671dSBarry Smith if (imark > -1) { 197370f0671dSBarry Smith for (i=0; i<imark; i++) { 197470f0671dSBarry Smith idx_p[i] = cmap[cworkB[i]]; 197570f0671dSBarry Smith } 197670f0671dSBarry Smith } else { 1977154123eaSLois Curfman McInnes for (i=0; i<nzB; i++) { 197870f0671dSBarry Smith if (cmap[cworkB[i]] < cstart) idx_p[i] = cmap[cworkB[i]]; 1979154123eaSLois Curfman McInnes else break; 1980154123eaSLois Curfman McInnes } 1981154123eaSLois Curfman McInnes imark = i; 198270f0671dSBarry Smith } 198370f0671dSBarry Smith for (i=0; i<nzA; i++) idx_p[imark+i] = cstart + cworkA[i]; 198470f0671dSBarry Smith for (i=imark; i<nzB; i++) idx_p[nzA+i] = cmap[cworkB[i]]; 198539e00950SLois Curfman McInnes } 19863f97c4b0SBarry Smith } else { 19871ca473b0SSatish Balay if (idx) *idx = 0; 19881ca473b0SSatish Balay if (v) *v = 0; 19891ca473b0SSatish Balay } 1990154123eaSLois Curfman McInnes } 199139e00950SLois Curfman McInnes *nz = nztot; 1992f830108cSBarry Smith ierr = (*mat->A->ops->restorerow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr); 1993f830108cSBarry Smith ierr = (*mat->B->ops->restorerow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr); 19943a40ed3dSBarry Smith PetscFunctionReturn(0); 199539e00950SLois Curfman McInnes } 199639e00950SLois Curfman McInnes 19974a2ae208SSatish Balay #undef __FUNCT__ 19984a2ae208SSatish Balay #define __FUNCT__ "MatRestoreRow_MPIAIJ" 1999b1d57f15SBarry Smith PetscErrorCode MatRestoreRow_MPIAIJ(Mat mat,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 200039e00950SLois Curfman McInnes { 20017a0afa10SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 20023a40ed3dSBarry Smith 20033a40ed3dSBarry Smith PetscFunctionBegin; 2004e7e72b3dSBarry Smith if (!aij->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"MatGetRow() must be called first"); 20057a0afa10SBarry Smith aij->getrowactive = PETSC_FALSE; 20063a40ed3dSBarry Smith PetscFunctionReturn(0); 200739e00950SLois Curfman McInnes } 200839e00950SLois Curfman McInnes 20094a2ae208SSatish Balay #undef __FUNCT__ 20104a2ae208SSatish Balay #define __FUNCT__ "MatNorm_MPIAIJ" 2011dfbe8321SBarry Smith PetscErrorCode MatNorm_MPIAIJ(Mat mat,NormType type,PetscReal *norm) 2012855ac2c5SLois Curfman McInnes { 2013855ac2c5SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 2014ec8511deSBarry Smith Mat_SeqAIJ *amat = (Mat_SeqAIJ*)aij->A->data,*bmat = (Mat_SeqAIJ*)aij->B->data; 2015dfbe8321SBarry Smith PetscErrorCode ierr; 2016d0f46423SBarry Smith PetscInt i,j,cstart = mat->cmap->rstart; 2017329f5518SBarry Smith PetscReal sum = 0.0; 2018a77337e4SBarry Smith MatScalar *v; 201904ca555eSLois Curfman McInnes 20203a40ed3dSBarry Smith PetscFunctionBegin; 202117699dbbSLois Curfman McInnes if (aij->size == 1) { 202214183eadSLois Curfman McInnes ierr = MatNorm(aij->A,type,norm);CHKERRQ(ierr); 202337fa93a5SLois Curfman McInnes } else { 202404ca555eSLois Curfman McInnes if (type == NORM_FROBENIUS) { 202504ca555eSLois Curfman McInnes v = amat->a; 202604ca555eSLois Curfman McInnes for (i=0; i<amat->nz; i++) { 2027329f5518SBarry Smith sum += PetscRealPart(PetscConj(*v)*(*v)); v++; 202804ca555eSLois Curfman McInnes } 202904ca555eSLois Curfman McInnes v = bmat->a; 203004ca555eSLois Curfman McInnes for (i=0; i<bmat->nz; i++) { 2031329f5518SBarry Smith sum += PetscRealPart(PetscConj(*v)*(*v)); v++; 203204ca555eSLois Curfman McInnes } 2033ce94432eSBarry Smith ierr = MPI_Allreduce(&sum,norm,1,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 20348f1a2a5eSBarry Smith *norm = PetscSqrtReal(*norm); 20353a40ed3dSBarry Smith } else if (type == NORM_1) { /* max column norm */ 2036329f5518SBarry Smith PetscReal *tmp,*tmp2; 2037b1d57f15SBarry Smith PetscInt *jj,*garray = aij->garray; 2038d0f46423SBarry Smith ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscReal),&tmp);CHKERRQ(ierr); 2039d0f46423SBarry Smith ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscReal),&tmp2);CHKERRQ(ierr); 2040d0f46423SBarry Smith ierr = PetscMemzero(tmp,mat->cmap->N*sizeof(PetscReal));CHKERRQ(ierr); 204104ca555eSLois Curfman McInnes *norm = 0.0; 204204ca555eSLois Curfman McInnes v = amat->a; jj = amat->j; 204304ca555eSLois Curfman McInnes for (j=0; j<amat->nz; j++) { 2044bfec09a0SHong Zhang tmp[cstart + *jj++] += PetscAbsScalar(*v); v++; 204504ca555eSLois Curfman McInnes } 204604ca555eSLois Curfman McInnes v = bmat->a; jj = bmat->j; 204704ca555eSLois Curfman McInnes for (j=0; j<bmat->nz; j++) { 2048bfec09a0SHong Zhang tmp[garray[*jj++]] += PetscAbsScalar(*v); v++; 204904ca555eSLois Curfman McInnes } 2050ce94432eSBarry Smith ierr = MPI_Allreduce(tmp,tmp2,mat->cmap->N,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 2051d0f46423SBarry Smith for (j=0; j<mat->cmap->N; j++) { 205204ca555eSLois Curfman McInnes if (tmp2[j] > *norm) *norm = tmp2[j]; 205304ca555eSLois Curfman McInnes } 2054606d414cSSatish Balay ierr = PetscFree(tmp);CHKERRQ(ierr); 2055606d414cSSatish Balay ierr = PetscFree(tmp2);CHKERRQ(ierr); 20563a40ed3dSBarry Smith } else if (type == NORM_INFINITY) { /* max row norm */ 2057329f5518SBarry Smith PetscReal ntemp = 0.0; 2058d0f46423SBarry Smith for (j=0; j<aij->A->rmap->n; j++) { 2059bfec09a0SHong Zhang v = amat->a + amat->i[j]; 206004ca555eSLois Curfman McInnes sum = 0.0; 206104ca555eSLois Curfman McInnes for (i=0; i<amat->i[j+1]-amat->i[j]; i++) { 2062cddf8d76SBarry Smith sum += PetscAbsScalar(*v); v++; 206304ca555eSLois Curfman McInnes } 2064bfec09a0SHong Zhang v = bmat->a + bmat->i[j]; 206504ca555eSLois Curfman McInnes for (i=0; i<bmat->i[j+1]-bmat->i[j]; i++) { 2066cddf8d76SBarry Smith sum += PetscAbsScalar(*v); v++; 206704ca555eSLois Curfman McInnes } 2068515d9167SLois Curfman McInnes if (sum > ntemp) ntemp = sum; 206904ca555eSLois Curfman McInnes } 2070ce94432eSBarry Smith ierr = MPI_Allreduce(&ntemp,norm,1,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 2071ce94432eSBarry Smith } else SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"No support for two norm"); 207237fa93a5SLois Curfman McInnes } 20733a40ed3dSBarry Smith PetscFunctionReturn(0); 2074855ac2c5SLois Curfman McInnes } 2075855ac2c5SLois Curfman McInnes 20764a2ae208SSatish Balay #undef __FUNCT__ 20774a2ae208SSatish Balay #define __FUNCT__ "MatTranspose_MPIAIJ" 2078fc4dec0aSBarry Smith PetscErrorCode MatTranspose_MPIAIJ(Mat A,MatReuse reuse,Mat *matout) 2079b7c46309SBarry Smith { 2080b7c46309SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 2081da668accSHong Zhang Mat_SeqAIJ *Aloc=(Mat_SeqAIJ*)a->A->data,*Bloc=(Mat_SeqAIJ*)a->B->data; 2082dfbe8321SBarry Smith PetscErrorCode ierr; 208380bcc5a1SJed Brown PetscInt M = A->rmap->N,N = A->cmap->N,ma,na,mb,nb,*ai,*aj,*bi,*bj,row,*cols,*cols_tmp,i; 2084d0f46423SBarry Smith PetscInt cstart = A->cmap->rstart,ncol; 20853a40ed3dSBarry Smith Mat B; 2086a77337e4SBarry Smith MatScalar *array; 2087b7c46309SBarry Smith 20883a40ed3dSBarry Smith PetscFunctionBegin; 2089ce94432eSBarry Smith if (reuse == MAT_REUSE_MATRIX && A == *matout && M != N) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Square matrix only for in-place"); 2090da668accSHong Zhang 209180bcc5a1SJed Brown ma = A->rmap->n; na = A->cmap->n; mb = a->B->rmap->n; nb = a->B->cmap->n; 2092da668accSHong Zhang ai = Aloc->i; aj = Aloc->j; 2093da668accSHong Zhang bi = Bloc->i; bj = Bloc->j; 2094fc73b1b3SBarry Smith if (reuse == MAT_INITIAL_MATRIX || *matout == A) { 209580bcc5a1SJed Brown PetscInt *d_nnz,*g_nnz,*o_nnz; 209680bcc5a1SJed Brown PetscSFNode *oloc; 2097713c93b4SJed Brown PETSC_UNUSED PetscSF sf; 209880bcc5a1SJed Brown 209980bcc5a1SJed Brown ierr = PetscMalloc4(na,PetscInt,&d_nnz,na,PetscInt,&o_nnz,nb,PetscInt,&g_nnz,nb,PetscSFNode,&oloc);CHKERRQ(ierr); 210080bcc5a1SJed Brown /* compute d_nnz for preallocation */ 210180bcc5a1SJed Brown ierr = PetscMemzero(d_nnz,na*sizeof(PetscInt));CHKERRQ(ierr); 2102da668accSHong Zhang for (i=0; i<ai[ma]; i++) { 2103da668accSHong Zhang d_nnz[aj[i]]++; 2104da668accSHong Zhang aj[i] += cstart; /* global col index to be used by MatSetValues() */ 2105d4bb536fSBarry Smith } 210680bcc5a1SJed Brown /* compute local off-diagonal contributions */ 21070beca09bSJed Brown ierr = PetscMemzero(g_nnz,nb*sizeof(PetscInt));CHKERRQ(ierr); 210880bcc5a1SJed Brown for (i=0; i<bi[ma]; i++) g_nnz[bj[i]]++; 210980bcc5a1SJed Brown /* map those to global */ 2110ce94432eSBarry Smith ierr = PetscSFCreate(PetscObjectComm((PetscObject)A),&sf);CHKERRQ(ierr); 21110298fd71SBarry Smith ierr = PetscSFSetGraphLayout(sf,A->cmap,nb,NULL,PETSC_USE_POINTER,a->garray);CHKERRQ(ierr); 2112e9e74f11SJed Brown ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr); 211380bcc5a1SJed Brown ierr = PetscMemzero(o_nnz,na*sizeof(PetscInt));CHKERRQ(ierr); 211480bcc5a1SJed Brown ierr = PetscSFReduceBegin(sf,MPIU_INT,g_nnz,o_nnz,MPIU_SUM);CHKERRQ(ierr); 211580bcc5a1SJed Brown ierr = PetscSFReduceEnd(sf,MPIU_INT,g_nnz,o_nnz,MPIU_SUM);CHKERRQ(ierr); 211680bcc5a1SJed Brown ierr = PetscSFDestroy(&sf);CHKERRQ(ierr); 2117d4bb536fSBarry Smith 2118ce94432eSBarry Smith ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr); 2119d0f46423SBarry Smith ierr = MatSetSizes(B,A->cmap->n,A->rmap->n,N,M);CHKERRQ(ierr); 2120a2f3521dSMark F. Adams ierr = MatSetBlockSizes(B,A->cmap->bs,A->rmap->bs);CHKERRQ(ierr); 21217adad957SLisandro Dalcin ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr); 212280bcc5a1SJed Brown ierr = MatMPIAIJSetPreallocation(B,0,d_nnz,0,o_nnz);CHKERRQ(ierr); 212380bcc5a1SJed Brown ierr = PetscFree4(d_nnz,o_nnz,g_nnz,oloc);CHKERRQ(ierr); 2124fc4dec0aSBarry Smith } else { 2125fc4dec0aSBarry Smith B = *matout; 21266ffab4bbSHong Zhang ierr = MatSetOption(B,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr); 21272205254eSKarl Rupp for (i=0; i<ai[ma]; i++) aj[i] += cstart; /* global col index to be used by MatSetValues() */ 2128fc4dec0aSBarry Smith } 2129b7c46309SBarry Smith 2130b7c46309SBarry Smith /* copy over the A part */ 2131da668accSHong Zhang array = Aloc->a; 2132d0f46423SBarry Smith row = A->rmap->rstart; 2133da668accSHong Zhang for (i=0; i<ma; i++) { 2134da668accSHong Zhang ncol = ai[i+1]-ai[i]; 2135da668accSHong Zhang ierr = MatSetValues(B,ncol,aj,1,&row,array,INSERT_VALUES);CHKERRQ(ierr); 21362205254eSKarl Rupp row++; 21372205254eSKarl Rupp array += ncol; aj += ncol; 2138b7c46309SBarry Smith } 2139b7c46309SBarry Smith aj = Aloc->j; 2140da668accSHong Zhang for (i=0; i<ai[ma]; i++) aj[i] -= cstart; /* resume local col index */ 2141b7c46309SBarry Smith 2142b7c46309SBarry Smith /* copy over the B part */ 2143fc73b1b3SBarry Smith ierr = PetscMalloc(bi[mb]*sizeof(PetscInt),&cols);CHKERRQ(ierr); 2144fc73b1b3SBarry Smith ierr = PetscMemzero(cols,bi[mb]*sizeof(PetscInt));CHKERRQ(ierr); 2145da668accSHong Zhang array = Bloc->a; 2146d0f46423SBarry Smith row = A->rmap->rstart; 21472205254eSKarl Rupp for (i=0; i<bi[mb]; i++) cols[i] = a->garray[bj[i]]; 214861a2fbbaSHong Zhang cols_tmp = cols; 2149da668accSHong Zhang for (i=0; i<mb; i++) { 2150da668accSHong Zhang ncol = bi[i+1]-bi[i]; 215161a2fbbaSHong Zhang ierr = MatSetValues(B,ncol,cols_tmp,1,&row,array,INSERT_VALUES);CHKERRQ(ierr); 21522205254eSKarl Rupp row++; 21532205254eSKarl Rupp array += ncol; cols_tmp += ncol; 2154b7c46309SBarry Smith } 2155fc73b1b3SBarry Smith ierr = PetscFree(cols);CHKERRQ(ierr); 2156fc73b1b3SBarry Smith 21576d4a8577SBarry Smith ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 21586d4a8577SBarry Smith ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2159815cbec1SBarry Smith if (reuse == MAT_INITIAL_MATRIX || *matout != A) { 21600de55854SLois Curfman McInnes *matout = B; 21610de55854SLois Curfman McInnes } else { 2162eb6b5d47SBarry Smith ierr = MatHeaderMerge(A,B);CHKERRQ(ierr); 21630de55854SLois Curfman McInnes } 21643a40ed3dSBarry Smith PetscFunctionReturn(0); 2165b7c46309SBarry Smith } 2166b7c46309SBarry Smith 21674a2ae208SSatish Balay #undef __FUNCT__ 21684a2ae208SSatish Balay #define __FUNCT__ "MatDiagonalScale_MPIAIJ" 2169dfbe8321SBarry Smith PetscErrorCode MatDiagonalScale_MPIAIJ(Mat mat,Vec ll,Vec rr) 2170a008b906SSatish Balay { 21714b967eb1SSatish Balay Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 21724b967eb1SSatish Balay Mat a = aij->A,b = aij->B; 2173dfbe8321SBarry Smith PetscErrorCode ierr; 2174b1d57f15SBarry Smith PetscInt s1,s2,s3; 2175a008b906SSatish Balay 21763a40ed3dSBarry Smith PetscFunctionBegin; 21774b967eb1SSatish Balay ierr = MatGetLocalSize(mat,&s2,&s3);CHKERRQ(ierr); 21784b967eb1SSatish Balay if (rr) { 2179e1311b90SBarry Smith ierr = VecGetLocalSize(rr,&s1);CHKERRQ(ierr); 2180e32f2f54SBarry Smith if (s1!=s3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"right vector non-conforming local size"); 21814b967eb1SSatish Balay /* Overlap communication with computation. */ 2182ca9f406cSSatish Balay ierr = VecScatterBegin(aij->Mvctx,rr,aij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 2183a008b906SSatish Balay } 21844b967eb1SSatish Balay if (ll) { 2185e1311b90SBarry Smith ierr = VecGetLocalSize(ll,&s1);CHKERRQ(ierr); 2186e32f2f54SBarry Smith if (s1!=s2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"left vector non-conforming local size"); 2187f830108cSBarry Smith ierr = (*b->ops->diagonalscale)(b,ll,0);CHKERRQ(ierr); 21884b967eb1SSatish Balay } 21894b967eb1SSatish Balay /* scale the diagonal block */ 2190f830108cSBarry Smith ierr = (*a->ops->diagonalscale)(a,ll,rr);CHKERRQ(ierr); 21914b967eb1SSatish Balay 21924b967eb1SSatish Balay if (rr) { 21934b967eb1SSatish Balay /* Do a scatter end and then right scale the off-diagonal block */ 2194ca9f406cSSatish Balay ierr = VecScatterEnd(aij->Mvctx,rr,aij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 2195f830108cSBarry Smith ierr = (*b->ops->diagonalscale)(b,0,aij->lvec);CHKERRQ(ierr); 21964b967eb1SSatish Balay } 21973a40ed3dSBarry Smith PetscFunctionReturn(0); 2198a008b906SSatish Balay } 2199a008b906SSatish Balay 22004a2ae208SSatish Balay #undef __FUNCT__ 22014a2ae208SSatish Balay #define __FUNCT__ "MatSetUnfactored_MPIAIJ" 2202dfbe8321SBarry Smith PetscErrorCode MatSetUnfactored_MPIAIJ(Mat A) 2203bb5a7306SBarry Smith { 2204bb5a7306SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 2205dfbe8321SBarry Smith PetscErrorCode ierr; 22063a40ed3dSBarry Smith 22073a40ed3dSBarry Smith PetscFunctionBegin; 2208bb5a7306SBarry Smith ierr = MatSetUnfactored(a->A);CHKERRQ(ierr); 22093a40ed3dSBarry Smith PetscFunctionReturn(0); 2210bb5a7306SBarry Smith } 2211bb5a7306SBarry Smith 22124a2ae208SSatish Balay #undef __FUNCT__ 22134a2ae208SSatish Balay #define __FUNCT__ "MatEqual_MPIAIJ" 2214ace3abfcSBarry Smith PetscErrorCode MatEqual_MPIAIJ(Mat A,Mat B,PetscBool *flag) 2215d4bb536fSBarry Smith { 2216d4bb536fSBarry Smith Mat_MPIAIJ *matB = (Mat_MPIAIJ*)B->data,*matA = (Mat_MPIAIJ*)A->data; 2217d4bb536fSBarry Smith Mat a,b,c,d; 2218ace3abfcSBarry Smith PetscBool flg; 2219dfbe8321SBarry Smith PetscErrorCode ierr; 2220d4bb536fSBarry Smith 22213a40ed3dSBarry Smith PetscFunctionBegin; 2222d4bb536fSBarry Smith a = matA->A; b = matA->B; 2223d4bb536fSBarry Smith c = matB->A; d = matB->B; 2224d4bb536fSBarry Smith 2225d4bb536fSBarry Smith ierr = MatEqual(a,c,&flg);CHKERRQ(ierr); 2226abc0a331SBarry Smith if (flg) { 2227d4bb536fSBarry Smith ierr = MatEqual(b,d,&flg);CHKERRQ(ierr); 2228d4bb536fSBarry Smith } 2229ce94432eSBarry Smith ierr = MPI_Allreduce(&flg,flag,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr); 22303a40ed3dSBarry Smith PetscFunctionReturn(0); 2231d4bb536fSBarry Smith } 2232d4bb536fSBarry Smith 22334a2ae208SSatish Balay #undef __FUNCT__ 22344a2ae208SSatish Balay #define __FUNCT__ "MatCopy_MPIAIJ" 2235dfbe8321SBarry Smith PetscErrorCode MatCopy_MPIAIJ(Mat A,Mat B,MatStructure str) 2236cb5b572fSBarry Smith { 2237dfbe8321SBarry Smith PetscErrorCode ierr; 2238cb5b572fSBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 2239cb5b572fSBarry Smith Mat_MPIAIJ *b = (Mat_MPIAIJ*)B->data; 2240cb5b572fSBarry Smith 2241cb5b572fSBarry Smith PetscFunctionBegin; 224233f4a19fSKris Buschelman /* If the two matrices don't have the same copy implementation, they aren't compatible for fast copy. */ 224333f4a19fSKris Buschelman if ((str != SAME_NONZERO_PATTERN) || (A->ops->copy != B->ops->copy)) { 2244cb5b572fSBarry Smith /* because of the column compression in the off-processor part of the matrix a->B, 2245cb5b572fSBarry Smith the number of columns in a->B and b->B may be different, hence we cannot call 2246cb5b572fSBarry Smith the MatCopy() directly on the two parts. If need be, we can provide a more 2247cb5b572fSBarry Smith efficient copy than the MatCopy_Basic() by first uncompressing the a->B matrices 2248cb5b572fSBarry Smith then copying the submatrices */ 2249cb5b572fSBarry Smith ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr); 2250cb5b572fSBarry Smith } else { 2251cb5b572fSBarry Smith ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr); 2252cb5b572fSBarry Smith ierr = MatCopy(a->B,b->B,str);CHKERRQ(ierr); 2253cb5b572fSBarry Smith } 2254cb5b572fSBarry Smith PetscFunctionReturn(0); 2255cb5b572fSBarry Smith } 2256cb5b572fSBarry Smith 22574a2ae208SSatish Balay #undef __FUNCT__ 22584994cf47SJed Brown #define __FUNCT__ "MatSetUp_MPIAIJ" 22594994cf47SJed Brown PetscErrorCode MatSetUp_MPIAIJ(Mat A) 2260273d9f13SBarry Smith { 2261dfbe8321SBarry Smith PetscErrorCode ierr; 2262273d9f13SBarry Smith 2263273d9f13SBarry Smith PetscFunctionBegin; 2264273d9f13SBarry Smith ierr = MatMPIAIJSetPreallocation(A,PETSC_DEFAULT,0,PETSC_DEFAULT,0);CHKERRQ(ierr); 2265273d9f13SBarry Smith PetscFunctionReturn(0); 2266273d9f13SBarry Smith } 2267273d9f13SBarry Smith 2268ac90fabeSBarry Smith #undef __FUNCT__ 226995b7e79eSJed Brown #define __FUNCT__ "MatAXPYGetPreallocation_MPIAIJ" 227095b7e79eSJed Brown /* This is the same as MatAXPYGetPreallocation_SeqAIJ, except that the local-to-global map is provided */ 227195b7e79eSJed Brown static PetscErrorCode MatAXPYGetPreallocation_MPIAIJ(Mat Y,const PetscInt *yltog,Mat X,const PetscInt *xltog,PetscInt *nnz) 227295b7e79eSJed Brown { 227395b7e79eSJed Brown PetscInt i,m=Y->rmap->N; 227495b7e79eSJed Brown Mat_SeqAIJ *x = (Mat_SeqAIJ*)X->data; 227595b7e79eSJed Brown Mat_SeqAIJ *y = (Mat_SeqAIJ*)Y->data; 227695b7e79eSJed Brown const PetscInt *xi = x->i,*yi = y->i; 227795b7e79eSJed Brown 227895b7e79eSJed Brown PetscFunctionBegin; 227995b7e79eSJed Brown /* Set the number of nonzeros in the new matrix */ 228095b7e79eSJed Brown for (i=0; i<m; i++) { 228195b7e79eSJed Brown PetscInt j,k,nzx = xi[i+1] - xi[i],nzy = yi[i+1] - yi[i]; 228295b7e79eSJed Brown const PetscInt *xj = x->j+xi[i],*yj = y->j+yi[i]; 228395b7e79eSJed Brown nnz[i] = 0; 228495b7e79eSJed Brown for (j=0,k=0; j<nzx; j++) { /* Point in X */ 228595b7e79eSJed Brown for (; k<nzy && yltog[yj[k]]<xltog[xj[j]]; k++) nnz[i]++; /* Catch up to X */ 228695b7e79eSJed Brown if (k<nzy && yltog[yj[k]]==xltog[xj[j]]) k++; /* Skip duplicate */ 228795b7e79eSJed Brown nnz[i]++; 228895b7e79eSJed Brown } 228995b7e79eSJed Brown for (; k<nzy; k++) nnz[i]++; 229095b7e79eSJed Brown } 229195b7e79eSJed Brown PetscFunctionReturn(0); 229295b7e79eSJed Brown } 229395b7e79eSJed Brown 229495b7e79eSJed Brown #undef __FUNCT__ 2295ac90fabeSBarry Smith #define __FUNCT__ "MatAXPY_MPIAIJ" 2296f4df32b1SMatthew Knepley PetscErrorCode MatAXPY_MPIAIJ(Mat Y,PetscScalar a,Mat X,MatStructure str) 2297ac90fabeSBarry Smith { 2298dfbe8321SBarry Smith PetscErrorCode ierr; 2299b1d57f15SBarry Smith PetscInt i; 2300ac90fabeSBarry Smith Mat_MPIAIJ *xx = (Mat_MPIAIJ*)X->data,*yy = (Mat_MPIAIJ*)Y->data; 23014ce68768SBarry Smith PetscBLASInt bnz,one=1; 2302ac90fabeSBarry Smith Mat_SeqAIJ *x,*y; 2303ac90fabeSBarry Smith 2304ac90fabeSBarry Smith PetscFunctionBegin; 2305ac90fabeSBarry Smith if (str == SAME_NONZERO_PATTERN) { 2306f4df32b1SMatthew Knepley PetscScalar alpha = a; 2307ac90fabeSBarry Smith x = (Mat_SeqAIJ*)xx->A->data; 2308c5df96a5SBarry Smith ierr = PetscBLASIntCast(x->nz,&bnz);CHKERRQ(ierr); 2309ac90fabeSBarry Smith y = (Mat_SeqAIJ*)yy->A->data; 23108b83055fSJed Brown PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one)); 2311ac90fabeSBarry Smith x = (Mat_SeqAIJ*)xx->B->data; 2312ac90fabeSBarry Smith y = (Mat_SeqAIJ*)yy->B->data; 2313c5df96a5SBarry Smith ierr = PetscBLASIntCast(x->nz,&bnz);CHKERRQ(ierr); 23148b83055fSJed Brown PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one)); 2315a30b2313SHong Zhang } else if (str == SUBSET_NONZERO_PATTERN) { 2316f4df32b1SMatthew Knepley ierr = MatAXPY_SeqAIJ(yy->A,a,xx->A,str);CHKERRQ(ierr); 2317c537a176SHong Zhang 2318c537a176SHong Zhang x = (Mat_SeqAIJ*)xx->B->data; 2319a30b2313SHong Zhang y = (Mat_SeqAIJ*)yy->B->data; 2320a30b2313SHong Zhang if (y->xtoy && y->XtoY != xx->B) { 2321a30b2313SHong Zhang ierr = PetscFree(y->xtoy);CHKERRQ(ierr); 23226bf464f9SBarry Smith ierr = MatDestroy(&y->XtoY);CHKERRQ(ierr); 2323c537a176SHong Zhang } 2324a30b2313SHong Zhang if (!y->xtoy) { /* get xtoy */ 2325d0f46423SBarry Smith ierr = MatAXPYGetxtoy_Private(xx->B->rmap->n,x->i,x->j,xx->garray,y->i,y->j,yy->garray,&y->xtoy);CHKERRQ(ierr); 2326a30b2313SHong Zhang y->XtoY = xx->B; 2327407f6b05SHong Zhang ierr = PetscObjectReference((PetscObject)xx->B);CHKERRQ(ierr); 2328c537a176SHong Zhang } 2329f4df32b1SMatthew Knepley for (i=0; i<x->nz; i++) y->a[y->xtoy[i]] += a*(x->a[i]); 2330ac90fabeSBarry Smith } else { 23319f5f6813SShri Abhyankar Mat B; 23329f5f6813SShri Abhyankar PetscInt *nnz_d,*nnz_o; 23339f5f6813SShri Abhyankar ierr = PetscMalloc(yy->A->rmap->N*sizeof(PetscInt),&nnz_d);CHKERRQ(ierr); 23349f5f6813SShri Abhyankar ierr = PetscMalloc(yy->B->rmap->N*sizeof(PetscInt),&nnz_o);CHKERRQ(ierr); 2335ce94432eSBarry Smith ierr = MatCreate(PetscObjectComm((PetscObject)Y),&B);CHKERRQ(ierr); 2336bc5a2726SShri Abhyankar ierr = PetscObjectSetName((PetscObject)B,((PetscObject)Y)->name);CHKERRQ(ierr); 23379f5f6813SShri Abhyankar ierr = MatSetSizes(B,Y->rmap->n,Y->cmap->n,Y->rmap->N,Y->cmap->N);CHKERRQ(ierr); 2338a2f3521dSMark F. Adams ierr = MatSetBlockSizes(B,Y->rmap->bs,Y->cmap->bs);CHKERRQ(ierr); 23399f5f6813SShri Abhyankar ierr = MatSetType(B,MATMPIAIJ);CHKERRQ(ierr); 23409f5f6813SShri Abhyankar ierr = MatAXPYGetPreallocation_SeqAIJ(yy->A,xx->A,nnz_d);CHKERRQ(ierr); 234195b7e79eSJed Brown ierr = MatAXPYGetPreallocation_MPIAIJ(yy->B,yy->garray,xx->B,xx->garray,nnz_o);CHKERRQ(ierr); 2342ecd8bba6SJed Brown ierr = MatMPIAIJSetPreallocation(B,0,nnz_d,0,nnz_o);CHKERRQ(ierr); 23439f5f6813SShri Abhyankar ierr = MatAXPY_BasicWithPreallocation(B,Y,a,X,str);CHKERRQ(ierr); 2344a2ea699eSBarry Smith ierr = MatHeaderReplace(Y,B);CHKERRQ(ierr); 23459f5f6813SShri Abhyankar ierr = PetscFree(nnz_d);CHKERRQ(ierr); 23469f5f6813SShri Abhyankar ierr = PetscFree(nnz_o);CHKERRQ(ierr); 2347ac90fabeSBarry Smith } 2348ac90fabeSBarry Smith PetscFunctionReturn(0); 2349ac90fabeSBarry Smith } 2350ac90fabeSBarry Smith 23517087cfbeSBarry Smith extern PetscErrorCode MatConjugate_SeqAIJ(Mat); 2352354c94deSBarry Smith 2353354c94deSBarry Smith #undef __FUNCT__ 2354354c94deSBarry Smith #define __FUNCT__ "MatConjugate_MPIAIJ" 23557087cfbeSBarry Smith PetscErrorCode MatConjugate_MPIAIJ(Mat mat) 2356354c94deSBarry Smith { 2357354c94deSBarry Smith #if defined(PETSC_USE_COMPLEX) 2358354c94deSBarry Smith PetscErrorCode ierr; 2359354c94deSBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 2360354c94deSBarry Smith 2361354c94deSBarry Smith PetscFunctionBegin; 2362354c94deSBarry Smith ierr = MatConjugate_SeqAIJ(aij->A);CHKERRQ(ierr); 2363354c94deSBarry Smith ierr = MatConjugate_SeqAIJ(aij->B);CHKERRQ(ierr); 2364354c94deSBarry Smith #else 2365354c94deSBarry Smith PetscFunctionBegin; 2366354c94deSBarry Smith #endif 2367354c94deSBarry Smith PetscFunctionReturn(0); 2368354c94deSBarry Smith } 2369354c94deSBarry Smith 237099cafbc1SBarry Smith #undef __FUNCT__ 237199cafbc1SBarry Smith #define __FUNCT__ "MatRealPart_MPIAIJ" 237299cafbc1SBarry Smith PetscErrorCode MatRealPart_MPIAIJ(Mat A) 237399cafbc1SBarry Smith { 237499cafbc1SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 237599cafbc1SBarry Smith PetscErrorCode ierr; 237699cafbc1SBarry Smith 237799cafbc1SBarry Smith PetscFunctionBegin; 237899cafbc1SBarry Smith ierr = MatRealPart(a->A);CHKERRQ(ierr); 237999cafbc1SBarry Smith ierr = MatRealPart(a->B);CHKERRQ(ierr); 238099cafbc1SBarry Smith PetscFunctionReturn(0); 238199cafbc1SBarry Smith } 238299cafbc1SBarry Smith 238399cafbc1SBarry Smith #undef __FUNCT__ 238499cafbc1SBarry Smith #define __FUNCT__ "MatImaginaryPart_MPIAIJ" 238599cafbc1SBarry Smith PetscErrorCode MatImaginaryPart_MPIAIJ(Mat A) 238699cafbc1SBarry Smith { 238799cafbc1SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 238899cafbc1SBarry Smith PetscErrorCode ierr; 238999cafbc1SBarry Smith 239099cafbc1SBarry Smith PetscFunctionBegin; 239199cafbc1SBarry Smith ierr = MatImaginaryPart(a->A);CHKERRQ(ierr); 239299cafbc1SBarry Smith ierr = MatImaginaryPart(a->B);CHKERRQ(ierr); 239399cafbc1SBarry Smith PetscFunctionReturn(0); 239499cafbc1SBarry Smith } 239599cafbc1SBarry Smith 2396519f805aSKarl Rupp #if defined(PETSC_HAVE_PBGL) 2397103bf8bdSMatthew Knepley 2398103bf8bdSMatthew Knepley #include <boost/parallel/mpi/bsp_process_group.hpp> 2399a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_default_graph.hpp> 2400a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_0_block.hpp> 2401a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_preconditioner.hpp> 2402103bf8bdSMatthew Knepley #include <boost/graph/distributed/petsc/interface.hpp> 2403a2c909beSMatthew Knepley #include <boost/multi_array.hpp> 2404d0f46423SBarry Smith #include <boost/parallel/distributed_property_map->hpp> 2405103bf8bdSMatthew Knepley 2406103bf8bdSMatthew Knepley #undef __FUNCT__ 2407103bf8bdSMatthew Knepley #define __FUNCT__ "MatILUFactorSymbolic_MPIAIJ" 2408103bf8bdSMatthew Knepley /* 2409103bf8bdSMatthew Knepley This uses the parallel ILU factorization of Peter Gottschling <pgottsch@osl.iu.edu> 2410103bf8bdSMatthew Knepley */ 24110481f469SBarry Smith PetscErrorCode MatILUFactorSymbolic_MPIAIJ(Mat fact,Mat A, IS isrow, IS iscol, const MatFactorInfo *info) 2412103bf8bdSMatthew Knepley { 2413a2c909beSMatthew Knepley namespace petsc = boost::distributed::petsc; 2414a2c909beSMatthew Knepley 2415a2c909beSMatthew Knepley namespace graph_dist = boost::graph::distributed; 2416a2c909beSMatthew Knepley using boost::graph::distributed::ilu_default::process_group_type; 2417a2c909beSMatthew Knepley using boost::graph::ilu_permuted; 2418a2c909beSMatthew Knepley 2419ace3abfcSBarry Smith PetscBool row_identity, col_identity; 2420776b82aeSLisandro Dalcin PetscContainer c; 2421103bf8bdSMatthew Knepley PetscInt m, n, M, N; 2422103bf8bdSMatthew Knepley PetscErrorCode ierr; 2423103bf8bdSMatthew Knepley 2424103bf8bdSMatthew Knepley PetscFunctionBegin; 2425e32f2f54SBarry Smith if (info->levels != 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only levels = 0 supported for parallel ilu"); 2426103bf8bdSMatthew Knepley ierr = ISIdentity(isrow, &row_identity);CHKERRQ(ierr); 2427103bf8bdSMatthew Knepley ierr = ISIdentity(iscol, &col_identity);CHKERRQ(ierr); 2428f23aa3ddSBarry Smith if (!row_identity || !col_identity) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Row and column permutations must be identity for parallel ILU"); 2429103bf8bdSMatthew Knepley 2430103bf8bdSMatthew Knepley process_group_type pg; 2431a2c909beSMatthew Knepley typedef graph_dist::ilu_default::ilu_level_graph_type lgraph_type; 2432a2c909beSMatthew Knepley lgraph_type *lgraph_p = new lgraph_type(petsc::num_global_vertices(A), pg, petsc::matrix_distribution(A, pg)); 2433a2c909beSMatthew Knepley lgraph_type& level_graph = *lgraph_p; 2434a2c909beSMatthew Knepley graph_dist::ilu_default::graph_type& graph(level_graph.graph); 2435a2c909beSMatthew Knepley 2436103bf8bdSMatthew Knepley petsc::read_matrix(A, graph, get(boost::edge_weight, graph)); 2437a2c909beSMatthew Knepley ilu_permuted(level_graph); 2438103bf8bdSMatthew Knepley 2439103bf8bdSMatthew Knepley /* put together the new matrix */ 2440ce94432eSBarry Smith ierr = MatCreate(PetscObjectComm((PetscObject)A), fact);CHKERRQ(ierr); 2441103bf8bdSMatthew Knepley ierr = MatGetLocalSize(A, &m, &n);CHKERRQ(ierr); 2442103bf8bdSMatthew Knepley ierr = MatGetSize(A, &M, &N);CHKERRQ(ierr); 2443719d5645SBarry Smith ierr = MatSetSizes(fact, m, n, M, N);CHKERRQ(ierr); 2444a2f3521dSMark F. Adams ierr = MatSetBlockSizes(fact,A->rmap->bs,A->cmap->bs);CHKERRQ(ierr); 2445719d5645SBarry Smith ierr = MatSetType(fact, ((PetscObject)A)->type_name);CHKERRQ(ierr); 2446719d5645SBarry Smith ierr = MatAssemblyBegin(fact, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2447719d5645SBarry Smith ierr = MatAssemblyEnd(fact, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2448103bf8bdSMatthew Knepley 2449ce94432eSBarry Smith ierr = PetscContainerCreate(PetscObjectComm((PetscObject)A), &c); 2450776b82aeSLisandro Dalcin ierr = PetscContainerSetPointer(c, lgraph_p); 2451719d5645SBarry Smith ierr = PetscObjectCompose((PetscObject) (fact), "graph", (PetscObject) c); 2452bf0cc555SLisandro Dalcin ierr = PetscContainerDestroy(&c); 2453103bf8bdSMatthew Knepley PetscFunctionReturn(0); 2454103bf8bdSMatthew Knepley } 2455103bf8bdSMatthew Knepley 2456103bf8bdSMatthew Knepley #undef __FUNCT__ 2457103bf8bdSMatthew Knepley #define __FUNCT__ "MatLUFactorNumeric_MPIAIJ" 24580481f469SBarry Smith PetscErrorCode MatLUFactorNumeric_MPIAIJ(Mat B,Mat A, const MatFactorInfo *info) 2459103bf8bdSMatthew Knepley { 2460103bf8bdSMatthew Knepley PetscFunctionBegin; 2461103bf8bdSMatthew Knepley PetscFunctionReturn(0); 2462103bf8bdSMatthew Knepley } 2463103bf8bdSMatthew Knepley 2464103bf8bdSMatthew Knepley #undef __FUNCT__ 2465103bf8bdSMatthew Knepley #define __FUNCT__ "MatSolve_MPIAIJ" 2466103bf8bdSMatthew Knepley /* 2467103bf8bdSMatthew Knepley This uses the parallel ILU factorization of Peter Gottschling <pgottsch@osl.iu.edu> 2468103bf8bdSMatthew Knepley */ 2469103bf8bdSMatthew Knepley PetscErrorCode MatSolve_MPIAIJ(Mat A, Vec b, Vec x) 2470103bf8bdSMatthew Knepley { 2471a2c909beSMatthew Knepley namespace graph_dist = boost::graph::distributed; 2472a2c909beSMatthew Knepley 2473a2c909beSMatthew Knepley typedef graph_dist::ilu_default::ilu_level_graph_type lgraph_type; 2474a2c909beSMatthew Knepley lgraph_type *lgraph_p; 2475776b82aeSLisandro Dalcin PetscContainer c; 2476103bf8bdSMatthew Knepley PetscErrorCode ierr; 2477103bf8bdSMatthew Knepley 2478103bf8bdSMatthew Knepley PetscFunctionBegin; 2479103bf8bdSMatthew Knepley ierr = PetscObjectQuery((PetscObject) A, "graph", (PetscObject*) &c);CHKERRQ(ierr); 2480776b82aeSLisandro Dalcin ierr = PetscContainerGetPointer(c, (void**) &lgraph_p);CHKERRQ(ierr); 2481103bf8bdSMatthew Knepley ierr = VecCopy(b, x);CHKERRQ(ierr); 2482a2c909beSMatthew Knepley 2483a2c909beSMatthew Knepley PetscScalar *array_x; 2484a2c909beSMatthew Knepley ierr = VecGetArray(x, &array_x);CHKERRQ(ierr); 2485a2c909beSMatthew Knepley PetscInt sx; 2486a2c909beSMatthew Knepley ierr = VecGetSize(x, &sx);CHKERRQ(ierr); 2487a2c909beSMatthew Knepley 2488a2c909beSMatthew Knepley PetscScalar *array_b; 2489a2c909beSMatthew Knepley ierr = VecGetArray(b, &array_b);CHKERRQ(ierr); 2490a2c909beSMatthew Knepley PetscInt sb; 2491a2c909beSMatthew Knepley ierr = VecGetSize(b, &sb);CHKERRQ(ierr); 2492a2c909beSMatthew Knepley 2493a2c909beSMatthew Knepley lgraph_type& level_graph = *lgraph_p; 2494a2c909beSMatthew Knepley graph_dist::ilu_default::graph_type& graph(level_graph.graph); 2495a2c909beSMatthew Knepley 2496a2c909beSMatthew Knepley typedef boost::multi_array_ref<PetscScalar, 1> array_ref_type; 24972205254eSKarl Rupp array_ref_type ref_b(array_b, boost::extents[num_vertices(graph)]); 24982205254eSKarl Rupp array_ref_type ref_x(array_x, boost::extents[num_vertices(graph)]); 2499a2c909beSMatthew Knepley 2500a2c909beSMatthew Knepley typedef boost::iterator_property_map<array_ref_type::iterator, 2501a2c909beSMatthew Knepley boost::property_map<graph_dist::ilu_default::graph_type, boost::vertex_index_t>::type> gvector_type; 25022205254eSKarl Rupp gvector_type vector_b(ref_b.begin(), get(boost::vertex_index, graph)); 25032205254eSKarl Rupp gvector_type vector_x(ref_x.begin(), get(boost::vertex_index, graph)); 2504a2c909beSMatthew Knepley 2505a2c909beSMatthew Knepley ilu_set_solve(*lgraph_p, vector_b, vector_x); 2506103bf8bdSMatthew Knepley PetscFunctionReturn(0); 2507103bf8bdSMatthew Knepley } 2508103bf8bdSMatthew Knepley #endif 2509103bf8bdSMatthew Knepley 251069db28dcSHong Zhang #undef __FUNCT__ 25115cc03489SHong Zhang #define __FUNCT__ "MatDestroy_MatRedundant" 25125cc03489SHong Zhang PetscErrorCode MatDestroy_MatRedundant(Mat A) 251369db28dcSHong Zhang { 251469db28dcSHong Zhang PetscErrorCode ierr; 25155cc03489SHong Zhang Mat_Redundant *redund; 251669db28dcSHong Zhang PetscInt i; 25175cc03489SHong Zhang PetscMPIInt size; 251869db28dcSHong Zhang 251969db28dcSHong Zhang PetscFunctionBegin; 25205cc03489SHong Zhang ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr); 25215cc03489SHong Zhang if (size == 1) { 25225cc03489SHong Zhang Mat_SeqAIJ *a = (Mat_SeqAIJ*)A->data; 25235cc03489SHong Zhang redund = a->redundant; 25245cc03489SHong Zhang } else { 25255cc03489SHong Zhang Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 25265cc03489SHong Zhang redund = a->redundant; 25275cc03489SHong Zhang } 25285cc03489SHong Zhang if (redund){ 25291d79065fSBarry Smith ierr = PetscFree2(redund->send_rank,redund->recv_rank);CHKERRQ(ierr); 253069db28dcSHong Zhang ierr = PetscFree(redund->sbuf_j);CHKERRQ(ierr); 253169db28dcSHong Zhang ierr = PetscFree(redund->sbuf_a);CHKERRQ(ierr); 253269db28dcSHong Zhang for (i=0; i<redund->nrecvs; i++) { 253369db28dcSHong Zhang ierr = PetscFree(redund->rbuf_j[i]);CHKERRQ(ierr); 253469db28dcSHong Zhang ierr = PetscFree(redund->rbuf_a[i]);CHKERRQ(ierr); 253569db28dcSHong Zhang } 25361d79065fSBarry Smith ierr = PetscFree4(redund->sbuf_nz,redund->rbuf_nz,redund->rbuf_j,redund->rbuf_a);CHKERRQ(ierr); 25370b291e46SHong Zhang 25380b291e46SHong Zhang if (redund->psubcomm) { 25390b291e46SHong Zhang ierr = PetscSubcommDestroy(&redund->psubcomm);CHKERRQ(ierr); 25400b291e46SHong Zhang } 25410b291e46SHong Zhang 25425cc03489SHong Zhang ierr = redund->Destroy(A);CHKERRQ(ierr); 254369db28dcSHong Zhang ierr = PetscFree(redund);CHKERRQ(ierr); 2544bf0cc555SLisandro Dalcin } 254569db28dcSHong Zhang PetscFunctionReturn(0); 254669db28dcSHong Zhang } 254769db28dcSHong Zhang 254869db28dcSHong Zhang #undef __FUNCT__ 2549*e37c6257SHong Zhang #define __FUNCT__ "MatGetRedundantMatrix_MPIAIJ_psubcomm" 2550*e37c6257SHong Zhang PetscErrorCode MatGetRedundantMatrix_MPIAIJ_psubcomm(Mat mat,PetscInt nsubcomm,PetscSubcomm psubcomm,MatReuse reuse,Mat *matredundant) 2551b4617e5dSHong Zhang { 2552b4617e5dSHong Zhang PetscMPIInt rank,size; 2553b4617e5dSHong Zhang MPI_Comm comm,subcomm=psubcomm->comm; 2554b4617e5dSHong Zhang PetscErrorCode ierr; 255534d19554SHong Zhang PetscInt nsends=0,nrecvs=0,i,rownz_max=0,M=mat->rmap->N,N=mat->cmap->N; 25565cc03489SHong Zhang PetscMPIInt *send_rank= NULL,*recv_rank=NULL,subrank,subsize; 2557b4617e5dSHong Zhang PetscInt *rowrange = mat->rmap->range; 2558b4617e5dSHong Zhang Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 2559b4617e5dSHong Zhang Mat A = aij->A,B=aij->B,C=*matredundant; 2560b4617e5dSHong Zhang Mat_SeqAIJ *a = (Mat_SeqAIJ*)A->data,*b=(Mat_SeqAIJ*)B->data; 2561b4617e5dSHong Zhang PetscScalar *sbuf_a; 2562b4617e5dSHong Zhang PetscInt nzlocal=a->nz+b->nz; 2563b4617e5dSHong Zhang PetscInt j,cstart=mat->cmap->rstart,cend=mat->cmap->rend,row,nzA,nzB,ncols,*cworkA,*cworkB; 256434d19554SHong Zhang PetscInt rstart=mat->rmap->rstart,rend=mat->rmap->rend,*bmap=aij->garray; 2565b4617e5dSHong Zhang PetscInt *cols,ctmp,lwrite,*rptr,l,*sbuf_j; 2566b4617e5dSHong Zhang MatScalar *aworkA,*aworkB; 2567b4617e5dSHong Zhang PetscScalar *vals; 2568b4617e5dSHong Zhang PetscMPIInt tag1,tag2,tag3,imdex; 2569b4617e5dSHong Zhang MPI_Request *s_waits1=NULL,*s_waits2=NULL,*s_waits3=NULL; 2570b4617e5dSHong Zhang MPI_Request *r_waits1=NULL,*r_waits2=NULL,*r_waits3=NULL; 2571b4617e5dSHong Zhang MPI_Status recv_status,*send_status; 2572b4617e5dSHong Zhang PetscInt *sbuf_nz=NULL,*rbuf_nz=NULL,count; 2573b4617e5dSHong Zhang PetscInt **rbuf_j=NULL; 2574b4617e5dSHong Zhang PetscScalar **rbuf_a=NULL; 2575b4617e5dSHong Zhang Mat_Redundant *redund =NULL; 2576b4617e5dSHong Zhang PetscInt rstart_sub,rend_sub,mloc_sub; 2577b4617e5dSHong Zhang 2578b4617e5dSHong Zhang PetscFunctionBegin; 2579b4617e5dSHong Zhang ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr); 2580b4617e5dSHong Zhang ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 2581b4617e5dSHong Zhang ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 25825cc03489SHong Zhang ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr); 25835cc03489SHong Zhang ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr); 2584d3b23db5SHong Zhang 2585b4617e5dSHong Zhang if (reuse == MAT_REUSE_MATRIX) { 2586b4617e5dSHong Zhang if (M != mat->rmap->N || N != mat->cmap->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong global size"); 25875cc03489SHong Zhang if (subsize == 1) { 25885cc03489SHong Zhang Mat_SeqAIJ *c = (Mat_SeqAIJ*)C->data; 25895cc03489SHong Zhang redund = c->redundant; 25905cc03489SHong Zhang } else { 25915cc03489SHong Zhang Mat_MPIAIJ *c = (Mat_MPIAIJ*)C->data; 25925cc03489SHong Zhang redund = c->redundant; 25935cc03489SHong Zhang } 2594b4617e5dSHong Zhang if (nzlocal != redund->nzlocal) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong nzlocal"); 2595b4617e5dSHong Zhang 2596b4617e5dSHong Zhang nsends = redund->nsends; 2597b4617e5dSHong Zhang nrecvs = redund->nrecvs; 2598b4617e5dSHong Zhang send_rank = redund->send_rank; 2599b4617e5dSHong Zhang recv_rank = redund->recv_rank; 2600b4617e5dSHong Zhang sbuf_nz = redund->sbuf_nz; 2601b4617e5dSHong Zhang rbuf_nz = redund->rbuf_nz; 2602b4617e5dSHong Zhang sbuf_j = redund->sbuf_j; 2603b4617e5dSHong Zhang sbuf_a = redund->sbuf_a; 2604b4617e5dSHong Zhang rbuf_j = redund->rbuf_j; 2605b4617e5dSHong Zhang rbuf_a = redund->rbuf_a; 2606b4617e5dSHong Zhang } 2607b4617e5dSHong Zhang 2608b4617e5dSHong Zhang if (reuse == MAT_INITIAL_MATRIX) { 2609b4617e5dSHong Zhang PetscInt nleftover,np_subcomm; 2610b4617e5dSHong Zhang 2611b4617e5dSHong Zhang /* get local size of redundant matrix */ 2612b4617e5dSHong Zhang const PetscInt *range; 2613b4617e5dSHong Zhang ierr = MatGetOwnershipRanges(mat,&range);CHKERRQ(ierr); 2614b4617e5dSHong Zhang rstart_sub = range[nsubcomm*subrank]; 2615b4617e5dSHong Zhang if (subrank+1 < subsize) { /* not the last proc in subcomm */ 2616b4617e5dSHong Zhang rend_sub = range[nsubcomm*(subrank+1)]; 2617b4617e5dSHong Zhang } else { 2618b4617e5dSHong Zhang rend_sub = mat->rmap->N; 2619b4617e5dSHong Zhang } 2620b4617e5dSHong Zhang mloc_sub = rend_sub - rstart_sub; 2621b4617e5dSHong Zhang 2622b4617e5dSHong Zhang /* get the destination processors' id send_rank, nsends and nrecvs */ 2623b4617e5dSHong Zhang ierr = PetscMalloc2(size,PetscMPIInt,&send_rank,size,PetscMPIInt,&recv_rank);CHKERRQ(ierr); 2624b4617e5dSHong Zhang 2625b4617e5dSHong Zhang np_subcomm = size/nsubcomm; 2626b4617e5dSHong Zhang nleftover = size - nsubcomm*np_subcomm; 2627b4617e5dSHong Zhang 2628b4617e5dSHong Zhang nsends = 0; nrecvs = 0; 2629*e37c6257SHong Zhang if (psubcomm->type == PETSC_SUBCOMM_INTERLACED) { 2630*e37c6257SHong Zhang /* -------------------------------------------*/ 2631b4617e5dSHong Zhang for (i=0; i<size; i++) { 2632b4617e5dSHong Zhang if (subrank == i/nsubcomm && i != rank) { /* my_subrank == other's subrank */ 2633b4617e5dSHong Zhang send_rank[nsends] = i; nsends++; 2634b4617e5dSHong Zhang recv_rank[nrecvs++] = i; 2635*e37c6257SHong Zhang /* printf("[%d] send to and recv from [%d]\n",rank,i); */ 2636b4617e5dSHong Zhang } 2637b4617e5dSHong Zhang } 2638b4617e5dSHong Zhang if (rank >= size - nleftover) { /* this proc is a leftover processor */ 2639b4617e5dSHong Zhang i = size-nleftover-1; 2640b4617e5dSHong Zhang j = 0; 2641b4617e5dSHong Zhang while (j < nsubcomm - nleftover) { 2642b4617e5dSHong Zhang send_rank[nsends++] = i; 2643b4617e5dSHong Zhang i--; j++; 2644*e37c6257SHong Zhang /* printf("[%d] send to [%d]\n",rank,i); */ 2645b4617e5dSHong Zhang } 2646b4617e5dSHong Zhang } 2647b4617e5dSHong Zhang 2648b4617e5dSHong Zhang if (nleftover && subsize == size/nsubcomm && subrank==subsize-1) { /* this proc recvs from leftover processors */ 2649b4617e5dSHong Zhang for (i=0; i<nleftover; i++) { 2650b4617e5dSHong Zhang recv_rank[nrecvs++] = size-nleftover+i; 2651*e37c6257SHong Zhang /* printf("[%d] recv from [%d]\n",rank,i); */ 2652b4617e5dSHong Zhang } 2653b4617e5dSHong Zhang } 2654*e37c6257SHong Zhang } else if (psubcomm->type == PETSC_SUBCOMM_CONTIGUOUS) { 2655*e37c6257SHong Zhang /* --------------------------------------------------*/ 2656*e37c6257SHong Zhang PetscInt color,subcommstart; 2657*e37c6257SHong Zhang subcommstart=0; 2658*e37c6257SHong Zhang for (color=0; color<nsubcomm; color++) { 2659*e37c6257SHong Zhang if (psubcomm->color != color) { 2660*e37c6257SHong Zhang for (i=0; i<psubcomm->subsize[color]; i++) { 2661*e37c6257SHong Zhang if (subrank == i) { /* my_subrank == other's subrank */ 2662*e37c6257SHong Zhang send_rank[nsends++] = subcommstart+i; 2663*e37c6257SHong Zhang recv_rank[nrecvs++] = subcommstart+i; 2664*e37c6257SHong Zhang /* printf("[%d] send to and recv from [%d]\n",rank,subcommstart+i); */ 2665*e37c6257SHong Zhang } 2666*e37c6257SHong Zhang } 2667*e37c6257SHong Zhang } 2668*e37c6257SHong Zhang subcommstart += psubcomm->subsize[color]; 2669*e37c6257SHong Zhang } 2670*e37c6257SHong Zhang if (nleftover && subrank == size/nsubcomm) { /* this proc is a leftover proc, send to subcomm that does not have leftover proc */ 2671*e37c6257SHong Zhang subcommstart=0; 2672*e37c6257SHong Zhang for (color=0; color<nsubcomm; color++) { 2673*e37c6257SHong Zhang subcommstart += psubcomm->subsize[color]; 2674*e37c6257SHong Zhang if (psubcomm->color == color) continue; 2675*e37c6257SHong Zhang if (psubcomm->subsize[color] == size/nsubcomm) { /* subcomm does not have leftover proc */ 2676*e37c6257SHong Zhang send_rank[nsends++] = subcommstart -1; /* send to the last proc of subcomm[color] */ 2677*e37c6257SHong Zhang /* printf("[%d] leftover send to [%d] \n",rank,subcommstart -1); */ 2678*e37c6257SHong Zhang } 2679*e37c6257SHong Zhang } 2680*e37c6257SHong Zhang } 2681*e37c6257SHong Zhang 2682*e37c6257SHong Zhang if (nleftover && subsize == size/nsubcomm && subrank==subsize-1) { /* this proc recvs from leftover processors */ 2683*e37c6257SHong Zhang subcommstart=0; 2684*e37c6257SHong Zhang for (color=0; color<nsubcomm; color++) { 2685*e37c6257SHong Zhang subcommstart += psubcomm->subsize[color]; 2686*e37c6257SHong Zhang if (psubcomm->subsize[color] > size/nsubcomm) { /* subcomm has leftover proc */ 2687*e37c6257SHong Zhang recv_rank[nrecvs++] = subcommstart -1; /* recv from the last proc of subcomm[color] */ 2688*e37c6257SHong Zhang /* printf("[%d] recv from [%d]\n",rank,subcommstart -1); */ 2689*e37c6257SHong Zhang } 2690*e37c6257SHong Zhang } 2691*e37c6257SHong Zhang } 2692*e37c6257SHong Zhang } /* endof (psubcomm->type == PETSC_SUBCOMM_INTERLACED) */ 2693b4617e5dSHong Zhang 2694b4617e5dSHong Zhang /* allocate sbuf_j, sbuf_a */ 2695b4617e5dSHong Zhang i = nzlocal + rowrange[rank+1] - rowrange[rank] + 2; 2696b4617e5dSHong Zhang ierr = PetscMalloc(i*sizeof(PetscInt),&sbuf_j);CHKERRQ(ierr); 2697b4617e5dSHong Zhang ierr = PetscMalloc((nzlocal+1)*sizeof(PetscScalar),&sbuf_a);CHKERRQ(ierr); 2698*e37c6257SHong Zhang /* 2699*e37c6257SHong Zhang ierr = PetscSynchronizedPrintf(comm,"[%d] nsends %d, nrecvs %d\n",rank,nsends,nrecvs);CHKERRQ(ierr); 2700*e37c6257SHong Zhang ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr); 2701*e37c6257SHong Zhang */ 2702b4617e5dSHong Zhang } /* endof if (reuse == MAT_INITIAL_MATRIX) */ 2703b4617e5dSHong Zhang 2704b4617e5dSHong Zhang /* copy mat's local entries into the buffers */ 2705b4617e5dSHong Zhang if (reuse == MAT_INITIAL_MATRIX) { 2706b4617e5dSHong Zhang rownz_max = 0; 2707b4617e5dSHong Zhang rptr = sbuf_j; 2708b4617e5dSHong Zhang cols = sbuf_j + rend-rstart + 1; 2709b4617e5dSHong Zhang vals = sbuf_a; 2710b4617e5dSHong Zhang rptr[0] = 0; 2711b4617e5dSHong Zhang for (i=0; i<rend-rstart; i++) { 2712b4617e5dSHong Zhang row = i + rstart; 2713b4617e5dSHong Zhang nzA = a->i[i+1] - a->i[i]; nzB = b->i[i+1] - b->i[i]; 2714b4617e5dSHong Zhang ncols = nzA + nzB; 2715b4617e5dSHong Zhang cworkA = a->j + a->i[i]; cworkB = b->j + b->i[i]; 2716b4617e5dSHong Zhang aworkA = a->a + a->i[i]; aworkB = b->a + b->i[i]; 2717b4617e5dSHong Zhang /* load the column indices for this row into cols */ 2718b4617e5dSHong Zhang lwrite = 0; 2719b4617e5dSHong Zhang for (l=0; l<nzB; l++) { 2720b4617e5dSHong Zhang if ((ctmp = bmap[cworkB[l]]) < cstart) { 2721b4617e5dSHong Zhang vals[lwrite] = aworkB[l]; 2722b4617e5dSHong Zhang cols[lwrite++] = ctmp; 2723b4617e5dSHong Zhang } 2724b4617e5dSHong Zhang } 2725b4617e5dSHong Zhang for (l=0; l<nzA; l++) { 2726b4617e5dSHong Zhang vals[lwrite] = aworkA[l]; 2727b4617e5dSHong Zhang cols[lwrite++] = cstart + cworkA[l]; 2728b4617e5dSHong Zhang } 2729b4617e5dSHong Zhang for (l=0; l<nzB; l++) { 2730b4617e5dSHong Zhang if ((ctmp = bmap[cworkB[l]]) >= cend) { 2731b4617e5dSHong Zhang vals[lwrite] = aworkB[l]; 2732b4617e5dSHong Zhang cols[lwrite++] = ctmp; 2733b4617e5dSHong Zhang } 2734b4617e5dSHong Zhang } 2735b4617e5dSHong Zhang vals += ncols; 2736b4617e5dSHong Zhang cols += ncols; 2737b4617e5dSHong Zhang rptr[i+1] = rptr[i] + ncols; 2738b4617e5dSHong Zhang if (rownz_max < ncols) rownz_max = ncols; 2739b4617e5dSHong Zhang } 2740b4617e5dSHong Zhang if (rptr[rend-rstart] != a->nz + b->nz) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB, "rptr[%d] %d != %d + %d",rend-rstart,rptr[rend-rstart+1],a->nz,b->nz); 2741b4617e5dSHong Zhang } else { /* only copy matrix values into sbuf_a */ 2742b4617e5dSHong Zhang rptr = sbuf_j; 2743b4617e5dSHong Zhang vals = sbuf_a; 2744b4617e5dSHong Zhang rptr[0] = 0; 2745b4617e5dSHong Zhang for (i=0; i<rend-rstart; i++) { 2746b4617e5dSHong Zhang row = i + rstart; 2747b4617e5dSHong Zhang nzA = a->i[i+1] - a->i[i]; nzB = b->i[i+1] - b->i[i]; 2748b4617e5dSHong Zhang ncols = nzA + nzB; 2749b4617e5dSHong Zhang cworkB = b->j + b->i[i]; 2750b4617e5dSHong Zhang aworkA = a->a + a->i[i]; 2751b4617e5dSHong Zhang aworkB = b->a + b->i[i]; 2752b4617e5dSHong Zhang lwrite = 0; 2753b4617e5dSHong Zhang for (l=0; l<nzB; l++) { 2754b4617e5dSHong Zhang if ((ctmp = bmap[cworkB[l]]) < cstart) vals[lwrite++] = aworkB[l]; 2755b4617e5dSHong Zhang } 2756b4617e5dSHong Zhang for (l=0; l<nzA; l++) vals[lwrite++] = aworkA[l]; 2757b4617e5dSHong Zhang for (l=0; l<nzB; l++) { 2758b4617e5dSHong Zhang if ((ctmp = bmap[cworkB[l]]) >= cend) vals[lwrite++] = aworkB[l]; 2759b4617e5dSHong Zhang } 2760b4617e5dSHong Zhang vals += ncols; 2761b4617e5dSHong Zhang rptr[i+1] = rptr[i] + ncols; 2762b4617e5dSHong Zhang } 2763b4617e5dSHong Zhang } /* endof if (reuse == MAT_INITIAL_MATRIX) */ 2764b4617e5dSHong Zhang 2765b4617e5dSHong Zhang /* send nzlocal to others, and recv other's nzlocal */ 2766b4617e5dSHong Zhang /*--------------------------------------------------*/ 2767b4617e5dSHong Zhang if (reuse == MAT_INITIAL_MATRIX) { 2768b4617e5dSHong Zhang ierr = PetscMalloc2(3*(nsends + nrecvs)+1,MPI_Request,&s_waits3,nsends+1,MPI_Status,&send_status);CHKERRQ(ierr); 2769b4617e5dSHong Zhang 2770b4617e5dSHong Zhang s_waits2 = s_waits3 + nsends; 2771b4617e5dSHong Zhang s_waits1 = s_waits2 + nsends; 2772b4617e5dSHong Zhang r_waits1 = s_waits1 + nsends; 2773b4617e5dSHong Zhang r_waits2 = r_waits1 + nrecvs; 2774b4617e5dSHong Zhang r_waits3 = r_waits2 + nrecvs; 2775b4617e5dSHong Zhang } else { 2776b4617e5dSHong Zhang ierr = PetscMalloc2(nsends + nrecvs +1,MPI_Request,&s_waits3,nsends+1,MPI_Status,&send_status);CHKERRQ(ierr); 2777b4617e5dSHong Zhang 2778b4617e5dSHong Zhang r_waits3 = s_waits3 + nsends; 2779b4617e5dSHong Zhang } 2780b4617e5dSHong Zhang 2781b4617e5dSHong Zhang ierr = PetscObjectGetNewTag((PetscObject)mat,&tag3);CHKERRQ(ierr); 2782b4617e5dSHong Zhang if (reuse == MAT_INITIAL_MATRIX) { 2783b4617e5dSHong Zhang /* get new tags to keep the communication clean */ 2784b4617e5dSHong Zhang ierr = PetscObjectGetNewTag((PetscObject)mat,&tag1);CHKERRQ(ierr); 2785b4617e5dSHong Zhang ierr = PetscObjectGetNewTag((PetscObject)mat,&tag2);CHKERRQ(ierr); 2786b4617e5dSHong Zhang ierr = PetscMalloc4(nsends,PetscInt,&sbuf_nz,nrecvs,PetscInt,&rbuf_nz,nrecvs,PetscInt*,&rbuf_j,nrecvs,PetscScalar*,&rbuf_a);CHKERRQ(ierr); 2787b4617e5dSHong Zhang 2788b4617e5dSHong Zhang /* post receives of other's nzlocal */ 2789b4617e5dSHong Zhang for (i=0; i<nrecvs; i++) { 2790b4617e5dSHong Zhang ierr = MPI_Irecv(rbuf_nz+i,1,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,r_waits1+i);CHKERRQ(ierr); 2791b4617e5dSHong Zhang } 2792b4617e5dSHong Zhang /* send nzlocal to others */ 2793b4617e5dSHong Zhang for (i=0; i<nsends; i++) { 2794b4617e5dSHong Zhang sbuf_nz[i] = nzlocal; 2795b4617e5dSHong Zhang ierr = MPI_Isend(sbuf_nz+i,1,MPIU_INT,send_rank[i],tag1,comm,s_waits1+i);CHKERRQ(ierr); 2796b4617e5dSHong Zhang } 2797b4617e5dSHong Zhang /* wait on receives of nzlocal; allocate space for rbuf_j, rbuf_a */ 2798b4617e5dSHong Zhang count = nrecvs; 2799b4617e5dSHong Zhang while (count) { 2800b4617e5dSHong Zhang ierr = MPI_Waitany(nrecvs,r_waits1,&imdex,&recv_status);CHKERRQ(ierr); 2801b4617e5dSHong Zhang 2802b4617e5dSHong Zhang recv_rank[imdex] = recv_status.MPI_SOURCE; 2803b4617e5dSHong Zhang /* allocate rbuf_a and rbuf_j; then post receives of rbuf_j */ 2804b4617e5dSHong Zhang ierr = PetscMalloc((rbuf_nz[imdex]+1)*sizeof(PetscScalar),&rbuf_a[imdex]);CHKERRQ(ierr); 2805b4617e5dSHong Zhang 2806b4617e5dSHong Zhang i = rowrange[recv_status.MPI_SOURCE+1] - rowrange[recv_status.MPI_SOURCE]; /* number of expected mat->i */ 2807b4617e5dSHong Zhang 2808b4617e5dSHong Zhang rbuf_nz[imdex] += i + 2; 2809b4617e5dSHong Zhang 2810b4617e5dSHong Zhang ierr = PetscMalloc(rbuf_nz[imdex]*sizeof(PetscInt),&rbuf_j[imdex]);CHKERRQ(ierr); 2811b4617e5dSHong Zhang ierr = MPI_Irecv(rbuf_j[imdex],rbuf_nz[imdex],MPIU_INT,recv_status.MPI_SOURCE,tag2,comm,r_waits2+imdex);CHKERRQ(ierr); 2812b4617e5dSHong Zhang count--; 2813b4617e5dSHong Zhang } 2814b4617e5dSHong Zhang /* wait on sends of nzlocal */ 2815b4617e5dSHong Zhang if (nsends) {ierr = MPI_Waitall(nsends,s_waits1,send_status);CHKERRQ(ierr);} 2816b4617e5dSHong Zhang /* send mat->i,j to others, and recv from other's */ 2817b4617e5dSHong Zhang /*------------------------------------------------*/ 2818b4617e5dSHong Zhang for (i=0; i<nsends; i++) { 2819b4617e5dSHong Zhang j = nzlocal + rowrange[rank+1] - rowrange[rank] + 1; 2820b4617e5dSHong Zhang ierr = MPI_Isend(sbuf_j,j,MPIU_INT,send_rank[i],tag2,comm,s_waits2+i);CHKERRQ(ierr); 2821b4617e5dSHong Zhang } 2822b4617e5dSHong Zhang /* wait on receives of mat->i,j */ 2823b4617e5dSHong Zhang /*------------------------------*/ 2824b4617e5dSHong Zhang count = nrecvs; 2825b4617e5dSHong Zhang while (count) { 2826b4617e5dSHong Zhang ierr = MPI_Waitany(nrecvs,r_waits2,&imdex,&recv_status);CHKERRQ(ierr); 2827b4617e5dSHong Zhang if (recv_rank[imdex] != recv_status.MPI_SOURCE) SETERRQ2(PETSC_COMM_SELF,1, "recv_rank %d != MPI_SOURCE %d",recv_rank[imdex],recv_status.MPI_SOURCE); 2828b4617e5dSHong Zhang count--; 2829b4617e5dSHong Zhang } 2830b4617e5dSHong Zhang /* wait on sends of mat->i,j */ 2831b4617e5dSHong Zhang /*---------------------------*/ 2832b4617e5dSHong Zhang if (nsends) { 2833b4617e5dSHong Zhang ierr = MPI_Waitall(nsends,s_waits2,send_status);CHKERRQ(ierr); 2834b4617e5dSHong Zhang } 2835b4617e5dSHong Zhang } /* endof if (reuse == MAT_INITIAL_MATRIX) */ 2836b4617e5dSHong Zhang 2837b4617e5dSHong Zhang /* post receives, send and receive mat->a */ 2838b4617e5dSHong Zhang /*----------------------------------------*/ 2839b4617e5dSHong Zhang for (imdex=0; imdex<nrecvs; imdex++) { 2840b4617e5dSHong Zhang ierr = MPI_Irecv(rbuf_a[imdex],rbuf_nz[imdex],MPIU_SCALAR,recv_rank[imdex],tag3,comm,r_waits3+imdex);CHKERRQ(ierr); 2841b4617e5dSHong Zhang } 2842b4617e5dSHong Zhang for (i=0; i<nsends; i++) { 2843b4617e5dSHong Zhang ierr = MPI_Isend(sbuf_a,nzlocal,MPIU_SCALAR,send_rank[i],tag3,comm,s_waits3+i);CHKERRQ(ierr); 2844b4617e5dSHong Zhang } 2845b4617e5dSHong Zhang count = nrecvs; 2846b4617e5dSHong Zhang while (count) { 2847b4617e5dSHong Zhang ierr = MPI_Waitany(nrecvs,r_waits3,&imdex,&recv_status);CHKERRQ(ierr); 2848b4617e5dSHong Zhang if (recv_rank[imdex] != recv_status.MPI_SOURCE) SETERRQ2(PETSC_COMM_SELF,1, "recv_rank %d != MPI_SOURCE %d",recv_rank[imdex],recv_status.MPI_SOURCE); 2849b4617e5dSHong Zhang count--; 2850b4617e5dSHong Zhang } 2851b4617e5dSHong Zhang if (nsends) { 2852b4617e5dSHong Zhang ierr = MPI_Waitall(nsends,s_waits3,send_status);CHKERRQ(ierr); 2853b4617e5dSHong Zhang } 2854b4617e5dSHong Zhang 2855b4617e5dSHong Zhang ierr = PetscFree2(s_waits3,send_status);CHKERRQ(ierr); 2856b4617e5dSHong Zhang 2857b4617e5dSHong Zhang /* create redundant matrix */ 2858b4617e5dSHong Zhang /*-------------------------*/ 2859b4617e5dSHong Zhang if (reuse == MAT_INITIAL_MATRIX) { 2860b4617e5dSHong Zhang /* compute rownz_max for preallocation */ 2861b4617e5dSHong Zhang for (imdex=0; imdex<nrecvs; imdex++) { 2862b4617e5dSHong Zhang j = rowrange[recv_rank[imdex]+1] - rowrange[recv_rank[imdex]]; 2863b4617e5dSHong Zhang rptr = rbuf_j[imdex]; 2864b4617e5dSHong Zhang for (i=0; i<j; i++) { 2865b4617e5dSHong Zhang ncols = rptr[i+1] - rptr[i]; 2866b4617e5dSHong Zhang if (rownz_max < ncols) rownz_max = ncols; 2867b4617e5dSHong Zhang } 2868b4617e5dSHong Zhang } 2869b4617e5dSHong Zhang 2870b4617e5dSHong Zhang ierr = MatCreate(subcomm,&C);CHKERRQ(ierr); 287134d19554SHong Zhang if (M == N) { 2872b4617e5dSHong Zhang ierr = MatSetSizes(C,mloc_sub,mloc_sub,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr); 287334d19554SHong Zhang } else { /* non-square matrix */ 287434d19554SHong Zhang ierr = MatSetSizes(C,mloc_sub,PETSC_DECIDE,PETSC_DECIDE,mat->cmap->N);CHKERRQ(ierr); 287534d19554SHong Zhang } 2876b4617e5dSHong Zhang ierr = MatSetBlockSizes(C,mat->rmap->bs,mat->cmap->bs);CHKERRQ(ierr); 2877b4617e5dSHong Zhang ierr = MatSetFromOptions(C);CHKERRQ(ierr); 2878b4617e5dSHong Zhang ierr = MatSeqAIJSetPreallocation(C,rownz_max,NULL);CHKERRQ(ierr); 2879b4617e5dSHong Zhang ierr = MatMPIAIJSetPreallocation(C,rownz_max,NULL,rownz_max,NULL);CHKERRQ(ierr); 2880b4617e5dSHong Zhang } else { 2881b4617e5dSHong Zhang C = *matredundant; 2882b4617e5dSHong Zhang } 2883b4617e5dSHong Zhang 2884b4617e5dSHong Zhang /* insert local matrix entries */ 2885b4617e5dSHong Zhang rptr = sbuf_j; 2886b4617e5dSHong Zhang cols = sbuf_j + rend-rstart + 1; 2887b4617e5dSHong Zhang vals = sbuf_a; 2888b4617e5dSHong Zhang for (i=0; i<rend-rstart; i++) { 2889b4617e5dSHong Zhang row = i + rstart; 2890b4617e5dSHong Zhang ncols = rptr[i+1] - rptr[i]; 2891b4617e5dSHong Zhang ierr = MatSetValues(C,1,&row,ncols,cols,vals,INSERT_VALUES);CHKERRQ(ierr); 2892b4617e5dSHong Zhang vals += ncols; 2893b4617e5dSHong Zhang cols += ncols; 2894b4617e5dSHong Zhang } 2895b4617e5dSHong Zhang /* insert received matrix entries */ 2896b4617e5dSHong Zhang for (imdex=0; imdex<nrecvs; imdex++) { 2897b4617e5dSHong Zhang rstart = rowrange[recv_rank[imdex]]; 2898b4617e5dSHong Zhang rend = rowrange[recv_rank[imdex]+1]; 2899*e37c6257SHong Zhang /* printf("[%d] insert rows %d - %d\n",rank,rstart,rend-1); */ 2900b4617e5dSHong Zhang rptr = rbuf_j[imdex]; 2901b4617e5dSHong Zhang cols = rbuf_j[imdex] + rend-rstart + 1; 2902b4617e5dSHong Zhang vals = rbuf_a[imdex]; 2903b4617e5dSHong Zhang for (i=0; i<rend-rstart; i++) { 2904b4617e5dSHong Zhang row = i + rstart; 2905b4617e5dSHong Zhang ncols = rptr[i+1] - rptr[i]; 2906b4617e5dSHong Zhang ierr = MatSetValues(C,1,&row,ncols,cols,vals,INSERT_VALUES);CHKERRQ(ierr); 2907b4617e5dSHong Zhang vals += ncols; 2908b4617e5dSHong Zhang cols += ncols; 2909b4617e5dSHong Zhang } 2910b4617e5dSHong Zhang } 2911b4617e5dSHong Zhang ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2912b4617e5dSHong Zhang ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2913b4617e5dSHong Zhang 2914b4617e5dSHong Zhang if (reuse == MAT_INITIAL_MATRIX) { 2915b4617e5dSHong Zhang *matredundant = C; 29165cc03489SHong Zhang 2917b4617e5dSHong Zhang /* create a supporting struct and attach it to C for reuse */ 2918b4617e5dSHong Zhang ierr = PetscNewLog(C,Mat_Redundant,&redund);CHKERRQ(ierr); 29195cc03489SHong Zhang if (subsize == 1) { 29205cc03489SHong Zhang Mat_SeqAIJ *c = (Mat_SeqAIJ*)C->data; 29215cc03489SHong Zhang c->redundant = redund; 29225cc03489SHong Zhang } else { 29235cc03489SHong Zhang Mat_MPIAIJ *c = (Mat_MPIAIJ*)C->data; 29245cc03489SHong Zhang c->redundant = redund; 29255cc03489SHong Zhang } 2926b4617e5dSHong Zhang 2927b4617e5dSHong Zhang redund->nzlocal = nzlocal; 2928b4617e5dSHong Zhang redund->nsends = nsends; 2929b4617e5dSHong Zhang redund->nrecvs = nrecvs; 2930b4617e5dSHong Zhang redund->send_rank = send_rank; 2931b4617e5dSHong Zhang redund->recv_rank = recv_rank; 2932b4617e5dSHong Zhang redund->sbuf_nz = sbuf_nz; 2933b4617e5dSHong Zhang redund->rbuf_nz = rbuf_nz; 2934b4617e5dSHong Zhang redund->sbuf_j = sbuf_j; 2935b4617e5dSHong Zhang redund->sbuf_a = sbuf_a; 2936b4617e5dSHong Zhang redund->rbuf_j = rbuf_j; 2937b4617e5dSHong Zhang redund->rbuf_a = rbuf_a; 29380b291e46SHong Zhang redund->psubcomm = NULL; 2939b4617e5dSHong Zhang 2940b4617e5dSHong Zhang redund->Destroy = C->ops->destroy; 2941b4617e5dSHong Zhang C->ops->destroy = MatDestroy_MatRedundant; 2942b4617e5dSHong Zhang } 2943b4617e5dSHong Zhang PetscFunctionReturn(0); 2944b4617e5dSHong Zhang } 2945b4617e5dSHong Zhang 2946b4617e5dSHong Zhang #undef __FUNCT__ 294769db28dcSHong Zhang #define __FUNCT__ "MatGetRedundantMatrix_MPIAIJ" 2948d3b23db5SHong Zhang PetscErrorCode MatGetRedundantMatrix_MPIAIJ(Mat mat,PetscInt nsubcomm,MPI_Comm subcomm,MatReuse reuse,Mat *matredundant) 294969db28dcSHong Zhang { 2950f38d543fSHong Zhang PetscErrorCode ierr; 295169db28dcSHong Zhang 295269db28dcSHong Zhang PetscFunctionBegin; 29530b291e46SHong Zhang /* Only MatGetRedundantMatrix_MPIAIJ_interlaced() is written now */ 295434d19554SHong Zhang if (subcomm == MPI_COMM_NULL || subcomm == PETSC_COMM_SELF) { /* create psubcomm */ 29550b291e46SHong Zhang MPI_Comm comm; 29560b291e46SHong Zhang PetscSubcomm psubcomm; 29570b291e46SHong Zhang PetscMPIInt size,subsize; 2958*e37c6257SHong Zhang PetscInt type=2; 2959*e37c6257SHong Zhang PetscBool flg=PETSC_FALSE; 2960ce94432eSBarry Smith ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr); 296169db28dcSHong Zhang ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 2962d3b23db5SHong Zhang ierr = PetscSubcommCreate(comm,&psubcomm);CHKERRQ(ierr); 2963d3b23db5SHong Zhang ierr = PetscSubcommSetNumber(psubcomm,nsubcomm);CHKERRQ(ierr); 2964*e37c6257SHong Zhang 2965*e37c6257SHong Zhang ierr = PetscOptionsGetInt(NULL,"-subcomm_type",&type,NULL);CHKERRQ(ierr); 2966*e37c6257SHong Zhang if (type == 2) { 2967d3b23db5SHong Zhang ierr = PetscSubcommSetType(psubcomm,PETSC_SUBCOMM_INTERLACED);CHKERRQ(ierr); 2968*e37c6257SHong Zhang } else { 2969*e37c6257SHong Zhang ierr = PetscSubcommSetType(psubcomm,PETSC_SUBCOMM_CONTIGUOUS);CHKERRQ(ierr); 2970*e37c6257SHong Zhang } 2971*e37c6257SHong Zhang ierr = PetscOptionsHasName(NULL, "-psubcomm_view", &flg);CHKERRQ(ierr); 2972*e37c6257SHong Zhang if (flg) { 2973*e37c6257SHong Zhang ierr = PetscSubcommView(psubcomm,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 2974*e37c6257SHong Zhang } 2975*e37c6257SHong Zhang 2976*e37c6257SHong Zhang ierr = MatGetRedundantMatrix_MPIAIJ_psubcomm(mat,nsubcomm,psubcomm,reuse,matredundant);CHKERRQ(ierr); 29770b291e46SHong Zhang 29780b291e46SHong Zhang /* free psubcomm in MatDestroy_MatRedundant() */ 29790b291e46SHong Zhang ierr = MPI_Comm_size(psubcomm->comm,&subsize);CHKERRQ(ierr); 29800b291e46SHong Zhang Mat C = *matredundant; 29810b291e46SHong Zhang if (subsize == 1) { 29820b291e46SHong Zhang Mat_SeqAIJ *c = (Mat_SeqAIJ*)C->data; 29830b291e46SHong Zhang c->redundant->psubcomm = psubcomm; 29840b291e46SHong Zhang } else { 29850b291e46SHong Zhang Mat_MPIAIJ *c = (Mat_MPIAIJ*)C->data; 29860b291e46SHong Zhang c->redundant->psubcomm = psubcomm ; 29870b291e46SHong Zhang } 298869db28dcSHong Zhang } else { 2989d3b23db5SHong Zhang SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support yet"); 299069db28dcSHong Zhang } 299169db28dcSHong Zhang PetscFunctionReturn(0); 299269db28dcSHong Zhang } 299369db28dcSHong Zhang 299403bc72f1SMatthew Knepley #undef __FUNCT__ 2995c91732d9SHong Zhang #define __FUNCT__ "MatGetRowMaxAbs_MPIAIJ" 2996c91732d9SHong Zhang PetscErrorCode MatGetRowMaxAbs_MPIAIJ(Mat A, Vec v, PetscInt idx[]) 2997c91732d9SHong Zhang { 2998c91732d9SHong Zhang Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 2999c91732d9SHong Zhang PetscErrorCode ierr; 3000c91732d9SHong Zhang PetscInt i,*idxb = 0; 3001c91732d9SHong Zhang PetscScalar *va,*vb; 3002c91732d9SHong Zhang Vec vtmp; 3003c91732d9SHong Zhang 3004c91732d9SHong Zhang PetscFunctionBegin; 3005c91732d9SHong Zhang ierr = MatGetRowMaxAbs(a->A,v,idx);CHKERRQ(ierr); 3006c91732d9SHong Zhang ierr = VecGetArray(v,&va);CHKERRQ(ierr); 3007c91732d9SHong Zhang if (idx) { 3008192daf7cSBarry Smith for (i=0; i<A->rmap->n; i++) { 3009d0f46423SBarry Smith if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart; 3010c91732d9SHong Zhang } 3011c91732d9SHong Zhang } 3012c91732d9SHong Zhang 3013d0f46423SBarry Smith ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->n,&vtmp);CHKERRQ(ierr); 3014c91732d9SHong Zhang if (idx) { 3015d0f46423SBarry Smith ierr = PetscMalloc(A->rmap->n*sizeof(PetscInt),&idxb);CHKERRQ(ierr); 3016c91732d9SHong Zhang } 3017c91732d9SHong Zhang ierr = MatGetRowMaxAbs(a->B,vtmp,idxb);CHKERRQ(ierr); 3018c91732d9SHong Zhang ierr = VecGetArray(vtmp,&vb);CHKERRQ(ierr); 3019c91732d9SHong Zhang 3020d0f46423SBarry Smith for (i=0; i<A->rmap->n; i++) { 3021c91732d9SHong Zhang if (PetscAbsScalar(va[i]) < PetscAbsScalar(vb[i])) { 3022c91732d9SHong Zhang va[i] = vb[i]; 3023c91732d9SHong Zhang if (idx) idx[i] = a->garray[idxb[i]]; 3024c91732d9SHong Zhang } 3025c91732d9SHong Zhang } 3026c91732d9SHong Zhang 3027c91732d9SHong Zhang ierr = VecRestoreArray(v,&va);CHKERRQ(ierr); 3028c91732d9SHong Zhang ierr = VecRestoreArray(vtmp,&vb);CHKERRQ(ierr); 3029c91732d9SHong Zhang ierr = PetscFree(idxb);CHKERRQ(ierr); 30306bf464f9SBarry Smith ierr = VecDestroy(&vtmp);CHKERRQ(ierr); 3031c91732d9SHong Zhang PetscFunctionReturn(0); 3032c91732d9SHong Zhang } 3033c91732d9SHong Zhang 3034c91732d9SHong Zhang #undef __FUNCT__ 3035c87e5d42SMatthew Knepley #define __FUNCT__ "MatGetRowMinAbs_MPIAIJ" 3036c87e5d42SMatthew Knepley PetscErrorCode MatGetRowMinAbs_MPIAIJ(Mat A, Vec v, PetscInt idx[]) 3037c87e5d42SMatthew Knepley { 3038c87e5d42SMatthew Knepley Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 3039c87e5d42SMatthew Knepley PetscErrorCode ierr; 3040c87e5d42SMatthew Knepley PetscInt i,*idxb = 0; 3041c87e5d42SMatthew Knepley PetscScalar *va,*vb; 3042c87e5d42SMatthew Knepley Vec vtmp; 3043c87e5d42SMatthew Knepley 3044c87e5d42SMatthew Knepley PetscFunctionBegin; 3045c87e5d42SMatthew Knepley ierr = MatGetRowMinAbs(a->A,v,idx);CHKERRQ(ierr); 3046c87e5d42SMatthew Knepley ierr = VecGetArray(v,&va);CHKERRQ(ierr); 3047c87e5d42SMatthew Knepley if (idx) { 3048c87e5d42SMatthew Knepley for (i=0; i<A->cmap->n; i++) { 3049c87e5d42SMatthew Knepley if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart; 3050c87e5d42SMatthew Knepley } 3051c87e5d42SMatthew Knepley } 3052c87e5d42SMatthew Knepley 3053c87e5d42SMatthew Knepley ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->n,&vtmp);CHKERRQ(ierr); 3054c87e5d42SMatthew Knepley if (idx) { 3055c87e5d42SMatthew Knepley ierr = PetscMalloc(A->rmap->n*sizeof(PetscInt),&idxb);CHKERRQ(ierr); 3056c87e5d42SMatthew Knepley } 3057c87e5d42SMatthew Knepley ierr = MatGetRowMinAbs(a->B,vtmp,idxb);CHKERRQ(ierr); 3058c87e5d42SMatthew Knepley ierr = VecGetArray(vtmp,&vb);CHKERRQ(ierr); 3059c87e5d42SMatthew Knepley 3060c87e5d42SMatthew Knepley for (i=0; i<A->rmap->n; i++) { 3061c87e5d42SMatthew Knepley if (PetscAbsScalar(va[i]) > PetscAbsScalar(vb[i])) { 3062c87e5d42SMatthew Knepley va[i] = vb[i]; 3063c87e5d42SMatthew Knepley if (idx) idx[i] = a->garray[idxb[i]]; 3064c87e5d42SMatthew Knepley } 3065c87e5d42SMatthew Knepley } 3066c87e5d42SMatthew Knepley 3067c87e5d42SMatthew Knepley ierr = VecRestoreArray(v,&va);CHKERRQ(ierr); 3068c87e5d42SMatthew Knepley ierr = VecRestoreArray(vtmp,&vb);CHKERRQ(ierr); 3069c87e5d42SMatthew Knepley ierr = PetscFree(idxb);CHKERRQ(ierr); 30706bf464f9SBarry Smith ierr = VecDestroy(&vtmp);CHKERRQ(ierr); 3071c87e5d42SMatthew Knepley PetscFunctionReturn(0); 3072c87e5d42SMatthew Knepley } 3073c87e5d42SMatthew Knepley 3074c87e5d42SMatthew Knepley #undef __FUNCT__ 307503bc72f1SMatthew Knepley #define __FUNCT__ "MatGetRowMin_MPIAIJ" 307603bc72f1SMatthew Knepley PetscErrorCode MatGetRowMin_MPIAIJ(Mat A, Vec v, PetscInt idx[]) 307703bc72f1SMatthew Knepley { 307803bc72f1SMatthew Knepley Mat_MPIAIJ *mat = (Mat_MPIAIJ*) A->data; 3079d0f46423SBarry Smith PetscInt n = A->rmap->n; 3080d0f46423SBarry Smith PetscInt cstart = A->cmap->rstart; 308103bc72f1SMatthew Knepley PetscInt *cmap = mat->garray; 308203bc72f1SMatthew Knepley PetscInt *diagIdx, *offdiagIdx; 308303bc72f1SMatthew Knepley Vec diagV, offdiagV; 308403bc72f1SMatthew Knepley PetscScalar *a, *diagA, *offdiagA; 308503bc72f1SMatthew Knepley PetscInt r; 308603bc72f1SMatthew Knepley PetscErrorCode ierr; 308703bc72f1SMatthew Knepley 308803bc72f1SMatthew Knepley PetscFunctionBegin; 308903bc72f1SMatthew Knepley ierr = PetscMalloc2(n,PetscInt,&diagIdx,n,PetscInt,&offdiagIdx);CHKERRQ(ierr); 3090ce94432eSBarry Smith ierr = VecCreateSeq(PetscObjectComm((PetscObject)A), n, &diagV);CHKERRQ(ierr); 3091ce94432eSBarry Smith ierr = VecCreateSeq(PetscObjectComm((PetscObject)A), n, &offdiagV);CHKERRQ(ierr); 309203bc72f1SMatthew Knepley ierr = MatGetRowMin(mat->A, diagV, diagIdx);CHKERRQ(ierr); 309303bc72f1SMatthew Knepley ierr = MatGetRowMin(mat->B, offdiagV, offdiagIdx);CHKERRQ(ierr); 309403bc72f1SMatthew Knepley ierr = VecGetArray(v, &a);CHKERRQ(ierr); 309503bc72f1SMatthew Knepley ierr = VecGetArray(diagV, &diagA);CHKERRQ(ierr); 309603bc72f1SMatthew Knepley ierr = VecGetArray(offdiagV, &offdiagA);CHKERRQ(ierr); 309703bc72f1SMatthew Knepley for (r = 0; r < n; ++r) { 3098028cd4eaSSatish Balay if (PetscAbsScalar(diagA[r]) <= PetscAbsScalar(offdiagA[r])) { 309903bc72f1SMatthew Knepley a[r] = diagA[r]; 310003bc72f1SMatthew Knepley idx[r] = cstart + diagIdx[r]; 310103bc72f1SMatthew Knepley } else { 310203bc72f1SMatthew Knepley a[r] = offdiagA[r]; 310303bc72f1SMatthew Knepley idx[r] = cmap[offdiagIdx[r]]; 310403bc72f1SMatthew Knepley } 310503bc72f1SMatthew Knepley } 310603bc72f1SMatthew Knepley ierr = VecRestoreArray(v, &a);CHKERRQ(ierr); 310703bc72f1SMatthew Knepley ierr = VecRestoreArray(diagV, &diagA);CHKERRQ(ierr); 310803bc72f1SMatthew Knepley ierr = VecRestoreArray(offdiagV, &offdiagA);CHKERRQ(ierr); 31096bf464f9SBarry Smith ierr = VecDestroy(&diagV);CHKERRQ(ierr); 31106bf464f9SBarry Smith ierr = VecDestroy(&offdiagV);CHKERRQ(ierr); 311103bc72f1SMatthew Knepley ierr = PetscFree2(diagIdx, offdiagIdx);CHKERRQ(ierr); 311203bc72f1SMatthew Knepley PetscFunctionReturn(0); 311303bc72f1SMatthew Knepley } 311403bc72f1SMatthew Knepley 31155494a064SHong Zhang #undef __FUNCT__ 3116c87e5d42SMatthew Knepley #define __FUNCT__ "MatGetRowMax_MPIAIJ" 3117c87e5d42SMatthew Knepley PetscErrorCode MatGetRowMax_MPIAIJ(Mat A, Vec v, PetscInt idx[]) 3118c87e5d42SMatthew Knepley { 3119c87e5d42SMatthew Knepley Mat_MPIAIJ *mat = (Mat_MPIAIJ*) A->data; 3120c87e5d42SMatthew Knepley PetscInt n = A->rmap->n; 3121c87e5d42SMatthew Knepley PetscInt cstart = A->cmap->rstart; 3122c87e5d42SMatthew Knepley PetscInt *cmap = mat->garray; 3123c87e5d42SMatthew Knepley PetscInt *diagIdx, *offdiagIdx; 3124c87e5d42SMatthew Knepley Vec diagV, offdiagV; 3125c87e5d42SMatthew Knepley PetscScalar *a, *diagA, *offdiagA; 3126c87e5d42SMatthew Knepley PetscInt r; 3127c87e5d42SMatthew Knepley PetscErrorCode ierr; 3128c87e5d42SMatthew Knepley 3129c87e5d42SMatthew Knepley PetscFunctionBegin; 3130c87e5d42SMatthew Knepley ierr = PetscMalloc2(n,PetscInt,&diagIdx,n,PetscInt,&offdiagIdx);CHKERRQ(ierr); 3131d11e49fbSSatish Balay ierr = VecCreateSeq(PETSC_COMM_SELF, n, &diagV);CHKERRQ(ierr); 3132d11e49fbSSatish Balay ierr = VecCreateSeq(PETSC_COMM_SELF, n, &offdiagV);CHKERRQ(ierr); 3133c87e5d42SMatthew Knepley ierr = MatGetRowMax(mat->A, diagV, diagIdx);CHKERRQ(ierr); 3134c87e5d42SMatthew Knepley ierr = MatGetRowMax(mat->B, offdiagV, offdiagIdx);CHKERRQ(ierr); 3135c87e5d42SMatthew Knepley ierr = VecGetArray(v, &a);CHKERRQ(ierr); 3136c87e5d42SMatthew Knepley ierr = VecGetArray(diagV, &diagA);CHKERRQ(ierr); 3137c87e5d42SMatthew Knepley ierr = VecGetArray(offdiagV, &offdiagA);CHKERRQ(ierr); 3138c87e5d42SMatthew Knepley for (r = 0; r < n; ++r) { 3139c87e5d42SMatthew Knepley if (PetscAbsScalar(diagA[r]) >= PetscAbsScalar(offdiagA[r])) { 3140c87e5d42SMatthew Knepley a[r] = diagA[r]; 3141c87e5d42SMatthew Knepley idx[r] = cstart + diagIdx[r]; 3142c87e5d42SMatthew Knepley } else { 3143c87e5d42SMatthew Knepley a[r] = offdiagA[r]; 3144c87e5d42SMatthew Knepley idx[r] = cmap[offdiagIdx[r]]; 3145c87e5d42SMatthew Knepley } 3146c87e5d42SMatthew Knepley } 3147c87e5d42SMatthew Knepley ierr = VecRestoreArray(v, &a);CHKERRQ(ierr); 3148c87e5d42SMatthew Knepley ierr = VecRestoreArray(diagV, &diagA);CHKERRQ(ierr); 3149c87e5d42SMatthew Knepley ierr = VecRestoreArray(offdiagV, &offdiagA);CHKERRQ(ierr); 31506bf464f9SBarry Smith ierr = VecDestroy(&diagV);CHKERRQ(ierr); 31516bf464f9SBarry Smith ierr = VecDestroy(&offdiagV);CHKERRQ(ierr); 3152c87e5d42SMatthew Knepley ierr = PetscFree2(diagIdx, offdiagIdx);CHKERRQ(ierr); 3153c87e5d42SMatthew Knepley PetscFunctionReturn(0); 3154c87e5d42SMatthew Knepley } 3155c87e5d42SMatthew Knepley 3156c87e5d42SMatthew Knepley #undef __FUNCT__ 3157d1adec66SJed Brown #define __FUNCT__ "MatGetSeqNonzeroStructure_MPIAIJ" 3158d1adec66SJed Brown PetscErrorCode MatGetSeqNonzeroStructure_MPIAIJ(Mat mat,Mat *newmat) 31595494a064SHong Zhang { 31605494a064SHong Zhang PetscErrorCode ierr; 3161f6d58c54SBarry Smith Mat *dummy; 31625494a064SHong Zhang 31635494a064SHong Zhang PetscFunctionBegin; 3164f6d58c54SBarry Smith ierr = MatGetSubMatrix_MPIAIJ_All(mat,MAT_DO_NOT_GET_VALUES,MAT_INITIAL_MATRIX,&dummy);CHKERRQ(ierr); 3165f6d58c54SBarry Smith *newmat = *dummy; 3166f6d58c54SBarry Smith ierr = PetscFree(dummy);CHKERRQ(ierr); 31675494a064SHong Zhang PetscFunctionReturn(0); 31685494a064SHong Zhang } 31695494a064SHong Zhang 31707087cfbeSBarry Smith extern PetscErrorCode MatFDColoringApply_AIJ(Mat,MatFDColoring,Vec,MatStructure*,void*); 3171bbead8a2SBarry Smith 3172bbead8a2SBarry Smith #undef __FUNCT__ 3173bbead8a2SBarry Smith #define __FUNCT__ "MatInvertBlockDiagonal_MPIAIJ" 3174713ccfa9SJed Brown PetscErrorCode MatInvertBlockDiagonal_MPIAIJ(Mat A,const PetscScalar **values) 3175bbead8a2SBarry Smith { 3176bbead8a2SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*) A->data; 3177bbead8a2SBarry Smith PetscErrorCode ierr; 3178bbead8a2SBarry Smith 3179bbead8a2SBarry Smith PetscFunctionBegin; 3180bbead8a2SBarry Smith ierr = MatInvertBlockDiagonal(a->A,values);CHKERRQ(ierr); 3181bbead8a2SBarry Smith PetscFunctionReturn(0); 3182bbead8a2SBarry Smith } 3183bbead8a2SBarry Smith 318473a71a0fSBarry Smith #undef __FUNCT__ 318573a71a0fSBarry Smith #define __FUNCT__ "MatSetRandom_MPIAIJ" 318673a71a0fSBarry Smith static PetscErrorCode MatSetRandom_MPIAIJ(Mat x,PetscRandom rctx) 318773a71a0fSBarry Smith { 318873a71a0fSBarry Smith PetscErrorCode ierr; 318973a71a0fSBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ*)x->data; 319073a71a0fSBarry Smith 319173a71a0fSBarry Smith PetscFunctionBegin; 319273a71a0fSBarry Smith ierr = MatSetRandom(aij->A,rctx);CHKERRQ(ierr); 319373a71a0fSBarry Smith ierr = MatSetRandom(aij->B,rctx);CHKERRQ(ierr); 319473a71a0fSBarry Smith ierr = MatAssemblyBegin(x,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 319573a71a0fSBarry Smith ierr = MatAssemblyEnd(x,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 319673a71a0fSBarry Smith PetscFunctionReturn(0); 319773a71a0fSBarry Smith } 3198bbead8a2SBarry Smith 31998a729477SBarry Smith /* -------------------------------------------------------------------*/ 3200cda55fadSBarry Smith static struct _MatOps MatOps_Values = {MatSetValues_MPIAIJ, 3201cda55fadSBarry Smith MatGetRow_MPIAIJ, 3202cda55fadSBarry Smith MatRestoreRow_MPIAIJ, 3203cda55fadSBarry Smith MatMult_MPIAIJ, 320497304618SKris Buschelman /* 4*/ MatMultAdd_MPIAIJ, 32057c922b88SBarry Smith MatMultTranspose_MPIAIJ, 32067c922b88SBarry Smith MatMultTransposeAdd_MPIAIJ, 3207519f805aSKarl Rupp #if defined(PETSC_HAVE_PBGL) 3208103bf8bdSMatthew Knepley MatSolve_MPIAIJ, 3209103bf8bdSMatthew Knepley #else 3210cda55fadSBarry Smith 0, 3211103bf8bdSMatthew Knepley #endif 3212cda55fadSBarry Smith 0, 3213cda55fadSBarry Smith 0, 321497304618SKris Buschelman /*10*/ 0, 3215cda55fadSBarry Smith 0, 3216cda55fadSBarry Smith 0, 321741f059aeSBarry Smith MatSOR_MPIAIJ, 3218b7c46309SBarry Smith MatTranspose_MPIAIJ, 321997304618SKris Buschelman /*15*/ MatGetInfo_MPIAIJ, 3220cda55fadSBarry Smith MatEqual_MPIAIJ, 3221cda55fadSBarry Smith MatGetDiagonal_MPIAIJ, 3222cda55fadSBarry Smith MatDiagonalScale_MPIAIJ, 3223cda55fadSBarry Smith MatNorm_MPIAIJ, 322497304618SKris Buschelman /*20*/ MatAssemblyBegin_MPIAIJ, 3225cda55fadSBarry Smith MatAssemblyEnd_MPIAIJ, 3226cda55fadSBarry Smith MatSetOption_MPIAIJ, 3227cda55fadSBarry Smith MatZeroEntries_MPIAIJ, 3228d519adbfSMatthew Knepley /*24*/ MatZeroRows_MPIAIJ, 3229cda55fadSBarry Smith 0, 3230519f805aSKarl Rupp #if defined(PETSC_HAVE_PBGL) 3231719d5645SBarry Smith 0, 3232103bf8bdSMatthew Knepley #else 3233cda55fadSBarry Smith 0, 3234103bf8bdSMatthew Knepley #endif 3235cda55fadSBarry Smith 0, 3236cda55fadSBarry Smith 0, 32374994cf47SJed Brown /*29*/ MatSetUp_MPIAIJ, 3238519f805aSKarl Rupp #if defined(PETSC_HAVE_PBGL) 3239719d5645SBarry Smith 0, 3240103bf8bdSMatthew Knepley #else 3241cda55fadSBarry Smith 0, 3242103bf8bdSMatthew Knepley #endif 3243cda55fadSBarry Smith 0, 3244cda55fadSBarry Smith 0, 3245cda55fadSBarry Smith 0, 3246d519adbfSMatthew Knepley /*34*/ MatDuplicate_MPIAIJ, 3247cda55fadSBarry Smith 0, 3248cda55fadSBarry Smith 0, 3249cda55fadSBarry Smith 0, 3250cda55fadSBarry Smith 0, 3251d519adbfSMatthew Knepley /*39*/ MatAXPY_MPIAIJ, 3252cda55fadSBarry Smith MatGetSubMatrices_MPIAIJ, 3253cda55fadSBarry Smith MatIncreaseOverlap_MPIAIJ, 3254cda55fadSBarry Smith MatGetValues_MPIAIJ, 3255cb5b572fSBarry Smith MatCopy_MPIAIJ, 3256d519adbfSMatthew Knepley /*44*/ MatGetRowMax_MPIAIJ, 3257cda55fadSBarry Smith MatScale_MPIAIJ, 3258cda55fadSBarry Smith 0, 3259cda55fadSBarry Smith 0, 3260564f14d6SBarry Smith MatZeroRowsColumns_MPIAIJ, 326173a71a0fSBarry Smith /*49*/ MatSetRandom_MPIAIJ, 3262cda55fadSBarry Smith 0, 3263cda55fadSBarry Smith 0, 3264cda55fadSBarry Smith 0, 3265cda55fadSBarry Smith 0, 3266d519adbfSMatthew Knepley /*54*/ MatFDColoringCreate_MPIAIJ, 3267cda55fadSBarry Smith 0, 3268cda55fadSBarry Smith MatSetUnfactored_MPIAIJ, 326972e6a0cfSJed Brown MatPermute_MPIAIJ, 3270cda55fadSBarry Smith 0, 3271d519adbfSMatthew Knepley /*59*/ MatGetSubMatrix_MPIAIJ, 3272e03a110bSBarry Smith MatDestroy_MPIAIJ, 3273e03a110bSBarry Smith MatView_MPIAIJ, 3274357abbc8SBarry Smith 0, 3275f996eeb8SHong Zhang MatMatMatMult_MPIAIJ_MPIAIJ_MPIAIJ, 3276f996eeb8SHong Zhang /*64*/ MatMatMatMultSymbolic_MPIAIJ_MPIAIJ_MPIAIJ, 3277f996eeb8SHong Zhang MatMatMatMultNumeric_MPIAIJ_MPIAIJ_MPIAIJ, 3278a2243be0SBarry Smith 0, 3279a2243be0SBarry Smith 0, 3280a2243be0SBarry Smith 0, 3281d519adbfSMatthew Knepley /*69*/ MatGetRowMaxAbs_MPIAIJ, 3282c87e5d42SMatthew Knepley MatGetRowMinAbs_MPIAIJ, 3283a2243be0SBarry Smith 0, 3284a2243be0SBarry Smith MatSetColoring_MPIAIJ, 3285dcf5cc72SBarry Smith 0, 328697304618SKris Buschelman MatSetValuesAdifor_MPIAIJ, 32873acb8795SBarry Smith /*75*/ MatFDColoringApply_AIJ, 328897304618SKris Buschelman 0, 328997304618SKris Buschelman 0, 329097304618SKris Buschelman 0, 3291f1f41ecbSJed Brown MatFindZeroDiagonals_MPIAIJ, 329297304618SKris Buschelman /*80*/ 0, 329397304618SKris Buschelman 0, 329497304618SKris Buschelman 0, 32955bba2384SShri Abhyankar /*83*/ MatLoad_MPIAIJ, 32966284ec50SHong Zhang 0, 32976284ec50SHong Zhang 0, 32986284ec50SHong Zhang 0, 32996284ec50SHong Zhang 0, 3300865e5f61SKris Buschelman 0, 3301d519adbfSMatthew Knepley /*89*/ MatMatMult_MPIAIJ_MPIAIJ, 330226be0446SHong Zhang MatMatMultSymbolic_MPIAIJ_MPIAIJ, 330326be0446SHong Zhang MatMatMultNumeric_MPIAIJ_MPIAIJ, 3304cf3ca8ceSHong Zhang MatPtAP_MPIAIJ_MPIAIJ, 3305cf3ca8ceSHong Zhang MatPtAPSymbolic_MPIAIJ_MPIAIJ, 3306cf3ca8ceSHong Zhang /*94*/ MatPtAPNumeric_MPIAIJ_MPIAIJ, 33077a7894deSKris Buschelman 0, 33087a7894deSKris Buschelman 0, 33097a7894deSKris Buschelman 0, 33107a7894deSKris Buschelman 0, 3311d519adbfSMatthew Knepley /*99*/ 0, 3312d2b207f1SPeter Brune 0, 3313d2b207f1SPeter Brune 0, 33142fd7e33dSBarry Smith MatConjugate_MPIAIJ, 33152fd7e33dSBarry Smith 0, 3316d519adbfSMatthew Knepley /*104*/MatSetValuesRow_MPIAIJ, 331799cafbc1SBarry Smith MatRealPart_MPIAIJ, 331869db28dcSHong Zhang MatImaginaryPart_MPIAIJ, 331969db28dcSHong Zhang 0, 332069db28dcSHong Zhang 0, 3321d519adbfSMatthew Knepley /*109*/0, 332203bc72f1SMatthew Knepley MatGetRedundantMatrix_MPIAIJ, 33235494a064SHong Zhang MatGetRowMin_MPIAIJ, 33245494a064SHong Zhang 0, 33255494a064SHong Zhang 0, 3326d1adec66SJed Brown /*114*/MatGetSeqNonzeroStructure_MPIAIJ, 3327bd0c2dcbSBarry Smith 0, 3328bd0c2dcbSBarry Smith 0, 3329bd0c2dcbSBarry Smith 0, 3330bd0c2dcbSBarry Smith 0, 33318fb81238SShri Abhyankar /*119*/0, 33328fb81238SShri Abhyankar 0, 33338fb81238SShri Abhyankar 0, 3334d6037b41SHong Zhang 0, 3335b9614d88SDmitry Karpeev MatGetMultiProcBlock_MPIAIJ, 3336f2c98031SJed Brown /*124*/MatFindNonzeroRows_MPIAIJ, 33370716a85fSBarry Smith MatGetColumnNorms_MPIAIJ, 3338bbead8a2SBarry Smith MatInvertBlockDiagonal_MPIAIJ, 3339b9614d88SDmitry Karpeev 0, 334037868618SMatthew G Knepley MatGetSubMatricesParallel_MPIAIJ, 3341187b3c17SHong Zhang /*129*/0, 3342187b3c17SHong Zhang MatTransposeMatMult_MPIAIJ_MPIAIJ, 3343187b3c17SHong Zhang MatTransposeMatMultSymbolic_MPIAIJ_MPIAIJ, 3344187b3c17SHong Zhang MatTransposeMatMultNumeric_MPIAIJ_MPIAIJ, 3345187b3c17SHong Zhang 0, 3346187b3c17SHong Zhang /*134*/0, 3347187b3c17SHong Zhang 0, 3348187b3c17SHong Zhang 0, 3349187b3c17SHong Zhang 0, 33503964eb88SJed Brown 0, 33513964eb88SJed Brown /*139*/0, 3352187b3c17SHong Zhang 0 3353bd0c2dcbSBarry Smith }; 335436ce4990SBarry Smith 33552e8a6d31SBarry Smith /* ----------------------------------------------------------------------------------------*/ 33562e8a6d31SBarry Smith 33574a2ae208SSatish Balay #undef __FUNCT__ 33584a2ae208SSatish Balay #define __FUNCT__ "MatStoreValues_MPIAIJ" 33597087cfbeSBarry Smith PetscErrorCode MatStoreValues_MPIAIJ(Mat mat) 33602e8a6d31SBarry Smith { 33612e8a6d31SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 3362dfbe8321SBarry Smith PetscErrorCode ierr; 33632e8a6d31SBarry Smith 33642e8a6d31SBarry Smith PetscFunctionBegin; 33652e8a6d31SBarry Smith ierr = MatStoreValues(aij->A);CHKERRQ(ierr); 33662e8a6d31SBarry Smith ierr = MatStoreValues(aij->B);CHKERRQ(ierr); 33672e8a6d31SBarry Smith PetscFunctionReturn(0); 33682e8a6d31SBarry Smith } 33692e8a6d31SBarry Smith 33704a2ae208SSatish Balay #undef __FUNCT__ 33714a2ae208SSatish Balay #define __FUNCT__ "MatRetrieveValues_MPIAIJ" 33727087cfbeSBarry Smith PetscErrorCode MatRetrieveValues_MPIAIJ(Mat mat) 33732e8a6d31SBarry Smith { 33742e8a6d31SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 3375dfbe8321SBarry Smith PetscErrorCode ierr; 33762e8a6d31SBarry Smith 33772e8a6d31SBarry Smith PetscFunctionBegin; 33782e8a6d31SBarry Smith ierr = MatRetrieveValues(aij->A);CHKERRQ(ierr); 33792e8a6d31SBarry Smith ierr = MatRetrieveValues(aij->B);CHKERRQ(ierr); 33802e8a6d31SBarry Smith PetscFunctionReturn(0); 33812e8a6d31SBarry Smith } 33828a729477SBarry Smith 33834a2ae208SSatish Balay #undef __FUNCT__ 3384a23d5eceSKris Buschelman #define __FUNCT__ "MatMPIAIJSetPreallocation_MPIAIJ" 33857087cfbeSBarry Smith PetscErrorCode MatMPIAIJSetPreallocation_MPIAIJ(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 3386a23d5eceSKris Buschelman { 3387a23d5eceSKris Buschelman Mat_MPIAIJ *b; 3388dfbe8321SBarry Smith PetscErrorCode ierr; 3389a23d5eceSKris Buschelman 3390a23d5eceSKris Buschelman PetscFunctionBegin; 339126283091SBarry Smith ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr); 339226283091SBarry Smith ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr); 3393a23d5eceSKris Buschelman b = (Mat_MPIAIJ*)B->data; 3394899cda47SBarry Smith 3395526dfc15SBarry Smith if (!B->preallocated) { 3396899cda47SBarry Smith /* Explicitly create 2 MATSEQAIJ matrices. */ 3397899cda47SBarry Smith ierr = MatCreate(PETSC_COMM_SELF,&b->A);CHKERRQ(ierr); 3398d0f46423SBarry Smith ierr = MatSetSizes(b->A,B->rmap->n,B->cmap->n,B->rmap->n,B->cmap->n);CHKERRQ(ierr); 3399f9e9af59SJed Brown ierr = MatSetBlockSizes(b->A,B->rmap->bs,B->cmap->bs);CHKERRQ(ierr); 3400899cda47SBarry Smith ierr = MatSetType(b->A,MATSEQAIJ);CHKERRQ(ierr); 3401899cda47SBarry Smith ierr = PetscLogObjectParent(B,b->A);CHKERRQ(ierr); 3402899cda47SBarry Smith ierr = MatCreate(PETSC_COMM_SELF,&b->B);CHKERRQ(ierr); 3403d0f46423SBarry Smith ierr = MatSetSizes(b->B,B->rmap->n,B->cmap->N,B->rmap->n,B->cmap->N);CHKERRQ(ierr); 3404f9e9af59SJed Brown ierr = MatSetBlockSizes(b->B,B->rmap->bs,B->cmap->bs);CHKERRQ(ierr); 3405899cda47SBarry Smith ierr = MatSetType(b->B,MATSEQAIJ);CHKERRQ(ierr); 3406899cda47SBarry Smith ierr = PetscLogObjectParent(B,b->B);CHKERRQ(ierr); 3407526dfc15SBarry Smith } 3408899cda47SBarry Smith 3409c60e587dSKris Buschelman ierr = MatSeqAIJSetPreallocation(b->A,d_nz,d_nnz);CHKERRQ(ierr); 3410c60e587dSKris Buschelman ierr = MatSeqAIJSetPreallocation(b->B,o_nz,o_nnz);CHKERRQ(ierr); 3411526dfc15SBarry Smith B->preallocated = PETSC_TRUE; 3412a23d5eceSKris Buschelman PetscFunctionReturn(0); 3413a23d5eceSKris Buschelman } 3414a23d5eceSKris Buschelman 34154a2ae208SSatish Balay #undef __FUNCT__ 34164a2ae208SSatish Balay #define __FUNCT__ "MatDuplicate_MPIAIJ" 3417dfbe8321SBarry Smith PetscErrorCode MatDuplicate_MPIAIJ(Mat matin,MatDuplicateOption cpvalues,Mat *newmat) 3418d6dfbf8fSBarry Smith { 3419d6dfbf8fSBarry Smith Mat mat; 3420416022c9SBarry Smith Mat_MPIAIJ *a,*oldmat = (Mat_MPIAIJ*)matin->data; 3421dfbe8321SBarry Smith PetscErrorCode ierr; 3422d6dfbf8fSBarry Smith 34233a40ed3dSBarry Smith PetscFunctionBegin; 3424416022c9SBarry Smith *newmat = 0; 3425ce94432eSBarry Smith ierr = MatCreate(PetscObjectComm((PetscObject)matin),&mat);CHKERRQ(ierr); 3426d0f46423SBarry Smith ierr = MatSetSizes(mat,matin->rmap->n,matin->cmap->n,matin->rmap->N,matin->cmap->N);CHKERRQ(ierr); 3427a2f3521dSMark F. Adams ierr = MatSetBlockSizes(mat,matin->rmap->bs,matin->cmap->bs);CHKERRQ(ierr); 34287adad957SLisandro Dalcin ierr = MatSetType(mat,((PetscObject)matin)->type_name);CHKERRQ(ierr); 34291d5dac46SHong Zhang ierr = PetscMemcpy(mat->ops,matin->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 3430273d9f13SBarry Smith a = (Mat_MPIAIJ*)mat->data; 3431e1b6402fSHong Zhang 3432d5f3da31SBarry Smith mat->factortype = matin->factortype; 3433d0f46423SBarry Smith mat->rmap->bs = matin->rmap->bs; 3434a2f3521dSMark F. Adams mat->cmap->bs = matin->cmap->bs; 3435c456f294SBarry Smith mat->assembled = PETSC_TRUE; 3436e7641de0SSatish Balay mat->insertmode = NOT_SET_VALUES; 3437273d9f13SBarry Smith mat->preallocated = PETSC_TRUE; 3438d6dfbf8fSBarry Smith 343917699dbbSLois Curfman McInnes a->size = oldmat->size; 344017699dbbSLois Curfman McInnes a->rank = oldmat->rank; 3441e7641de0SSatish Balay a->donotstash = oldmat->donotstash; 3442e7641de0SSatish Balay a->roworiented = oldmat->roworiented; 3443e7641de0SSatish Balay a->rowindices = 0; 3444bcd2baecSBarry Smith a->rowvalues = 0; 3445bcd2baecSBarry Smith a->getrowactive = PETSC_FALSE; 3446d6dfbf8fSBarry Smith 34471e1e43feSBarry Smith ierr = PetscLayoutReference(matin->rmap,&mat->rmap);CHKERRQ(ierr); 34481e1e43feSBarry Smith ierr = PetscLayoutReference(matin->cmap,&mat->cmap);CHKERRQ(ierr); 3449899cda47SBarry Smith 34502ee70a88SLois Curfman McInnes if (oldmat->colmap) { 3451aa482453SBarry Smith #if defined(PETSC_USE_CTABLE) 34520f5bd95cSBarry Smith ierr = PetscTableCreateCopy(oldmat->colmap,&a->colmap);CHKERRQ(ierr); 3453b1fc9764SSatish Balay #else 3454d0f46423SBarry Smith ierr = PetscMalloc((mat->cmap->N)*sizeof(PetscInt),&a->colmap);CHKERRQ(ierr); 3455d0f46423SBarry Smith ierr = PetscLogObjectMemory(mat,(mat->cmap->N)*sizeof(PetscInt));CHKERRQ(ierr); 3456d0f46423SBarry Smith ierr = PetscMemcpy(a->colmap,oldmat->colmap,(mat->cmap->N)*sizeof(PetscInt));CHKERRQ(ierr); 3457b1fc9764SSatish Balay #endif 3458416022c9SBarry Smith } else a->colmap = 0; 34593f41c07dSBarry Smith if (oldmat->garray) { 3460b1d57f15SBarry Smith PetscInt len; 3461d0f46423SBarry Smith len = oldmat->B->cmap->n; 3462b1d57f15SBarry Smith ierr = PetscMalloc((len+1)*sizeof(PetscInt),&a->garray);CHKERRQ(ierr); 346352e6d16bSBarry Smith ierr = PetscLogObjectMemory(mat,len*sizeof(PetscInt));CHKERRQ(ierr); 3464b1d57f15SBarry Smith if (len) { ierr = PetscMemcpy(a->garray,oldmat->garray,len*sizeof(PetscInt));CHKERRQ(ierr); } 3465416022c9SBarry Smith } else a->garray = 0; 3466d6dfbf8fSBarry Smith 3467416022c9SBarry Smith ierr = VecDuplicate(oldmat->lvec,&a->lvec);CHKERRQ(ierr); 346852e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,a->lvec);CHKERRQ(ierr); 3469a56f8943SBarry Smith ierr = VecScatterCopy(oldmat->Mvctx,&a->Mvctx);CHKERRQ(ierr); 347052e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,a->Mvctx);CHKERRQ(ierr); 34712e8a6d31SBarry Smith ierr = MatDuplicate(oldmat->A,cpvalues,&a->A);CHKERRQ(ierr); 347252e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,a->A);CHKERRQ(ierr); 34732e8a6d31SBarry Smith ierr = MatDuplicate(oldmat->B,cpvalues,&a->B);CHKERRQ(ierr); 347452e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,a->B);CHKERRQ(ierr); 3475140e18c1SBarry Smith ierr = PetscFunctionListDuplicate(((PetscObject)matin)->qlist,&((PetscObject)mat)->qlist);CHKERRQ(ierr); 34768a729477SBarry Smith *newmat = mat; 34773a40ed3dSBarry Smith PetscFunctionReturn(0); 34788a729477SBarry Smith } 3479416022c9SBarry Smith 34801a4ee126SBarry Smith 34811a4ee126SBarry Smith 34824a2ae208SSatish Balay #undef __FUNCT__ 34835bba2384SShri Abhyankar #define __FUNCT__ "MatLoad_MPIAIJ" 3484112444f4SShri Abhyankar PetscErrorCode MatLoad_MPIAIJ(Mat newMat, PetscViewer viewer) 34858fb81238SShri Abhyankar { 34868fb81238SShri Abhyankar PetscScalar *vals,*svals; 3487ce94432eSBarry Smith MPI_Comm comm; 34888fb81238SShri Abhyankar PetscErrorCode ierr; 34891a4ee126SBarry Smith PetscMPIInt rank,size,tag = ((PetscObject)viewer)->tag; 34908fb81238SShri Abhyankar PetscInt i,nz,j,rstart,rend,mmax,maxnz = 0,grows,gcols; 34918fb81238SShri Abhyankar PetscInt header[4],*rowlengths = 0,M,N,m,*cols; 34920298fd71SBarry Smith PetscInt *ourlens = NULL,*procsnz = NULL,*offlens = NULL,jj,*mycols,*smycols; 34938fb81238SShri Abhyankar PetscInt cend,cstart,n,*rowners,sizesset=1; 34948fb81238SShri Abhyankar int fd; 349508ea439dSMark F. Adams PetscInt bs = 1; 34968fb81238SShri Abhyankar 34978fb81238SShri Abhyankar PetscFunctionBegin; 3498ce94432eSBarry Smith ierr = PetscObjectGetComm((PetscObject)viewer,&comm);CHKERRQ(ierr); 34998fb81238SShri Abhyankar ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 35008fb81238SShri Abhyankar ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 35018fb81238SShri Abhyankar if (!rank) { 35028fb81238SShri Abhyankar ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr); 35038fb81238SShri Abhyankar ierr = PetscBinaryRead(fd,(char*)header,4,PETSC_INT);CHKERRQ(ierr); 35048fb81238SShri Abhyankar if (header[0] != MAT_FILE_CLASSID) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"not matrix object"); 35058fb81238SShri Abhyankar } 35068fb81238SShri Abhyankar 35070298fd71SBarry Smith ierr = PetscOptionsBegin(comm,NULL,"Options for loading SEQAIJ matrix","Mat");CHKERRQ(ierr); 35080298fd71SBarry Smith ierr = PetscOptionsInt("-matload_block_size","Set the blocksize used to store the matrix","MatLoad",bs,&bs,NULL);CHKERRQ(ierr); 350908ea439dSMark F. Adams ierr = PetscOptionsEnd();CHKERRQ(ierr); 351008ea439dSMark F. Adams 35118fb81238SShri Abhyankar if (newMat->rmap->n < 0 && newMat->rmap->N < 0 && newMat->cmap->n < 0 && newMat->cmap->N < 0) sizesset = 0; 35128fb81238SShri Abhyankar 35138fb81238SShri Abhyankar ierr = MPI_Bcast(header+1,3,MPIU_INT,0,comm);CHKERRQ(ierr); 35148fb81238SShri Abhyankar M = header[1]; N = header[2]; 35158fb81238SShri Abhyankar /* If global rows/cols are set to PETSC_DECIDE, set it to the sizes given in the file */ 35168fb81238SShri Abhyankar if (sizesset && newMat->rmap->N < 0) newMat->rmap->N = M; 35178fb81238SShri Abhyankar if (sizesset && newMat->cmap->N < 0) newMat->cmap->N = N; 35188fb81238SShri Abhyankar 35198fb81238SShri Abhyankar /* If global sizes are set, check if they are consistent with that given in the file */ 35208fb81238SShri Abhyankar if (sizesset) { 35218fb81238SShri Abhyankar ierr = MatGetSize(newMat,&grows,&gcols);CHKERRQ(ierr); 35228fb81238SShri Abhyankar } 3523abd38a8fSBarry Smith if (sizesset && newMat->rmap->N != grows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Inconsistent # of rows:Matrix in file has (%d) and input matrix has (%d)",M,grows); 3524abd38a8fSBarry Smith if (sizesset && newMat->cmap->N != gcols) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Inconsistent # of cols:Matrix in file has (%d) and input matrix has (%d)",N,gcols); 35258fb81238SShri Abhyankar 352608ea439dSMark F. Adams /* determine ownership of all (block) rows */ 352708ea439dSMark F. Adams if (M%bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Inconsistent # of rows (%d) and block size (%d)",M,bs); 352808ea439dSMark F. Adams if (newMat->rmap->n < 0) m = bs*((M/bs)/size + (((M/bs) % size) > rank)); /* PETSC_DECIDE */ 35294683f7a4SShri Abhyankar else m = newMat->rmap->n; /* Set by user */ 35308fb81238SShri Abhyankar 35318fb81238SShri Abhyankar ierr = PetscMalloc((size+1)*sizeof(PetscInt),&rowners);CHKERRQ(ierr); 35328fb81238SShri Abhyankar ierr = MPI_Allgather(&m,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr); 35338fb81238SShri Abhyankar 35348fb81238SShri Abhyankar /* First process needs enough room for process with most rows */ 35358fb81238SShri Abhyankar if (!rank) { 35368fb81238SShri Abhyankar mmax = rowners[1]; 35375c4ea359SMatthew G Knepley for (i=2; i<=size; i++) { 35388fb81238SShri Abhyankar mmax = PetscMax(mmax, rowners[i]); 35398fb81238SShri Abhyankar } 35403964eb88SJed Brown } else mmax = -1; /* unused, but compilers complain */ 35418fb81238SShri Abhyankar 35428fb81238SShri Abhyankar rowners[0] = 0; 35438fb81238SShri Abhyankar for (i=2; i<=size; i++) { 35448fb81238SShri Abhyankar rowners[i] += rowners[i-1]; 35458fb81238SShri Abhyankar } 35468fb81238SShri Abhyankar rstart = rowners[rank]; 35478fb81238SShri Abhyankar rend = rowners[rank+1]; 35488fb81238SShri Abhyankar 35498fb81238SShri Abhyankar /* distribute row lengths to all processors */ 35505aa9a6beSBarry Smith ierr = PetscMalloc2(m,PetscInt,&ourlens,m,PetscInt,&offlens);CHKERRQ(ierr); 35518fb81238SShri Abhyankar if (!rank) { 35528fb81238SShri Abhyankar ierr = PetscBinaryRead(fd,ourlens,m,PETSC_INT);CHKERRQ(ierr); 35535c4ea359SMatthew G Knepley ierr = PetscMalloc(mmax*sizeof(PetscInt),&rowlengths);CHKERRQ(ierr); 35548fb81238SShri Abhyankar ierr = PetscMalloc(size*sizeof(PetscInt),&procsnz);CHKERRQ(ierr); 35558fb81238SShri Abhyankar ierr = PetscMemzero(procsnz,size*sizeof(PetscInt));CHKERRQ(ierr); 35568fb81238SShri Abhyankar for (j=0; j<m; j++) { 35578fb81238SShri Abhyankar procsnz[0] += ourlens[j]; 35588fb81238SShri Abhyankar } 35598fb81238SShri Abhyankar for (i=1; i<size; i++) { 35608fb81238SShri Abhyankar ierr = PetscBinaryRead(fd,rowlengths,rowners[i+1]-rowners[i],PETSC_INT);CHKERRQ(ierr); 35618fb81238SShri Abhyankar /* calculate the number of nonzeros on each processor */ 35628fb81238SShri Abhyankar for (j=0; j<rowners[i+1]-rowners[i]; j++) { 35638fb81238SShri Abhyankar procsnz[i] += rowlengths[j]; 35648fb81238SShri Abhyankar } 3565a25532f0SBarry Smith ierr = MPIULong_Send(rowlengths,rowners[i+1]-rowners[i],MPIU_INT,i,tag,comm);CHKERRQ(ierr); 35668fb81238SShri Abhyankar } 35678fb81238SShri Abhyankar ierr = PetscFree(rowlengths);CHKERRQ(ierr); 35688fb81238SShri Abhyankar } else { 3569a25532f0SBarry Smith ierr = MPIULong_Recv(ourlens,m,MPIU_INT,0,tag,comm);CHKERRQ(ierr); 35708fb81238SShri Abhyankar } 35718fb81238SShri Abhyankar 35728fb81238SShri Abhyankar if (!rank) { 35738fb81238SShri Abhyankar /* determine max buffer needed and allocate it */ 35748fb81238SShri Abhyankar maxnz = 0; 35758fb81238SShri Abhyankar for (i=0; i<size; i++) { 35768fb81238SShri Abhyankar maxnz = PetscMax(maxnz,procsnz[i]); 35778fb81238SShri Abhyankar } 35788fb81238SShri Abhyankar ierr = PetscMalloc(maxnz*sizeof(PetscInt),&cols);CHKERRQ(ierr); 35798fb81238SShri Abhyankar 35808fb81238SShri Abhyankar /* read in my part of the matrix column indices */ 35818fb81238SShri Abhyankar nz = procsnz[0]; 35828fb81238SShri Abhyankar ierr = PetscMalloc(nz*sizeof(PetscInt),&mycols);CHKERRQ(ierr); 35838fb81238SShri Abhyankar ierr = PetscBinaryRead(fd,mycols,nz,PETSC_INT);CHKERRQ(ierr); 35848fb81238SShri Abhyankar 35858fb81238SShri Abhyankar /* read in every one elses and ship off */ 35868fb81238SShri Abhyankar for (i=1; i<size; i++) { 35878fb81238SShri Abhyankar nz = procsnz[i]; 35888fb81238SShri Abhyankar ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr); 3589a25532f0SBarry Smith ierr = MPIULong_Send(cols,nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr); 35908fb81238SShri Abhyankar } 35918fb81238SShri Abhyankar ierr = PetscFree(cols);CHKERRQ(ierr); 35928fb81238SShri Abhyankar } else { 35938fb81238SShri Abhyankar /* determine buffer space needed for message */ 35948fb81238SShri Abhyankar nz = 0; 35958fb81238SShri Abhyankar for (i=0; i<m; i++) { 35968fb81238SShri Abhyankar nz += ourlens[i]; 35978fb81238SShri Abhyankar } 35988fb81238SShri Abhyankar ierr = PetscMalloc(nz*sizeof(PetscInt),&mycols);CHKERRQ(ierr); 35998fb81238SShri Abhyankar 36008fb81238SShri Abhyankar /* receive message of column indices*/ 3601a25532f0SBarry Smith ierr = MPIULong_Recv(mycols,nz,MPIU_INT,0,tag,comm);CHKERRQ(ierr); 36028fb81238SShri Abhyankar } 36038fb81238SShri Abhyankar 36048fb81238SShri Abhyankar /* determine column ownership if matrix is not square */ 36058fb81238SShri Abhyankar if (N != M) { 36068fb81238SShri Abhyankar if (newMat->cmap->n < 0) n = N/size + ((N % size) > rank); 36078fb81238SShri Abhyankar else n = newMat->cmap->n; 36088fb81238SShri Abhyankar ierr = MPI_Scan(&n,&cend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr); 36098fb81238SShri Abhyankar cstart = cend - n; 36108fb81238SShri Abhyankar } else { 36118fb81238SShri Abhyankar cstart = rstart; 36128fb81238SShri Abhyankar cend = rend; 36138fb81238SShri Abhyankar n = cend - cstart; 36148fb81238SShri Abhyankar } 36158fb81238SShri Abhyankar 36168fb81238SShri Abhyankar /* loop over local rows, determining number of off diagonal entries */ 36178fb81238SShri Abhyankar ierr = PetscMemzero(offlens,m*sizeof(PetscInt));CHKERRQ(ierr); 36188fb81238SShri Abhyankar jj = 0; 36198fb81238SShri Abhyankar for (i=0; i<m; i++) { 36208fb81238SShri Abhyankar for (j=0; j<ourlens[i]; j++) { 36218fb81238SShri Abhyankar if (mycols[jj] < cstart || mycols[jj] >= cend) offlens[i]++; 36228fb81238SShri Abhyankar jj++; 36238fb81238SShri Abhyankar } 36248fb81238SShri Abhyankar } 36258fb81238SShri Abhyankar 36268fb81238SShri Abhyankar for (i=0; i<m; i++) { 36278fb81238SShri Abhyankar ourlens[i] -= offlens[i]; 36288fb81238SShri Abhyankar } 36298fb81238SShri Abhyankar if (!sizesset) { 36308fb81238SShri Abhyankar ierr = MatSetSizes(newMat,m,n,M,N);CHKERRQ(ierr); 36318fb81238SShri Abhyankar } 363208ea439dSMark F. Adams 363308ea439dSMark F. Adams if (bs > 1) {ierr = MatSetBlockSize(newMat,bs);CHKERRQ(ierr);} 363408ea439dSMark F. Adams 36358fb81238SShri Abhyankar ierr = MatMPIAIJSetPreallocation(newMat,0,ourlens,0,offlens);CHKERRQ(ierr); 36368fb81238SShri Abhyankar 36378fb81238SShri Abhyankar for (i=0; i<m; i++) { 36388fb81238SShri Abhyankar ourlens[i] += offlens[i]; 36398fb81238SShri Abhyankar } 36408fb81238SShri Abhyankar 36418fb81238SShri Abhyankar if (!rank) { 36428fb81238SShri Abhyankar ierr = PetscMalloc((maxnz+1)*sizeof(PetscScalar),&vals);CHKERRQ(ierr); 36438fb81238SShri Abhyankar 36448fb81238SShri Abhyankar /* read in my part of the matrix numerical values */ 36458fb81238SShri Abhyankar nz = procsnz[0]; 36468fb81238SShri Abhyankar ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr); 36478fb81238SShri Abhyankar 36488fb81238SShri Abhyankar /* insert into matrix */ 36498fb81238SShri Abhyankar jj = rstart; 36508fb81238SShri Abhyankar smycols = mycols; 36518fb81238SShri Abhyankar svals = vals; 36528fb81238SShri Abhyankar for (i=0; i<m; i++) { 36538fb81238SShri Abhyankar ierr = MatSetValues_MPIAIJ(newMat,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);CHKERRQ(ierr); 36548fb81238SShri Abhyankar smycols += ourlens[i]; 36558fb81238SShri Abhyankar svals += ourlens[i]; 36568fb81238SShri Abhyankar jj++; 36578fb81238SShri Abhyankar } 36588fb81238SShri Abhyankar 36598fb81238SShri Abhyankar /* read in other processors and ship out */ 36608fb81238SShri Abhyankar for (i=1; i<size; i++) { 36618fb81238SShri Abhyankar nz = procsnz[i]; 36628fb81238SShri Abhyankar ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr); 3663a25532f0SBarry Smith ierr = MPIULong_Send(vals,nz,MPIU_SCALAR,i,((PetscObject)newMat)->tag,comm);CHKERRQ(ierr); 36648fb81238SShri Abhyankar } 36658fb81238SShri Abhyankar ierr = PetscFree(procsnz);CHKERRQ(ierr); 36668fb81238SShri Abhyankar } else { 36678fb81238SShri Abhyankar /* receive numeric values */ 36688fb81238SShri Abhyankar ierr = PetscMalloc((nz+1)*sizeof(PetscScalar),&vals);CHKERRQ(ierr); 36698fb81238SShri Abhyankar 36708fb81238SShri Abhyankar /* receive message of values*/ 3671a25532f0SBarry Smith ierr = MPIULong_Recv(vals,nz,MPIU_SCALAR,0,((PetscObject)newMat)->tag,comm);CHKERRQ(ierr); 36728fb81238SShri Abhyankar 36738fb81238SShri Abhyankar /* insert into matrix */ 36748fb81238SShri Abhyankar jj = rstart; 36758fb81238SShri Abhyankar smycols = mycols; 36768fb81238SShri Abhyankar svals = vals; 36778fb81238SShri Abhyankar for (i=0; i<m; i++) { 36788fb81238SShri Abhyankar ierr = MatSetValues_MPIAIJ(newMat,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);CHKERRQ(ierr); 36798fb81238SShri Abhyankar smycols += ourlens[i]; 36808fb81238SShri Abhyankar svals += ourlens[i]; 36818fb81238SShri Abhyankar jj++; 36828fb81238SShri Abhyankar } 36838fb81238SShri Abhyankar } 36848fb81238SShri Abhyankar ierr = PetscFree2(ourlens,offlens);CHKERRQ(ierr); 36858fb81238SShri Abhyankar ierr = PetscFree(vals);CHKERRQ(ierr); 36868fb81238SShri Abhyankar ierr = PetscFree(mycols);CHKERRQ(ierr); 36878fb81238SShri Abhyankar ierr = PetscFree(rowners);CHKERRQ(ierr); 36888fb81238SShri Abhyankar ierr = MatAssemblyBegin(newMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 36898fb81238SShri Abhyankar ierr = MatAssemblyEnd(newMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 36908fb81238SShri Abhyankar PetscFunctionReturn(0); 36918fb81238SShri Abhyankar } 36928fb81238SShri Abhyankar 36938fb81238SShri Abhyankar #undef __FUNCT__ 36944a2ae208SSatish Balay #define __FUNCT__ "MatGetSubMatrix_MPIAIJ" 36954aa3045dSJed Brown PetscErrorCode MatGetSubMatrix_MPIAIJ(Mat mat,IS isrow,IS iscol,MatReuse call,Mat *newmat) 36964aa3045dSJed Brown { 36974aa3045dSJed Brown PetscErrorCode ierr; 36984aa3045dSJed Brown IS iscol_local; 36994aa3045dSJed Brown PetscInt csize; 37004aa3045dSJed Brown 37014aa3045dSJed Brown PetscFunctionBegin; 37024aa3045dSJed Brown ierr = ISGetLocalSize(iscol,&csize);CHKERRQ(ierr); 3703b79d0421SJed Brown if (call == MAT_REUSE_MATRIX) { 3704b79d0421SJed Brown ierr = PetscObjectQuery((PetscObject)*newmat,"ISAllGather",(PetscObject*)&iscol_local);CHKERRQ(ierr); 3705e32f2f54SBarry Smith if (!iscol_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse"); 3706b79d0421SJed Brown } else { 3707c5bfad50SMark F. Adams PetscInt cbs; 3708c5bfad50SMark F. Adams ierr = ISGetBlockSize(iscol,&cbs);CHKERRQ(ierr); 37094aa3045dSJed Brown ierr = ISAllGather(iscol,&iscol_local);CHKERRQ(ierr); 3710c5bfad50SMark F. Adams ierr = ISSetBlockSize(iscol_local,cbs);CHKERRQ(ierr); 3711b79d0421SJed Brown } 37124aa3045dSJed Brown ierr = MatGetSubMatrix_MPIAIJ_Private(mat,isrow,iscol_local,csize,call,newmat);CHKERRQ(ierr); 3713b79d0421SJed Brown if (call == MAT_INITIAL_MATRIX) { 3714b79d0421SJed Brown ierr = PetscObjectCompose((PetscObject)*newmat,"ISAllGather",(PetscObject)iscol_local);CHKERRQ(ierr); 37156bf464f9SBarry Smith ierr = ISDestroy(&iscol_local);CHKERRQ(ierr); 3716b79d0421SJed Brown } 37174aa3045dSJed Brown PetscFunctionReturn(0); 37184aa3045dSJed Brown } 37194aa3045dSJed Brown 372029dcf524SDmitry Karpeev extern PetscErrorCode MatGetSubMatrices_MPIAIJ_Local(Mat,PetscInt,const IS[],const IS[],MatReuse,PetscBool*,Mat*); 37214aa3045dSJed Brown #undef __FUNCT__ 37224aa3045dSJed Brown #define __FUNCT__ "MatGetSubMatrix_MPIAIJ_Private" 3723a0ff6018SBarry Smith /* 372429da9460SBarry Smith Not great since it makes two copies of the submatrix, first an SeqAIJ 372529da9460SBarry Smith in local and then by concatenating the local matrices the end result. 372629da9460SBarry Smith Writing it directly would be much like MatGetSubMatrices_MPIAIJ() 37274aa3045dSJed Brown 37284aa3045dSJed Brown Note: This requires a sequential iscol with all indices. 3729a0ff6018SBarry Smith */ 37304aa3045dSJed Brown PetscErrorCode MatGetSubMatrix_MPIAIJ_Private(Mat mat,IS isrow,IS iscol,PetscInt csize,MatReuse call,Mat *newmat) 3731a0ff6018SBarry Smith { 3732dfbe8321SBarry Smith PetscErrorCode ierr; 373332dcc486SBarry Smith PetscMPIInt rank,size; 3734a2f3521dSMark F. Adams PetscInt i,m,n,rstart,row,rend,nz,*cwork,j,bs,cbs; 373529dcf524SDmitry Karpeev PetscInt *ii,*jj,nlocal,*dlens,*olens,dlen,olen,jend,mglobal,ncol; 373629dcf524SDmitry Karpeev PetscBool allcolumns, colflag; 373729dcf524SDmitry Karpeev Mat M,Mreuse; 3738a77337e4SBarry Smith MatScalar *vwork,*aa; 3739ce94432eSBarry Smith MPI_Comm comm; 374000e6dbe6SBarry Smith Mat_SeqAIJ *aij; 37417e2c5f70SBarry Smith 3742a0ff6018SBarry Smith PetscFunctionBegin; 3743ce94432eSBarry Smith ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr); 37441dab6e02SBarry Smith ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 37451dab6e02SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 374600e6dbe6SBarry Smith 374729dcf524SDmitry Karpeev ierr = ISIdentity(iscol,&colflag);CHKERRQ(ierr); 374829dcf524SDmitry Karpeev ierr = ISGetLocalSize(iscol,&ncol);CHKERRQ(ierr); 374929dcf524SDmitry Karpeev if (colflag && ncol == mat->cmap->N) { 375029dcf524SDmitry Karpeev allcolumns = PETSC_TRUE; 375129dcf524SDmitry Karpeev } else { 375229dcf524SDmitry Karpeev allcolumns = PETSC_FALSE; 375329dcf524SDmitry Karpeev } 3754fee21e36SBarry Smith if (call == MAT_REUSE_MATRIX) { 3755fee21e36SBarry Smith ierr = PetscObjectQuery((PetscObject)*newmat,"SubMatrix",(PetscObject*)&Mreuse);CHKERRQ(ierr); 3756e32f2f54SBarry Smith if (!Mreuse) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse"); 375729dcf524SDmitry Karpeev ierr = MatGetSubMatrices_MPIAIJ_Local(mat,1,&isrow,&iscol,MAT_REUSE_MATRIX,&allcolumns,&Mreuse);CHKERRQ(ierr); 3758fee21e36SBarry Smith } else { 375929dcf524SDmitry Karpeev ierr = MatGetSubMatrices_MPIAIJ_Local(mat,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&allcolumns,&Mreuse);CHKERRQ(ierr); 3760fee21e36SBarry Smith } 3761a0ff6018SBarry Smith 3762a0ff6018SBarry Smith /* 3763a0ff6018SBarry Smith m - number of local rows 3764a0ff6018SBarry Smith n - number of columns (same on all processors) 3765a0ff6018SBarry Smith rstart - first row in new global matrix generated 3766a0ff6018SBarry Smith */ 3767fee21e36SBarry Smith ierr = MatGetSize(Mreuse,&m,&n);CHKERRQ(ierr); 3768a2f3521dSMark F. Adams ierr = MatGetBlockSizes(Mreuse,&bs,&cbs);CHKERRQ(ierr); 3769a0ff6018SBarry Smith if (call == MAT_INITIAL_MATRIX) { 3770fee21e36SBarry Smith aij = (Mat_SeqAIJ*)(Mreuse)->data; 377100e6dbe6SBarry Smith ii = aij->i; 377200e6dbe6SBarry Smith jj = aij->j; 377300e6dbe6SBarry Smith 3774a0ff6018SBarry Smith /* 377500e6dbe6SBarry Smith Determine the number of non-zeros in the diagonal and off-diagonal 377600e6dbe6SBarry Smith portions of the matrix in order to do correct preallocation 3777a0ff6018SBarry Smith */ 377800e6dbe6SBarry Smith 377900e6dbe6SBarry Smith /* first get start and end of "diagonal" columns */ 37806a6a5d1dSBarry Smith if (csize == PETSC_DECIDE) { 3781ab50ec6bSBarry Smith ierr = ISGetSize(isrow,&mglobal);CHKERRQ(ierr); 3782ab50ec6bSBarry Smith if (mglobal == n) { /* square matrix */ 3783e2c4fddaSBarry Smith nlocal = m; 37846a6a5d1dSBarry Smith } else { 3785ab50ec6bSBarry Smith nlocal = n/size + ((n % size) > rank); 3786ab50ec6bSBarry Smith } 3787ab50ec6bSBarry Smith } else { 37886a6a5d1dSBarry Smith nlocal = csize; 37896a6a5d1dSBarry Smith } 3790b1d57f15SBarry Smith ierr = MPI_Scan(&nlocal,&rend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr); 379100e6dbe6SBarry Smith rstart = rend - nlocal; 379265e19b50SBarry Smith if (rank == size - 1 && rend != n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local column sizes %D do not add up to total number of columns %D",rend,n); 379300e6dbe6SBarry Smith 379400e6dbe6SBarry Smith /* next, compute all the lengths */ 3795b1d57f15SBarry Smith ierr = PetscMalloc((2*m+1)*sizeof(PetscInt),&dlens);CHKERRQ(ierr); 379600e6dbe6SBarry Smith olens = dlens + m; 379700e6dbe6SBarry Smith for (i=0; i<m; i++) { 379800e6dbe6SBarry Smith jend = ii[i+1] - ii[i]; 379900e6dbe6SBarry Smith olen = 0; 380000e6dbe6SBarry Smith dlen = 0; 380100e6dbe6SBarry Smith for (j=0; j<jend; j++) { 380200e6dbe6SBarry Smith if (*jj < rstart || *jj >= rend) olen++; 380300e6dbe6SBarry Smith else dlen++; 380400e6dbe6SBarry Smith jj++; 380500e6dbe6SBarry Smith } 380600e6dbe6SBarry Smith olens[i] = olen; 380700e6dbe6SBarry Smith dlens[i] = dlen; 380800e6dbe6SBarry Smith } 3809f69a0ea3SMatthew Knepley ierr = MatCreate(comm,&M);CHKERRQ(ierr); 3810f69a0ea3SMatthew Knepley ierr = MatSetSizes(M,m,nlocal,PETSC_DECIDE,n);CHKERRQ(ierr); 3811a2f3521dSMark F. Adams ierr = MatSetBlockSizes(M,bs,cbs);CHKERRQ(ierr); 38127adad957SLisandro Dalcin ierr = MatSetType(M,((PetscObject)mat)->type_name);CHKERRQ(ierr); 3813e2d9671bSKris Buschelman ierr = MatMPIAIJSetPreallocation(M,0,dlens,0,olens);CHKERRQ(ierr); 3814606d414cSSatish Balay ierr = PetscFree(dlens);CHKERRQ(ierr); 3815a0ff6018SBarry Smith } else { 3816b1d57f15SBarry Smith PetscInt ml,nl; 3817a0ff6018SBarry Smith 3818a0ff6018SBarry Smith M = *newmat; 3819a0ff6018SBarry Smith ierr = MatGetLocalSize(M,&ml,&nl);CHKERRQ(ierr); 3820e32f2f54SBarry Smith if (ml != m) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Previous matrix must be same size/layout as request"); 3821a0ff6018SBarry Smith ierr = MatZeroEntries(M);CHKERRQ(ierr); 3822c48de900SBarry Smith /* 3823c48de900SBarry Smith The next two lines are needed so we may call MatSetValues_MPIAIJ() below directly, 3824c48de900SBarry Smith rather than the slower MatSetValues(). 3825c48de900SBarry Smith */ 3826c48de900SBarry Smith M->was_assembled = PETSC_TRUE; 3827c48de900SBarry Smith M->assembled = PETSC_FALSE; 3828a0ff6018SBarry Smith } 3829a0ff6018SBarry Smith ierr = MatGetOwnershipRange(M,&rstart,&rend);CHKERRQ(ierr); 3830fee21e36SBarry Smith aij = (Mat_SeqAIJ*)(Mreuse)->data; 383100e6dbe6SBarry Smith ii = aij->i; 383200e6dbe6SBarry Smith jj = aij->j; 383300e6dbe6SBarry Smith aa = aij->a; 3834a0ff6018SBarry Smith for (i=0; i<m; i++) { 3835a0ff6018SBarry Smith row = rstart + i; 383600e6dbe6SBarry Smith nz = ii[i+1] - ii[i]; 383700e6dbe6SBarry Smith cwork = jj; jj += nz; 383800e6dbe6SBarry Smith vwork = aa; aa += nz; 38398c638d02SBarry Smith ierr = MatSetValues_MPIAIJ(M,1,&row,nz,cwork,vwork,INSERT_VALUES);CHKERRQ(ierr); 3840a0ff6018SBarry Smith } 3841a0ff6018SBarry Smith 3842a0ff6018SBarry Smith ierr = MatAssemblyBegin(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3843a0ff6018SBarry Smith ierr = MatAssemblyEnd(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3844a0ff6018SBarry Smith *newmat = M; 3845fee21e36SBarry Smith 3846fee21e36SBarry Smith /* save submatrix used in processor for next request */ 3847fee21e36SBarry Smith if (call == MAT_INITIAL_MATRIX) { 3848fee21e36SBarry Smith ierr = PetscObjectCompose((PetscObject)M,"SubMatrix",(PetscObject)Mreuse);CHKERRQ(ierr); 3849bf0cc555SLisandro Dalcin ierr = MatDestroy(&Mreuse);CHKERRQ(ierr); 3850fee21e36SBarry Smith } 3851a0ff6018SBarry Smith PetscFunctionReturn(0); 3852a0ff6018SBarry Smith } 3853273d9f13SBarry Smith 38544a2ae208SSatish Balay #undef __FUNCT__ 3855ccd8e176SBarry Smith #define __FUNCT__ "MatMPIAIJSetPreallocationCSR_MPIAIJ" 38567087cfbeSBarry Smith PetscErrorCode MatMPIAIJSetPreallocationCSR_MPIAIJ(Mat B,const PetscInt Ii[],const PetscInt J[],const PetscScalar v[]) 3857ccd8e176SBarry Smith { 3858899cda47SBarry Smith PetscInt m,cstart, cend,j,nnz,i,d; 3859899cda47SBarry Smith PetscInt *d_nnz,*o_nnz,nnz_max = 0,rstart,ii; 3860ccd8e176SBarry Smith const PetscInt *JJ; 3861ccd8e176SBarry Smith PetscScalar *values; 3862ccd8e176SBarry Smith PetscErrorCode ierr; 3863ccd8e176SBarry Smith 3864ccd8e176SBarry Smith PetscFunctionBegin; 3865e32f2f54SBarry Smith if (Ii[0]) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ii[0] must be 0 it is %D",Ii[0]); 3866899cda47SBarry Smith 386726283091SBarry Smith ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr); 386826283091SBarry Smith ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr); 3869d0f46423SBarry Smith m = B->rmap->n; 3870d0f46423SBarry Smith cstart = B->cmap->rstart; 3871d0f46423SBarry Smith cend = B->cmap->rend; 3872d0f46423SBarry Smith rstart = B->rmap->rstart; 3873899cda47SBarry Smith 38741d79065fSBarry Smith ierr = PetscMalloc2(m,PetscInt,&d_nnz,m,PetscInt,&o_nnz);CHKERRQ(ierr); 3875ccd8e176SBarry Smith 3876ecc77c7aSBarry Smith #if defined(PETSC_USE_DEBUGGING) 3877ecc77c7aSBarry Smith for (i=0; i<m; i++) { 3878ecc77c7aSBarry Smith nnz = Ii[i+1]- Ii[i]; 3879ecc77c7aSBarry Smith JJ = J + Ii[i]; 3880e32f2f54SBarry Smith if (nnz < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local row %D has a negative %D number of columns",i,nnz); 3881ecc77c7aSBarry Smith if (nnz && (JJ[0] < 0)) SETERRRQ1(PETSC_ERR_ARG_WRONGSTATE,"Row %D starts with negative column index",i,j); 3882d0f46423SBarry Smith if (nnz && (JJ[nnz-1] >= B->cmap->N) SETERRRQ3(PETSC_ERR_ARG_WRONGSTATE,"Row %D ends with too large a column index %D (max allowed %D)",i,JJ[nnz-1],B->cmap->N); 3883ecc77c7aSBarry Smith } 3884ecc77c7aSBarry Smith #endif 3885ecc77c7aSBarry Smith 3886ccd8e176SBarry Smith for (i=0; i<m; i++) { 3887b7940d39SSatish Balay nnz = Ii[i+1]- Ii[i]; 3888b7940d39SSatish Balay JJ = J + Ii[i]; 3889ccd8e176SBarry Smith nnz_max = PetscMax(nnz_max,nnz); 3890ccd8e176SBarry Smith d = 0; 38910daa03b5SJed Brown for (j=0; j<nnz; j++) { 38920daa03b5SJed Brown if (cstart <= JJ[j] && JJ[j] < cend) d++; 3893ccd8e176SBarry Smith } 3894ccd8e176SBarry Smith d_nnz[i] = d; 3895ccd8e176SBarry Smith o_nnz[i] = nnz - d; 3896ccd8e176SBarry Smith } 3897ccd8e176SBarry Smith ierr = MatMPIAIJSetPreallocation(B,0,d_nnz,0,o_nnz);CHKERRQ(ierr); 38981d79065fSBarry Smith ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr); 3899ccd8e176SBarry Smith 3900ccd8e176SBarry Smith if (v) values = (PetscScalar*)v; 3901ccd8e176SBarry Smith else { 3902ccd8e176SBarry Smith ierr = PetscMalloc((nnz_max+1)*sizeof(PetscScalar),&values);CHKERRQ(ierr); 3903ccd8e176SBarry Smith ierr = PetscMemzero(values,nnz_max*sizeof(PetscScalar));CHKERRQ(ierr); 3904ccd8e176SBarry Smith } 3905ccd8e176SBarry Smith 3906ccd8e176SBarry Smith for (i=0; i<m; i++) { 3907ccd8e176SBarry Smith ii = i + rstart; 3908b7940d39SSatish Balay nnz = Ii[i+1]- Ii[i]; 3909b7940d39SSatish Balay ierr = MatSetValues_MPIAIJ(B,1,&ii,nnz,J+Ii[i],values+(v ? Ii[i] : 0),INSERT_VALUES);CHKERRQ(ierr); 3910ccd8e176SBarry Smith } 3911ccd8e176SBarry Smith ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3912ccd8e176SBarry Smith ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3913ccd8e176SBarry Smith 3914ccd8e176SBarry Smith if (!v) { 3915ccd8e176SBarry Smith ierr = PetscFree(values);CHKERRQ(ierr); 3916ccd8e176SBarry Smith } 39177827cd58SJed Brown ierr = MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr); 3918ccd8e176SBarry Smith PetscFunctionReturn(0); 3919ccd8e176SBarry Smith } 3920ccd8e176SBarry Smith 3921ccd8e176SBarry Smith #undef __FUNCT__ 3922ccd8e176SBarry Smith #define __FUNCT__ "MatMPIAIJSetPreallocationCSR" 39231eea217eSSatish Balay /*@ 3924ccd8e176SBarry Smith MatMPIAIJSetPreallocationCSR - Allocates memory for a sparse parallel matrix in AIJ format 3925ccd8e176SBarry Smith (the default parallel PETSc format). 3926ccd8e176SBarry Smith 3927ccd8e176SBarry Smith Collective on MPI_Comm 3928ccd8e176SBarry Smith 3929ccd8e176SBarry Smith Input Parameters: 3930a1661176SMatthew Knepley + B - the matrix 3931ccd8e176SBarry Smith . i - the indices into j for the start of each local row (starts with zero) 39320daa03b5SJed Brown . j - the column indices for each local row (starts with zero) 3933ccd8e176SBarry Smith - v - optional values in the matrix 3934ccd8e176SBarry Smith 3935ccd8e176SBarry Smith Level: developer 3936ccd8e176SBarry Smith 393712251496SSatish Balay Notes: 393812251496SSatish Balay The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc; 393912251496SSatish Balay thus you CANNOT change the matrix entries by changing the values of a[] after you have 394012251496SSatish Balay called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays. 394112251496SSatish Balay 394212251496SSatish Balay The i and j indices are 0 based, and i indices are indices corresponding to the local j array. 394312251496SSatish Balay 394412251496SSatish Balay The format which is used for the sparse matrix input, is equivalent to a 394512251496SSatish Balay row-major ordering.. i.e for the following matrix, the input data expected is 394612251496SSatish Balay as shown: 394712251496SSatish Balay 394812251496SSatish Balay 1 0 0 394912251496SSatish Balay 2 0 3 P0 395012251496SSatish Balay ------- 395112251496SSatish Balay 4 5 6 P1 395212251496SSatish Balay 395312251496SSatish Balay Process0 [P0]: rows_owned=[0,1] 395412251496SSatish Balay i = {0,1,3} [size = nrow+1 = 2+1] 395512251496SSatish Balay j = {0,0,2} [size = nz = 6] 395612251496SSatish Balay v = {1,2,3} [size = nz = 6] 395712251496SSatish Balay 395812251496SSatish Balay Process1 [P1]: rows_owned=[2] 395912251496SSatish Balay i = {0,3} [size = nrow+1 = 1+1] 396012251496SSatish Balay j = {0,1,2} [size = nz = 6] 396112251496SSatish Balay v = {4,5,6} [size = nz = 6] 396212251496SSatish Balay 3963ccd8e176SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 3964ccd8e176SBarry Smith 396569b1f4b7SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatCreateAIJ(), MPIAIJ, 39668d7a6e47SBarry Smith MatCreateSeqAIJWithArrays(), MatCreateMPIAIJWithSplitArrays() 3967ccd8e176SBarry Smith @*/ 39687087cfbeSBarry Smith PetscErrorCode MatMPIAIJSetPreallocationCSR(Mat B,const PetscInt i[],const PetscInt j[], const PetscScalar v[]) 3969ccd8e176SBarry Smith { 39704ac538c5SBarry Smith PetscErrorCode ierr; 3971ccd8e176SBarry Smith 3972ccd8e176SBarry Smith PetscFunctionBegin; 39734ac538c5SBarry Smith ierr = PetscTryMethod(B,"MatMPIAIJSetPreallocationCSR_C",(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]),(B,i,j,v));CHKERRQ(ierr); 3974ccd8e176SBarry Smith PetscFunctionReturn(0); 3975ccd8e176SBarry Smith } 3976ccd8e176SBarry Smith 3977ccd8e176SBarry Smith #undef __FUNCT__ 39784a2ae208SSatish Balay #define __FUNCT__ "MatMPIAIJSetPreallocation" 3979273d9f13SBarry Smith /*@C 3980ccd8e176SBarry Smith MatMPIAIJSetPreallocation - Preallocates memory for a sparse parallel matrix in AIJ format 3981273d9f13SBarry Smith (the default parallel PETSc format). For good matrix assembly performance 3982273d9f13SBarry Smith the user should preallocate the matrix storage by setting the parameters 3983273d9f13SBarry Smith d_nz (or d_nnz) and o_nz (or o_nnz). By setting these parameters accurately, 3984273d9f13SBarry Smith performance can be increased by more than a factor of 50. 3985273d9f13SBarry Smith 3986273d9f13SBarry Smith Collective on MPI_Comm 3987273d9f13SBarry Smith 3988273d9f13SBarry Smith Input Parameters: 3989273d9f13SBarry Smith + A - the matrix 3990273d9f13SBarry Smith . d_nz - number of nonzeros per row in DIAGONAL portion of local submatrix 3991273d9f13SBarry Smith (same value is used for all local rows) 3992273d9f13SBarry Smith . d_nnz - array containing the number of nonzeros in the various rows of the 3993273d9f13SBarry Smith DIAGONAL portion of the local submatrix (possibly different for each row) 39940298fd71SBarry Smith or NULL, if d_nz is used to specify the nonzero structure. 3995273d9f13SBarry Smith The size of this array is equal to the number of local rows, i.e 'm'. 39963287b5eaSJed Brown For matrices that will be factored, you must leave room for (and set) 39973287b5eaSJed Brown the diagonal entry even if it is zero. 3998273d9f13SBarry Smith . o_nz - number of nonzeros per row in the OFF-DIAGONAL portion of local 3999273d9f13SBarry Smith submatrix (same value is used for all local rows). 4000273d9f13SBarry Smith - o_nnz - array containing the number of nonzeros in the various rows of the 4001273d9f13SBarry Smith OFF-DIAGONAL portion of the local submatrix (possibly different for 40020298fd71SBarry Smith each row) or NULL, if o_nz is used to specify the nonzero 4003273d9f13SBarry Smith structure. The size of this array is equal to the number 4004273d9f13SBarry Smith of local rows, i.e 'm'. 4005273d9f13SBarry Smith 400649a6f317SBarry Smith If the *_nnz parameter is given then the *_nz parameter is ignored 400749a6f317SBarry Smith 4008273d9f13SBarry Smith The AIJ format (also called the Yale sparse matrix format or 4009ccd8e176SBarry Smith compressed row storage (CSR)), is fully compatible with standard Fortran 77 40100598bfebSBarry Smith storage. The stored row and column indices begin with zero. 40110598bfebSBarry Smith See the <A href="../../docs/manual.pdf#nameddest=ch_mat">Mat chapter of the users manual</A> for details. 4012273d9f13SBarry Smith 4013273d9f13SBarry Smith The parallel matrix is partitioned such that the first m0 rows belong to 4014273d9f13SBarry Smith process 0, the next m1 rows belong to process 1, the next m2 rows belong 4015273d9f13SBarry Smith to process 2 etc.. where m0,m1,m2... are the input parameter 'm'. 4016273d9f13SBarry Smith 4017273d9f13SBarry Smith The DIAGONAL portion of the local submatrix of a processor can be defined 4018a05b864aSJed Brown as the submatrix which is obtained by extraction the part corresponding to 4019a05b864aSJed Brown the rows r1-r2 and columns c1-c2 of the global matrix, where r1 is the 4020a05b864aSJed Brown first row that belongs to the processor, r2 is the last row belonging to 4021a05b864aSJed Brown the this processor, and c1-c2 is range of indices of the local part of a 4022a05b864aSJed Brown vector suitable for applying the matrix to. This is an mxn matrix. In the 4023a05b864aSJed Brown common case of a square matrix, the row and column ranges are the same and 4024a05b864aSJed Brown the DIAGONAL part is also square. The remaining portion of the local 4025a05b864aSJed Brown submatrix (mxN) constitute the OFF-DIAGONAL portion. 4026273d9f13SBarry Smith 4027273d9f13SBarry Smith If o_nnz, d_nnz are specified, then o_nz, and d_nz are ignored. 4028273d9f13SBarry Smith 4029aa95bbe8SBarry Smith You can call MatGetInfo() to get information on how effective the preallocation was; 4030aa95bbe8SBarry Smith for example the fields mallocs,nz_allocated,nz_used,nz_unneeded; 4031aa95bbe8SBarry Smith You can also run with the option -info and look for messages with the string 4032aa95bbe8SBarry Smith malloc in them to see if additional memory allocation was needed. 4033aa95bbe8SBarry Smith 4034273d9f13SBarry Smith Example usage: 4035273d9f13SBarry Smith 4036273d9f13SBarry Smith Consider the following 8x8 matrix with 34 non-zero values, that is 4037273d9f13SBarry Smith assembled across 3 processors. Lets assume that proc0 owns 3 rows, 4038273d9f13SBarry Smith proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown 4039273d9f13SBarry Smith as follows: 4040273d9f13SBarry Smith 4041273d9f13SBarry Smith .vb 4042273d9f13SBarry Smith 1 2 0 | 0 3 0 | 0 4 4043273d9f13SBarry Smith Proc0 0 5 6 | 7 0 0 | 8 0 4044273d9f13SBarry Smith 9 0 10 | 11 0 0 | 12 0 4045273d9f13SBarry Smith ------------------------------------- 4046273d9f13SBarry Smith 13 0 14 | 15 16 17 | 0 0 4047273d9f13SBarry Smith Proc1 0 18 0 | 19 20 21 | 0 0 4048273d9f13SBarry Smith 0 0 0 | 22 23 0 | 24 0 4049273d9f13SBarry Smith ------------------------------------- 4050273d9f13SBarry Smith Proc2 25 26 27 | 0 0 28 | 29 0 4051273d9f13SBarry Smith 30 0 0 | 31 32 33 | 0 34 4052273d9f13SBarry Smith .ve 4053273d9f13SBarry Smith 4054273d9f13SBarry Smith This can be represented as a collection of submatrices as: 4055273d9f13SBarry Smith 4056273d9f13SBarry Smith .vb 4057273d9f13SBarry Smith A B C 4058273d9f13SBarry Smith D E F 4059273d9f13SBarry Smith G H I 4060273d9f13SBarry Smith .ve 4061273d9f13SBarry Smith 4062273d9f13SBarry Smith Where the submatrices A,B,C are owned by proc0, D,E,F are 4063273d9f13SBarry Smith owned by proc1, G,H,I are owned by proc2. 4064273d9f13SBarry Smith 4065273d9f13SBarry Smith The 'm' parameters for proc0,proc1,proc2 are 3,3,2 respectively. 4066273d9f13SBarry Smith The 'n' parameters for proc0,proc1,proc2 are 3,3,2 respectively. 4067273d9f13SBarry Smith The 'M','N' parameters are 8,8, and have the same values on all procs. 4068273d9f13SBarry Smith 4069273d9f13SBarry Smith The DIAGONAL submatrices corresponding to proc0,proc1,proc2 are 4070273d9f13SBarry Smith submatrices [A], [E], [I] respectively. The OFF-DIAGONAL submatrices 4071273d9f13SBarry Smith corresponding to proc0,proc1,proc2 are [BC], [DF], [GH] respectively. 4072273d9f13SBarry Smith Internally, each processor stores the DIAGONAL part, and the OFF-DIAGONAL 4073273d9f13SBarry Smith part as SeqAIJ matrices. for eg: proc1 will store [E] as a SeqAIJ 4074273d9f13SBarry Smith matrix, ans [DF] as another SeqAIJ matrix. 4075273d9f13SBarry Smith 4076273d9f13SBarry Smith When d_nz, o_nz parameters are specified, d_nz storage elements are 4077273d9f13SBarry Smith allocated for every row of the local diagonal submatrix, and o_nz 4078273d9f13SBarry Smith storage locations are allocated for every row of the OFF-DIAGONAL submat. 4079273d9f13SBarry Smith One way to choose d_nz and o_nz is to use the max nonzerors per local 4080273d9f13SBarry Smith rows for each of the local DIAGONAL, and the OFF-DIAGONAL submatrices. 4081273d9f13SBarry Smith In this case, the values of d_nz,o_nz are: 4082273d9f13SBarry Smith .vb 4083273d9f13SBarry Smith proc0 : dnz = 2, o_nz = 2 4084273d9f13SBarry Smith proc1 : dnz = 3, o_nz = 2 4085273d9f13SBarry Smith proc2 : dnz = 1, o_nz = 4 4086273d9f13SBarry Smith .ve 4087273d9f13SBarry Smith We are allocating m*(d_nz+o_nz) storage locations for every proc. This 4088273d9f13SBarry Smith translates to 3*(2+2)=12 for proc0, 3*(3+2)=15 for proc1, 2*(1+4)=10 4089273d9f13SBarry Smith for proc3. i.e we are using 12+15+10=37 storage locations to store 4090273d9f13SBarry Smith 34 values. 4091273d9f13SBarry Smith 4092273d9f13SBarry Smith When d_nnz, o_nnz parameters are specified, the storage is specified 4093273d9f13SBarry Smith for every row, coresponding to both DIAGONAL and OFF-DIAGONAL submatrices. 4094273d9f13SBarry Smith In the above case the values for d_nnz,o_nnz are: 4095273d9f13SBarry Smith .vb 4096273d9f13SBarry Smith proc0: d_nnz = [2,2,2] and o_nnz = [2,2,2] 4097273d9f13SBarry Smith proc1: d_nnz = [3,3,2] and o_nnz = [2,1,1] 4098273d9f13SBarry Smith proc2: d_nnz = [1,1] and o_nnz = [4,4] 4099273d9f13SBarry Smith .ve 4100273d9f13SBarry Smith Here the space allocated is sum of all the above values i.e 34, and 4101273d9f13SBarry Smith hence pre-allocation is perfect. 4102273d9f13SBarry Smith 4103273d9f13SBarry Smith Level: intermediate 4104273d9f13SBarry Smith 4105273d9f13SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 4106273d9f13SBarry Smith 410769b1f4b7SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatCreateAIJ(), MatMPIAIJSetPreallocationCSR(), 4108ab978733SBarry Smith MPIAIJ, MatGetInfo(), PetscSplitOwnership() 4109273d9f13SBarry Smith @*/ 41107087cfbeSBarry Smith PetscErrorCode MatMPIAIJSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 4111273d9f13SBarry Smith { 41124ac538c5SBarry Smith PetscErrorCode ierr; 4113273d9f13SBarry Smith 4114273d9f13SBarry Smith PetscFunctionBegin; 41156ba663aaSJed Brown PetscValidHeaderSpecific(B,MAT_CLASSID,1); 41166ba663aaSJed Brown PetscValidType(B,1); 41174ac538c5SBarry Smith ierr = PetscTryMethod(B,"MatMPIAIJSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr); 4118273d9f13SBarry Smith PetscFunctionReturn(0); 4119273d9f13SBarry Smith } 4120273d9f13SBarry Smith 41214a2ae208SSatish Balay #undef __FUNCT__ 41222fb0ec9aSBarry Smith #define __FUNCT__ "MatCreateMPIAIJWithArrays" 412358d36128SBarry Smith /*@ 41242fb0ec9aSBarry Smith MatCreateMPIAIJWithArrays - creates a MPI AIJ matrix using arrays that contain in standard 41252fb0ec9aSBarry Smith CSR format the local rows. 41262fb0ec9aSBarry Smith 41272fb0ec9aSBarry Smith Collective on MPI_Comm 41282fb0ec9aSBarry Smith 41292fb0ec9aSBarry Smith Input Parameters: 41302fb0ec9aSBarry Smith + comm - MPI communicator 41312fb0ec9aSBarry Smith . m - number of local rows (Cannot be PETSC_DECIDE) 41322fb0ec9aSBarry Smith . n - This value should be the same as the local size used in creating the 41332fb0ec9aSBarry Smith x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have 41342fb0ec9aSBarry Smith calculated if N is given) For square matrices n is almost always m. 41352fb0ec9aSBarry Smith . M - number of global rows (or PETSC_DETERMINE to have calculated if m is given) 41362fb0ec9aSBarry Smith . N - number of global columns (or PETSC_DETERMINE to have calculated if n is given) 41372fb0ec9aSBarry Smith . i - row indices 41382fb0ec9aSBarry Smith . j - column indices 41392fb0ec9aSBarry Smith - a - matrix values 41402fb0ec9aSBarry Smith 41412fb0ec9aSBarry Smith Output Parameter: 41422fb0ec9aSBarry Smith . mat - the matrix 414303bfb495SBarry Smith 41442fb0ec9aSBarry Smith Level: intermediate 41452fb0ec9aSBarry Smith 41462fb0ec9aSBarry Smith Notes: 41472fb0ec9aSBarry Smith The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc; 41482fb0ec9aSBarry Smith thus you CANNOT change the matrix entries by changing the values of a[] after you have 41498d7a6e47SBarry Smith called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays. 41502fb0ec9aSBarry Smith 415112251496SSatish Balay The i and j indices are 0 based, and i indices are indices corresponding to the local j array. 415212251496SSatish Balay 415312251496SSatish Balay The format which is used for the sparse matrix input, is equivalent to a 415412251496SSatish Balay row-major ordering.. i.e for the following matrix, the input data expected is 415512251496SSatish Balay as shown: 415612251496SSatish Balay 415712251496SSatish Balay 1 0 0 415812251496SSatish Balay 2 0 3 P0 415912251496SSatish Balay ------- 416012251496SSatish Balay 4 5 6 P1 416112251496SSatish Balay 416212251496SSatish Balay Process0 [P0]: rows_owned=[0,1] 416312251496SSatish Balay i = {0,1,3} [size = nrow+1 = 2+1] 416412251496SSatish Balay j = {0,0,2} [size = nz = 6] 416512251496SSatish Balay v = {1,2,3} [size = nz = 6] 416612251496SSatish Balay 416712251496SSatish Balay Process1 [P1]: rows_owned=[2] 416812251496SSatish Balay i = {0,3} [size = nrow+1 = 1+1] 416912251496SSatish Balay j = {0,1,2} [size = nz = 6] 417012251496SSatish Balay v = {4,5,6} [size = nz = 6] 41712fb0ec9aSBarry Smith 41722fb0ec9aSBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 41732fb0ec9aSBarry Smith 41742fb0ec9aSBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(), 417569b1f4b7SBarry Smith MPIAIJ, MatCreateAIJ(), MatCreateMPIAIJWithSplitArrays() 41762fb0ec9aSBarry Smith @*/ 41777087cfbeSBarry Smith PetscErrorCode MatCreateMPIAIJWithArrays(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt i[],const PetscInt j[],const PetscScalar a[],Mat *mat) 41782fb0ec9aSBarry Smith { 41792fb0ec9aSBarry Smith PetscErrorCode ierr; 41802fb0ec9aSBarry Smith 41812fb0ec9aSBarry Smith PetscFunctionBegin; 418269b1f4b7SBarry Smith if (i[0]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0"); 4183e32f2f54SBarry Smith if (m < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative"); 41842fb0ec9aSBarry Smith ierr = MatCreate(comm,mat);CHKERRQ(ierr); 4185d4146a68SBarry Smith ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr); 4186a2f3521dSMark F. Adams /* ierr = MatSetBlockSizes(M,bs,cbs);CHKERRQ(ierr); */ 41872fb0ec9aSBarry Smith ierr = MatSetType(*mat,MATMPIAIJ);CHKERRQ(ierr); 41882fb0ec9aSBarry Smith ierr = MatMPIAIJSetPreallocationCSR(*mat,i,j,a);CHKERRQ(ierr); 41892fb0ec9aSBarry Smith PetscFunctionReturn(0); 41902fb0ec9aSBarry Smith } 41912fb0ec9aSBarry Smith 41922fb0ec9aSBarry Smith #undef __FUNCT__ 419369b1f4b7SBarry Smith #define __FUNCT__ "MatCreateAIJ" 4194273d9f13SBarry Smith /*@C 419569b1f4b7SBarry Smith MatCreateAIJ - Creates a sparse parallel matrix in AIJ format 4196273d9f13SBarry Smith (the default parallel PETSc format). For good matrix assembly performance 4197273d9f13SBarry Smith the user should preallocate the matrix storage by setting the parameters 4198273d9f13SBarry Smith d_nz (or d_nnz) and o_nz (or o_nnz). By setting these parameters accurately, 4199273d9f13SBarry Smith performance can be increased by more than a factor of 50. 4200273d9f13SBarry Smith 4201273d9f13SBarry Smith Collective on MPI_Comm 4202273d9f13SBarry Smith 4203273d9f13SBarry Smith Input Parameters: 4204273d9f13SBarry Smith + comm - MPI communicator 4205273d9f13SBarry Smith . m - number of local rows (or PETSC_DECIDE to have calculated if M is given) 4206273d9f13SBarry Smith This value should be the same as the local size used in creating the 4207273d9f13SBarry Smith y vector for the matrix-vector product y = Ax. 4208273d9f13SBarry Smith . n - This value should be the same as the local size used in creating the 4209273d9f13SBarry Smith x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have 4210273d9f13SBarry Smith calculated if N is given) For square matrices n is almost always m. 4211273d9f13SBarry Smith . M - number of global rows (or PETSC_DETERMINE to have calculated if m is given) 4212273d9f13SBarry Smith . N - number of global columns (or PETSC_DETERMINE to have calculated if n is given) 4213273d9f13SBarry Smith . d_nz - number of nonzeros per row in DIAGONAL portion of local submatrix 4214273d9f13SBarry Smith (same value is used for all local rows) 4215273d9f13SBarry Smith . d_nnz - array containing the number of nonzeros in the various rows of the 4216273d9f13SBarry Smith DIAGONAL portion of the local submatrix (possibly different for each row) 42170298fd71SBarry Smith or NULL, if d_nz is used to specify the nonzero structure. 4218273d9f13SBarry Smith The size of this array is equal to the number of local rows, i.e 'm'. 4219273d9f13SBarry Smith . o_nz - number of nonzeros per row in the OFF-DIAGONAL portion of local 4220273d9f13SBarry Smith submatrix (same value is used for all local rows). 4221273d9f13SBarry Smith - o_nnz - array containing the number of nonzeros in the various rows of the 4222273d9f13SBarry Smith OFF-DIAGONAL portion of the local submatrix (possibly different for 42230298fd71SBarry Smith each row) or NULL, if o_nz is used to specify the nonzero 4224273d9f13SBarry Smith structure. The size of this array is equal to the number 4225273d9f13SBarry Smith of local rows, i.e 'm'. 4226273d9f13SBarry Smith 4227273d9f13SBarry Smith Output Parameter: 4228273d9f13SBarry Smith . A - the matrix 4229273d9f13SBarry Smith 4230175b88e8SBarry Smith It is recommended that one use the MatCreate(), MatSetType() and/or MatSetFromOptions(), 4231ae1d86c5SBarry Smith MatXXXXSetPreallocation() paradgm instead of this routine directly. 4232175b88e8SBarry Smith [MatXXXXSetPreallocation() is, for example, MatSeqAIJSetPreallocation] 4233175b88e8SBarry Smith 4234273d9f13SBarry Smith Notes: 423549a6f317SBarry Smith If the *_nnz parameter is given then the *_nz parameter is ignored 423649a6f317SBarry Smith 4237273d9f13SBarry Smith m,n,M,N parameters specify the size of the matrix, and its partitioning across 4238273d9f13SBarry Smith processors, while d_nz,d_nnz,o_nz,o_nnz parameters specify the approximate 4239273d9f13SBarry Smith storage requirements for this matrix. 4240273d9f13SBarry Smith 4241273d9f13SBarry Smith If PETSC_DECIDE or PETSC_DETERMINE is used for a particular argument on one 4242273d9f13SBarry Smith processor than it must be used on all processors that share the object for 4243273d9f13SBarry Smith that argument. 4244273d9f13SBarry Smith 4245273d9f13SBarry Smith The user MUST specify either the local or global matrix dimensions 4246273d9f13SBarry Smith (possibly both). 4247273d9f13SBarry Smith 424833a7c187SSatish Balay The parallel matrix is partitioned across processors such that the 424933a7c187SSatish Balay first m0 rows belong to process 0, the next m1 rows belong to 425033a7c187SSatish Balay process 1, the next m2 rows belong to process 2 etc.. where 425133a7c187SSatish Balay m0,m1,m2,.. are the input parameter 'm'. i.e each processor stores 425233a7c187SSatish Balay values corresponding to [m x N] submatrix. 4253273d9f13SBarry Smith 425433a7c187SSatish Balay The columns are logically partitioned with the n0 columns belonging 425533a7c187SSatish Balay to 0th partition, the next n1 columns belonging to the next 425633a7c187SSatish Balay partition etc.. where n0,n1,n2... are the the input parameter 'n'. 425733a7c187SSatish Balay 425833a7c187SSatish Balay The DIAGONAL portion of the local submatrix on any given processor 425933a7c187SSatish Balay is the submatrix corresponding to the rows and columns m,n 426033a7c187SSatish Balay corresponding to the given processor. i.e diagonal matrix on 426133a7c187SSatish Balay process 0 is [m0 x n0], diagonal matrix on process 1 is [m1 x n1] 426233a7c187SSatish Balay etc. The remaining portion of the local submatrix [m x (N-n)] 426333a7c187SSatish Balay constitute the OFF-DIAGONAL portion. The example below better 426433a7c187SSatish Balay illustrates this concept. 426533a7c187SSatish Balay 426633a7c187SSatish Balay For a square global matrix we define each processor's diagonal portion 426733a7c187SSatish Balay to be its local rows and the corresponding columns (a square submatrix); 426833a7c187SSatish Balay each processor's off-diagonal portion encompasses the remainder of the 426933a7c187SSatish Balay local matrix (a rectangular submatrix). 4270273d9f13SBarry Smith 4271273d9f13SBarry Smith If o_nnz, d_nnz are specified, then o_nz, and d_nz are ignored. 4272273d9f13SBarry Smith 427397d05335SKris Buschelman When calling this routine with a single process communicator, a matrix of 427497d05335SKris Buschelman type SEQAIJ is returned. If a matrix of type MPIAIJ is desired for this 427597d05335SKris Buschelman type of communicator, use the construction mechanism: 427678102f6cSMatthew Knepley MatCreate(...,&A); MatSetType(A,MATMPIAIJ); MatSetSizes(A, m,n,M,N); MatMPIAIJSetPreallocation(A,...); 427797d05335SKris Buschelman 4278273d9f13SBarry Smith By default, this format uses inodes (identical nodes) when possible. 4279273d9f13SBarry Smith We search for consecutive rows with the same nonzero structure, thereby 4280273d9f13SBarry Smith reusing matrix information to achieve increased efficiency. 4281273d9f13SBarry Smith 4282273d9f13SBarry Smith Options Database Keys: 4283923f20ffSKris Buschelman + -mat_no_inode - Do not use inodes 4284923f20ffSKris Buschelman . -mat_inode_limit <limit> - Sets inode limit (max limit=5) 4285273d9f13SBarry Smith - -mat_aij_oneindex - Internally use indexing starting at 1 4286273d9f13SBarry Smith rather than 0. Note that when calling MatSetValues(), 4287273d9f13SBarry Smith the user still MUST index entries starting at 0! 4288273d9f13SBarry Smith 4289273d9f13SBarry Smith 4290273d9f13SBarry Smith Example usage: 4291273d9f13SBarry Smith 4292273d9f13SBarry Smith Consider the following 8x8 matrix with 34 non-zero values, that is 4293273d9f13SBarry Smith assembled across 3 processors. Lets assume that proc0 owns 3 rows, 4294273d9f13SBarry Smith proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown 4295273d9f13SBarry Smith as follows: 4296273d9f13SBarry Smith 4297273d9f13SBarry Smith .vb 4298273d9f13SBarry Smith 1 2 0 | 0 3 0 | 0 4 4299273d9f13SBarry Smith Proc0 0 5 6 | 7 0 0 | 8 0 4300273d9f13SBarry Smith 9 0 10 | 11 0 0 | 12 0 4301273d9f13SBarry Smith ------------------------------------- 4302273d9f13SBarry Smith 13 0 14 | 15 16 17 | 0 0 4303273d9f13SBarry Smith Proc1 0 18 0 | 19 20 21 | 0 0 4304273d9f13SBarry Smith 0 0 0 | 22 23 0 | 24 0 4305273d9f13SBarry Smith ------------------------------------- 4306273d9f13SBarry Smith Proc2 25 26 27 | 0 0 28 | 29 0 4307273d9f13SBarry Smith 30 0 0 | 31 32 33 | 0 34 4308273d9f13SBarry Smith .ve 4309273d9f13SBarry Smith 4310273d9f13SBarry Smith This can be represented as a collection of submatrices as: 4311273d9f13SBarry Smith 4312273d9f13SBarry Smith .vb 4313273d9f13SBarry Smith A B C 4314273d9f13SBarry Smith D E F 4315273d9f13SBarry Smith G H I 4316273d9f13SBarry Smith .ve 4317273d9f13SBarry Smith 4318273d9f13SBarry Smith Where the submatrices A,B,C are owned by proc0, D,E,F are 4319273d9f13SBarry Smith owned by proc1, G,H,I are owned by proc2. 4320273d9f13SBarry Smith 4321273d9f13SBarry Smith The 'm' parameters for proc0,proc1,proc2 are 3,3,2 respectively. 4322273d9f13SBarry Smith The 'n' parameters for proc0,proc1,proc2 are 3,3,2 respectively. 4323273d9f13SBarry Smith The 'M','N' parameters are 8,8, and have the same values on all procs. 4324273d9f13SBarry Smith 4325273d9f13SBarry Smith The DIAGONAL submatrices corresponding to proc0,proc1,proc2 are 4326273d9f13SBarry Smith submatrices [A], [E], [I] respectively. The OFF-DIAGONAL submatrices 4327273d9f13SBarry Smith corresponding to proc0,proc1,proc2 are [BC], [DF], [GH] respectively. 4328273d9f13SBarry Smith Internally, each processor stores the DIAGONAL part, and the OFF-DIAGONAL 4329273d9f13SBarry Smith part as SeqAIJ matrices. for eg: proc1 will store [E] as a SeqAIJ 4330273d9f13SBarry Smith matrix, ans [DF] as another SeqAIJ matrix. 4331273d9f13SBarry Smith 4332273d9f13SBarry Smith When d_nz, o_nz parameters are specified, d_nz storage elements are 4333273d9f13SBarry Smith allocated for every row of the local diagonal submatrix, and o_nz 4334273d9f13SBarry Smith storage locations are allocated for every row of the OFF-DIAGONAL submat. 4335273d9f13SBarry Smith One way to choose d_nz and o_nz is to use the max nonzerors per local 4336273d9f13SBarry Smith rows for each of the local DIAGONAL, and the OFF-DIAGONAL submatrices. 4337273d9f13SBarry Smith In this case, the values of d_nz,o_nz are: 4338273d9f13SBarry Smith .vb 4339273d9f13SBarry Smith proc0 : dnz = 2, o_nz = 2 4340273d9f13SBarry Smith proc1 : dnz = 3, o_nz = 2 4341273d9f13SBarry Smith proc2 : dnz = 1, o_nz = 4 4342273d9f13SBarry Smith .ve 4343273d9f13SBarry Smith We are allocating m*(d_nz+o_nz) storage locations for every proc. This 4344273d9f13SBarry Smith translates to 3*(2+2)=12 for proc0, 3*(3+2)=15 for proc1, 2*(1+4)=10 4345273d9f13SBarry Smith for proc3. i.e we are using 12+15+10=37 storage locations to store 4346273d9f13SBarry Smith 34 values. 4347273d9f13SBarry Smith 4348273d9f13SBarry Smith When d_nnz, o_nnz parameters are specified, the storage is specified 4349273d9f13SBarry Smith for every row, coresponding to both DIAGONAL and OFF-DIAGONAL submatrices. 4350273d9f13SBarry Smith In the above case the values for d_nnz,o_nnz are: 4351273d9f13SBarry Smith .vb 4352273d9f13SBarry Smith proc0: d_nnz = [2,2,2] and o_nnz = [2,2,2] 4353273d9f13SBarry Smith proc1: d_nnz = [3,3,2] and o_nnz = [2,1,1] 4354273d9f13SBarry Smith proc2: d_nnz = [1,1] and o_nnz = [4,4] 4355273d9f13SBarry Smith .ve 4356273d9f13SBarry Smith Here the space allocated is sum of all the above values i.e 34, and 4357273d9f13SBarry Smith hence pre-allocation is perfect. 4358273d9f13SBarry Smith 4359273d9f13SBarry Smith Level: intermediate 4360273d9f13SBarry Smith 4361273d9f13SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 4362273d9f13SBarry Smith 4363ccd8e176SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(), 43642fb0ec9aSBarry Smith MPIAIJ, MatCreateMPIAIJWithArrays() 4365273d9f13SBarry Smith @*/ 436669b1f4b7SBarry Smith PetscErrorCode MatCreateAIJ(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[],Mat *A) 4367273d9f13SBarry Smith { 43686849ba73SBarry Smith PetscErrorCode ierr; 4369b1d57f15SBarry Smith PetscMPIInt size; 4370273d9f13SBarry Smith 4371273d9f13SBarry Smith PetscFunctionBegin; 4372f69a0ea3SMatthew Knepley ierr = MatCreate(comm,A);CHKERRQ(ierr); 4373f69a0ea3SMatthew Knepley ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr); 4374273d9f13SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 4375273d9f13SBarry Smith if (size > 1) { 4376273d9f13SBarry Smith ierr = MatSetType(*A,MATMPIAIJ);CHKERRQ(ierr); 4377273d9f13SBarry Smith ierr = MatMPIAIJSetPreallocation(*A,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr); 4378273d9f13SBarry Smith } else { 4379273d9f13SBarry Smith ierr = MatSetType(*A,MATSEQAIJ);CHKERRQ(ierr); 4380273d9f13SBarry Smith ierr = MatSeqAIJSetPreallocation(*A,d_nz,d_nnz);CHKERRQ(ierr); 4381273d9f13SBarry Smith } 4382273d9f13SBarry Smith PetscFunctionReturn(0); 4383273d9f13SBarry Smith } 4384195d93cdSBarry Smith 43854a2ae208SSatish Balay #undef __FUNCT__ 43864a2ae208SSatish Balay #define __FUNCT__ "MatMPIAIJGetSeqAIJ" 43879230625dSJed Brown PetscErrorCode MatMPIAIJGetSeqAIJ(Mat A,Mat *Ad,Mat *Ao,const PetscInt *colmap[]) 4388195d93cdSBarry Smith { 4389195d93cdSBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 4390b1d57f15SBarry Smith 4391195d93cdSBarry Smith PetscFunctionBegin; 4392195d93cdSBarry Smith *Ad = a->A; 4393195d93cdSBarry Smith *Ao = a->B; 4394195d93cdSBarry Smith *colmap = a->garray; 4395195d93cdSBarry Smith PetscFunctionReturn(0); 4396195d93cdSBarry Smith } 4397a2243be0SBarry Smith 4398a2243be0SBarry Smith #undef __FUNCT__ 4399a2243be0SBarry Smith #define __FUNCT__ "MatSetColoring_MPIAIJ" 4400dfbe8321SBarry Smith PetscErrorCode MatSetColoring_MPIAIJ(Mat A,ISColoring coloring) 4401a2243be0SBarry Smith { 4402dfbe8321SBarry Smith PetscErrorCode ierr; 4403b1d57f15SBarry Smith PetscInt i; 4404a2243be0SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 4405a2243be0SBarry Smith 4406a2243be0SBarry Smith PetscFunctionBegin; 44078ee2e534SBarry Smith if (coloring->ctype == IS_COLORING_GLOBAL) { 440808b6dcc0SBarry Smith ISColoringValue *allcolors,*colors; 4409a2243be0SBarry Smith ISColoring ocoloring; 4410a2243be0SBarry Smith 4411a2243be0SBarry Smith /* set coloring for diagonal portion */ 4412a2243be0SBarry Smith ierr = MatSetColoring_SeqAIJ(a->A,coloring);CHKERRQ(ierr); 4413a2243be0SBarry Smith 4414a2243be0SBarry Smith /* set coloring for off-diagonal portion */ 4415ce94432eSBarry Smith ierr = ISAllGatherColors(PetscObjectComm((PetscObject)A),coloring->n,coloring->colors,NULL,&allcolors);CHKERRQ(ierr); 4416d0f46423SBarry Smith ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr); 4417d0f46423SBarry Smith for (i=0; i<a->B->cmap->n; i++) { 4418a2243be0SBarry Smith colors[i] = allcolors[a->garray[i]]; 4419a2243be0SBarry Smith } 4420a2243be0SBarry Smith ierr = PetscFree(allcolors);CHKERRQ(ierr); 4421d0f46423SBarry Smith ierr = ISColoringCreate(MPI_COMM_SELF,coloring->n,a->B->cmap->n,colors,&ocoloring);CHKERRQ(ierr); 4422a2243be0SBarry Smith ierr = MatSetColoring_SeqAIJ(a->B,ocoloring);CHKERRQ(ierr); 44236bf464f9SBarry Smith ierr = ISColoringDestroy(&ocoloring);CHKERRQ(ierr); 4424a2243be0SBarry Smith } else if (coloring->ctype == IS_COLORING_GHOSTED) { 442508b6dcc0SBarry Smith ISColoringValue *colors; 4426b1d57f15SBarry Smith PetscInt *larray; 4427a2243be0SBarry Smith ISColoring ocoloring; 4428a2243be0SBarry Smith 4429a2243be0SBarry Smith /* set coloring for diagonal portion */ 4430d0f46423SBarry Smith ierr = PetscMalloc((a->A->cmap->n+1)*sizeof(PetscInt),&larray);CHKERRQ(ierr); 4431d0f46423SBarry Smith for (i=0; i<a->A->cmap->n; i++) { 4432d0f46423SBarry Smith larray[i] = i + A->cmap->rstart; 4433a2243be0SBarry Smith } 44340298fd71SBarry Smith ierr = ISGlobalToLocalMappingApply(A->cmap->mapping,IS_GTOLM_MASK,a->A->cmap->n,larray,NULL,larray);CHKERRQ(ierr); 4435d0f46423SBarry Smith ierr = PetscMalloc((a->A->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr); 4436d0f46423SBarry Smith for (i=0; i<a->A->cmap->n; i++) { 4437a2243be0SBarry Smith colors[i] = coloring->colors[larray[i]]; 4438a2243be0SBarry Smith } 4439a2243be0SBarry Smith ierr = PetscFree(larray);CHKERRQ(ierr); 4440d0f46423SBarry Smith ierr = ISColoringCreate(PETSC_COMM_SELF,coloring->n,a->A->cmap->n,colors,&ocoloring);CHKERRQ(ierr); 4441a2243be0SBarry Smith ierr = MatSetColoring_SeqAIJ(a->A,ocoloring);CHKERRQ(ierr); 44426bf464f9SBarry Smith ierr = ISColoringDestroy(&ocoloring);CHKERRQ(ierr); 4443a2243be0SBarry Smith 4444a2243be0SBarry Smith /* set coloring for off-diagonal portion */ 4445d0f46423SBarry Smith ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(PetscInt),&larray);CHKERRQ(ierr); 44460298fd71SBarry Smith ierr = ISGlobalToLocalMappingApply(A->cmap->mapping,IS_GTOLM_MASK,a->B->cmap->n,a->garray,NULL,larray);CHKERRQ(ierr); 4447d0f46423SBarry Smith ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr); 4448d0f46423SBarry Smith for (i=0; i<a->B->cmap->n; i++) { 4449a2243be0SBarry Smith colors[i] = coloring->colors[larray[i]]; 4450a2243be0SBarry Smith } 4451a2243be0SBarry Smith ierr = PetscFree(larray);CHKERRQ(ierr); 4452d0f46423SBarry Smith ierr = ISColoringCreate(MPI_COMM_SELF,coloring->n,a->B->cmap->n,colors,&ocoloring);CHKERRQ(ierr); 4453a2243be0SBarry Smith ierr = MatSetColoring_SeqAIJ(a->B,ocoloring);CHKERRQ(ierr); 44546bf464f9SBarry Smith ierr = ISColoringDestroy(&ocoloring);CHKERRQ(ierr); 44556bf464f9SBarry Smith } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support ISColoringType %d",(int)coloring->ctype); 4456a2243be0SBarry Smith PetscFunctionReturn(0); 4457a2243be0SBarry Smith } 4458a2243be0SBarry Smith 4459779c1a83SBarry Smith #undef __FUNCT__ 4460779c1a83SBarry Smith #define __FUNCT__ "MatSetValuesAdifor_MPIAIJ" 4461b1d57f15SBarry Smith PetscErrorCode MatSetValuesAdifor_MPIAIJ(Mat A,PetscInt nl,void *advalues) 4462779c1a83SBarry Smith { 4463779c1a83SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 4464dfbe8321SBarry Smith PetscErrorCode ierr; 4465779c1a83SBarry Smith 4466779c1a83SBarry Smith PetscFunctionBegin; 4467779c1a83SBarry Smith ierr = MatSetValuesAdifor_SeqAIJ(a->A,nl,advalues);CHKERRQ(ierr); 4468779c1a83SBarry Smith ierr = MatSetValuesAdifor_SeqAIJ(a->B,nl,advalues);CHKERRQ(ierr); 4469a2243be0SBarry Smith PetscFunctionReturn(0); 4470a2243be0SBarry Smith } 4471c5d6d63eSBarry Smith 4472c5d6d63eSBarry Smith #undef __FUNCT__ 447390431a8fSHong Zhang #define __FUNCT__ "MatCreateMPIAIJConcatenateSeqAIJSymbolic" 447490431a8fSHong Zhang PetscErrorCode MatCreateMPIAIJConcatenateSeqAIJSymbolic(MPI_Comm comm,Mat inmat,PetscInt n,Mat *outmat) 44759b8102ccSHong Zhang { 44769b8102ccSHong Zhang PetscErrorCode ierr; 4477a2f3521dSMark F. Adams PetscInt m,N,i,rstart,nnz,*dnz,*onz,sum,bs,cbs; 44789b8102ccSHong Zhang PetscInt *indx; 44799b8102ccSHong Zhang 44809b8102ccSHong Zhang PetscFunctionBegin; 44819b8102ccSHong Zhang /* This routine will ONLY return MPIAIJ type matrix */ 44829b8102ccSHong Zhang ierr = MatGetSize(inmat,&m,&N);CHKERRQ(ierr); 4483a2f3521dSMark F. Adams ierr = MatGetBlockSizes(inmat,&bs,&cbs);CHKERRQ(ierr); 44849b8102ccSHong Zhang if (n == PETSC_DECIDE) { 44859b8102ccSHong Zhang ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 44869b8102ccSHong Zhang } 4487a22543b6SHong Zhang /* Check sum(n) = N */ 4488a95133b1SBarry Smith ierr = MPI_Allreduce(&n,&sum,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr); 4489a22543b6SHong Zhang if (sum != N) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Sum of local columns != global columns %d",N); 4490a22543b6SHong Zhang 44919b8102ccSHong Zhang ierr = MPI_Scan(&m, &rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr); 44929b8102ccSHong Zhang rstart -= m; 44939b8102ccSHong Zhang 44949b8102ccSHong Zhang ierr = MatPreallocateInitialize(comm,m,n,dnz,onz);CHKERRQ(ierr); 44959b8102ccSHong Zhang for (i=0; i<m; i++) { 44960298fd71SBarry Smith ierr = MatGetRow_SeqAIJ(inmat,i,&nnz,&indx,NULL);CHKERRQ(ierr); 44979b8102ccSHong Zhang ierr = MatPreallocateSet(i+rstart,nnz,indx,dnz,onz);CHKERRQ(ierr); 44980298fd71SBarry Smith ierr = MatRestoreRow_SeqAIJ(inmat,i,&nnz,&indx,NULL);CHKERRQ(ierr); 44999b8102ccSHong Zhang } 45009b8102ccSHong Zhang 45019b8102ccSHong Zhang ierr = MatCreate(comm,outmat);CHKERRQ(ierr); 45029b8102ccSHong Zhang ierr = MatSetSizes(*outmat,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 4503a2f3521dSMark F. Adams ierr = MatSetBlockSizes(*outmat,bs,cbs);CHKERRQ(ierr); 45049b8102ccSHong Zhang ierr = MatSetType(*outmat,MATMPIAIJ);CHKERRQ(ierr); 45059b8102ccSHong Zhang ierr = MatMPIAIJSetPreallocation(*outmat,0,dnz,0,onz);CHKERRQ(ierr); 45069b8102ccSHong Zhang ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr); 45079b8102ccSHong Zhang PetscFunctionReturn(0); 45089b8102ccSHong Zhang } 45099b8102ccSHong Zhang 45109b8102ccSHong Zhang #undef __FUNCT__ 451190431a8fSHong Zhang #define __FUNCT__ "MatCreateMPIAIJConcatenateSeqAIJNumeric" 451290431a8fSHong Zhang PetscErrorCode MatCreateMPIAIJConcatenateSeqAIJNumeric(MPI_Comm comm,Mat inmat,PetscInt n,Mat outmat) 45139b8102ccSHong Zhang { 45149b8102ccSHong Zhang PetscErrorCode ierr; 45159b8102ccSHong Zhang PetscInt m,N,i,rstart,nnz,Ii; 45169b8102ccSHong Zhang PetscInt *indx; 45179b8102ccSHong Zhang PetscScalar *values; 45189b8102ccSHong Zhang 45199b8102ccSHong Zhang PetscFunctionBegin; 45209b8102ccSHong Zhang ierr = MatGetSize(inmat,&m,&N);CHKERRQ(ierr); 45210298fd71SBarry Smith ierr = MatGetOwnershipRange(outmat,&rstart,NULL);CHKERRQ(ierr); 45229b8102ccSHong Zhang for (i=0; i<m; i++) { 45239b8102ccSHong Zhang ierr = MatGetRow_SeqAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr); 45249b8102ccSHong Zhang Ii = i + rstart; 4525a22543b6SHong Zhang ierr = MatSetValues_MPIAIJ(outmat,1,&Ii,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr); 45269b8102ccSHong Zhang ierr = MatRestoreRow_SeqAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr); 45279b8102ccSHong Zhang } 45289b8102ccSHong Zhang ierr = MatAssemblyBegin(outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 45299b8102ccSHong Zhang ierr = MatAssemblyEnd(outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 45309b8102ccSHong Zhang PetscFunctionReturn(0); 45319b8102ccSHong Zhang } 45329b8102ccSHong Zhang 45339b8102ccSHong Zhang #undef __FUNCT__ 453490431a8fSHong Zhang #define __FUNCT__ "MatCreateMPIAIJConcatenateSeqAIJ" 4535bc08b0f1SBarry Smith /*@ 453690431a8fSHong Zhang MatCreateMPIAIJConcatenateSeqAIJ - Creates a single large PETSc matrix by concatenating sequential 453751dd7536SBarry Smith matrices from each processor 4538c5d6d63eSBarry Smith 4539c5d6d63eSBarry Smith Collective on MPI_Comm 4540c5d6d63eSBarry Smith 4541c5d6d63eSBarry Smith Input Parameters: 454251dd7536SBarry Smith + comm - the communicators the parallel matrix will live on 4543d6bb3c2dSHong Zhang . inmat - the input sequential matrices 45440e36024fSHong Zhang . n - number of local columns (or PETSC_DECIDE) 4545d6bb3c2dSHong Zhang - scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 454651dd7536SBarry Smith 454751dd7536SBarry Smith Output Parameter: 454851dd7536SBarry Smith . outmat - the parallel matrix generated 4549c5d6d63eSBarry Smith 45507e25d530SSatish Balay Level: advanced 45517e25d530SSatish Balay 4552f08fae4eSHong Zhang Notes: The number of columns of the matrix in EACH processor MUST be the same. 4553c5d6d63eSBarry Smith 4554c5d6d63eSBarry Smith @*/ 455590431a8fSHong Zhang PetscErrorCode MatCreateMPIAIJConcatenateSeqAIJ(MPI_Comm comm,Mat inmat,PetscInt n,MatReuse scall,Mat *outmat) 4556c5d6d63eSBarry Smith { 4557dfbe8321SBarry Smith PetscErrorCode ierr; 4558c5d6d63eSBarry Smith 4559c5d6d63eSBarry Smith PetscFunctionBegin; 45609b8102ccSHong Zhang ierr = PetscLogEventBegin(MAT_Merge,inmat,0,0,0);CHKERRQ(ierr); 4561d6bb3c2dSHong Zhang if (scall == MAT_INITIAL_MATRIX) { 456290431a8fSHong Zhang ierr = MatCreateMPIAIJConcatenateSeqAIJSymbolic(comm,inmat,n,outmat);CHKERRQ(ierr); 45630e36024fSHong Zhang } 456490431a8fSHong Zhang ierr = MatCreateMPIAIJConcatenateSeqAIJNumeric(comm,inmat,n,*outmat);CHKERRQ(ierr); 45659b8102ccSHong Zhang ierr = PetscLogEventEnd(MAT_Merge,inmat,0,0,0);CHKERRQ(ierr); 4566c5d6d63eSBarry Smith PetscFunctionReturn(0); 4567c5d6d63eSBarry Smith } 4568c5d6d63eSBarry Smith 4569c5d6d63eSBarry Smith #undef __FUNCT__ 4570c5d6d63eSBarry Smith #define __FUNCT__ "MatFileSplit" 4571dfbe8321SBarry Smith PetscErrorCode MatFileSplit(Mat A,char *outfile) 4572c5d6d63eSBarry Smith { 4573dfbe8321SBarry Smith PetscErrorCode ierr; 457432dcc486SBarry Smith PetscMPIInt rank; 4575b1d57f15SBarry Smith PetscInt m,N,i,rstart,nnz; 4576de4209c5SBarry Smith size_t len; 4577b1d57f15SBarry Smith const PetscInt *indx; 4578c5d6d63eSBarry Smith PetscViewer out; 4579c5d6d63eSBarry Smith char *name; 4580c5d6d63eSBarry Smith Mat B; 4581b3cc6726SBarry Smith const PetscScalar *values; 4582c5d6d63eSBarry Smith 4583c5d6d63eSBarry Smith PetscFunctionBegin; 4584c5d6d63eSBarry Smith ierr = MatGetLocalSize(A,&m,0);CHKERRQ(ierr); 4585c5d6d63eSBarry Smith ierr = MatGetSize(A,0,&N);CHKERRQ(ierr); 4586f204ca49SKris Buschelman /* Should this be the type of the diagonal block of A? */ 4587f69a0ea3SMatthew Knepley ierr = MatCreate(PETSC_COMM_SELF,&B);CHKERRQ(ierr); 4588f69a0ea3SMatthew Knepley ierr = MatSetSizes(B,m,N,m,N);CHKERRQ(ierr); 4589a2f3521dSMark F. Adams ierr = MatSetBlockSizes(B,A->rmap->bs,A->cmap->bs);CHKERRQ(ierr); 4590f204ca49SKris Buschelman ierr = MatSetType(B,MATSEQAIJ);CHKERRQ(ierr); 45910298fd71SBarry Smith ierr = MatSeqAIJSetPreallocation(B,0,NULL);CHKERRQ(ierr); 4592c5d6d63eSBarry Smith ierr = MatGetOwnershipRange(A,&rstart,0);CHKERRQ(ierr); 4593c5d6d63eSBarry Smith for (i=0; i<m; i++) { 4594c5d6d63eSBarry Smith ierr = MatGetRow(A,i+rstart,&nnz,&indx,&values);CHKERRQ(ierr); 4595c5d6d63eSBarry Smith ierr = MatSetValues(B,1,&i,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr); 4596c5d6d63eSBarry Smith ierr = MatRestoreRow(A,i+rstart,&nnz,&indx,&values);CHKERRQ(ierr); 4597c5d6d63eSBarry Smith } 4598c5d6d63eSBarry Smith ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 4599c5d6d63eSBarry Smith ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 4600c5d6d63eSBarry Smith 4601ce94432eSBarry Smith ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)A),&rank);CHKERRQ(ierr); 4602c5d6d63eSBarry Smith ierr = PetscStrlen(outfile,&len);CHKERRQ(ierr); 4603c5d6d63eSBarry Smith ierr = PetscMalloc((len+5)*sizeof(char),&name);CHKERRQ(ierr); 4604c5d6d63eSBarry Smith sprintf(name,"%s.%d",outfile,rank); 4605852598b0SBarry Smith ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,name,FILE_MODE_APPEND,&out);CHKERRQ(ierr); 4606a2ea699eSBarry Smith ierr = PetscFree(name);CHKERRQ(ierr); 4607c5d6d63eSBarry Smith ierr = MatView(B,out);CHKERRQ(ierr); 46086bf464f9SBarry Smith ierr = PetscViewerDestroy(&out);CHKERRQ(ierr); 46096bf464f9SBarry Smith ierr = MatDestroy(&B);CHKERRQ(ierr); 4610c5d6d63eSBarry Smith PetscFunctionReturn(0); 4611c5d6d63eSBarry Smith } 4612e5f2cdd8SHong Zhang 461309573ac7SBarry Smith extern PetscErrorCode MatDestroy_MPIAIJ(Mat); 461451a7d1a8SHong Zhang #undef __FUNCT__ 461551a7d1a8SHong Zhang #define __FUNCT__ "MatDestroy_MPIAIJ_SeqsToMPI" 46167087cfbeSBarry Smith PetscErrorCode MatDestroy_MPIAIJ_SeqsToMPI(Mat A) 461751a7d1a8SHong Zhang { 461851a7d1a8SHong Zhang PetscErrorCode ierr; 4619671beff6SHong Zhang Mat_Merge_SeqsToMPI *merge; 4620776b82aeSLisandro Dalcin PetscContainer container; 462151a7d1a8SHong Zhang 462251a7d1a8SHong Zhang PetscFunctionBegin; 4623671beff6SHong Zhang ierr = PetscObjectQuery((PetscObject)A,"MatMergeSeqsToMPI",(PetscObject*)&container);CHKERRQ(ierr); 4624671beff6SHong Zhang if (container) { 4625776b82aeSLisandro Dalcin ierr = PetscContainerGetPointer(container,(void**)&merge);CHKERRQ(ierr); 462651a7d1a8SHong Zhang ierr = PetscFree(merge->id_r);CHKERRQ(ierr); 46273e06a4e6SHong Zhang ierr = PetscFree(merge->len_s);CHKERRQ(ierr); 46283e06a4e6SHong Zhang ierr = PetscFree(merge->len_r);CHKERRQ(ierr); 462951a7d1a8SHong Zhang ierr = PetscFree(merge->bi);CHKERRQ(ierr); 463051a7d1a8SHong Zhang ierr = PetscFree(merge->bj);CHKERRQ(ierr); 4631533163c2SBarry Smith ierr = PetscFree(merge->buf_ri[0]);CHKERRQ(ierr); 463202c68681SHong Zhang ierr = PetscFree(merge->buf_ri);CHKERRQ(ierr); 4633533163c2SBarry Smith ierr = PetscFree(merge->buf_rj[0]);CHKERRQ(ierr); 463402c68681SHong Zhang ierr = PetscFree(merge->buf_rj);CHKERRQ(ierr); 463505b42c5fSBarry Smith ierr = PetscFree(merge->coi);CHKERRQ(ierr); 463605b42c5fSBarry Smith ierr = PetscFree(merge->coj);CHKERRQ(ierr); 463705b42c5fSBarry Smith ierr = PetscFree(merge->owners_co);CHKERRQ(ierr); 46386bf464f9SBarry Smith ierr = PetscLayoutDestroy(&merge->rowmap);CHKERRQ(ierr); 4639bf0cc555SLisandro Dalcin ierr = PetscFree(merge);CHKERRQ(ierr); 4640671beff6SHong Zhang ierr = PetscObjectCompose((PetscObject)A,"MatMergeSeqsToMPI",0);CHKERRQ(ierr); 4641671beff6SHong Zhang } 464251a7d1a8SHong Zhang ierr = MatDestroy_MPIAIJ(A);CHKERRQ(ierr); 464351a7d1a8SHong Zhang PetscFunctionReturn(0); 464451a7d1a8SHong Zhang } 464551a7d1a8SHong Zhang 4646c6db04a5SJed Brown #include <../src/mat/utils/freespace.h> 4647c6db04a5SJed Brown #include <petscbt.h> 46484ebed01fSBarry Smith 4649e5f2cdd8SHong Zhang #undef __FUNCT__ 465090431a8fSHong Zhang #define __FUNCT__ "MatCreateMPIAIJSumSeqAIJNumeric" 465190431a8fSHong Zhang PetscErrorCode MatCreateMPIAIJSumSeqAIJNumeric(Mat seqmat,Mat mpimat) 465255d1abb9SHong Zhang { 465355d1abb9SHong Zhang PetscErrorCode ierr; 4654ce94432eSBarry Smith MPI_Comm comm; 465555d1abb9SHong Zhang Mat_SeqAIJ *a =(Mat_SeqAIJ*)seqmat->data; 4656b1d57f15SBarry Smith PetscMPIInt size,rank,taga,*len_s; 4657a2ea699eSBarry Smith PetscInt N=mpimat->cmap->N,i,j,*owners,*ai=a->i,*aj; 4658b1d57f15SBarry Smith PetscInt proc,m; 4659b1d57f15SBarry Smith PetscInt **buf_ri,**buf_rj; 4660b1d57f15SBarry Smith PetscInt k,anzi,*bj_i,*bi,*bj,arow,bnzi,nextaj; 4661b1d57f15SBarry Smith PetscInt nrows,**buf_ri_k,**nextrow,**nextai; 466255d1abb9SHong Zhang MPI_Request *s_waits,*r_waits; 466355d1abb9SHong Zhang MPI_Status *status; 4664a77337e4SBarry Smith MatScalar *aa=a->a; 4665dd6ea824SBarry Smith MatScalar **abuf_r,*ba_i; 466655d1abb9SHong Zhang Mat_Merge_SeqsToMPI *merge; 4667776b82aeSLisandro Dalcin PetscContainer container; 466855d1abb9SHong Zhang 466955d1abb9SHong Zhang PetscFunctionBegin; 4670bedda5b1SHong Zhang ierr = PetscObjectGetComm((PetscObject)mpimat,&comm);CHKERRQ(ierr); 46714ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Seqstompinum,seqmat,0,0,0);CHKERRQ(ierr); 46723c2c1871SHong Zhang 467355d1abb9SHong Zhang ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 467455d1abb9SHong Zhang ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 467555d1abb9SHong Zhang 467655d1abb9SHong Zhang ierr = PetscObjectQuery((PetscObject)mpimat,"MatMergeSeqsToMPI",(PetscObject*)&container);CHKERRQ(ierr); 4677776b82aeSLisandro Dalcin ierr = PetscContainerGetPointer(container,(void**)&merge);CHKERRQ(ierr); 4678bf0cc555SLisandro Dalcin 467955d1abb9SHong Zhang bi = merge->bi; 468055d1abb9SHong Zhang bj = merge->bj; 468155d1abb9SHong Zhang buf_ri = merge->buf_ri; 468255d1abb9SHong Zhang buf_rj = merge->buf_rj; 468355d1abb9SHong Zhang 468455d1abb9SHong Zhang ierr = PetscMalloc(size*sizeof(MPI_Status),&status);CHKERRQ(ierr); 46857a2fc3feSBarry Smith owners = merge->rowmap->range; 468655d1abb9SHong Zhang len_s = merge->len_s; 468755d1abb9SHong Zhang 468855d1abb9SHong Zhang /* send and recv matrix values */ 468955d1abb9SHong Zhang /*-----------------------------*/ 4690357abbc8SBarry Smith ierr = PetscObjectGetNewTag((PetscObject)mpimat,&taga);CHKERRQ(ierr); 469155d1abb9SHong Zhang ierr = PetscPostIrecvScalar(comm,taga,merge->nrecv,merge->id_r,merge->len_r,&abuf_r,&r_waits);CHKERRQ(ierr); 469255d1abb9SHong Zhang 469355d1abb9SHong Zhang ierr = PetscMalloc((merge->nsend+1)*sizeof(MPI_Request),&s_waits);CHKERRQ(ierr); 469455d1abb9SHong Zhang for (proc=0,k=0; proc<size; proc++) { 469555d1abb9SHong Zhang if (!len_s[proc]) continue; 469655d1abb9SHong Zhang i = owners[proc]; 469755d1abb9SHong Zhang ierr = MPI_Isend(aa+ai[i],len_s[proc],MPIU_MATSCALAR,proc,taga,comm,s_waits+k);CHKERRQ(ierr); 469855d1abb9SHong Zhang k++; 469955d1abb9SHong Zhang } 470055d1abb9SHong Zhang 47010c468ba9SBarry Smith if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,r_waits,status);CHKERRQ(ierr);} 47020c468ba9SBarry Smith if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,s_waits,status);CHKERRQ(ierr);} 470355d1abb9SHong Zhang ierr = PetscFree(status);CHKERRQ(ierr); 470455d1abb9SHong Zhang 470555d1abb9SHong Zhang ierr = PetscFree(s_waits);CHKERRQ(ierr); 470655d1abb9SHong Zhang ierr = PetscFree(r_waits);CHKERRQ(ierr); 470755d1abb9SHong Zhang 470855d1abb9SHong Zhang /* insert mat values of mpimat */ 470955d1abb9SHong Zhang /*----------------------------*/ 4710a77337e4SBarry Smith ierr = PetscMalloc(N*sizeof(PetscScalar),&ba_i);CHKERRQ(ierr); 47110572522cSBarry Smith ierr = PetscMalloc3(merge->nrecv,PetscInt*,&buf_ri_k,merge->nrecv,PetscInt*,&nextrow,merge->nrecv,PetscInt*,&nextai);CHKERRQ(ierr); 471255d1abb9SHong Zhang 471355d1abb9SHong Zhang for (k=0; k<merge->nrecv; k++) { 471455d1abb9SHong Zhang buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */ 471555d1abb9SHong Zhang nrows = *(buf_ri_k[k]); 471655d1abb9SHong Zhang nextrow[k] = buf_ri_k[k]+1; /* next row number of k-th recved i-structure */ 471755d1abb9SHong Zhang nextai[k] = buf_ri_k[k] + (nrows + 1); /* poins to the next i-structure of k-th recved i-structure */ 471855d1abb9SHong Zhang } 471955d1abb9SHong Zhang 472055d1abb9SHong Zhang /* set values of ba */ 47217a2fc3feSBarry Smith m = merge->rowmap->n; 472255d1abb9SHong Zhang for (i=0; i<m; i++) { 472355d1abb9SHong Zhang arow = owners[rank] + i; 472455d1abb9SHong Zhang bj_i = bj+bi[i]; /* col indices of the i-th row of mpimat */ 472555d1abb9SHong Zhang bnzi = bi[i+1] - bi[i]; 4726a77337e4SBarry Smith ierr = PetscMemzero(ba_i,bnzi*sizeof(PetscScalar));CHKERRQ(ierr); 472755d1abb9SHong Zhang 472855d1abb9SHong Zhang /* add local non-zero vals of this proc's seqmat into ba */ 472955d1abb9SHong Zhang anzi = ai[arow+1] - ai[arow]; 473055d1abb9SHong Zhang aj = a->j + ai[arow]; 473155d1abb9SHong Zhang aa = a->a + ai[arow]; 473255d1abb9SHong Zhang nextaj = 0; 473355d1abb9SHong Zhang for (j=0; nextaj<anzi; j++) { 473455d1abb9SHong Zhang if (*(bj_i + j) == aj[nextaj]) { /* bcol == acol */ 473555d1abb9SHong Zhang ba_i[j] += aa[nextaj++]; 473655d1abb9SHong Zhang } 473755d1abb9SHong Zhang } 473855d1abb9SHong Zhang 473955d1abb9SHong Zhang /* add received vals into ba */ 474055d1abb9SHong Zhang for (k=0; k<merge->nrecv; k++) { /* k-th received message */ 474155d1abb9SHong Zhang /* i-th row */ 474255d1abb9SHong Zhang if (i == *nextrow[k]) { 474355d1abb9SHong Zhang anzi = *(nextai[k]+1) - *nextai[k]; 474455d1abb9SHong Zhang aj = buf_rj[k] + *(nextai[k]); 474555d1abb9SHong Zhang aa = abuf_r[k] + *(nextai[k]); 474655d1abb9SHong Zhang nextaj = 0; 474755d1abb9SHong Zhang for (j=0; nextaj<anzi; j++) { 474855d1abb9SHong Zhang if (*(bj_i + j) == aj[nextaj]) { /* bcol == acol */ 474955d1abb9SHong Zhang ba_i[j] += aa[nextaj++]; 475055d1abb9SHong Zhang } 475155d1abb9SHong Zhang } 475255d1abb9SHong Zhang nextrow[k]++; nextai[k]++; 475355d1abb9SHong Zhang } 475455d1abb9SHong Zhang } 475555d1abb9SHong Zhang ierr = MatSetValues(mpimat,1,&arow,bnzi,bj_i,ba_i,INSERT_VALUES);CHKERRQ(ierr); 475655d1abb9SHong Zhang } 475755d1abb9SHong Zhang ierr = MatAssemblyBegin(mpimat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 475855d1abb9SHong Zhang ierr = MatAssemblyEnd(mpimat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 475955d1abb9SHong Zhang 4760533163c2SBarry Smith ierr = PetscFree(abuf_r[0]);CHKERRQ(ierr); 476155d1abb9SHong Zhang ierr = PetscFree(abuf_r);CHKERRQ(ierr); 476255d1abb9SHong Zhang ierr = PetscFree(ba_i);CHKERRQ(ierr); 47631d79065fSBarry Smith ierr = PetscFree3(buf_ri_k,nextrow,nextai);CHKERRQ(ierr); 47644ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Seqstompinum,seqmat,0,0,0);CHKERRQ(ierr); 476555d1abb9SHong Zhang PetscFunctionReturn(0); 476655d1abb9SHong Zhang } 476738f152feSBarry Smith 47686bc0bbbfSBarry Smith extern PetscErrorCode MatDestroy_MPIAIJ_SeqsToMPI(Mat); 47696bc0bbbfSBarry Smith 477038f152feSBarry Smith #undef __FUNCT__ 477190431a8fSHong Zhang #define __FUNCT__ "MatCreateMPIAIJSumSeqAIJSymbolic" 477290431a8fSHong Zhang PetscErrorCode MatCreateMPIAIJSumSeqAIJSymbolic(MPI_Comm comm,Mat seqmat,PetscInt m,PetscInt n,Mat *mpimat) 4773e5f2cdd8SHong Zhang { 4774f08fae4eSHong Zhang PetscErrorCode ierr; 477555a3bba9SHong Zhang Mat B_mpi; 4776c2234fe3SHong Zhang Mat_SeqAIJ *a=(Mat_SeqAIJ*)seqmat->data; 4777b1d57f15SBarry Smith PetscMPIInt size,rank,tagi,tagj,*len_s,*len_si,*len_ri; 4778b1d57f15SBarry Smith PetscInt **buf_rj,**buf_ri,**buf_ri_k; 4779d0f46423SBarry Smith PetscInt M=seqmat->rmap->n,N=seqmat->cmap->n,i,*owners,*ai=a->i,*aj=a->j; 4780a2f3521dSMark F. Adams PetscInt len,proc,*dnz,*onz,bs,cbs; 4781b1d57f15SBarry Smith PetscInt k,anzi,*bi,*bj,*lnk,nlnk,arow,bnzi,nspacedouble=0; 4782b1d57f15SBarry Smith PetscInt nrows,*buf_s,*buf_si,*buf_si_i,**nextrow,**nextai; 478355d1abb9SHong Zhang MPI_Request *si_waits,*sj_waits,*ri_waits,*rj_waits; 478458cb9c82SHong Zhang MPI_Status *status; 47850298fd71SBarry Smith PetscFreeSpaceList free_space=NULL,current_space=NULL; 4786be0fcf8dSHong Zhang PetscBT lnkbt; 478751a7d1a8SHong Zhang Mat_Merge_SeqsToMPI *merge; 4788776b82aeSLisandro Dalcin PetscContainer container; 478902c68681SHong Zhang 4790e5f2cdd8SHong Zhang PetscFunctionBegin; 47914ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Seqstompisym,seqmat,0,0,0);CHKERRQ(ierr); 47923c2c1871SHong Zhang 479338f152feSBarry Smith /* make sure it is a PETSc comm */ 47940298fd71SBarry Smith ierr = PetscCommDuplicate(comm,&comm,NULL);CHKERRQ(ierr); 4795e5f2cdd8SHong Zhang ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 4796e5f2cdd8SHong Zhang ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 479755d1abb9SHong Zhang 479851a7d1a8SHong Zhang ierr = PetscNew(Mat_Merge_SeqsToMPI,&merge);CHKERRQ(ierr); 4799c2234fe3SHong Zhang ierr = PetscMalloc(size*sizeof(MPI_Status),&status);CHKERRQ(ierr); 4800e5f2cdd8SHong Zhang 48016abd8857SHong Zhang /* determine row ownership */ 4802f08fae4eSHong Zhang /*---------------------------------------------------------*/ 480326283091SBarry Smith ierr = PetscLayoutCreate(comm,&merge->rowmap);CHKERRQ(ierr); 480426283091SBarry Smith ierr = PetscLayoutSetLocalSize(merge->rowmap,m);CHKERRQ(ierr); 480526283091SBarry Smith ierr = PetscLayoutSetSize(merge->rowmap,M);CHKERRQ(ierr); 480626283091SBarry Smith ierr = PetscLayoutSetBlockSize(merge->rowmap,1);CHKERRQ(ierr); 480726283091SBarry Smith ierr = PetscLayoutSetUp(merge->rowmap);CHKERRQ(ierr); 4808b1d57f15SBarry Smith ierr = PetscMalloc(size*sizeof(PetscMPIInt),&len_si);CHKERRQ(ierr); 4809b1d57f15SBarry Smith ierr = PetscMalloc(size*sizeof(PetscMPIInt),&merge->len_s);CHKERRQ(ierr); 481055d1abb9SHong Zhang 48117a2fc3feSBarry Smith m = merge->rowmap->n; 48127a2fc3feSBarry Smith owners = merge->rowmap->range; 48136abd8857SHong Zhang 48146abd8857SHong Zhang /* determine the number of messages to send, their lengths */ 48156abd8857SHong Zhang /*---------------------------------------------------------*/ 48163e06a4e6SHong Zhang len_s = merge->len_s; 481751a7d1a8SHong Zhang 48182257cef7SHong Zhang len = 0; /* length of buf_si[] */ 4819c2234fe3SHong Zhang merge->nsend = 0; 4820409913e3SHong Zhang for (proc=0; proc<size; proc++) { 48212257cef7SHong Zhang len_si[proc] = 0; 48223e06a4e6SHong Zhang if (proc == rank) { 48236abd8857SHong Zhang len_s[proc] = 0; 48243e06a4e6SHong Zhang } else { 482502c68681SHong Zhang len_si[proc] = owners[proc+1] - owners[proc] + 1; 48263e06a4e6SHong Zhang len_s[proc] = ai[owners[proc+1]] - ai[owners[proc]]; /* num of rows to be sent to [proc] */ 48273e06a4e6SHong Zhang } 48283e06a4e6SHong Zhang if (len_s[proc]) { 4829c2234fe3SHong Zhang merge->nsend++; 48302257cef7SHong Zhang nrows = 0; 48312257cef7SHong Zhang for (i=owners[proc]; i<owners[proc+1]; i++) { 48322257cef7SHong Zhang if (ai[i+1] > ai[i]) nrows++; 48332257cef7SHong Zhang } 48342257cef7SHong Zhang len_si[proc] = 2*(nrows+1); 48352257cef7SHong Zhang len += len_si[proc]; 4836409913e3SHong Zhang } 483758cb9c82SHong Zhang } 4838409913e3SHong Zhang 48392257cef7SHong Zhang /* determine the number and length of messages to receive for ij-structure */ 48402257cef7SHong Zhang /*-------------------------------------------------------------------------*/ 48410298fd71SBarry Smith ierr = PetscGatherNumberOfMessages(comm,NULL,len_s,&merge->nrecv);CHKERRQ(ierr); 484255d1abb9SHong Zhang ierr = PetscGatherMessageLengths2(comm,merge->nsend,merge->nrecv,len_s,len_si,&merge->id_r,&merge->len_r,&len_ri);CHKERRQ(ierr); 4843671beff6SHong Zhang 48443e06a4e6SHong Zhang /* post the Irecv of j-structure */ 48453e06a4e6SHong Zhang /*-------------------------------*/ 48462c72b5baSSatish Balay ierr = PetscCommGetNewTag(comm,&tagj);CHKERRQ(ierr); 48473e06a4e6SHong Zhang ierr = PetscPostIrecvInt(comm,tagj,merge->nrecv,merge->id_r,merge->len_r,&buf_rj,&rj_waits);CHKERRQ(ierr); 484802c68681SHong Zhang 48493e06a4e6SHong Zhang /* post the Isend of j-structure */ 4850affca5deSHong Zhang /*--------------------------------*/ 48511d79065fSBarry Smith ierr = PetscMalloc2(merge->nsend,MPI_Request,&si_waits,merge->nsend,MPI_Request,&sj_waits);CHKERRQ(ierr); 48523e06a4e6SHong Zhang 48532257cef7SHong Zhang for (proc=0, k=0; proc<size; proc++) { 4854409913e3SHong Zhang if (!len_s[proc]) continue; 485502c68681SHong Zhang i = owners[proc]; 4856b1d57f15SBarry Smith ierr = MPI_Isend(aj+ai[i],len_s[proc],MPIU_INT,proc,tagj,comm,sj_waits+k);CHKERRQ(ierr); 485751a7d1a8SHong Zhang k++; 485851a7d1a8SHong Zhang } 485951a7d1a8SHong Zhang 48603e06a4e6SHong Zhang /* receives and sends of j-structure are complete */ 48613e06a4e6SHong Zhang /*------------------------------------------------*/ 48620c468ba9SBarry Smith if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,rj_waits,status);CHKERRQ(ierr);} 48630c468ba9SBarry Smith if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,sj_waits,status);CHKERRQ(ierr);} 486402c68681SHong Zhang 486502c68681SHong Zhang /* send and recv i-structure */ 486602c68681SHong Zhang /*---------------------------*/ 48672c72b5baSSatish Balay ierr = PetscCommGetNewTag(comm,&tagi);CHKERRQ(ierr); 486802c68681SHong Zhang ierr = PetscPostIrecvInt(comm,tagi,merge->nrecv,merge->id_r,len_ri,&buf_ri,&ri_waits);CHKERRQ(ierr); 486902c68681SHong Zhang 4870b1d57f15SBarry Smith ierr = PetscMalloc((len+1)*sizeof(PetscInt),&buf_s);CHKERRQ(ierr); 48713e06a4e6SHong Zhang buf_si = buf_s; /* points to the beginning of k-th msg to be sent */ 48722257cef7SHong Zhang for (proc=0,k=0; proc<size; proc++) { 487302c68681SHong Zhang if (!len_s[proc]) continue; 48743e06a4e6SHong Zhang /* form outgoing message for i-structure: 48753e06a4e6SHong Zhang buf_si[0]: nrows to be sent 48763e06a4e6SHong Zhang [1:nrows]: row index (global) 48773e06a4e6SHong Zhang [nrows+1:2*nrows+1]: i-structure index 48783e06a4e6SHong Zhang */ 48793e06a4e6SHong Zhang /*-------------------------------------------*/ 48802257cef7SHong Zhang nrows = len_si[proc]/2 - 1; 48813e06a4e6SHong Zhang buf_si_i = buf_si + nrows+1; 48823e06a4e6SHong Zhang buf_si[0] = nrows; 48833e06a4e6SHong Zhang buf_si_i[0] = 0; 48843e06a4e6SHong Zhang nrows = 0; 48853e06a4e6SHong Zhang for (i=owners[proc]; i<owners[proc+1]; i++) { 48863e06a4e6SHong Zhang anzi = ai[i+1] - ai[i]; 48873e06a4e6SHong Zhang if (anzi) { 48883e06a4e6SHong Zhang buf_si_i[nrows+1] = buf_si_i[nrows] + anzi; /* i-structure */ 48893e06a4e6SHong Zhang buf_si[nrows+1] = i-owners[proc]; /* local row index */ 48903e06a4e6SHong Zhang nrows++; 48913e06a4e6SHong Zhang } 48923e06a4e6SHong Zhang } 4893b1d57f15SBarry Smith ierr = MPI_Isend(buf_si,len_si[proc],MPIU_INT,proc,tagi,comm,si_waits+k);CHKERRQ(ierr); 489402c68681SHong Zhang k++; 48952257cef7SHong Zhang buf_si += len_si[proc]; 489602c68681SHong Zhang } 48972257cef7SHong Zhang 48980c468ba9SBarry Smith if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,ri_waits,status);CHKERRQ(ierr);} 48990c468ba9SBarry Smith if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,si_waits,status);CHKERRQ(ierr);} 490002c68681SHong Zhang 4901ae15b995SBarry Smith ierr = PetscInfo2(seqmat,"nsend: %D, nrecv: %D\n",merge->nsend,merge->nrecv);CHKERRQ(ierr); 49023e06a4e6SHong Zhang for (i=0; i<merge->nrecv; i++) { 4903ae15b995SBarry Smith ierr = PetscInfo3(seqmat,"recv len_ri=%D, len_rj=%D from [%D]\n",len_ri[i],merge->len_r[i],merge->id_r[i]);CHKERRQ(ierr); 49043e06a4e6SHong Zhang } 49053e06a4e6SHong Zhang 49063e06a4e6SHong Zhang ierr = PetscFree(len_si);CHKERRQ(ierr); 490702c68681SHong Zhang ierr = PetscFree(len_ri);CHKERRQ(ierr); 490802c68681SHong Zhang ierr = PetscFree(rj_waits);CHKERRQ(ierr); 49091d79065fSBarry Smith ierr = PetscFree2(si_waits,sj_waits);CHKERRQ(ierr); 49102257cef7SHong Zhang ierr = PetscFree(ri_waits);CHKERRQ(ierr); 49113e06a4e6SHong Zhang ierr = PetscFree(buf_s);CHKERRQ(ierr); 4912bcc1bcd5SHong Zhang ierr = PetscFree(status);CHKERRQ(ierr); 491358cb9c82SHong Zhang 4914bcc1bcd5SHong Zhang /* compute a local seq matrix in each processor */ 4915bcc1bcd5SHong Zhang /*----------------------------------------------*/ 491658cb9c82SHong Zhang /* allocate bi array and free space for accumulating nonzero column info */ 4917b1d57f15SBarry Smith ierr = PetscMalloc((m+1)*sizeof(PetscInt),&bi);CHKERRQ(ierr); 491858cb9c82SHong Zhang bi[0] = 0; 491958cb9c82SHong Zhang 4920be0fcf8dSHong Zhang /* create and initialize a linked list */ 4921be0fcf8dSHong Zhang nlnk = N+1; 4922be0fcf8dSHong Zhang ierr = PetscLLCreate(N,N,nlnk,lnk,lnkbt);CHKERRQ(ierr); 492358cb9c82SHong Zhang 4924bcc1bcd5SHong Zhang /* initial FreeSpace size is 2*(num of local nnz(seqmat)) */ 4925bcc1bcd5SHong Zhang len = ai[owners[rank+1]] - ai[owners[rank]]; 4926a1a86e44SBarry Smith ierr = PetscFreeSpaceGet((PetscInt)(2*len+1),&free_space);CHKERRQ(ierr); 49272205254eSKarl Rupp 492858cb9c82SHong Zhang current_space = free_space; 492958cb9c82SHong Zhang 4930bcc1bcd5SHong Zhang /* determine symbolic info for each local row */ 49310572522cSBarry Smith ierr = PetscMalloc3(merge->nrecv,PetscInt*,&buf_ri_k,merge->nrecv,PetscInt*,&nextrow,merge->nrecv,PetscInt*,&nextai);CHKERRQ(ierr); 49321d79065fSBarry Smith 49333e06a4e6SHong Zhang for (k=0; k<merge->nrecv; k++) { 49342257cef7SHong Zhang buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */ 49353e06a4e6SHong Zhang nrows = *buf_ri_k[k]; 49363e06a4e6SHong Zhang nextrow[k] = buf_ri_k[k] + 1; /* next row number of k-th recved i-structure */ 49372257cef7SHong Zhang nextai[k] = buf_ri_k[k] + (nrows + 1); /* poins to the next i-structure of k-th recved i-structure */ 49383e06a4e6SHong Zhang } 49392257cef7SHong Zhang 4940bcc1bcd5SHong Zhang ierr = MatPreallocateInitialize(comm,m,n,dnz,onz);CHKERRQ(ierr); 4941bcc1bcd5SHong Zhang len = 0; 494258cb9c82SHong Zhang for (i=0; i<m; i++) { 494358cb9c82SHong Zhang bnzi = 0; 494458cb9c82SHong Zhang /* add local non-zero cols of this proc's seqmat into lnk */ 494558cb9c82SHong Zhang arow = owners[rank] + i; 494658cb9c82SHong Zhang anzi = ai[arow+1] - ai[arow]; 494758cb9c82SHong Zhang aj = a->j + ai[arow]; 4948dadf0e6bSHong Zhang ierr = PetscLLAddSorted(anzi,aj,N,nlnk,lnk,lnkbt);CHKERRQ(ierr); 494958cb9c82SHong Zhang bnzi += nlnk; 495058cb9c82SHong Zhang /* add received col data into lnk */ 495151a7d1a8SHong Zhang for (k=0; k<merge->nrecv; k++) { /* k-th received message */ 495255d1abb9SHong Zhang if (i == *nextrow[k]) { /* i-th row */ 49533e06a4e6SHong Zhang anzi = *(nextai[k]+1) - *nextai[k]; 49543e06a4e6SHong Zhang aj = buf_rj[k] + *nextai[k]; 4955dadf0e6bSHong Zhang ierr = PetscLLAddSorted(anzi,aj,N,nlnk,lnk,lnkbt);CHKERRQ(ierr); 49563e06a4e6SHong Zhang bnzi += nlnk; 49573e06a4e6SHong Zhang nextrow[k]++; nextai[k]++; 49583e06a4e6SHong Zhang } 495958cb9c82SHong Zhang } 4960bcc1bcd5SHong Zhang if (len < bnzi) len = bnzi; /* =max(bnzi) */ 496158cb9c82SHong Zhang 496258cb9c82SHong Zhang /* if free space is not available, make more free space */ 496358cb9c82SHong Zhang if (current_space->local_remaining<bnzi) { 49644238b7adSHong Zhang ierr = PetscFreeSpaceGet(bnzi+current_space->total_array_size,¤t_space);CHKERRQ(ierr); 496558cb9c82SHong Zhang nspacedouble++; 496658cb9c82SHong Zhang } 496758cb9c82SHong Zhang /* copy data into free space, then initialize lnk */ 4968be0fcf8dSHong Zhang ierr = PetscLLClean(N,N,bnzi,lnk,current_space->array,lnkbt);CHKERRQ(ierr); 4969bcc1bcd5SHong Zhang ierr = MatPreallocateSet(i+owners[rank],bnzi,current_space->array,dnz,onz);CHKERRQ(ierr); 4970bcc1bcd5SHong Zhang 497158cb9c82SHong Zhang current_space->array += bnzi; 497258cb9c82SHong Zhang current_space->local_used += bnzi; 497358cb9c82SHong Zhang current_space->local_remaining -= bnzi; 497458cb9c82SHong Zhang 497558cb9c82SHong Zhang bi[i+1] = bi[i] + bnzi; 497658cb9c82SHong Zhang } 4977bcc1bcd5SHong Zhang 49781d79065fSBarry Smith ierr = PetscFree3(buf_ri_k,nextrow,nextai);CHKERRQ(ierr); 4979bcc1bcd5SHong Zhang 4980b1d57f15SBarry Smith ierr = PetscMalloc((bi[m]+1)*sizeof(PetscInt),&bj);CHKERRQ(ierr); 4981a1a86e44SBarry Smith ierr = PetscFreeSpaceContiguous(&free_space,bj);CHKERRQ(ierr); 4982be0fcf8dSHong Zhang ierr = PetscLLDestroy(lnk,lnkbt);CHKERRQ(ierr); 4983409913e3SHong Zhang 4984bcc1bcd5SHong Zhang /* create symbolic parallel matrix B_mpi */ 4985bcc1bcd5SHong Zhang /*---------------------------------------*/ 4986a2f3521dSMark F. Adams ierr = MatGetBlockSizes(seqmat,&bs,&cbs);CHKERRQ(ierr); 4987f69a0ea3SMatthew Knepley ierr = MatCreate(comm,&B_mpi);CHKERRQ(ierr); 498854b84b50SHong Zhang if (n==PETSC_DECIDE) { 4989f69a0ea3SMatthew Knepley ierr = MatSetSizes(B_mpi,m,n,PETSC_DETERMINE,N);CHKERRQ(ierr); 499054b84b50SHong Zhang } else { 4991f69a0ea3SMatthew Knepley ierr = MatSetSizes(B_mpi,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 499254b84b50SHong Zhang } 4993a2f3521dSMark F. Adams ierr = MatSetBlockSizes(B_mpi,bs,cbs);CHKERRQ(ierr); 4994bcc1bcd5SHong Zhang ierr = MatSetType(B_mpi,MATMPIAIJ);CHKERRQ(ierr); 4995bcc1bcd5SHong Zhang ierr = MatMPIAIJSetPreallocation(B_mpi,0,dnz,0,onz);CHKERRQ(ierr); 4996bcc1bcd5SHong Zhang ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr); 49977e63b356SHong Zhang ierr = MatSetOption(B_mpi,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE);CHKERRQ(ierr); 499858cb9c82SHong Zhang 499990431a8fSHong Zhang /* B_mpi is not ready for use - assembly will be done by MatCreateMPIAIJSumSeqAIJNumeric() */ 50006abd8857SHong Zhang B_mpi->assembled = PETSC_FALSE; 5001affca5deSHong Zhang B_mpi->ops->destroy = MatDestroy_MPIAIJ_SeqsToMPI; 5002affca5deSHong Zhang merge->bi = bi; 5003affca5deSHong Zhang merge->bj = bj; 500402c68681SHong Zhang merge->buf_ri = buf_ri; 500502c68681SHong Zhang merge->buf_rj = buf_rj; 50060298fd71SBarry Smith merge->coi = NULL; 50070298fd71SBarry Smith merge->coj = NULL; 50080298fd71SBarry Smith merge->owners_co = NULL; 5009affca5deSHong Zhang 5010bf0cc555SLisandro Dalcin ierr = PetscCommDestroy(&comm);CHKERRQ(ierr); 5011bf0cc555SLisandro Dalcin 5012affca5deSHong Zhang /* attach the supporting struct to B_mpi for reuse */ 5013776b82aeSLisandro Dalcin ierr = PetscContainerCreate(PETSC_COMM_SELF,&container);CHKERRQ(ierr); 5014776b82aeSLisandro Dalcin ierr = PetscContainerSetPointer(container,merge);CHKERRQ(ierr); 5015affca5deSHong Zhang ierr = PetscObjectCompose((PetscObject)B_mpi,"MatMergeSeqsToMPI",(PetscObject)container);CHKERRQ(ierr); 5016bf0cc555SLisandro Dalcin ierr = PetscContainerDestroy(&container);CHKERRQ(ierr); 5017affca5deSHong Zhang *mpimat = B_mpi; 501838f152feSBarry Smith 50194ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Seqstompisym,seqmat,0,0,0);CHKERRQ(ierr); 5020e5f2cdd8SHong Zhang PetscFunctionReturn(0); 5021e5f2cdd8SHong Zhang } 502225616d81SHong Zhang 502338f152feSBarry Smith #undef __FUNCT__ 502490431a8fSHong Zhang #define __FUNCT__ "MatCreateMPIAIJSumSeqAIJ" 5025d4036a1aSHong Zhang /*@C 502690431a8fSHong Zhang MatCreateMPIAIJSumSeqAIJ - Creates a MPIAIJ matrix by adding sequential 5027d4036a1aSHong Zhang matrices from each processor 5028d4036a1aSHong Zhang 5029d4036a1aSHong Zhang Collective on MPI_Comm 5030d4036a1aSHong Zhang 5031d4036a1aSHong Zhang Input Parameters: 5032d4036a1aSHong Zhang + comm - the communicators the parallel matrix will live on 5033d4036a1aSHong Zhang . seqmat - the input sequential matrices 5034d4036a1aSHong Zhang . m - number of local rows (or PETSC_DECIDE) 5035d4036a1aSHong Zhang . n - number of local columns (or PETSC_DECIDE) 5036d4036a1aSHong Zhang - scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 5037d4036a1aSHong Zhang 5038d4036a1aSHong Zhang Output Parameter: 5039d4036a1aSHong Zhang . mpimat - the parallel matrix generated 5040d4036a1aSHong Zhang 5041d4036a1aSHong Zhang Level: advanced 5042d4036a1aSHong Zhang 5043d4036a1aSHong Zhang Notes: 5044d4036a1aSHong Zhang The dimensions of the sequential matrix in each processor MUST be the same. 5045d4036a1aSHong Zhang The input seqmat is included into the container "Mat_Merge_SeqsToMPI", and will be 5046d4036a1aSHong Zhang destroyed when mpimat is destroyed. Call PetscObjectQuery() to access seqmat. 5047d4036a1aSHong Zhang @*/ 504890431a8fSHong Zhang PetscErrorCode MatCreateMPIAIJSumSeqAIJ(MPI_Comm comm,Mat seqmat,PetscInt m,PetscInt n,MatReuse scall,Mat *mpimat) 504955d1abb9SHong Zhang { 505055d1abb9SHong Zhang PetscErrorCode ierr; 50517e63b356SHong Zhang PetscMPIInt size; 505255d1abb9SHong Zhang 505355d1abb9SHong Zhang PetscFunctionBegin; 50547e63b356SHong Zhang ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 50557e63b356SHong Zhang if (size == 1) { 50567e63b356SHong Zhang ierr = PetscLogEventBegin(MAT_Seqstompi,seqmat,0,0,0);CHKERRQ(ierr); 50577e63b356SHong Zhang if (scall == MAT_INITIAL_MATRIX) { 50587e63b356SHong Zhang ierr = MatDuplicate(seqmat,MAT_COPY_VALUES,mpimat);CHKERRQ(ierr); 50597e63b356SHong Zhang } else { 50607e63b356SHong Zhang ierr = MatCopy(seqmat,*mpimat,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 50617e63b356SHong Zhang } 50627e63b356SHong Zhang ierr = PetscLogEventEnd(MAT_Seqstompi,seqmat,0,0,0);CHKERRQ(ierr); 50637e63b356SHong Zhang PetscFunctionReturn(0); 50647e63b356SHong Zhang } 50654ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Seqstompi,seqmat,0,0,0);CHKERRQ(ierr); 506655d1abb9SHong Zhang if (scall == MAT_INITIAL_MATRIX) { 506790431a8fSHong Zhang ierr = MatCreateMPIAIJSumSeqAIJSymbolic(comm,seqmat,m,n,mpimat);CHKERRQ(ierr); 506855d1abb9SHong Zhang } 506990431a8fSHong Zhang ierr = MatCreateMPIAIJSumSeqAIJNumeric(seqmat,*mpimat);CHKERRQ(ierr); 50704ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Seqstompi,seqmat,0,0,0);CHKERRQ(ierr); 507155d1abb9SHong Zhang PetscFunctionReturn(0); 507255d1abb9SHong Zhang } 50734ebed01fSBarry Smith 507425616d81SHong Zhang #undef __FUNCT__ 50754a2b5492SBarry Smith #define __FUNCT__ "MatMPIAIJGetLocalMat" 5076bc08b0f1SBarry Smith /*@ 50774a2b5492SBarry Smith MatMPIAIJGetLocalMat - Creates a SeqAIJ from a MPIAIJ matrix by taking all its local rows and putting them into a sequential vector with 50788661ff28SBarry Smith mlocal rows and n columns. Where mlocal is the row count obtained with MatGetLocalSize() and n is the global column count obtained 50798661ff28SBarry Smith with MatGetSize() 508025616d81SHong Zhang 508132fba14fSHong Zhang Not Collective 508225616d81SHong Zhang 508325616d81SHong Zhang Input Parameters: 508425616d81SHong Zhang + A - the matrix 508525616d81SHong Zhang . scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 508625616d81SHong Zhang 508725616d81SHong Zhang Output Parameter: 508825616d81SHong Zhang . A_loc - the local sequential matrix generated 508925616d81SHong Zhang 509025616d81SHong Zhang Level: developer 509125616d81SHong Zhang 5092ba264940SBarry Smith .seealso: MatGetOwnerShipRange(), MatMPIAIJGetLocalMatCondensed() 50938661ff28SBarry Smith 509425616d81SHong Zhang @*/ 50954a2b5492SBarry Smith PetscErrorCode MatMPIAIJGetLocalMat(Mat A,MatReuse scall,Mat *A_loc) 509625616d81SHong Zhang { 509725616d81SHong Zhang PetscErrorCode ierr; 509801b7ae99SHong Zhang Mat_MPIAIJ *mpimat=(Mat_MPIAIJ*)A->data; 509901b7ae99SHong Zhang Mat_SeqAIJ *mat,*a=(Mat_SeqAIJ*)(mpimat->A)->data,*b=(Mat_SeqAIJ*)(mpimat->B)->data; 510001b7ae99SHong Zhang PetscInt *ai=a->i,*aj=a->j,*bi=b->i,*bj=b->j,*cmap=mpimat->garray; 5101a77337e4SBarry Smith MatScalar *aa=a->a,*ba=b->a,*cam; 5102a77337e4SBarry Smith PetscScalar *ca; 5103d0f46423SBarry Smith PetscInt am=A->rmap->n,i,j,k,cstart=A->cmap->rstart; 51045a7d977cSHong Zhang PetscInt *ci,*cj,col,ncols_d,ncols_o,jo; 51058661ff28SBarry Smith PetscBool match; 510625616d81SHong Zhang 510725616d81SHong Zhang PetscFunctionBegin; 5108251f4c67SDmitry Karpeev ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&match);CHKERRQ(ierr); 5109ce94432eSBarry Smith if (!match) SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_SUP,"Requires MPIAIJ matrix as input"); 51104ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Getlocalmat,A,0,0,0);CHKERRQ(ierr); 511101b7ae99SHong Zhang if (scall == MAT_INITIAL_MATRIX) { 5112dea91ad1SHong Zhang ierr = PetscMalloc((1+am)*sizeof(PetscInt),&ci);CHKERRQ(ierr); 5113dea91ad1SHong Zhang ci[0] = 0; 511401b7ae99SHong Zhang for (i=0; i<am; i++) { 5115dea91ad1SHong Zhang ci[i+1] = ci[i] + (ai[i+1] - ai[i]) + (bi[i+1] - bi[i]); 511601b7ae99SHong Zhang } 5117dea91ad1SHong Zhang ierr = PetscMalloc((1+ci[am])*sizeof(PetscInt),&cj);CHKERRQ(ierr); 5118dea91ad1SHong Zhang ierr = PetscMalloc((1+ci[am])*sizeof(PetscScalar),&ca);CHKERRQ(ierr); 5119dea91ad1SHong Zhang k = 0; 512001b7ae99SHong Zhang for (i=0; i<am; i++) { 51215a7d977cSHong Zhang ncols_o = bi[i+1] - bi[i]; 51225a7d977cSHong Zhang ncols_d = ai[i+1] - ai[i]; 512301b7ae99SHong Zhang /* off-diagonal portion of A */ 51245a7d977cSHong Zhang for (jo=0; jo<ncols_o; jo++) { 51255a7d977cSHong Zhang col = cmap[*bj]; 51265a7d977cSHong Zhang if (col >= cstart) break; 51275a7d977cSHong Zhang cj[k] = col; bj++; 51285a7d977cSHong Zhang ca[k++] = *ba++; 51295a7d977cSHong Zhang } 51305a7d977cSHong Zhang /* diagonal portion of A */ 51315a7d977cSHong Zhang for (j=0; j<ncols_d; j++) { 51325a7d977cSHong Zhang cj[k] = cstart + *aj++; 51335a7d977cSHong Zhang ca[k++] = *aa++; 51345a7d977cSHong Zhang } 51355a7d977cSHong Zhang /* off-diagonal portion of A */ 51365a7d977cSHong Zhang for (j=jo; j<ncols_o; j++) { 51375a7d977cSHong Zhang cj[k] = cmap[*bj++]; 51385a7d977cSHong Zhang ca[k++] = *ba++; 51395a7d977cSHong Zhang } 514025616d81SHong Zhang } 5141dea91ad1SHong Zhang /* put together the new matrix */ 5142d0f46423SBarry Smith ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,am,A->cmap->N,ci,cj,ca,A_loc);CHKERRQ(ierr); 5143dea91ad1SHong Zhang /* MatCreateSeqAIJWithArrays flags matrix so PETSc doesn't free the user's arrays. */ 5144dea91ad1SHong Zhang /* Since these are PETSc arrays, change flags to free them as necessary. */ 5145dea91ad1SHong Zhang mat = (Mat_SeqAIJ*)(*A_loc)->data; 5146e6b907acSBarry Smith mat->free_a = PETSC_TRUE; 5147e6b907acSBarry Smith mat->free_ij = PETSC_TRUE; 5148dea91ad1SHong Zhang mat->nonew = 0; 51495a7d977cSHong Zhang } else if (scall == MAT_REUSE_MATRIX) { 51505a7d977cSHong Zhang mat=(Mat_SeqAIJ*)(*A_loc)->data; 5151a77337e4SBarry Smith ci = mat->i; cj = mat->j; cam = mat->a; 51525a7d977cSHong Zhang for (i=0; i<am; i++) { 51535a7d977cSHong Zhang /* off-diagonal portion of A */ 51545a7d977cSHong Zhang ncols_o = bi[i+1] - bi[i]; 51555a7d977cSHong Zhang for (jo=0; jo<ncols_o; jo++) { 51565a7d977cSHong Zhang col = cmap[*bj]; 51575a7d977cSHong Zhang if (col >= cstart) break; 5158a77337e4SBarry Smith *cam++ = *ba++; bj++; 51595a7d977cSHong Zhang } 51605a7d977cSHong Zhang /* diagonal portion of A */ 5161ecc9b87dSHong Zhang ncols_d = ai[i+1] - ai[i]; 5162a77337e4SBarry Smith for (j=0; j<ncols_d; j++) *cam++ = *aa++; 51635a7d977cSHong Zhang /* off-diagonal portion of A */ 5164f33d1a9aSHong Zhang for (j=jo; j<ncols_o; j++) { 5165a77337e4SBarry Smith *cam++ = *ba++; bj++; 5166f33d1a9aSHong Zhang } 51675a7d977cSHong Zhang } 51688661ff28SBarry Smith } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid MatReuse %d",(int)scall); 51694ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Getlocalmat,A,0,0,0);CHKERRQ(ierr); 517025616d81SHong Zhang PetscFunctionReturn(0); 517125616d81SHong Zhang } 517225616d81SHong Zhang 517332fba14fSHong Zhang #undef __FUNCT__ 51744a2b5492SBarry Smith #define __FUNCT__ "MatMPIAIJGetLocalMatCondensed" 517532fba14fSHong Zhang /*@C 5176ba264940SBarry Smith MatMPIAIJGetLocalMatCondensed - Creates a SeqAIJ matrix from an MPIAIJ matrix by taking all its local rows and NON-ZERO columns 517732fba14fSHong Zhang 517832fba14fSHong Zhang Not Collective 517932fba14fSHong Zhang 518032fba14fSHong Zhang Input Parameters: 518132fba14fSHong Zhang + A - the matrix 518232fba14fSHong Zhang . scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 51830298fd71SBarry Smith - row, col - index sets of rows and columns to extract (or NULL) 518432fba14fSHong Zhang 518532fba14fSHong Zhang Output Parameter: 518632fba14fSHong Zhang . A_loc - the local sequential matrix generated 518732fba14fSHong Zhang 518832fba14fSHong Zhang Level: developer 518932fba14fSHong Zhang 5190ba264940SBarry Smith .seealso: MatGetOwnershipRange(), MatMPIAIJGetLocalMat() 5191ba264940SBarry Smith 519232fba14fSHong Zhang @*/ 51934a2b5492SBarry Smith PetscErrorCode MatMPIAIJGetLocalMatCondensed(Mat A,MatReuse scall,IS *row,IS *col,Mat *A_loc) 519432fba14fSHong Zhang { 519532fba14fSHong Zhang Mat_MPIAIJ *a=(Mat_MPIAIJ*)A->data; 519632fba14fSHong Zhang PetscErrorCode ierr; 519732fba14fSHong Zhang PetscInt i,start,end,ncols,nzA,nzB,*cmap,imark,*idx; 519832fba14fSHong Zhang IS isrowa,iscola; 519932fba14fSHong Zhang Mat *aloc; 52004a2b5492SBarry Smith PetscBool match; 520132fba14fSHong Zhang 520232fba14fSHong Zhang PetscFunctionBegin; 5203251f4c67SDmitry Karpeev ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&match);CHKERRQ(ierr); 5204ce94432eSBarry Smith if (!match) SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_SUP,"Requires MPIAIJ matrix as input"); 52054ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Getlocalmatcondensed,A,0,0,0);CHKERRQ(ierr); 520632fba14fSHong Zhang if (!row) { 5207d0f46423SBarry Smith start = A->rmap->rstart; end = A->rmap->rend; 520832fba14fSHong Zhang ierr = ISCreateStride(PETSC_COMM_SELF,end-start,start,1,&isrowa);CHKERRQ(ierr); 520932fba14fSHong Zhang } else { 521032fba14fSHong Zhang isrowa = *row; 521132fba14fSHong Zhang } 521232fba14fSHong Zhang if (!col) { 5213d0f46423SBarry Smith start = A->cmap->rstart; 521432fba14fSHong Zhang cmap = a->garray; 5215d0f46423SBarry Smith nzA = a->A->cmap->n; 5216d0f46423SBarry Smith nzB = a->B->cmap->n; 521732fba14fSHong Zhang ierr = PetscMalloc((nzA+nzB)*sizeof(PetscInt), &idx);CHKERRQ(ierr); 521832fba14fSHong Zhang ncols = 0; 521932fba14fSHong Zhang for (i=0; i<nzB; i++) { 522032fba14fSHong Zhang if (cmap[i] < start) idx[ncols++] = cmap[i]; 522132fba14fSHong Zhang else break; 522232fba14fSHong Zhang } 522332fba14fSHong Zhang imark = i; 522432fba14fSHong Zhang for (i=0; i<nzA; i++) idx[ncols++] = start + i; 522532fba14fSHong Zhang for (i=imark; i<nzB; i++) idx[ncols++] = cmap[i]; 5226d67e408aSBarry Smith ierr = ISCreateGeneral(PETSC_COMM_SELF,ncols,idx,PETSC_OWN_POINTER,&iscola);CHKERRQ(ierr); 522732fba14fSHong Zhang } else { 522832fba14fSHong Zhang iscola = *col; 522932fba14fSHong Zhang } 523032fba14fSHong Zhang if (scall != MAT_INITIAL_MATRIX) { 523132fba14fSHong Zhang ierr = PetscMalloc(sizeof(Mat),&aloc);CHKERRQ(ierr); 523232fba14fSHong Zhang aloc[0] = *A_loc; 523332fba14fSHong Zhang } 523432fba14fSHong Zhang ierr = MatGetSubMatrices(A,1,&isrowa,&iscola,scall,&aloc);CHKERRQ(ierr); 523532fba14fSHong Zhang *A_loc = aloc[0]; 523632fba14fSHong Zhang ierr = PetscFree(aloc);CHKERRQ(ierr); 523732fba14fSHong Zhang if (!row) { 52386bf464f9SBarry Smith ierr = ISDestroy(&isrowa);CHKERRQ(ierr); 523932fba14fSHong Zhang } 524032fba14fSHong Zhang if (!col) { 52416bf464f9SBarry Smith ierr = ISDestroy(&iscola);CHKERRQ(ierr); 524232fba14fSHong Zhang } 52434ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Getlocalmatcondensed,A,0,0,0);CHKERRQ(ierr); 524432fba14fSHong Zhang PetscFunctionReturn(0); 524532fba14fSHong Zhang } 524632fba14fSHong Zhang 524725616d81SHong Zhang #undef __FUNCT__ 524825616d81SHong Zhang #define __FUNCT__ "MatGetBrowsOfAcols" 524925616d81SHong Zhang /*@C 525032fba14fSHong Zhang MatGetBrowsOfAcols - Creates a SeqAIJ matrix by taking rows of B that equal to nonzero columns of local A 525125616d81SHong Zhang 525225616d81SHong Zhang Collective on Mat 525325616d81SHong Zhang 525425616d81SHong Zhang Input Parameters: 5255e240928fSHong Zhang + A,B - the matrices in mpiaij format 525625616d81SHong Zhang . scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 52570298fd71SBarry Smith - rowb, colb - index sets of rows and columns of B to extract (or NULL) 525825616d81SHong Zhang 525925616d81SHong Zhang Output Parameter: 526025616d81SHong Zhang + rowb, colb - index sets of rows and columns of B to extract 526125616d81SHong Zhang - B_seq - the sequential matrix generated 526225616d81SHong Zhang 526325616d81SHong Zhang Level: developer 526425616d81SHong Zhang 526525616d81SHong Zhang @*/ 526666bfb163SHong Zhang PetscErrorCode MatGetBrowsOfAcols(Mat A,Mat B,MatReuse scall,IS *rowb,IS *colb,Mat *B_seq) 526725616d81SHong Zhang { 5268899cda47SBarry Smith Mat_MPIAIJ *a=(Mat_MPIAIJ*)A->data; 526925616d81SHong Zhang PetscErrorCode ierr; 5270b1d57f15SBarry Smith PetscInt *idx,i,start,ncols,nzA,nzB,*cmap,imark; 527125616d81SHong Zhang IS isrowb,iscolb; 52720298fd71SBarry Smith Mat *bseq=NULL; 527325616d81SHong Zhang 527425616d81SHong Zhang PetscFunctionBegin; 5275d0f46423SBarry Smith if (A->cmap->rstart != B->rmap->rstart || A->cmap->rend != B->rmap->rend) { 5276e32f2f54SBarry Smith SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, (%D, %D) != (%D,%D)",A->cmap->rstart,A->cmap->rend,B->rmap->rstart,B->rmap->rend); 527725616d81SHong Zhang } 52784ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_GetBrowsOfAcols,A,B,0,0);CHKERRQ(ierr); 527925616d81SHong Zhang 528025616d81SHong Zhang if (scall == MAT_INITIAL_MATRIX) { 5281d0f46423SBarry Smith start = A->cmap->rstart; 528225616d81SHong Zhang cmap = a->garray; 5283d0f46423SBarry Smith nzA = a->A->cmap->n; 5284d0f46423SBarry Smith nzB = a->B->cmap->n; 5285b1d57f15SBarry Smith ierr = PetscMalloc((nzA+nzB)*sizeof(PetscInt), &idx);CHKERRQ(ierr); 528625616d81SHong Zhang ncols = 0; 52870390132cSHong Zhang for (i=0; i<nzB; i++) { /* row < local row index */ 528825616d81SHong Zhang if (cmap[i] < start) idx[ncols++] = cmap[i]; 528925616d81SHong Zhang else break; 529025616d81SHong Zhang } 529125616d81SHong Zhang imark = i; 52920390132cSHong Zhang for (i=0; i<nzA; i++) idx[ncols++] = start + i; /* local rows */ 52930390132cSHong Zhang for (i=imark; i<nzB; i++) idx[ncols++] = cmap[i]; /* row > local row index */ 5294d67e408aSBarry Smith ierr = ISCreateGeneral(PETSC_COMM_SELF,ncols,idx,PETSC_OWN_POINTER,&isrowb);CHKERRQ(ierr); 5295d0f46423SBarry Smith ierr = ISCreateStride(PETSC_COMM_SELF,B->cmap->N,0,1,&iscolb);CHKERRQ(ierr); 529625616d81SHong Zhang } else { 5297e32f2f54SBarry Smith if (!rowb || !colb) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"IS rowb and colb must be provided for MAT_REUSE_MATRIX"); 529825616d81SHong Zhang isrowb = *rowb; iscolb = *colb; 529925616d81SHong Zhang ierr = PetscMalloc(sizeof(Mat),&bseq);CHKERRQ(ierr); 530025616d81SHong Zhang bseq[0] = *B_seq; 530125616d81SHong Zhang } 530225616d81SHong Zhang ierr = MatGetSubMatrices(B,1,&isrowb,&iscolb,scall,&bseq);CHKERRQ(ierr); 530325616d81SHong Zhang *B_seq = bseq[0]; 530425616d81SHong Zhang ierr = PetscFree(bseq);CHKERRQ(ierr); 530525616d81SHong Zhang if (!rowb) { 53066bf464f9SBarry Smith ierr = ISDestroy(&isrowb);CHKERRQ(ierr); 530725616d81SHong Zhang } else { 530825616d81SHong Zhang *rowb = isrowb; 530925616d81SHong Zhang } 531025616d81SHong Zhang if (!colb) { 53116bf464f9SBarry Smith ierr = ISDestroy(&iscolb);CHKERRQ(ierr); 531225616d81SHong Zhang } else { 531325616d81SHong Zhang *colb = iscolb; 531425616d81SHong Zhang } 53154ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_GetBrowsOfAcols,A,B,0,0);CHKERRQ(ierr); 531625616d81SHong Zhang PetscFunctionReturn(0); 531725616d81SHong Zhang } 5318429d309bSHong Zhang 5319a61c8c0fSHong Zhang #undef __FUNCT__ 5320f8487c73SHong Zhang #define __FUNCT__ "MatGetBrowsOfAoCols_MPIAIJ" 5321f8487c73SHong Zhang /* 5322f8487c73SHong Zhang MatGetBrowsOfAoCols_MPIAIJ - Creates a SeqAIJ matrix by taking rows of B that equal to nonzero columns 532301b7ae99SHong Zhang of the OFF-DIAGONAL portion of local A 5324429d309bSHong Zhang 5325429d309bSHong Zhang Collective on Mat 5326429d309bSHong Zhang 5327429d309bSHong Zhang Input Parameters: 5328429d309bSHong Zhang + A,B - the matrices in mpiaij format 5329598bc09dSHong Zhang - scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 5330429d309bSHong Zhang 5331429d309bSHong Zhang Output Parameter: 53320298fd71SBarry Smith + startsj_s - starting point in B's sending j-arrays, saved for MAT_REUSE (or NULL) 53330298fd71SBarry Smith . startsj_r - starting point in B's receiving j-arrays, saved for MAT_REUSE (or NULL) 53340298fd71SBarry Smith . bufa_ptr - array for sending matrix values, saved for MAT_REUSE (or NULL) 5335598bc09dSHong Zhang - B_oth - the sequential matrix generated with size aBn=a->B->cmap->n by B->cmap->N 5336429d309bSHong Zhang 5337429d309bSHong Zhang Level: developer 5338429d309bSHong Zhang 5339f8487c73SHong Zhang */ 5340b7f45c76SHong Zhang PetscErrorCode MatGetBrowsOfAoCols_MPIAIJ(Mat A,Mat B,MatReuse scall,PetscInt **startsj_s,PetscInt **startsj_r,MatScalar **bufa_ptr,Mat *B_oth) 5341429d309bSHong Zhang { 5342a6b2eed2SHong Zhang VecScatter_MPI_General *gen_to,*gen_from; 5343429d309bSHong Zhang PetscErrorCode ierr; 5344899cda47SBarry Smith Mat_MPIAIJ *a=(Mat_MPIAIJ*)A->data; 534587025532SHong Zhang Mat_SeqAIJ *b_oth; 5346a6b2eed2SHong Zhang VecScatter ctx =a->Mvctx; 5347ce94432eSBarry Smith MPI_Comm comm; 53487adad957SLisandro Dalcin PetscMPIInt *rprocs,*sprocs,tag=((PetscObject)ctx)->tag,rank; 5349d0f46423SBarry Smith PetscInt *rowlen,*bufj,*bufJ,ncols,aBn=a->B->cmap->n,row,*b_othi,*b_othj; 5350dd6ea824SBarry Smith PetscScalar *rvalues,*svalues; 5351dd6ea824SBarry Smith MatScalar *b_otha,*bufa,*bufA; 5352e42f35eeSHong Zhang PetscInt i,j,k,l,ll,nrecvs,nsends,nrows,*srow,*rstarts,*rstartsj = 0,*sstarts,*sstartsj,len; 53530298fd71SBarry Smith MPI_Request *rwaits = NULL,*swaits = NULL; 535487025532SHong Zhang MPI_Status *sstatus,rstatus; 5355aa5bb8c0SSatish Balay PetscMPIInt jj; 5356e42f35eeSHong Zhang PetscInt *cols,sbs,rbs; 5357ba8c8a56SBarry Smith PetscScalar *vals; 5358429d309bSHong Zhang 5359429d309bSHong Zhang PetscFunctionBegin; 5360ce94432eSBarry Smith ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr); 5361d0f46423SBarry Smith if (A->cmap->rstart != B->rmap->rstart || A->cmap->rend != B->rmap->rend) { 5362e32f2f54SBarry Smith SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, (%d, %d) != (%d,%d)",A->cmap->rstart,A->cmap->rend,B->rmap->rstart,B->rmap->rend); 5363429d309bSHong Zhang } 53644ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_GetBrowsOfAocols,A,B,0,0);CHKERRQ(ierr); 5365a6b2eed2SHong Zhang ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 5366a6b2eed2SHong Zhang 5367a6b2eed2SHong Zhang gen_to = (VecScatter_MPI_General*)ctx->todata; 5368a6b2eed2SHong Zhang gen_from = (VecScatter_MPI_General*)ctx->fromdata; 5369e42f35eeSHong Zhang rvalues = gen_from->values; /* holds the length of receiving row */ 5370e42f35eeSHong Zhang svalues = gen_to->values; /* holds the length of sending row */ 5371a6b2eed2SHong Zhang nrecvs = gen_from->n; 5372a6b2eed2SHong Zhang nsends = gen_to->n; 5373d7ee0231SBarry Smith 5374d7ee0231SBarry Smith ierr = PetscMalloc2(nrecvs,MPI_Request,&rwaits,nsends,MPI_Request,&swaits);CHKERRQ(ierr); 5375a6b2eed2SHong Zhang srow = gen_to->indices; /* local row index to be sent */ 5376a6b2eed2SHong Zhang sstarts = gen_to->starts; 5377a6b2eed2SHong Zhang sprocs = gen_to->procs; 5378a6b2eed2SHong Zhang sstatus = gen_to->sstatus; 5379e42f35eeSHong Zhang sbs = gen_to->bs; 5380e42f35eeSHong Zhang rstarts = gen_from->starts; 5381e42f35eeSHong Zhang rprocs = gen_from->procs; 5382e42f35eeSHong Zhang rbs = gen_from->bs; 5383429d309bSHong Zhang 5384b7f45c76SHong Zhang if (!startsj_s || !bufa_ptr) scall = MAT_INITIAL_MATRIX; 5385429d309bSHong Zhang if (scall == MAT_INITIAL_MATRIX) { 5386a6b2eed2SHong Zhang /* i-array */ 5387a6b2eed2SHong Zhang /*---------*/ 5388a6b2eed2SHong Zhang /* post receives */ 5389a6b2eed2SHong Zhang for (i=0; i<nrecvs; i++) { 5390e42f35eeSHong Zhang rowlen = (PetscInt*)rvalues + rstarts[i]*rbs; 5391e42f35eeSHong Zhang nrows = (rstarts[i+1]-rstarts[i])*rbs; /* num of indices to be received */ 539287025532SHong Zhang ierr = MPI_Irecv(rowlen,nrows,MPIU_INT,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr); 5393429d309bSHong Zhang } 5394a6b2eed2SHong Zhang 5395a6b2eed2SHong Zhang /* pack the outgoing message */ 53961d79065fSBarry Smith ierr = PetscMalloc2(nsends+1,PetscInt,&sstartsj,nrecvs+1,PetscInt,&rstartsj);CHKERRQ(ierr); 53972205254eSKarl Rupp 53982205254eSKarl Rupp sstartsj[0] = 0; 53992205254eSKarl Rupp rstartsj[0] = 0; 5400a6b2eed2SHong Zhang len = 0; /* total length of j or a array to be sent */ 5401a6b2eed2SHong Zhang k = 0; 5402a6b2eed2SHong Zhang for (i=0; i<nsends; i++) { 5403e42f35eeSHong Zhang rowlen = (PetscInt*)svalues + sstarts[i]*sbs; 5404e42f35eeSHong Zhang nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */ 540587025532SHong Zhang for (j=0; j<nrows; j++) { 5406d0f46423SBarry Smith row = srow[k] + B->rmap->range[rank]; /* global row idx */ 5407e42f35eeSHong Zhang for (l=0; l<sbs; l++) { 54080298fd71SBarry Smith ierr = MatGetRow_MPIAIJ(B,row+l,&ncols,NULL,NULL);CHKERRQ(ierr); /* rowlength */ 54092205254eSKarl Rupp 5410e42f35eeSHong Zhang rowlen[j*sbs+l] = ncols; 54112205254eSKarl Rupp 5412e42f35eeSHong Zhang len += ncols; 54130298fd71SBarry Smith ierr = MatRestoreRow_MPIAIJ(B,row+l,&ncols,NULL,NULL);CHKERRQ(ierr); 5414e42f35eeSHong Zhang } 5415a6b2eed2SHong Zhang k++; 5416429d309bSHong Zhang } 5417e42f35eeSHong Zhang ierr = MPI_Isend(rowlen,nrows*sbs,MPIU_INT,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr); 54182205254eSKarl Rupp 5419dea91ad1SHong Zhang sstartsj[i+1] = len; /* starting point of (i+1)-th outgoing msg in bufj and bufa */ 5420429d309bSHong Zhang } 542187025532SHong Zhang /* recvs and sends of i-array are completed */ 542287025532SHong Zhang i = nrecvs; 542387025532SHong Zhang while (i--) { 5424aa5bb8c0SSatish Balay ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr); 542587025532SHong Zhang } 54260c468ba9SBarry Smith if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);} 5427e42f35eeSHong Zhang 5428a6b2eed2SHong Zhang /* allocate buffers for sending j and a arrays */ 5429a6b2eed2SHong Zhang ierr = PetscMalloc((len+1)*sizeof(PetscInt),&bufj);CHKERRQ(ierr); 5430a6b2eed2SHong Zhang ierr = PetscMalloc((len+1)*sizeof(PetscScalar),&bufa);CHKERRQ(ierr); 5431a6b2eed2SHong Zhang 543287025532SHong Zhang /* create i-array of B_oth */ 543387025532SHong Zhang ierr = PetscMalloc((aBn+2)*sizeof(PetscInt),&b_othi);CHKERRQ(ierr); 54342205254eSKarl Rupp 543587025532SHong Zhang b_othi[0] = 0; 5436a6b2eed2SHong Zhang len = 0; /* total length of j or a array to be received */ 5437a6b2eed2SHong Zhang k = 0; 5438a6b2eed2SHong Zhang for (i=0; i<nrecvs; i++) { 5439fd0ff01cSHong Zhang rowlen = (PetscInt*)rvalues + rstarts[i]*rbs; 5440e42f35eeSHong Zhang nrows = rbs*(rstarts[i+1]-rstarts[i]); /* num of rows to be recieved */ 544187025532SHong Zhang for (j=0; j<nrows; j++) { 544287025532SHong Zhang b_othi[k+1] = b_othi[k] + rowlen[j]; 5443a6b2eed2SHong Zhang len += rowlen[j]; k++; 5444a6b2eed2SHong Zhang } 5445dea91ad1SHong Zhang rstartsj[i+1] = len; /* starting point of (i+1)-th incoming msg in bufj and bufa */ 5446a6b2eed2SHong Zhang } 5447a6b2eed2SHong Zhang 544887025532SHong Zhang /* allocate space for j and a arrrays of B_oth */ 544987025532SHong Zhang ierr = PetscMalloc((b_othi[aBn]+1)*sizeof(PetscInt),&b_othj);CHKERRQ(ierr); 5450dd6ea824SBarry Smith ierr = PetscMalloc((b_othi[aBn]+1)*sizeof(MatScalar),&b_otha);CHKERRQ(ierr); 5451a6b2eed2SHong Zhang 545287025532SHong Zhang /* j-array */ 545387025532SHong Zhang /*---------*/ 5454a6b2eed2SHong Zhang /* post receives of j-array */ 5455a6b2eed2SHong Zhang for (i=0; i<nrecvs; i++) { 545687025532SHong Zhang nrows = rstartsj[i+1]-rstartsj[i]; /* length of the msg received */ 545787025532SHong Zhang ierr = MPI_Irecv(b_othj+rstartsj[i],nrows,MPIU_INT,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr); 5458a6b2eed2SHong Zhang } 5459e42f35eeSHong Zhang 5460e42f35eeSHong Zhang /* pack the outgoing message j-array */ 5461a6b2eed2SHong Zhang k = 0; 5462a6b2eed2SHong Zhang for (i=0; i<nsends; i++) { 5463e42f35eeSHong Zhang nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */ 5464a6b2eed2SHong Zhang bufJ = bufj+sstartsj[i]; 546587025532SHong Zhang for (j=0; j<nrows; j++) { 5466d0f46423SBarry Smith row = srow[k++] + B->rmap->range[rank]; /* global row idx */ 5467e42f35eeSHong Zhang for (ll=0; ll<sbs; ll++) { 54680298fd71SBarry Smith ierr = MatGetRow_MPIAIJ(B,row+ll,&ncols,&cols,NULL);CHKERRQ(ierr); 5469a6b2eed2SHong Zhang for (l=0; l<ncols; l++) { 5470a6b2eed2SHong Zhang *bufJ++ = cols[l]; 547187025532SHong Zhang } 54720298fd71SBarry Smith ierr = MatRestoreRow_MPIAIJ(B,row+ll,&ncols,&cols,NULL);CHKERRQ(ierr); 5473e42f35eeSHong Zhang } 547487025532SHong Zhang } 547587025532SHong Zhang ierr = MPI_Isend(bufj+sstartsj[i],sstartsj[i+1]-sstartsj[i],MPIU_INT,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr); 547687025532SHong Zhang } 547787025532SHong Zhang 547887025532SHong Zhang /* recvs and sends of j-array are completed */ 547987025532SHong Zhang i = nrecvs; 548087025532SHong Zhang while (i--) { 5481aa5bb8c0SSatish Balay ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr); 548287025532SHong Zhang } 54830c468ba9SBarry Smith if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);} 548487025532SHong Zhang } else if (scall == MAT_REUSE_MATRIX) { 5485b7f45c76SHong Zhang sstartsj = *startsj_s; 54861d79065fSBarry Smith rstartsj = *startsj_r; 548787025532SHong Zhang bufa = *bufa_ptr; 548887025532SHong Zhang b_oth = (Mat_SeqAIJ*)(*B_oth)->data; 548987025532SHong Zhang b_otha = b_oth->a; 5490f23aa3ddSBarry Smith } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "Matrix P does not posses an object container"); 549187025532SHong Zhang 549287025532SHong Zhang /* a-array */ 549387025532SHong Zhang /*---------*/ 549487025532SHong Zhang /* post receives of a-array */ 549587025532SHong Zhang for (i=0; i<nrecvs; i++) { 549687025532SHong Zhang nrows = rstartsj[i+1]-rstartsj[i]; /* length of the msg received */ 549787025532SHong Zhang ierr = MPI_Irecv(b_otha+rstartsj[i],nrows,MPIU_SCALAR,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr); 549887025532SHong Zhang } 5499e42f35eeSHong Zhang 5500e42f35eeSHong Zhang /* pack the outgoing message a-array */ 550187025532SHong Zhang k = 0; 550287025532SHong Zhang for (i=0; i<nsends; i++) { 5503e42f35eeSHong Zhang nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */ 550487025532SHong Zhang bufA = bufa+sstartsj[i]; 550587025532SHong Zhang for (j=0; j<nrows; j++) { 5506d0f46423SBarry Smith row = srow[k++] + B->rmap->range[rank]; /* global row idx */ 5507e42f35eeSHong Zhang for (ll=0; ll<sbs; ll++) { 55080298fd71SBarry Smith ierr = MatGetRow_MPIAIJ(B,row+ll,&ncols,NULL,&vals);CHKERRQ(ierr); 550987025532SHong Zhang for (l=0; l<ncols; l++) { 5510a6b2eed2SHong Zhang *bufA++ = vals[l]; 5511a6b2eed2SHong Zhang } 55120298fd71SBarry Smith ierr = MatRestoreRow_MPIAIJ(B,row+ll,&ncols,NULL,&vals);CHKERRQ(ierr); 5513e42f35eeSHong Zhang } 5514a6b2eed2SHong Zhang } 551587025532SHong Zhang ierr = MPI_Isend(bufa+sstartsj[i],sstartsj[i+1]-sstartsj[i],MPIU_SCALAR,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr); 5516a6b2eed2SHong Zhang } 551787025532SHong Zhang /* recvs and sends of a-array are completed */ 551887025532SHong Zhang i = nrecvs; 551987025532SHong Zhang while (i--) { 5520aa5bb8c0SSatish Balay ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr); 552187025532SHong Zhang } 55220c468ba9SBarry Smith if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);} 5523d7ee0231SBarry Smith ierr = PetscFree2(rwaits,swaits);CHKERRQ(ierr); 5524a6b2eed2SHong Zhang 552587025532SHong Zhang if (scall == MAT_INITIAL_MATRIX) { 5526a6b2eed2SHong Zhang /* put together the new matrix */ 5527d0f46423SBarry Smith ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,aBn,B->cmap->N,b_othi,b_othj,b_otha,B_oth);CHKERRQ(ierr); 5528a6b2eed2SHong Zhang 5529a6b2eed2SHong Zhang /* MatCreateSeqAIJWithArrays flags matrix so PETSc doesn't free the user's arrays. */ 5530a6b2eed2SHong Zhang /* Since these are PETSc arrays, change flags to free them as necessary. */ 553187025532SHong Zhang b_oth = (Mat_SeqAIJ*)(*B_oth)->data; 5532e6b907acSBarry Smith b_oth->free_a = PETSC_TRUE; 5533e6b907acSBarry Smith b_oth->free_ij = PETSC_TRUE; 553487025532SHong Zhang b_oth->nonew = 0; 5535a6b2eed2SHong Zhang 5536a6b2eed2SHong Zhang ierr = PetscFree(bufj);CHKERRQ(ierr); 5537b7f45c76SHong Zhang if (!startsj_s || !bufa_ptr) { 55381d79065fSBarry Smith ierr = PetscFree2(sstartsj,rstartsj);CHKERRQ(ierr); 5539dea91ad1SHong Zhang ierr = PetscFree(bufa_ptr);CHKERRQ(ierr); 5540dea91ad1SHong Zhang } else { 5541b7f45c76SHong Zhang *startsj_s = sstartsj; 55421d79065fSBarry Smith *startsj_r = rstartsj; 554387025532SHong Zhang *bufa_ptr = bufa; 554487025532SHong Zhang } 5545dea91ad1SHong Zhang } 55464ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_GetBrowsOfAocols,A,B,0,0);CHKERRQ(ierr); 5547429d309bSHong Zhang PetscFunctionReturn(0); 5548429d309bSHong Zhang } 5549ccd8e176SBarry Smith 555043eb5e2fSMatthew Knepley #undef __FUNCT__ 555143eb5e2fSMatthew Knepley #define __FUNCT__ "MatGetCommunicationStructs" 555243eb5e2fSMatthew Knepley /*@C 555343eb5e2fSMatthew Knepley MatGetCommunicationStructs - Provides access to the communication structures used in matrix-vector multiplication. 555443eb5e2fSMatthew Knepley 555543eb5e2fSMatthew Knepley Not Collective 555643eb5e2fSMatthew Knepley 555743eb5e2fSMatthew Knepley Input Parameters: 555843eb5e2fSMatthew Knepley . A - The matrix in mpiaij format 555943eb5e2fSMatthew Knepley 556043eb5e2fSMatthew Knepley Output Parameter: 556143eb5e2fSMatthew Knepley + lvec - The local vector holding off-process values from the argument to a matrix-vector product 556243eb5e2fSMatthew Knepley . colmap - A map from global column index to local index into lvec 556343eb5e2fSMatthew Knepley - multScatter - A scatter from the argument of a matrix-vector product to lvec 556443eb5e2fSMatthew Knepley 556543eb5e2fSMatthew Knepley Level: developer 556643eb5e2fSMatthew Knepley 556743eb5e2fSMatthew Knepley @*/ 556843eb5e2fSMatthew Knepley #if defined(PETSC_USE_CTABLE) 55697087cfbeSBarry Smith PetscErrorCode MatGetCommunicationStructs(Mat A, Vec *lvec, PetscTable *colmap, VecScatter *multScatter) 557043eb5e2fSMatthew Knepley #else 55717087cfbeSBarry Smith PetscErrorCode MatGetCommunicationStructs(Mat A, Vec *lvec, PetscInt *colmap[], VecScatter *multScatter) 557243eb5e2fSMatthew Knepley #endif 557343eb5e2fSMatthew Knepley { 557443eb5e2fSMatthew Knepley Mat_MPIAIJ *a; 557543eb5e2fSMatthew Knepley 557643eb5e2fSMatthew Knepley PetscFunctionBegin; 55770700a824SBarry Smith PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 5578e414b56bSJed Brown PetscValidPointer(lvec, 2); 5579e414b56bSJed Brown PetscValidPointer(colmap, 3); 5580e414b56bSJed Brown PetscValidPointer(multScatter, 4); 558143eb5e2fSMatthew Knepley a = (Mat_MPIAIJ*) A->data; 558243eb5e2fSMatthew Knepley if (lvec) *lvec = a->lvec; 558343eb5e2fSMatthew Knepley if (colmap) *colmap = a->colmap; 558443eb5e2fSMatthew Knepley if (multScatter) *multScatter = a->Mvctx; 558543eb5e2fSMatthew Knepley PetscFunctionReturn(0); 558643eb5e2fSMatthew Knepley } 558743eb5e2fSMatthew Knepley 55888cc058d9SJed Brown PETSC_EXTERN PetscErrorCode MatConvert_MPIAIJ_MPIAIJCRL(Mat,MatType,MatReuse,Mat*); 55898cc058d9SJed Brown PETSC_EXTERN PetscErrorCode MatConvert_MPIAIJ_MPIAIJPERM(Mat,MatType,MatReuse,Mat*); 55908cc058d9SJed Brown PETSC_EXTERN PetscErrorCode MatConvert_MPIAIJ_MPISBAIJ(Mat,MatType,MatReuse,Mat*); 559117667f90SBarry Smith 5592fc4dec0aSBarry Smith #undef __FUNCT__ 5593fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMultNumeric_MPIDense_MPIAIJ" 5594fc4dec0aSBarry Smith /* 5595fc4dec0aSBarry Smith Computes (B'*A')' since computing B*A directly is untenable 5596fc4dec0aSBarry Smith 5597fc4dec0aSBarry Smith n p p 5598fc4dec0aSBarry Smith ( ) ( ) ( ) 5599fc4dec0aSBarry Smith m ( A ) * n ( B ) = m ( C ) 5600fc4dec0aSBarry Smith ( ) ( ) ( ) 5601fc4dec0aSBarry Smith 5602fc4dec0aSBarry Smith */ 5603fc4dec0aSBarry Smith PetscErrorCode MatMatMultNumeric_MPIDense_MPIAIJ(Mat A,Mat B,Mat C) 5604fc4dec0aSBarry Smith { 5605fc4dec0aSBarry Smith PetscErrorCode ierr; 5606fc4dec0aSBarry Smith Mat At,Bt,Ct; 5607fc4dec0aSBarry Smith 5608fc4dec0aSBarry Smith PetscFunctionBegin; 5609fc4dec0aSBarry Smith ierr = MatTranspose(A,MAT_INITIAL_MATRIX,&At);CHKERRQ(ierr); 5610fc4dec0aSBarry Smith ierr = MatTranspose(B,MAT_INITIAL_MATRIX,&Bt);CHKERRQ(ierr); 5611fc4dec0aSBarry Smith ierr = MatMatMult(Bt,At,MAT_INITIAL_MATRIX,1.0,&Ct);CHKERRQ(ierr); 56126bf464f9SBarry Smith ierr = MatDestroy(&At);CHKERRQ(ierr); 56136bf464f9SBarry Smith ierr = MatDestroy(&Bt);CHKERRQ(ierr); 5614fc4dec0aSBarry Smith ierr = MatTranspose(Ct,MAT_REUSE_MATRIX,&C);CHKERRQ(ierr); 56156bf464f9SBarry Smith ierr = MatDestroy(&Ct);CHKERRQ(ierr); 5616fc4dec0aSBarry Smith PetscFunctionReturn(0); 5617fc4dec0aSBarry Smith } 5618fc4dec0aSBarry Smith 5619fc4dec0aSBarry Smith #undef __FUNCT__ 5620fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMultSymbolic_MPIDense_MPIAIJ" 5621fc4dec0aSBarry Smith PetscErrorCode MatMatMultSymbolic_MPIDense_MPIAIJ(Mat A,Mat B,PetscReal fill,Mat *C) 5622fc4dec0aSBarry Smith { 5623fc4dec0aSBarry Smith PetscErrorCode ierr; 5624d0f46423SBarry Smith PetscInt m=A->rmap->n,n=B->cmap->n; 5625fc4dec0aSBarry Smith Mat Cmat; 5626fc4dec0aSBarry Smith 5627fc4dec0aSBarry Smith PetscFunctionBegin; 5628e32f2f54SBarry Smith if (A->cmap->n != B->rmap->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"A->cmap->n %d != B->rmap->n %d\n",A->cmap->n,B->rmap->n); 5629ce94432eSBarry Smith ierr = MatCreate(PetscObjectComm((PetscObject)A),&Cmat);CHKERRQ(ierr); 5630fc4dec0aSBarry Smith ierr = MatSetSizes(Cmat,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 5631a2f3521dSMark F. Adams ierr = MatSetBlockSizes(Cmat,A->rmap->bs,B->cmap->bs);CHKERRQ(ierr); 5632fc4dec0aSBarry Smith ierr = MatSetType(Cmat,MATMPIDENSE);CHKERRQ(ierr); 56330298fd71SBarry Smith ierr = MatMPIDenseSetPreallocation(Cmat,NULL);CHKERRQ(ierr); 563438556019SBarry Smith ierr = MatAssemblyBegin(Cmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 563538556019SBarry Smith ierr = MatAssemblyEnd(Cmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 5636f75ecaa4SHong Zhang 5637f75ecaa4SHong Zhang Cmat->ops->matmultnumeric = MatMatMultNumeric_MPIDense_MPIAIJ; 56382205254eSKarl Rupp 5639fc4dec0aSBarry Smith *C = Cmat; 5640fc4dec0aSBarry Smith PetscFunctionReturn(0); 5641fc4dec0aSBarry Smith } 5642fc4dec0aSBarry Smith 5643fc4dec0aSBarry Smith /* ----------------------------------------------------------------*/ 5644fc4dec0aSBarry Smith #undef __FUNCT__ 5645fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMult_MPIDense_MPIAIJ" 5646fc4dec0aSBarry Smith PetscErrorCode MatMatMult_MPIDense_MPIAIJ(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C) 5647fc4dec0aSBarry Smith { 5648fc4dec0aSBarry Smith PetscErrorCode ierr; 5649fc4dec0aSBarry Smith 5650fc4dec0aSBarry Smith PetscFunctionBegin; 5651fc4dec0aSBarry Smith if (scall == MAT_INITIAL_MATRIX) { 56523ff4c91cSHong Zhang ierr = PetscLogEventBegin(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr); 5653fc4dec0aSBarry Smith ierr = MatMatMultSymbolic_MPIDense_MPIAIJ(A,B,fill,C);CHKERRQ(ierr); 56543ff4c91cSHong Zhang ierr = PetscLogEventEnd(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr); 5655fc4dec0aSBarry Smith } 56563ff4c91cSHong Zhang ierr = PetscLogEventBegin(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr); 5657fc4dec0aSBarry Smith ierr = MatMatMultNumeric_MPIDense_MPIAIJ(A,B,*C);CHKERRQ(ierr); 56583ff4c91cSHong Zhang ierr = PetscLogEventEnd(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr); 5659fc4dec0aSBarry Smith PetscFunctionReturn(0); 5660fc4dec0aSBarry Smith } 5661fc4dec0aSBarry Smith 5662611f576cSBarry Smith #if defined(PETSC_HAVE_MUMPS) 56638cc058d9SJed Brown PETSC_EXTERN PetscErrorCode MatGetFactor_aij_mumps(Mat,MatFactorType,Mat*); 5664611f576cSBarry Smith #endif 56653bf14a46SMatthew Knepley #if defined(PETSC_HAVE_PASTIX) 56668cc058d9SJed Brown PETSC_EXTERN PetscErrorCode MatGetFactor_mpiaij_pastix(Mat,MatFactorType,Mat*); 56673bf14a46SMatthew Knepley #endif 5668611f576cSBarry Smith #if defined(PETSC_HAVE_SUPERLU_DIST) 56698cc058d9SJed Brown PETSC_EXTERN PetscErrorCode MatGetFactor_mpiaij_superlu_dist(Mat,MatFactorType,Mat*); 5670611f576cSBarry Smith #endif 567117f1a0eaSHong Zhang #if defined(PETSC_HAVE_CLIQUE) 56728cc058d9SJed Brown PETSC_EXTERN PetscErrorCode MatGetFactor_aij_clique(Mat,MatFactorType,Mat*); 567317f1a0eaSHong Zhang #endif 56745c9eb25fSBarry Smith 5675ccd8e176SBarry Smith /*MC 5676ccd8e176SBarry Smith MATMPIAIJ - MATMPIAIJ = "mpiaij" - A matrix type to be used for parallel sparse matrices. 5677ccd8e176SBarry Smith 5678ccd8e176SBarry Smith Options Database Keys: 5679ccd8e176SBarry Smith . -mat_type mpiaij - sets the matrix type to "mpiaij" during a call to MatSetFromOptions() 5680ccd8e176SBarry Smith 5681ccd8e176SBarry Smith Level: beginner 5682ccd8e176SBarry Smith 568369b1f4b7SBarry Smith .seealso: MatCreateAIJ() 5684ccd8e176SBarry Smith M*/ 5685ccd8e176SBarry Smith 5686ccd8e176SBarry Smith #undef __FUNCT__ 5687ccd8e176SBarry Smith #define __FUNCT__ "MatCreate_MPIAIJ" 56888cc058d9SJed Brown PETSC_EXTERN PetscErrorCode MatCreate_MPIAIJ(Mat B) 5689ccd8e176SBarry Smith { 5690ccd8e176SBarry Smith Mat_MPIAIJ *b; 5691ccd8e176SBarry Smith PetscErrorCode ierr; 5692ccd8e176SBarry Smith PetscMPIInt size; 5693ccd8e176SBarry Smith 5694ccd8e176SBarry Smith PetscFunctionBegin; 5695ce94432eSBarry Smith ierr = MPI_Comm_size(PetscObjectComm((PetscObject)B),&size);CHKERRQ(ierr); 56962205254eSKarl Rupp 569738f2d2fdSLisandro Dalcin ierr = PetscNewLog(B,Mat_MPIAIJ,&b);CHKERRQ(ierr); 5698ccd8e176SBarry Smith B->data = (void*)b; 5699ccd8e176SBarry Smith ierr = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr); 5700ccd8e176SBarry Smith B->assembled = PETSC_FALSE; 5701ccd8e176SBarry Smith B->insertmode = NOT_SET_VALUES; 5702ccd8e176SBarry Smith b->size = size; 57032205254eSKarl Rupp 5704ce94432eSBarry Smith ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)B),&b->rank);CHKERRQ(ierr); 5705ccd8e176SBarry Smith 5706ccd8e176SBarry Smith /* build cache for off array entries formed */ 5707ce94432eSBarry Smith ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash);CHKERRQ(ierr); 57082205254eSKarl Rupp 5709ccd8e176SBarry Smith b->donotstash = PETSC_FALSE; 5710ccd8e176SBarry Smith b->colmap = 0; 5711ccd8e176SBarry Smith b->garray = 0; 5712ccd8e176SBarry Smith b->roworiented = PETSC_TRUE; 5713ccd8e176SBarry Smith 5714ccd8e176SBarry Smith /* stuff used for matrix vector multiply */ 57150298fd71SBarry Smith b->lvec = NULL; 57160298fd71SBarry Smith b->Mvctx = NULL; 5717ccd8e176SBarry Smith 5718ccd8e176SBarry Smith /* stuff for MatGetRow() */ 5719ccd8e176SBarry Smith b->rowindices = 0; 5720ccd8e176SBarry Smith b->rowvalues = 0; 5721ccd8e176SBarry Smith b->getrowactive = PETSC_FALSE; 5722ccd8e176SBarry Smith 5723bbf3fe20SPaul Mullowney /* flexible pointer used in CUSP/CUSPARSE classes */ 57240298fd71SBarry Smith b->spptr = NULL; 5725f60c3dc2SHong Zhang 5726611f576cSBarry Smith #if defined(PETSC_HAVE_MUMPS) 5727bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)B,"MatGetFactor_mumps_C",MatGetFactor_aij_mumps);CHKERRQ(ierr); 5728611f576cSBarry Smith #endif 57293bf14a46SMatthew Knepley #if defined(PETSC_HAVE_PASTIX) 5730bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)B,"MatGetFactor_pastix_C",MatGetFactor_mpiaij_pastix);CHKERRQ(ierr); 57313bf14a46SMatthew Knepley #endif 5732611f576cSBarry Smith #if defined(PETSC_HAVE_SUPERLU_DIST) 5733bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)B,"MatGetFactor_superlu_dist_C",MatGetFactor_mpiaij_superlu_dist);CHKERRQ(ierr); 5734611f576cSBarry Smith #endif 573517f1a0eaSHong Zhang #if defined(PETSC_HAVE_CLIQUE) 5736bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)B,"MatGetFactor_clique_C",MatGetFactor_aij_clique);CHKERRQ(ierr); 573717f1a0eaSHong Zhang #endif 5738bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)B,"MatStoreValues_C",MatStoreValues_MPIAIJ);CHKERRQ(ierr); 5739bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)B,"MatRetrieveValues_C",MatRetrieveValues_MPIAIJ);CHKERRQ(ierr); 5740bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)B,"MatGetDiagonalBlock_C",MatGetDiagonalBlock_MPIAIJ);CHKERRQ(ierr); 5741bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)B,"MatIsTranspose_C",MatIsTranspose_MPIAIJ);CHKERRQ(ierr); 5742bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAIJSetPreallocation_C",MatMPIAIJSetPreallocation_MPIAIJ);CHKERRQ(ierr); 5743bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAIJSetPreallocationCSR_C",MatMPIAIJSetPreallocationCSR_MPIAIJ);CHKERRQ(ierr); 5744bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)B,"MatDiagonalScaleLocal_C",MatDiagonalScaleLocal_MPIAIJ);CHKERRQ(ierr); 5745bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpiaij_mpiaijperm_C",MatConvert_MPIAIJ_MPIAIJPERM);CHKERRQ(ierr); 5746bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpiaij_mpiaijcrl_C",MatConvert_MPIAIJ_MPIAIJCRL);CHKERRQ(ierr); 5747bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpiaij_mpisbaij_C",MatConvert_MPIAIJ_MPISBAIJ);CHKERRQ(ierr); 5748bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)B,"MatMatMult_mpidense_mpiaij_C",MatMatMult_MPIDense_MPIAIJ);CHKERRQ(ierr); 5749bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)B,"MatMatMultSymbolic_mpidense_mpiaij_C",MatMatMultSymbolic_MPIDense_MPIAIJ);CHKERRQ(ierr); 5750bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)B,"MatMatMultNumeric_mpidense_mpiaij_C",MatMatMultNumeric_MPIDense_MPIAIJ);CHKERRQ(ierr); 575117667f90SBarry Smith ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIAIJ);CHKERRQ(ierr); 5752ccd8e176SBarry Smith PetscFunctionReturn(0); 5753ccd8e176SBarry Smith } 575481824310SBarry Smith 575503bfb495SBarry Smith #undef __FUNCT__ 575603bfb495SBarry Smith #define __FUNCT__ "MatCreateMPIAIJWithSplitArrays" 575758d36128SBarry Smith /*@ 575803bfb495SBarry Smith MatCreateMPIAIJWithSplitArrays - creates a MPI AIJ matrix using arrays that contain the "diagonal" 575903bfb495SBarry Smith and "off-diagonal" part of the matrix in CSR format. 576003bfb495SBarry Smith 576103bfb495SBarry Smith Collective on MPI_Comm 576203bfb495SBarry Smith 576303bfb495SBarry Smith Input Parameters: 576403bfb495SBarry Smith + comm - MPI communicator 576503bfb495SBarry Smith . m - number of local rows (Cannot be PETSC_DECIDE) 576603bfb495SBarry Smith . n - This value should be the same as the local size used in creating the 576703bfb495SBarry Smith x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have 576803bfb495SBarry Smith calculated if N is given) For square matrices n is almost always m. 576903bfb495SBarry Smith . M - number of global rows (or PETSC_DETERMINE to have calculated if m is given) 577003bfb495SBarry Smith . N - number of global columns (or PETSC_DETERMINE to have calculated if n is given) 577103bfb495SBarry Smith . i - row indices for "diagonal" portion of matrix 577203bfb495SBarry Smith . j - column indices 577303bfb495SBarry Smith . a - matrix values 577403bfb495SBarry Smith . oi - row indices for "off-diagonal" portion of matrix 577503bfb495SBarry Smith . oj - column indices 577603bfb495SBarry Smith - oa - matrix values 577703bfb495SBarry Smith 577803bfb495SBarry Smith Output Parameter: 577903bfb495SBarry Smith . mat - the matrix 578003bfb495SBarry Smith 578103bfb495SBarry Smith Level: advanced 578203bfb495SBarry Smith 578303bfb495SBarry Smith Notes: 5784292fb18eSBarry Smith The i, j, and a arrays ARE NOT copied by this routine into the internal format used by PETSc. The user 5785292fb18eSBarry Smith must free the arrays once the matrix has been destroyed and not before. 578603bfb495SBarry Smith 578703bfb495SBarry Smith The i and j indices are 0 based 578803bfb495SBarry Smith 578969b1f4b7SBarry Smith See MatCreateAIJ() for the definition of "diagonal" and "off-diagonal" portion of the matrix 579003bfb495SBarry Smith 57917b55108eSBarry Smith This sets local rows and cannot be used to set off-processor values. 57927b55108eSBarry Smith 5793dca341c0SJed Brown Use of this routine is discouraged because it is inflexible and cumbersome to use. It is extremely rare that a 5794dca341c0SJed Brown legacy application natively assembles into exactly this split format. The code to do so is nontrivial and does 5795dca341c0SJed Brown not easily support in-place reassembly. It is recommended to use MatSetValues() (or a variant thereof) because 5796dca341c0SJed Brown the resulting assembly is easier to implement, will work with any matrix format, and the user does not have to 5797dca341c0SJed Brown keep track of the underlying array. Use MatSetOption(A,MAT_IGNORE_OFF_PROC_ENTRIES,PETSC_TRUE) to disable all 5798dca341c0SJed Brown communication if it is known that only local entries will be set. 579903bfb495SBarry Smith 580003bfb495SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 580103bfb495SBarry Smith 580203bfb495SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(), 580369b1f4b7SBarry Smith MPIAIJ, MatCreateAIJ(), MatCreateMPIAIJWithArrays() 580403bfb495SBarry Smith @*/ 58052205254eSKarl Rupp PetscErrorCode MatCreateMPIAIJWithSplitArrays(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt i[],PetscInt j[],PetscScalar a[],PetscInt oi[], PetscInt oj[],PetscScalar oa[],Mat *mat) 580603bfb495SBarry Smith { 580703bfb495SBarry Smith PetscErrorCode ierr; 580803bfb495SBarry Smith Mat_MPIAIJ *maij; 580903bfb495SBarry Smith 581003bfb495SBarry Smith PetscFunctionBegin; 5811e32f2f54SBarry Smith if (m < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative"); 5812ea345e14SBarry Smith if (i[0]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0"); 5813ea345e14SBarry Smith if (oi[0]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"oi (row indices) must start with 0"); 581403bfb495SBarry Smith ierr = MatCreate(comm,mat);CHKERRQ(ierr); 581503bfb495SBarry Smith ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr); 581603bfb495SBarry Smith ierr = MatSetType(*mat,MATMPIAIJ);CHKERRQ(ierr); 581703bfb495SBarry Smith maij = (Mat_MPIAIJ*) (*mat)->data; 58182205254eSKarl Rupp 58198d7a6e47SBarry Smith (*mat)->preallocated = PETSC_TRUE; 582003bfb495SBarry Smith 582126283091SBarry Smith ierr = PetscLayoutSetUp((*mat)->rmap);CHKERRQ(ierr); 582226283091SBarry Smith ierr = PetscLayoutSetUp((*mat)->cmap);CHKERRQ(ierr); 582303bfb495SBarry Smith 582403bfb495SBarry Smith ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,m,n,i,j,a,&maij->A);CHKERRQ(ierr); 5825d0f46423SBarry Smith ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,m,(*mat)->cmap->N,oi,oj,oa,&maij->B);CHKERRQ(ierr); 582603bfb495SBarry Smith 58278d7a6e47SBarry Smith ierr = MatAssemblyBegin(maij->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 58288d7a6e47SBarry Smith ierr = MatAssemblyEnd(maij->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 58298d7a6e47SBarry Smith ierr = MatAssemblyBegin(maij->B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 58308d7a6e47SBarry Smith ierr = MatAssemblyEnd(maij->B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 58318d7a6e47SBarry Smith 583203bfb495SBarry Smith ierr = MatAssemblyBegin(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 583303bfb495SBarry Smith ierr = MatAssemblyEnd(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 5834dca341c0SJed Brown ierr = MatSetOption(*mat,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr); 583503bfb495SBarry Smith PetscFunctionReturn(0); 583603bfb495SBarry Smith } 583703bfb495SBarry Smith 583881824310SBarry Smith /* 583981824310SBarry Smith Special version for direct calls from Fortran 584081824310SBarry Smith */ 5841b45d2f2cSJed Brown #include <petsc-private/fortranimpl.h> 58427087cfbeSBarry Smith 584381824310SBarry Smith #if defined(PETSC_HAVE_FORTRAN_CAPS) 584481824310SBarry Smith #define matsetvaluesmpiaij_ MATSETVALUESMPIAIJ 584581824310SBarry Smith #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) 584681824310SBarry Smith #define matsetvaluesmpiaij_ matsetvaluesmpiaij 584781824310SBarry Smith #endif 584881824310SBarry Smith 584981824310SBarry Smith /* Change these macros so can be used in void function */ 585081824310SBarry Smith #undef CHKERRQ 5851e32f2f54SBarry Smith #define CHKERRQ(ierr) CHKERRABORT(PETSC_COMM_WORLD,ierr) 585281824310SBarry Smith #undef SETERRQ2 5853e32f2f54SBarry Smith #define SETERRQ2(comm,ierr,b,c,d) CHKERRABORT(comm,ierr) 58544994cf47SJed Brown #undef SETERRQ3 58554994cf47SJed Brown #define SETERRQ3(comm,ierr,b,c,d,e) CHKERRABORT(comm,ierr) 585681824310SBarry Smith #undef SETERRQ 5857e32f2f54SBarry Smith #define SETERRQ(c,ierr,b) CHKERRABORT(c,ierr) 585881824310SBarry Smith 585981824310SBarry Smith #undef __FUNCT__ 586081824310SBarry Smith #define __FUNCT__ "matsetvaluesmpiaij_" 58618cc058d9SJed Brown PETSC_EXTERN void PETSC_STDCALL matsetvaluesmpiaij_(Mat *mmat,PetscInt *mm,const PetscInt im[],PetscInt *mn,const PetscInt in[],const PetscScalar v[],InsertMode *maddv,PetscErrorCode *_ierr) 586281824310SBarry Smith { 586381824310SBarry Smith Mat mat = *mmat; 586481824310SBarry Smith PetscInt m = *mm, n = *mn; 586581824310SBarry Smith InsertMode addv = *maddv; 586681824310SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 586781824310SBarry Smith PetscScalar value; 586881824310SBarry Smith PetscErrorCode ierr; 5869899cda47SBarry Smith 58704994cf47SJed Brown MatCheckPreallocated(mat,1); 58712205254eSKarl Rupp if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv; 58722205254eSKarl Rupp 587381824310SBarry Smith #if defined(PETSC_USE_DEBUG) 5874f23aa3ddSBarry Smith else if (mat->insertmode != addv) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values"); 587581824310SBarry Smith #endif 587681824310SBarry Smith { 5877d0f46423SBarry Smith PetscInt i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend; 5878d0f46423SBarry Smith PetscInt cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col; 5879ace3abfcSBarry Smith PetscBool roworiented = aij->roworiented; 588081824310SBarry Smith 588181824310SBarry Smith /* Some Variables required in the macro */ 588281824310SBarry Smith Mat A = aij->A; 588381824310SBarry Smith Mat_SeqAIJ *a = (Mat_SeqAIJ*)A->data; 588481824310SBarry Smith PetscInt *aimax = a->imax,*ai = a->i,*ailen = a->ilen,*aj = a->j; 5885dd6ea824SBarry Smith MatScalar *aa = a->a; 5886ace3abfcSBarry Smith PetscBool ignorezeroentries = (((a->ignorezeroentries)&&(addv==ADD_VALUES)) ? PETSC_TRUE : PETSC_FALSE); 588781824310SBarry Smith Mat B = aij->B; 588881824310SBarry Smith Mat_SeqAIJ *b = (Mat_SeqAIJ*)B->data; 5889d0f46423SBarry Smith PetscInt *bimax = b->imax,*bi = b->i,*bilen = b->ilen,*bj = b->j,bm = aij->B->rmap->n,am = aij->A->rmap->n; 5890dd6ea824SBarry Smith MatScalar *ba = b->a; 589181824310SBarry Smith 589281824310SBarry Smith PetscInt *rp1,*rp2,ii,nrow1,nrow2,_i,rmax1,rmax2,N,low1,high1,low2,high2,t,lastcol1,lastcol2; 589381824310SBarry Smith PetscInt nonew = a->nonew; 5894dd6ea824SBarry Smith MatScalar *ap1,*ap2; 589581824310SBarry Smith 589681824310SBarry Smith PetscFunctionBegin; 589781824310SBarry Smith for (i=0; i<m; i++) { 589881824310SBarry Smith if (im[i] < 0) continue; 589981824310SBarry Smith #if defined(PETSC_USE_DEBUG) 5900e32f2f54SBarry Smith if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1); 590181824310SBarry Smith #endif 590281824310SBarry Smith if (im[i] >= rstart && im[i] < rend) { 590381824310SBarry Smith row = im[i] - rstart; 590481824310SBarry Smith lastcol1 = -1; 590581824310SBarry Smith rp1 = aj + ai[row]; 590681824310SBarry Smith ap1 = aa + ai[row]; 590781824310SBarry Smith rmax1 = aimax[row]; 590881824310SBarry Smith nrow1 = ailen[row]; 590981824310SBarry Smith low1 = 0; 591081824310SBarry Smith high1 = nrow1; 591181824310SBarry Smith lastcol2 = -1; 591281824310SBarry Smith rp2 = bj + bi[row]; 591381824310SBarry Smith ap2 = ba + bi[row]; 591481824310SBarry Smith rmax2 = bimax[row]; 591581824310SBarry Smith nrow2 = bilen[row]; 591681824310SBarry Smith low2 = 0; 591781824310SBarry Smith high2 = nrow2; 591881824310SBarry Smith 591981824310SBarry Smith for (j=0; j<n; j++) { 59202205254eSKarl Rupp if (roworiented) value = v[i*n+j]; 59212205254eSKarl Rupp else value = v[i+j*m]; 592281824310SBarry Smith if (ignorezeroentries && value == 0.0 && (addv == ADD_VALUES)) continue; 592381824310SBarry Smith if (in[j] >= cstart && in[j] < cend) { 592481824310SBarry Smith col = in[j] - cstart; 592581824310SBarry Smith MatSetValues_SeqAIJ_A_Private(row,col,value,addv); 592681824310SBarry Smith } else if (in[j] < 0) continue; 592781824310SBarry Smith #if defined(PETSC_USE_DEBUG) 5928cb9801acSJed Brown else if (in[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1); 592981824310SBarry Smith #endif 593081824310SBarry Smith else { 593181824310SBarry Smith if (mat->was_assembled) { 593281824310SBarry Smith if (!aij->colmap) { 5933ab9863d7SBarry Smith ierr = MatCreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr); 593481824310SBarry Smith } 593581824310SBarry Smith #if defined(PETSC_USE_CTABLE) 593681824310SBarry Smith ierr = PetscTableFind(aij->colmap,in[j]+1,&col);CHKERRQ(ierr); 593781824310SBarry Smith col--; 593881824310SBarry Smith #else 593981824310SBarry Smith col = aij->colmap[in[j]] - 1; 594081824310SBarry Smith #endif 594181824310SBarry Smith if (col < 0 && !((Mat_SeqAIJ*)(aij->A->data))->nonew) { 5942ab9863d7SBarry Smith ierr = MatDisAssemble_MPIAIJ(mat);CHKERRQ(ierr); 594381824310SBarry Smith col = in[j]; 594481824310SBarry Smith /* Reinitialize the variables required by MatSetValues_SeqAIJ_B_Private() */ 594581824310SBarry Smith B = aij->B; 594681824310SBarry Smith b = (Mat_SeqAIJ*)B->data; 594781824310SBarry Smith bimax = b->imax; bi = b->i; bilen = b->ilen; bj = b->j; 594881824310SBarry Smith rp2 = bj + bi[row]; 594981824310SBarry Smith ap2 = ba + bi[row]; 595081824310SBarry Smith rmax2 = bimax[row]; 595181824310SBarry Smith nrow2 = bilen[row]; 595281824310SBarry Smith low2 = 0; 595381824310SBarry Smith high2 = nrow2; 5954d0f46423SBarry Smith bm = aij->B->rmap->n; 595581824310SBarry Smith ba = b->a; 595681824310SBarry Smith } 595781824310SBarry Smith } else col = in[j]; 595881824310SBarry Smith MatSetValues_SeqAIJ_B_Private(row,col,value,addv); 595981824310SBarry Smith } 596081824310SBarry Smith } 59612205254eSKarl Rupp } else if (!aij->donotstash) { 596281824310SBarry Smith if (roworiented) { 5963ace3abfcSBarry Smith ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,(PetscBool)(ignorezeroentries && (addv == ADD_VALUES)));CHKERRQ(ierr); 596481824310SBarry Smith } else { 5965ace3abfcSBarry Smith ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,(PetscBool)(ignorezeroentries && (addv == ADD_VALUES)));CHKERRQ(ierr); 596681824310SBarry Smith } 596781824310SBarry Smith } 596881824310SBarry Smith } 59692205254eSKarl Rupp } 597081824310SBarry Smith PetscFunctionReturnVoid(); 597181824310SBarry Smith } 597203bfb495SBarry Smith 5973