18a729477SBarry Smith 2c6db04a5SJed Brown #include <../src/mat/impls/aij/mpi/mpiaij.h> /*I "petscmat.h" I*/ 39a6d0b0bSJed Brown #include <petsc-private/vecimpl.h> 4c6db04a5SJed Brown #include <petscblaslapack.h> 50c312b8eSJed Brown #include <petscsf.h> 68a729477SBarry Smith 701bebe75SBarry Smith /*MC 801bebe75SBarry Smith MATAIJ - MATAIJ = "aij" - A matrix type to be used for sparse matrices. 901bebe75SBarry Smith 1001bebe75SBarry Smith This matrix type is identical to MATSEQAIJ when constructed with a single process communicator, 1101bebe75SBarry Smith and MATMPIAIJ otherwise. As a result, for single process communicators, 1201bebe75SBarry Smith MatSeqAIJSetPreallocation is supported, and similarly MatMPIAIJSetPreallocation is supported 1301bebe75SBarry Smith for communicators controlling multiple processes. It is recommended that you call both of 1401bebe75SBarry Smith the above preallocation routines for simplicity. 1501bebe75SBarry Smith 1601bebe75SBarry Smith Options Database Keys: 1701bebe75SBarry Smith . -mat_type aij - sets the matrix type to "aij" during a call to MatSetFromOptions() 1801bebe75SBarry Smith 199ae82921SPaul Mullowney Developer Notes: Subclasses include MATAIJCUSP, MATAIJCUSPARSE, MATAIJPERM, MATAIJCRL, and also automatically switches over to use inodes when 2001bebe75SBarry Smith enough exist. 2101bebe75SBarry Smith 2201bebe75SBarry Smith Level: beginner 2301bebe75SBarry Smith 2469b1f4b7SBarry Smith .seealso: MatCreateAIJ(), MatCreateSeqAIJ(), MATSEQAIJ,MATMPIAIJ 2501bebe75SBarry Smith M*/ 2601bebe75SBarry Smith 2701bebe75SBarry Smith /*MC 2801bebe75SBarry Smith MATAIJCRL - MATAIJCRL = "aijcrl" - A matrix type to be used for sparse matrices. 2901bebe75SBarry Smith 3001bebe75SBarry Smith This matrix type is identical to MATSEQAIJCRL when constructed with a single process communicator, 3101bebe75SBarry Smith and MATMPIAIJCRL otherwise. As a result, for single process communicators, 3201bebe75SBarry Smith MatSeqAIJSetPreallocation() is supported, and similarly MatMPIAIJSetPreallocation() is supported 3301bebe75SBarry Smith for communicators controlling multiple processes. It is recommended that you call both of 3401bebe75SBarry Smith the above preallocation routines for simplicity. 3501bebe75SBarry Smith 3601bebe75SBarry Smith Options Database Keys: 3701bebe75SBarry Smith . -mat_type aijcrl - sets the matrix type to "aijcrl" during a call to MatSetFromOptions() 3801bebe75SBarry Smith 3901bebe75SBarry Smith Level: beginner 4001bebe75SBarry Smith 4101bebe75SBarry Smith .seealso: MatCreateMPIAIJCRL,MATSEQAIJCRL,MATMPIAIJCRL, MATSEQAIJCRL, MATMPIAIJCRL 4201bebe75SBarry Smith M*/ 4301bebe75SBarry Smith 44dd6ea824SBarry Smith #undef __FUNCT__ 45f2c98031SJed Brown #define __FUNCT__ "MatFindNonzeroRows_MPIAIJ" 46f2c98031SJed Brown PetscErrorCode MatFindNonzeroRows_MPIAIJ(Mat M,IS *keptrows) 4727d4218bSShri Abhyankar { 4827d4218bSShri Abhyankar PetscErrorCode ierr; 4927d4218bSShri Abhyankar Mat_MPIAIJ *mat = (Mat_MPIAIJ*)M->data; 5027d4218bSShri Abhyankar Mat_SeqAIJ *a = (Mat_SeqAIJ*)mat->A->data; 5127d4218bSShri Abhyankar Mat_SeqAIJ *b = (Mat_SeqAIJ*)mat->B->data; 5227d4218bSShri Abhyankar const PetscInt *ia,*ib; 5327d4218bSShri Abhyankar const MatScalar *aa,*bb; 5427d4218bSShri Abhyankar PetscInt na,nb,i,j,*rows,cnt=0,n0rows; 5527d4218bSShri Abhyankar PetscInt m = M->rmap->n,rstart = M->rmap->rstart; 5627d4218bSShri Abhyankar 5727d4218bSShri Abhyankar PetscFunctionBegin; 5827d4218bSShri Abhyankar *keptrows = 0; 5927d4218bSShri Abhyankar ia = a->i; 6027d4218bSShri Abhyankar ib = b->i; 6127d4218bSShri Abhyankar for (i=0; i<m; i++) { 6227d4218bSShri Abhyankar na = ia[i+1] - ia[i]; 6327d4218bSShri Abhyankar nb = ib[i+1] - ib[i]; 6427d4218bSShri Abhyankar if (!na && !nb) { 6527d4218bSShri Abhyankar cnt++; 6627d4218bSShri Abhyankar goto ok1; 6727d4218bSShri Abhyankar } 6827d4218bSShri Abhyankar aa = a->a + ia[i]; 6927d4218bSShri Abhyankar for (j=0; j<na; j++) { 7027d4218bSShri Abhyankar if (aa[j] != 0.0) goto ok1; 7127d4218bSShri Abhyankar } 7227d4218bSShri Abhyankar bb = b->a + ib[i]; 7327d4218bSShri Abhyankar for (j=0; j <nb; j++) { 7427d4218bSShri Abhyankar if (bb[j] != 0.0) goto ok1; 7527d4218bSShri Abhyankar } 7627d4218bSShri Abhyankar cnt++; 7727d4218bSShri Abhyankar ok1:; 7827d4218bSShri Abhyankar } 79ce94432eSBarry Smith ierr = MPI_Allreduce(&cnt,&n0rows,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)M));CHKERRQ(ierr); 8027d4218bSShri Abhyankar if (!n0rows) PetscFunctionReturn(0); 8127d4218bSShri Abhyankar ierr = PetscMalloc((M->rmap->n-cnt)*sizeof(PetscInt),&rows);CHKERRQ(ierr); 8227d4218bSShri Abhyankar cnt = 0; 8327d4218bSShri Abhyankar for (i=0; i<m; i++) { 8427d4218bSShri Abhyankar na = ia[i+1] - ia[i]; 8527d4218bSShri Abhyankar nb = ib[i+1] - ib[i]; 8627d4218bSShri Abhyankar if (!na && !nb) continue; 8727d4218bSShri Abhyankar aa = a->a + ia[i]; 8827d4218bSShri Abhyankar for (j=0; j<na;j++) { 8927d4218bSShri Abhyankar if (aa[j] != 0.0) { 9027d4218bSShri Abhyankar rows[cnt++] = rstart + i; 9127d4218bSShri Abhyankar goto ok2; 9227d4218bSShri Abhyankar } 9327d4218bSShri Abhyankar } 9427d4218bSShri Abhyankar bb = b->a + ib[i]; 9527d4218bSShri Abhyankar for (j=0; j<nb; j++) { 9627d4218bSShri Abhyankar if (bb[j] != 0.0) { 9727d4218bSShri Abhyankar rows[cnt++] = rstart + i; 9827d4218bSShri Abhyankar goto ok2; 9927d4218bSShri Abhyankar } 10027d4218bSShri Abhyankar } 10127d4218bSShri Abhyankar ok2:; 10227d4218bSShri Abhyankar } 103ce94432eSBarry Smith ierr = ISCreateGeneral(PetscObjectComm((PetscObject)M),cnt,rows,PETSC_OWN_POINTER,keptrows);CHKERRQ(ierr); 10427d4218bSShri Abhyankar PetscFunctionReturn(0); 10527d4218bSShri Abhyankar } 10627d4218bSShri Abhyankar 10727d4218bSShri Abhyankar #undef __FUNCT__ 108f1f41ecbSJed Brown #define __FUNCT__ "MatFindZeroDiagonals_MPIAIJ" 109f1f41ecbSJed Brown PetscErrorCode MatFindZeroDiagonals_MPIAIJ(Mat M,IS *zrows) 110f1f41ecbSJed Brown { 111f1f41ecbSJed Brown Mat_MPIAIJ *aij = (Mat_MPIAIJ*)M->data; 112f1f41ecbSJed Brown PetscErrorCode ierr; 113f1f41ecbSJed Brown PetscInt i,rstart,nrows,*rows; 114f1f41ecbSJed Brown 115f1f41ecbSJed Brown PetscFunctionBegin; 1160298fd71SBarry Smith *zrows = NULL; 117f1f41ecbSJed Brown ierr = MatFindZeroDiagonals_SeqAIJ_Private(aij->A,&nrows,&rows);CHKERRQ(ierr); 1180298fd71SBarry Smith ierr = MatGetOwnershipRange(M,&rstart,NULL);CHKERRQ(ierr); 119f1f41ecbSJed Brown for (i=0; i<nrows; i++) rows[i] += rstart; 120ce94432eSBarry Smith ierr = ISCreateGeneral(PetscObjectComm((PetscObject)M),nrows,rows,PETSC_OWN_POINTER,zrows);CHKERRQ(ierr); 121f1f41ecbSJed Brown PetscFunctionReturn(0); 122f1f41ecbSJed Brown } 123f1f41ecbSJed Brown 124f1f41ecbSJed Brown #undef __FUNCT__ 1250716a85fSBarry Smith #define __FUNCT__ "MatGetColumnNorms_MPIAIJ" 1260716a85fSBarry Smith PetscErrorCode MatGetColumnNorms_MPIAIJ(Mat A,NormType type,PetscReal *norms) 1270716a85fSBarry Smith { 1280716a85fSBarry Smith PetscErrorCode ierr; 1290716a85fSBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ*)A->data; 1300716a85fSBarry Smith PetscInt i,n,*garray = aij->garray; 1310716a85fSBarry Smith Mat_SeqAIJ *a_aij = (Mat_SeqAIJ*) aij->A->data; 1320716a85fSBarry Smith Mat_SeqAIJ *b_aij = (Mat_SeqAIJ*) aij->B->data; 1330716a85fSBarry Smith PetscReal *work; 1340716a85fSBarry Smith 1350716a85fSBarry Smith PetscFunctionBegin; 1360298fd71SBarry Smith ierr = MatGetSize(A,NULL,&n);CHKERRQ(ierr); 1370716a85fSBarry Smith ierr = PetscMalloc(n*sizeof(PetscReal),&work);CHKERRQ(ierr); 1380716a85fSBarry Smith ierr = PetscMemzero(work,n*sizeof(PetscReal));CHKERRQ(ierr); 1390716a85fSBarry Smith if (type == NORM_2) { 1400716a85fSBarry Smith for (i=0; i<a_aij->i[aij->A->rmap->n]; i++) { 1410716a85fSBarry Smith work[A->cmap->rstart + a_aij->j[i]] += PetscAbsScalar(a_aij->a[i]*a_aij->a[i]); 1420716a85fSBarry Smith } 1430716a85fSBarry Smith for (i=0; i<b_aij->i[aij->B->rmap->n]; i++) { 1440716a85fSBarry Smith work[garray[b_aij->j[i]]] += PetscAbsScalar(b_aij->a[i]*b_aij->a[i]); 1450716a85fSBarry Smith } 1460716a85fSBarry Smith } else if (type == NORM_1) { 1470716a85fSBarry Smith for (i=0; i<a_aij->i[aij->A->rmap->n]; i++) { 1480716a85fSBarry Smith work[A->cmap->rstart + a_aij->j[i]] += PetscAbsScalar(a_aij->a[i]); 1490716a85fSBarry Smith } 1500716a85fSBarry Smith for (i=0; i<b_aij->i[aij->B->rmap->n]; i++) { 1510716a85fSBarry Smith work[garray[b_aij->j[i]]] += PetscAbsScalar(b_aij->a[i]); 1520716a85fSBarry Smith } 1530716a85fSBarry Smith } else if (type == NORM_INFINITY) { 1540716a85fSBarry Smith for (i=0; i<a_aij->i[aij->A->rmap->n]; i++) { 1550716a85fSBarry Smith work[A->cmap->rstart + a_aij->j[i]] = PetscMax(PetscAbsScalar(a_aij->a[i]), work[A->cmap->rstart + a_aij->j[i]]); 1560716a85fSBarry Smith } 1570716a85fSBarry Smith for (i=0; i<b_aij->i[aij->B->rmap->n]; i++) { 1580716a85fSBarry Smith work[garray[b_aij->j[i]]] = PetscMax(PetscAbsScalar(b_aij->a[i]),work[garray[b_aij->j[i]]]); 1590716a85fSBarry Smith } 1600716a85fSBarry Smith 161ce94432eSBarry Smith } else SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Unknown NormType"); 1620716a85fSBarry Smith if (type == NORM_INFINITY) { 1630716a85fSBarry Smith ierr = MPI_Allreduce(work,norms,n,MPIU_REAL,MPIU_MAX,A->hdr.comm);CHKERRQ(ierr); 1640716a85fSBarry Smith } else { 1650716a85fSBarry Smith ierr = MPI_Allreduce(work,norms,n,MPIU_REAL,MPIU_SUM,A->hdr.comm);CHKERRQ(ierr); 1660716a85fSBarry Smith } 1670716a85fSBarry Smith ierr = PetscFree(work);CHKERRQ(ierr); 1680716a85fSBarry Smith if (type == NORM_2) { 1698f1a2a5eSBarry Smith for (i=0; i<n; i++) norms[i] = PetscSqrtReal(norms[i]); 1700716a85fSBarry Smith } 1710716a85fSBarry Smith PetscFunctionReturn(0); 1720716a85fSBarry Smith } 1730716a85fSBarry Smith 1740716a85fSBarry Smith #undef __FUNCT__ 175dd6ea824SBarry Smith #define __FUNCT__ "MatDistribute_MPIAIJ" 176dd6ea824SBarry Smith /* 177dd6ea824SBarry Smith Distributes a SeqAIJ matrix across a set of processes. Code stolen from 178dd6ea824SBarry Smith MatLoad_MPIAIJ(). Horrible lack of reuse. Should be a routine for each matrix type. 179dd6ea824SBarry Smith 180dd6ea824SBarry Smith Only for square matrices 181b30237c6SBarry Smith 182b30237c6SBarry Smith Used by a preconditioner, hence PETSC_EXTERN 183dd6ea824SBarry Smith */ 1845a576424SJed Brown PETSC_EXTERN PetscErrorCode MatDistribute_MPIAIJ(MPI_Comm comm,Mat gmat,PetscInt m,MatReuse reuse,Mat *inmat) 185dd6ea824SBarry Smith { 186dd6ea824SBarry Smith PetscMPIInt rank,size; 187efcf75d5SBarry Smith PetscInt *rowners,*dlens,*olens,i,rstart,rend,j,jj,nz,*gmataj,cnt,row,*ld,bses[2]; 188dd6ea824SBarry Smith PetscErrorCode ierr; 189dd6ea824SBarry Smith Mat mat; 190dd6ea824SBarry Smith Mat_SeqAIJ *gmata; 191dd6ea824SBarry Smith PetscMPIInt tag; 192dd6ea824SBarry Smith MPI_Status status; 193ace3abfcSBarry Smith PetscBool aij; 194dd6ea824SBarry Smith MatScalar *gmataa,*ao,*ad,*gmataarestore=0; 195dd6ea824SBarry Smith 196dd6ea824SBarry Smith PetscFunctionBegin; 197dd6ea824SBarry Smith ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 198dd6ea824SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 199dd6ea824SBarry Smith if (!rank) { 200251f4c67SDmitry Karpeev ierr = PetscObjectTypeCompare((PetscObject)gmat,MATSEQAIJ,&aij);CHKERRQ(ierr); 201ce94432eSBarry Smith if (!aij) SETERRQ1(PetscObjectComm((PetscObject)gmat),PETSC_ERR_SUP,"Currently no support for input matrix of type %s\n",((PetscObject)gmat)->type_name); 202dd6ea824SBarry Smith } 203dd6ea824SBarry Smith if (reuse == MAT_INITIAL_MATRIX) { 204dd6ea824SBarry Smith ierr = MatCreate(comm,&mat);CHKERRQ(ierr); 205dd6ea824SBarry Smith ierr = MatSetSizes(mat,m,m,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 206efcf75d5SBarry Smith if (!rank) { 207efcf75d5SBarry Smith bses[0] = gmat->rmap->bs; 208efcf75d5SBarry Smith bses[1] = gmat->cmap->bs; 209efcf75d5SBarry Smith } 210efcf75d5SBarry Smith ierr = MPI_Bcast(bses,2,MPIU_INT,0,comm);CHKERRQ(ierr); 211efcf75d5SBarry Smith ierr = MatSetBlockSizes(mat,bses[0],bses[1]);CHKERRQ(ierr); 212dd6ea824SBarry Smith ierr = MatSetType(mat,MATAIJ);CHKERRQ(ierr); 213dd6ea824SBarry Smith ierr = PetscMalloc((size+1)*sizeof(PetscInt),&rowners);CHKERRQ(ierr); 214dd6ea824SBarry Smith ierr = PetscMalloc2(m,PetscInt,&dlens,m,PetscInt,&olens);CHKERRQ(ierr); 215dd6ea824SBarry Smith ierr = MPI_Allgather(&m,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr); 2162205254eSKarl Rupp 217dd6ea824SBarry Smith rowners[0] = 0; 2182205254eSKarl Rupp for (i=2; i<=size; i++) rowners[i] += rowners[i-1]; 219dd6ea824SBarry Smith rstart = rowners[rank]; 220dd6ea824SBarry Smith rend = rowners[rank+1]; 221dd6ea824SBarry Smith ierr = PetscObjectGetNewTag((PetscObject)mat,&tag);CHKERRQ(ierr); 222dd6ea824SBarry Smith if (!rank) { 223dd6ea824SBarry Smith gmata = (Mat_SeqAIJ*) gmat->data; 224dd6ea824SBarry Smith /* send row lengths to all processors */ 225dd6ea824SBarry Smith for (i=0; i<m; i++) dlens[i] = gmata->ilen[i]; 226dd6ea824SBarry Smith for (i=1; i<size; i++) { 227dd6ea824SBarry Smith ierr = MPI_Send(gmata->ilen + rowners[i],rowners[i+1]-rowners[i],MPIU_INT,i,tag,comm);CHKERRQ(ierr); 228dd6ea824SBarry Smith } 229dd6ea824SBarry Smith /* determine number diagonal and off-diagonal counts */ 230dd6ea824SBarry Smith ierr = PetscMemzero(olens,m*sizeof(PetscInt));CHKERRQ(ierr); 231dd6ea824SBarry Smith ierr = PetscMalloc(m*sizeof(PetscInt),&ld);CHKERRQ(ierr); 232dd6ea824SBarry Smith ierr = PetscMemzero(ld,m*sizeof(PetscInt));CHKERRQ(ierr); 233dd6ea824SBarry Smith jj = 0; 234dd6ea824SBarry Smith for (i=0; i<m; i++) { 235dd6ea824SBarry Smith for (j=0; j<dlens[i]; j++) { 236dd6ea824SBarry Smith if (gmata->j[jj] < rstart) ld[i]++; 237dd6ea824SBarry Smith if (gmata->j[jj] < rstart || gmata->j[jj] >= rend) olens[i]++; 238dd6ea824SBarry Smith jj++; 239dd6ea824SBarry Smith } 240dd6ea824SBarry Smith } 241dd6ea824SBarry Smith /* send column indices to other processes */ 242dd6ea824SBarry Smith for (i=1; i<size; i++) { 243dd6ea824SBarry Smith nz = gmata->i[rowners[i+1]]-gmata->i[rowners[i]]; 244dd6ea824SBarry Smith ierr = MPI_Send(&nz,1,MPIU_INT,i,tag,comm);CHKERRQ(ierr); 245dd6ea824SBarry Smith ierr = MPI_Send(gmata->j + gmata->i[rowners[i]],nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr); 246dd6ea824SBarry Smith } 247dd6ea824SBarry Smith 248dd6ea824SBarry Smith /* send numerical values to other processes */ 249dd6ea824SBarry Smith for (i=1; i<size; i++) { 250dd6ea824SBarry Smith nz = gmata->i[rowners[i+1]]-gmata->i[rowners[i]]; 251dd6ea824SBarry Smith ierr = MPI_Send(gmata->a + gmata->i[rowners[i]],nz,MPIU_SCALAR,i,tag,comm);CHKERRQ(ierr); 252dd6ea824SBarry Smith } 253dd6ea824SBarry Smith gmataa = gmata->a; 254dd6ea824SBarry Smith gmataj = gmata->j; 255dd6ea824SBarry Smith 256dd6ea824SBarry Smith } else { 257dd6ea824SBarry Smith /* receive row lengths */ 258dd6ea824SBarry Smith ierr = MPI_Recv(dlens,m,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr); 259dd6ea824SBarry Smith /* receive column indices */ 260dd6ea824SBarry Smith ierr = MPI_Recv(&nz,1,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr); 261dd6ea824SBarry Smith ierr = PetscMalloc2(nz,PetscScalar,&gmataa,nz,PetscInt,&gmataj);CHKERRQ(ierr); 262dd6ea824SBarry Smith ierr = MPI_Recv(gmataj,nz,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr); 263dd6ea824SBarry Smith /* determine number diagonal and off-diagonal counts */ 264dd6ea824SBarry Smith ierr = PetscMemzero(olens,m*sizeof(PetscInt));CHKERRQ(ierr); 265dd6ea824SBarry Smith ierr = PetscMalloc(m*sizeof(PetscInt),&ld);CHKERRQ(ierr); 266dd6ea824SBarry Smith ierr = PetscMemzero(ld,m*sizeof(PetscInt));CHKERRQ(ierr); 267dd6ea824SBarry Smith jj = 0; 268dd6ea824SBarry Smith for (i=0; i<m; i++) { 269dd6ea824SBarry Smith for (j=0; j<dlens[i]; j++) { 270dd6ea824SBarry Smith if (gmataj[jj] < rstart) ld[i]++; 271dd6ea824SBarry Smith if (gmataj[jj] < rstart || gmataj[jj] >= rend) olens[i]++; 272dd6ea824SBarry Smith jj++; 273dd6ea824SBarry Smith } 274dd6ea824SBarry Smith } 275dd6ea824SBarry Smith /* receive numerical values */ 276dd6ea824SBarry Smith ierr = PetscMemzero(gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); 277dd6ea824SBarry Smith ierr = MPI_Recv(gmataa,nz,MPIU_SCALAR,0,tag,comm,&status);CHKERRQ(ierr); 278dd6ea824SBarry Smith } 279dd6ea824SBarry Smith /* set preallocation */ 280dd6ea824SBarry Smith for (i=0; i<m; i++) { 281dd6ea824SBarry Smith dlens[i] -= olens[i]; 282dd6ea824SBarry Smith } 283dd6ea824SBarry Smith ierr = MatSeqAIJSetPreallocation(mat,0,dlens);CHKERRQ(ierr); 284dd6ea824SBarry Smith ierr = MatMPIAIJSetPreallocation(mat,0,dlens,0,olens);CHKERRQ(ierr); 285dd6ea824SBarry Smith 286dd6ea824SBarry Smith for (i=0; i<m; i++) { 287dd6ea824SBarry Smith dlens[i] += olens[i]; 288dd6ea824SBarry Smith } 289dd6ea824SBarry Smith cnt = 0; 290dd6ea824SBarry Smith for (i=0; i<m; i++) { 291dd6ea824SBarry Smith row = rstart + i; 292dd6ea824SBarry Smith ierr = MatSetValues(mat,1,&row,dlens[i],gmataj+cnt,gmataa+cnt,INSERT_VALUES);CHKERRQ(ierr); 293dd6ea824SBarry Smith cnt += dlens[i]; 294dd6ea824SBarry Smith } 295dd6ea824SBarry Smith if (rank) { 296dd6ea824SBarry Smith ierr = PetscFree2(gmataa,gmataj);CHKERRQ(ierr); 297dd6ea824SBarry Smith } 298dd6ea824SBarry Smith ierr = PetscFree2(dlens,olens);CHKERRQ(ierr); 299dd6ea824SBarry Smith ierr = PetscFree(rowners);CHKERRQ(ierr); 3002205254eSKarl Rupp 301dd6ea824SBarry Smith ((Mat_MPIAIJ*)(mat->data))->ld = ld; 3022205254eSKarl Rupp 303dd6ea824SBarry Smith *inmat = mat; 304dd6ea824SBarry Smith } else { /* column indices are already set; only need to move over numerical values from process 0 */ 305dd6ea824SBarry Smith Mat_SeqAIJ *Ad = (Mat_SeqAIJ*)((Mat_MPIAIJ*)((*inmat)->data))->A->data; 306dd6ea824SBarry Smith Mat_SeqAIJ *Ao = (Mat_SeqAIJ*)((Mat_MPIAIJ*)((*inmat)->data))->B->data; 307dd6ea824SBarry Smith mat = *inmat; 308dd6ea824SBarry Smith ierr = PetscObjectGetNewTag((PetscObject)mat,&tag);CHKERRQ(ierr); 309dd6ea824SBarry Smith if (!rank) { 310dd6ea824SBarry Smith /* send numerical values to other processes */ 311dd6ea824SBarry Smith gmata = (Mat_SeqAIJ*) gmat->data; 312dd6ea824SBarry Smith ierr = MatGetOwnershipRanges(mat,(const PetscInt**)&rowners);CHKERRQ(ierr); 313dd6ea824SBarry Smith gmataa = gmata->a; 314dd6ea824SBarry Smith for (i=1; i<size; i++) { 315dd6ea824SBarry Smith nz = gmata->i[rowners[i+1]]-gmata->i[rowners[i]]; 316dd6ea824SBarry Smith ierr = MPI_Send(gmataa + gmata->i[rowners[i]],nz,MPIU_SCALAR,i,tag,comm);CHKERRQ(ierr); 317dd6ea824SBarry Smith } 318dd6ea824SBarry Smith nz = gmata->i[rowners[1]]-gmata->i[rowners[0]]; 319dd6ea824SBarry Smith } else { 320dd6ea824SBarry Smith /* receive numerical values from process 0*/ 321dd6ea824SBarry Smith nz = Ad->nz + Ao->nz; 322dd6ea824SBarry Smith ierr = PetscMalloc(nz*sizeof(PetscScalar),&gmataa);CHKERRQ(ierr); gmataarestore = gmataa; 323dd6ea824SBarry Smith ierr = MPI_Recv(gmataa,nz,MPIU_SCALAR,0,tag,comm,&status);CHKERRQ(ierr); 324dd6ea824SBarry Smith } 325dd6ea824SBarry Smith /* transfer numerical values into the diagonal A and off diagonal B parts of mat */ 326dd6ea824SBarry Smith ld = ((Mat_MPIAIJ*)(mat->data))->ld; 327dd6ea824SBarry Smith ad = Ad->a; 328dd6ea824SBarry Smith ao = Ao->a; 329d0f46423SBarry Smith if (mat->rmap->n) { 330dd6ea824SBarry Smith i = 0; 331dd6ea824SBarry Smith nz = ld[i]; ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ao += nz; gmataa += nz; 332dd6ea824SBarry Smith nz = Ad->i[i+1] - Ad->i[i]; ierr = PetscMemcpy(ad,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ad += nz; gmataa += nz; 333dd6ea824SBarry Smith } 334d0f46423SBarry Smith for (i=1; i<mat->rmap->n; i++) { 335dd6ea824SBarry Smith nz = Ao->i[i] - Ao->i[i-1] - ld[i-1] + ld[i]; ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ao += nz; gmataa += nz; 336dd6ea824SBarry Smith nz = Ad->i[i+1] - Ad->i[i]; ierr = PetscMemcpy(ad,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ad += nz; gmataa += nz; 337dd6ea824SBarry Smith } 338dd6ea824SBarry Smith i--; 339d0f46423SBarry Smith if (mat->rmap->n) { 34022d28d08SBarry Smith nz = Ao->i[i+1] - Ao->i[i] - ld[i]; ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); 341dd6ea824SBarry Smith } 342dd6ea824SBarry Smith if (rank) { 343dd6ea824SBarry Smith ierr = PetscFree(gmataarestore);CHKERRQ(ierr); 344dd6ea824SBarry Smith } 345dd6ea824SBarry Smith } 346dd6ea824SBarry Smith ierr = MatAssemblyBegin(mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 347dd6ea824SBarry Smith ierr = MatAssemblyEnd(mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 348dd6ea824SBarry Smith PetscFunctionReturn(0); 349dd6ea824SBarry Smith } 350dd6ea824SBarry Smith 3510f5bd95cSBarry Smith /* 3520f5bd95cSBarry Smith Local utility routine that creates a mapping from the global column 3539e25ed09SBarry Smith number to the local number in the off-diagonal part of the local 3540f5bd95cSBarry Smith storage of the matrix. When PETSC_USE_CTABLE is used this is scalable at 3550f5bd95cSBarry Smith a slightly higher hash table cost; without it it is not scalable (each processor 3560f5bd95cSBarry Smith has an order N integer array but is fast to acess. 3579e25ed09SBarry Smith */ 3584a2ae208SSatish Balay #undef __FUNCT__ 359ab9863d7SBarry Smith #define __FUNCT__ "MatCreateColmap_MPIAIJ_Private" 360ab9863d7SBarry Smith PetscErrorCode MatCreateColmap_MPIAIJ_Private(Mat mat) 3619e25ed09SBarry Smith { 36244a69424SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 3636849ba73SBarry Smith PetscErrorCode ierr; 364d0f46423SBarry Smith PetscInt n = aij->B->cmap->n,i; 365dbb450caSBarry Smith 3663a40ed3dSBarry Smith PetscFunctionBegin; 3675e1f6667SBarry Smith if (!aij->garray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"MPIAIJ Matrix was assembled but is missing garray"); 368aa482453SBarry Smith #if defined(PETSC_USE_CTABLE) 369e23dfa41SBarry Smith ierr = PetscTableCreate(n,mat->cmap->N+1,&aij->colmap);CHKERRQ(ierr); 370b1fc9764SSatish Balay for (i=0; i<n; i++) { 3713861aac3SJed Brown ierr = PetscTableAdd(aij->colmap,aij->garray[i]+1,i+1,INSERT_VALUES);CHKERRQ(ierr); 372b1fc9764SSatish Balay } 373b1fc9764SSatish Balay #else 374d0f46423SBarry Smith ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscInt),&aij->colmap);CHKERRQ(ierr); 375d0f46423SBarry Smith ierr = PetscLogObjectMemory(mat,mat->cmap->N*sizeof(PetscInt));CHKERRQ(ierr); 376d0f46423SBarry Smith ierr = PetscMemzero(aij->colmap,mat->cmap->N*sizeof(PetscInt));CHKERRQ(ierr); 377905e6a2fSBarry Smith for (i=0; i<n; i++) aij->colmap[aij->garray[i]] = i+1; 378b1fc9764SSatish Balay #endif 3793a40ed3dSBarry Smith PetscFunctionReturn(0); 3809e25ed09SBarry Smith } 3819e25ed09SBarry Smith 38230770e4dSSatish Balay #define MatSetValues_SeqAIJ_A_Private(row,col,value,addv) \ 3830520107fSSatish Balay { \ 384db4deed7SKarl Rupp if (col <= lastcol1) low1 = 0; \ 385db4deed7SKarl Rupp else high1 = nrow1; \ 386fd3458f5SBarry Smith lastcol1 = col;\ 387fd3458f5SBarry Smith while (high1-low1 > 5) { \ 388fd3458f5SBarry Smith t = (low1+high1)/2; \ 389fd3458f5SBarry Smith if (rp1[t] > col) high1 = t; \ 390fd3458f5SBarry Smith else low1 = t; \ 391ba4e3ef2SSatish Balay } \ 392fd3458f5SBarry Smith for (_i=low1; _i<high1; _i++) { \ 393fd3458f5SBarry Smith if (rp1[_i] > col) break; \ 394fd3458f5SBarry Smith if (rp1[_i] == col) { \ 395fd3458f5SBarry Smith if (addv == ADD_VALUES) ap1[_i] += value; \ 396fd3458f5SBarry Smith else ap1[_i] = value; \ 39730770e4dSSatish Balay goto a_noinsert; \ 3980520107fSSatish Balay } \ 3990520107fSSatish Balay } \ 400e44c0bd4SBarry Smith if (value == 0.0 && ignorezeroentries) {low1 = 0; high1 = nrow1;goto a_noinsert;} \ 401e44c0bd4SBarry Smith if (nonew == 1) {low1 = 0; high1 = nrow1; goto a_noinsert;} \ 402e32f2f54SBarry Smith if (nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \ 403fef13f97SBarry Smith MatSeqXAIJReallocateAIJ(A,am,1,nrow1,row,col,rmax1,aa,ai,aj,rp1,ap1,aimax,nonew,MatScalar); \ 404669a8dbcSSatish Balay N = nrow1++ - 1; a->nz++; high1++; \ 4050520107fSSatish Balay /* shift up all the later entries in this row */ \ 4060520107fSSatish Balay for (ii=N; ii>=_i; ii--) { \ 407fd3458f5SBarry Smith rp1[ii+1] = rp1[ii]; \ 408fd3458f5SBarry Smith ap1[ii+1] = ap1[ii]; \ 4090520107fSSatish Balay } \ 410fd3458f5SBarry Smith rp1[_i] = col; \ 411fd3458f5SBarry Smith ap1[_i] = value; \ 41230770e4dSSatish Balay a_noinsert: ; \ 413fd3458f5SBarry Smith ailen[row] = nrow1; \ 4140520107fSSatish Balay } 4150a198c4cSBarry Smith 416085a36d4SBarry Smith 41730770e4dSSatish Balay #define MatSetValues_SeqAIJ_B_Private(row,col,value,addv) \ 41830770e4dSSatish Balay { \ 419db4deed7SKarl Rupp if (col <= lastcol2) low2 = 0; \ 420db4deed7SKarl Rupp else high2 = nrow2; \ 421fd3458f5SBarry Smith lastcol2 = col; \ 422fd3458f5SBarry Smith while (high2-low2 > 5) { \ 423fd3458f5SBarry Smith t = (low2+high2)/2; \ 424fd3458f5SBarry Smith if (rp2[t] > col) high2 = t; \ 425fd3458f5SBarry Smith else low2 = t; \ 426ba4e3ef2SSatish Balay } \ 427fd3458f5SBarry Smith for (_i=low2; _i<high2; _i++) { \ 428fd3458f5SBarry Smith if (rp2[_i] > col) break; \ 429fd3458f5SBarry Smith if (rp2[_i] == col) { \ 430fd3458f5SBarry Smith if (addv == ADD_VALUES) ap2[_i] += value; \ 431fd3458f5SBarry Smith else ap2[_i] = value; \ 43230770e4dSSatish Balay goto b_noinsert; \ 43330770e4dSSatish Balay } \ 43430770e4dSSatish Balay } \ 435e44c0bd4SBarry Smith if (value == 0.0 && ignorezeroentries) {low2 = 0; high2 = nrow2; goto b_noinsert;} \ 436e44c0bd4SBarry Smith if (nonew == 1) {low2 = 0; high2 = nrow2; goto b_noinsert;} \ 437e32f2f54SBarry Smith if (nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \ 438fef13f97SBarry Smith MatSeqXAIJReallocateAIJ(B,bm,1,nrow2,row,col,rmax2,ba,bi,bj,rp2,ap2,bimax,nonew,MatScalar); \ 439669a8dbcSSatish Balay N = nrow2++ - 1; b->nz++; high2++; \ 44030770e4dSSatish Balay /* shift up all the later entries in this row */ \ 44130770e4dSSatish Balay for (ii=N; ii>=_i; ii--) { \ 442fd3458f5SBarry Smith rp2[ii+1] = rp2[ii]; \ 443fd3458f5SBarry Smith ap2[ii+1] = ap2[ii]; \ 44430770e4dSSatish Balay } \ 445fd3458f5SBarry Smith rp2[_i] = col; \ 446fd3458f5SBarry Smith ap2[_i] = value; \ 44730770e4dSSatish Balay b_noinsert: ; \ 448fd3458f5SBarry Smith bilen[row] = nrow2; \ 44930770e4dSSatish Balay } 45030770e4dSSatish Balay 4514a2ae208SSatish Balay #undef __FUNCT__ 4522fd7e33dSBarry Smith #define __FUNCT__ "MatSetValuesRow_MPIAIJ" 4532fd7e33dSBarry Smith PetscErrorCode MatSetValuesRow_MPIAIJ(Mat A,PetscInt row,const PetscScalar v[]) 4542fd7e33dSBarry Smith { 4552fd7e33dSBarry Smith Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data; 4562fd7e33dSBarry Smith Mat_SeqAIJ *a = (Mat_SeqAIJ*)mat->A->data,*b = (Mat_SeqAIJ*)mat->B->data; 4572fd7e33dSBarry Smith PetscErrorCode ierr; 4582fd7e33dSBarry Smith PetscInt l,*garray = mat->garray,diag; 4592fd7e33dSBarry Smith 4602fd7e33dSBarry Smith PetscFunctionBegin; 4612fd7e33dSBarry Smith /* code only works for square matrices A */ 4622fd7e33dSBarry Smith 4632fd7e33dSBarry Smith /* find size of row to the left of the diagonal part */ 4642fd7e33dSBarry Smith ierr = MatGetOwnershipRange(A,&diag,0);CHKERRQ(ierr); 4652fd7e33dSBarry Smith row = row - diag; 4662fd7e33dSBarry Smith for (l=0; l<b->i[row+1]-b->i[row]; l++) { 4672fd7e33dSBarry Smith if (garray[b->j[b->i[row]+l]] > diag) break; 4682fd7e33dSBarry Smith } 4692fd7e33dSBarry Smith ierr = PetscMemcpy(b->a+b->i[row],v,l*sizeof(PetscScalar));CHKERRQ(ierr); 4702fd7e33dSBarry Smith 4712fd7e33dSBarry Smith /* diagonal part */ 4722fd7e33dSBarry Smith ierr = PetscMemcpy(a->a+a->i[row],v+l,(a->i[row+1]-a->i[row])*sizeof(PetscScalar));CHKERRQ(ierr); 4732fd7e33dSBarry Smith 4742fd7e33dSBarry Smith /* right of diagonal part */ 4752fd7e33dSBarry Smith ierr = PetscMemcpy(b->a+b->i[row]+l,v+l+a->i[row+1]-a->i[row],(b->i[row+1]-b->i[row]-l)*sizeof(PetscScalar));CHKERRQ(ierr); 4762fd7e33dSBarry Smith PetscFunctionReturn(0); 4772fd7e33dSBarry Smith } 4782fd7e33dSBarry Smith 4792fd7e33dSBarry Smith #undef __FUNCT__ 4804a2ae208SSatish Balay #define __FUNCT__ "MatSetValues_MPIAIJ" 481b1d57f15SBarry Smith PetscErrorCode MatSetValues_MPIAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv) 4828a729477SBarry Smith { 48344a69424SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 48487828ca2SBarry Smith PetscScalar value; 485dfbe8321SBarry Smith PetscErrorCode ierr; 486d0f46423SBarry Smith PetscInt i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend; 487d0f46423SBarry Smith PetscInt cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col; 488ace3abfcSBarry Smith PetscBool roworiented = aij->roworiented; 4898a729477SBarry Smith 4900520107fSSatish Balay /* Some Variables required in the macro */ 4914ee7247eSSatish Balay Mat A = aij->A; 4924ee7247eSSatish Balay Mat_SeqAIJ *a = (Mat_SeqAIJ*)A->data; 49357809a77SBarry Smith PetscInt *aimax = a->imax,*ai = a->i,*ailen = a->ilen,*aj = a->j; 494a77337e4SBarry Smith MatScalar *aa = a->a; 495ace3abfcSBarry Smith PetscBool ignorezeroentries = a->ignorezeroentries; 49630770e4dSSatish Balay Mat B = aij->B; 49730770e4dSSatish Balay Mat_SeqAIJ *b = (Mat_SeqAIJ*)B->data; 498d0f46423SBarry Smith PetscInt *bimax = b->imax,*bi = b->i,*bilen = b->ilen,*bj = b->j,bm = aij->B->rmap->n,am = aij->A->rmap->n; 499a77337e4SBarry Smith MatScalar *ba = b->a; 50030770e4dSSatish Balay 501fd3458f5SBarry Smith PetscInt *rp1,*rp2,ii,nrow1,nrow2,_i,rmax1,rmax2,N,low1,high1,low2,high2,t,lastcol1,lastcol2; 5028d76821aSHong Zhang PetscInt nonew; 503a77337e4SBarry Smith MatScalar *ap1,*ap2; 5044ee7247eSSatish Balay 5053a40ed3dSBarry Smith PetscFunctionBegin; 50671fd2e92SBarry Smith if (v) PetscValidScalarPointer(v,6); 5078a729477SBarry Smith for (i=0; i<m; i++) { 5085ef9f2a5SBarry Smith if (im[i] < 0) continue; 5092515c552SBarry Smith #if defined(PETSC_USE_DEBUG) 510e32f2f54SBarry Smith if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1); 5110a198c4cSBarry Smith #endif 5124b0e389bSBarry Smith if (im[i] >= rstart && im[i] < rend) { 5134b0e389bSBarry Smith row = im[i] - rstart; 514fd3458f5SBarry Smith lastcol1 = -1; 515fd3458f5SBarry Smith rp1 = aj + ai[row]; 516fd3458f5SBarry Smith ap1 = aa + ai[row]; 517fd3458f5SBarry Smith rmax1 = aimax[row]; 518fd3458f5SBarry Smith nrow1 = ailen[row]; 519fd3458f5SBarry Smith low1 = 0; 520fd3458f5SBarry Smith high1 = nrow1; 521fd3458f5SBarry Smith lastcol2 = -1; 522fd3458f5SBarry Smith rp2 = bj + bi[row]; 523d498b1e9SBarry Smith ap2 = ba + bi[row]; 524fd3458f5SBarry Smith rmax2 = bimax[row]; 525d498b1e9SBarry Smith nrow2 = bilen[row]; 526fd3458f5SBarry Smith low2 = 0; 527fd3458f5SBarry Smith high2 = nrow2; 528fd3458f5SBarry Smith 5291eb62cbbSBarry Smith for (j=0; j<n; j++) { 530db4deed7SKarl Rupp if (v) { 531db4deed7SKarl Rupp if (roworiented) value = v[i*n+j]; 532db4deed7SKarl Rupp else value = v[i+j*m]; 533db4deed7SKarl Rupp } else value = 0.0; 534abc0a331SBarry Smith if (ignorezeroentries && value == 0.0 && (addv == ADD_VALUES)) continue; 535fd3458f5SBarry Smith if (in[j] >= cstart && in[j] < cend) { 536fd3458f5SBarry Smith col = in[j] - cstart; 5378d76821aSHong Zhang nonew = a->nonew; 53830770e4dSSatish Balay MatSetValues_SeqAIJ_A_Private(row,col,value,addv); 539273d9f13SBarry Smith } else if (in[j] < 0) continue; 5402515c552SBarry Smith #if defined(PETSC_USE_DEBUG) 541cb9801acSJed Brown else if (in[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1); 5420a198c4cSBarry Smith #endif 5431eb62cbbSBarry Smith else { 544227d817aSBarry Smith if (mat->was_assembled) { 545905e6a2fSBarry Smith if (!aij->colmap) { 546ab9863d7SBarry Smith ierr = MatCreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr); 547905e6a2fSBarry Smith } 548aa482453SBarry Smith #if defined(PETSC_USE_CTABLE) 5490f5bd95cSBarry Smith ierr = PetscTableFind(aij->colmap,in[j]+1,&col);CHKERRQ(ierr); 550fa46199cSSatish Balay col--; 551b1fc9764SSatish Balay #else 552905e6a2fSBarry Smith col = aij->colmap[in[j]] - 1; 553b1fc9764SSatish Balay #endif 5540e9bae81SBarry Smith if (col < 0 && !((Mat_SeqAIJ*)(aij->B->data))->nonew) { 555ab9863d7SBarry Smith ierr = MatDisAssemble_MPIAIJ(mat);CHKERRQ(ierr); 5564b0e389bSBarry Smith col = in[j]; 5579bf004c3SSatish Balay /* Reinitialize the variables required by MatSetValues_SeqAIJ_B_Private() */ 558f9508a3cSSatish Balay B = aij->B; 559f9508a3cSSatish Balay b = (Mat_SeqAIJ*)B->data; 560e44c0bd4SBarry Smith bimax = b->imax; bi = b->i; bilen = b->ilen; bj = b->j; ba = b->a; 561d498b1e9SBarry Smith rp2 = bj + bi[row]; 562d498b1e9SBarry Smith ap2 = ba + bi[row]; 563d498b1e9SBarry Smith rmax2 = bimax[row]; 564d498b1e9SBarry Smith nrow2 = bilen[row]; 565d498b1e9SBarry Smith low2 = 0; 566d498b1e9SBarry Smith high2 = nrow2; 567d0f46423SBarry Smith bm = aij->B->rmap->n; 568f9508a3cSSatish Balay ba = b->a; 5690e9bae81SBarry Smith } else if (col < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", im[i], in[j]); 570c48de900SBarry Smith } else col = in[j]; 5718d76821aSHong Zhang nonew = b->nonew; 57230770e4dSSatish Balay MatSetValues_SeqAIJ_B_Private(row,col,value,addv); 5731eb62cbbSBarry Smith } 5741eb62cbbSBarry Smith } 5755ef9f2a5SBarry Smith } else { 5764cb17eb5SBarry Smith if (mat->nooffprocentries) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Setting off process row %D even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set",im[i]); 57790f02eecSBarry Smith if (!aij->donotstash) { 5785080c13bSMatthew G Knepley mat->assembled = PETSC_FALSE; 579d36fbae8SSatish Balay if (roworiented) { 580ace3abfcSBarry Smith ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,(PetscBool)(ignorezeroentries && (addv == ADD_VALUES)));CHKERRQ(ierr); 581d36fbae8SSatish Balay } else { 582ace3abfcSBarry Smith ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,(PetscBool)(ignorezeroentries && (addv == ADD_VALUES)));CHKERRQ(ierr); 5834b0e389bSBarry Smith } 5841eb62cbbSBarry Smith } 5858a729477SBarry Smith } 58690f02eecSBarry Smith } 5873a40ed3dSBarry Smith PetscFunctionReturn(0); 5888a729477SBarry Smith } 5898a729477SBarry Smith 5904a2ae208SSatish Balay #undef __FUNCT__ 5914a2ae208SSatish Balay #define __FUNCT__ "MatGetValues_MPIAIJ" 592b1d57f15SBarry Smith PetscErrorCode MatGetValues_MPIAIJ(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[]) 593b49de8d1SLois Curfman McInnes { 594b49de8d1SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 595dfbe8321SBarry Smith PetscErrorCode ierr; 596d0f46423SBarry Smith PetscInt i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend; 597d0f46423SBarry Smith PetscInt cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col; 598b49de8d1SLois Curfman McInnes 5993a40ed3dSBarry Smith PetscFunctionBegin; 600b49de8d1SLois Curfman McInnes for (i=0; i<m; i++) { 601e32f2f54SBarry Smith if (idxm[i] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",idxm[i]);*/ 602e32f2f54SBarry Smith if (idxm[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",idxm[i],mat->rmap->N-1); 603b49de8d1SLois Curfman McInnes if (idxm[i] >= rstart && idxm[i] < rend) { 604b49de8d1SLois Curfman McInnes row = idxm[i] - rstart; 605b49de8d1SLois Curfman McInnes for (j=0; j<n; j++) { 606e32f2f54SBarry Smith if (idxn[j] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative column: %D",idxn[j]); */ 607e32f2f54SBarry Smith if (idxn[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",idxn[j],mat->cmap->N-1); 608b49de8d1SLois Curfman McInnes if (idxn[j] >= cstart && idxn[j] < cend) { 609b49de8d1SLois Curfman McInnes col = idxn[j] - cstart; 610b49de8d1SLois Curfman McInnes ierr = MatGetValues(aij->A,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr); 611fa852ad4SSatish Balay } else { 612905e6a2fSBarry Smith if (!aij->colmap) { 613ab9863d7SBarry Smith ierr = MatCreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr); 614905e6a2fSBarry Smith } 615aa482453SBarry Smith #if defined(PETSC_USE_CTABLE) 6160f5bd95cSBarry Smith ierr = PetscTableFind(aij->colmap,idxn[j]+1,&col);CHKERRQ(ierr); 617fa46199cSSatish Balay col--; 618b1fc9764SSatish Balay #else 619905e6a2fSBarry Smith col = aij->colmap[idxn[j]] - 1; 620b1fc9764SSatish Balay #endif 621e60e1c95SSatish Balay if ((col < 0) || (aij->garray[col] != idxn[j])) *(v+i*n+j) = 0.0; 622d9d09a02SSatish Balay else { 623b49de8d1SLois Curfman McInnes ierr = MatGetValues(aij->B,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr); 624b49de8d1SLois Curfman McInnes } 625b49de8d1SLois Curfman McInnes } 626b49de8d1SLois Curfman McInnes } 627f23aa3ddSBarry Smith } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local values currently supported"); 628b49de8d1SLois Curfman McInnes } 6293a40ed3dSBarry Smith PetscFunctionReturn(0); 630b49de8d1SLois Curfman McInnes } 631bc5ccf88SSatish Balay 632bd0c2dcbSBarry Smith extern PetscErrorCode MatMultDiagonalBlock_MPIAIJ(Mat,Vec,Vec); 633bd0c2dcbSBarry Smith 6344a2ae208SSatish Balay #undef __FUNCT__ 6354a2ae208SSatish Balay #define __FUNCT__ "MatAssemblyBegin_MPIAIJ" 636dfbe8321SBarry Smith PetscErrorCode MatAssemblyBegin_MPIAIJ(Mat mat,MatAssemblyType mode) 637bc5ccf88SSatish Balay { 638bc5ccf88SSatish Balay Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 639dfbe8321SBarry Smith PetscErrorCode ierr; 640b1d57f15SBarry Smith PetscInt nstash,reallocs; 641bc5ccf88SSatish Balay InsertMode addv; 642bc5ccf88SSatish Balay 643bc5ccf88SSatish Balay PetscFunctionBegin; 6442205254eSKarl Rupp if (aij->donotstash || mat->nooffprocentries) PetscFunctionReturn(0); 645bc5ccf88SSatish Balay 646bc5ccf88SSatish Balay /* make sure all processors are either in INSERTMODE or ADDMODE */ 647ce94432eSBarry Smith ierr = MPI_Allreduce((PetscEnum*)&mat->insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 648ce94432eSBarry Smith if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Some processors inserted others added"); 649bc5ccf88SSatish Balay mat->insertmode = addv; /* in case this processor had no cache */ 650bc5ccf88SSatish Balay 651d0f46423SBarry Smith ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr); 6528798bf22SSatish Balay ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr); 653ae15b995SBarry Smith ierr = PetscInfo2(aij->A,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr); 654bc5ccf88SSatish Balay PetscFunctionReturn(0); 655bc5ccf88SSatish Balay } 656bc5ccf88SSatish Balay 6574a2ae208SSatish Balay #undef __FUNCT__ 6584a2ae208SSatish Balay #define __FUNCT__ "MatAssemblyEnd_MPIAIJ" 659dfbe8321SBarry Smith PetscErrorCode MatAssemblyEnd_MPIAIJ(Mat mat,MatAssemblyType mode) 660bc5ccf88SSatish Balay { 661bc5ccf88SSatish Balay Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 66291c97fd4SSatish Balay Mat_SeqAIJ *a = (Mat_SeqAIJ*)aij->A->data; 6636849ba73SBarry Smith PetscErrorCode ierr; 664b1d57f15SBarry Smith PetscMPIInt n; 665b1d57f15SBarry Smith PetscInt i,j,rstart,ncols,flg; 666e44c0bd4SBarry Smith PetscInt *row,*col; 667ace3abfcSBarry Smith PetscBool other_disassembled; 66887828ca2SBarry Smith PetscScalar *val; 669bc5ccf88SSatish Balay InsertMode addv = mat->insertmode; 670bc5ccf88SSatish Balay 67191c97fd4SSatish Balay /* do not use 'b = (Mat_SeqAIJ*)aij->B->data' as B can be reset in disassembly */ 6726e111a19SKarl Rupp 673bc5ccf88SSatish Balay PetscFunctionBegin; 6744cb17eb5SBarry Smith if (!aij->donotstash && !mat->nooffprocentries) { 675a2d1c673SSatish Balay while (1) { 6768798bf22SSatish Balay ierr = MatStashScatterGetMesg_Private(&mat->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr); 677a2d1c673SSatish Balay if (!flg) break; 678a2d1c673SSatish Balay 679bc5ccf88SSatish Balay for (i=0; i<n; ) { 680bc5ccf88SSatish Balay /* Now identify the consecutive vals belonging to the same row */ 6812205254eSKarl Rupp for (j=i,rstart=row[j]; j<n; j++) { 6822205254eSKarl Rupp if (row[j] != rstart) break; 6832205254eSKarl Rupp } 684bc5ccf88SSatish Balay if (j < n) ncols = j-i; 685bc5ccf88SSatish Balay else ncols = n-i; 686bc5ccf88SSatish Balay /* Now assemble all these values with a single function call */ 687bc5ccf88SSatish Balay ierr = MatSetValues_MPIAIJ(mat,1,row+i,ncols,col+i,val+i,addv);CHKERRQ(ierr); 6882205254eSKarl Rupp 689bc5ccf88SSatish Balay i = j; 690bc5ccf88SSatish Balay } 691bc5ccf88SSatish Balay } 6928798bf22SSatish Balay ierr = MatStashScatterEnd_Private(&mat->stash);CHKERRQ(ierr); 693bc5ccf88SSatish Balay } 694bc5ccf88SSatish Balay ierr = MatAssemblyBegin(aij->A,mode);CHKERRQ(ierr); 695bc5ccf88SSatish Balay ierr = MatAssemblyEnd(aij->A,mode);CHKERRQ(ierr); 696bc5ccf88SSatish Balay 697bc5ccf88SSatish Balay /* determine if any processor has disassembled, if so we must 698bc5ccf88SSatish Balay also disassemble ourselfs, in order that we may reassemble. */ 699bc5ccf88SSatish Balay /* 700bc5ccf88SSatish Balay if nonzero structure of submatrix B cannot change then we know that 701bc5ccf88SSatish Balay no processor disassembled thus we can skip this stuff 702bc5ccf88SSatish Balay */ 703bc5ccf88SSatish Balay if (!((Mat_SeqAIJ*)aij->B->data)->nonew) { 704ce94432eSBarry Smith ierr = MPI_Allreduce(&mat->was_assembled,&other_disassembled,1,MPIU_BOOL,MPI_PROD,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 705bc5ccf88SSatish Balay if (mat->was_assembled && !other_disassembled) { 706ab9863d7SBarry Smith ierr = MatDisAssemble_MPIAIJ(mat);CHKERRQ(ierr); 707ad59fb31SSatish Balay } 708ad59fb31SSatish Balay } 709bc5ccf88SSatish Balay if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) { 710bc5ccf88SSatish Balay ierr = MatSetUpMultiply_MPIAIJ(mat);CHKERRQ(ierr); 711bc5ccf88SSatish Balay } 7124e0d8c25SBarry Smith ierr = MatSetOption(aij->B,MAT_USE_INODES,PETSC_FALSE);CHKERRQ(ierr); 7134e35b6f3SSatish Balay ierr = MatSetOption(aij->B,MAT_CHECK_COMPRESSED_ROW,PETSC_FALSE);CHKERRQ(ierr); 714bc5ccf88SSatish Balay ierr = MatAssemblyBegin(aij->B,mode);CHKERRQ(ierr); 715bc5ccf88SSatish Balay ierr = MatAssemblyEnd(aij->B,mode);CHKERRQ(ierr); 716bc5ccf88SSatish Balay 7171d79065fSBarry Smith ierr = PetscFree2(aij->rowvalues,aij->rowindices);CHKERRQ(ierr); 7182205254eSKarl Rupp 719606d414cSSatish Balay aij->rowvalues = 0; 720a30b2313SHong Zhang 721a30b2313SHong Zhang /* used by MatAXPY() */ 72291c97fd4SSatish Balay a->xtoy = 0; ((Mat_SeqAIJ*)aij->B->data)->xtoy = 0; /* b->xtoy = 0 */ 72391c97fd4SSatish Balay a->XtoY = 0; ((Mat_SeqAIJ*)aij->B->data)->XtoY = 0; /* b->XtoY = 0 */ 724a30b2313SHong Zhang 7256bf464f9SBarry Smith ierr = VecDestroy(&aij->diag);CHKERRQ(ierr); 726bd0c2dcbSBarry Smith if (a->inode.size) mat->ops->multdiagonalblock = MatMultDiagonalBlock_MPIAIJ; 727bc5ccf88SSatish Balay PetscFunctionReturn(0); 728bc5ccf88SSatish Balay } 729bc5ccf88SSatish Balay 7304a2ae208SSatish Balay #undef __FUNCT__ 7314a2ae208SSatish Balay #define __FUNCT__ "MatZeroEntries_MPIAIJ" 732dfbe8321SBarry Smith PetscErrorCode MatZeroEntries_MPIAIJ(Mat A) 7331eb62cbbSBarry Smith { 73444a69424SLois Curfman McInnes Mat_MPIAIJ *l = (Mat_MPIAIJ*)A->data; 735dfbe8321SBarry Smith PetscErrorCode ierr; 7363a40ed3dSBarry Smith 7373a40ed3dSBarry Smith PetscFunctionBegin; 73878b31e54SBarry Smith ierr = MatZeroEntries(l->A);CHKERRQ(ierr); 73978b31e54SBarry Smith ierr = MatZeroEntries(l->B);CHKERRQ(ierr); 7403a40ed3dSBarry Smith PetscFunctionReturn(0); 7411eb62cbbSBarry Smith } 7421eb62cbbSBarry Smith 7434a2ae208SSatish Balay #undef __FUNCT__ 7444a2ae208SSatish Balay #define __FUNCT__ "MatZeroRows_MPIAIJ" 7452b40b63fSBarry Smith PetscErrorCode MatZeroRows_MPIAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 7461eb62cbbSBarry Smith { 74744a69424SLois Curfman McInnes Mat_MPIAIJ *l = (Mat_MPIAIJ*)A->data; 7486849ba73SBarry Smith PetscErrorCode ierr; 7497adad957SLisandro Dalcin PetscMPIInt size = l->size,imdex,n,rank = l->rank,tag = ((PetscObject)A)->tag,lastidx = -1; 750d0f46423SBarry Smith PetscInt i,*owners = A->rmap->range; 751b1d57f15SBarry Smith PetscInt *nprocs,j,idx,nsends,row; 752b1d57f15SBarry Smith PetscInt nmax,*svalues,*starts,*owner,nrecvs; 753b1d57f15SBarry Smith PetscInt *rvalues,count,base,slen,*source; 754d0f46423SBarry Smith PetscInt *lens,*lrows,*values,rstart=A->rmap->rstart; 755ce94432eSBarry Smith MPI_Comm comm; 7561eb62cbbSBarry Smith MPI_Request *send_waits,*recv_waits; 7571eb62cbbSBarry Smith MPI_Status recv_status,*send_status; 75897b48c8fSBarry Smith const PetscScalar *xx; 75997b48c8fSBarry Smith PetscScalar *bb; 7606543fbbaSBarry Smith #if defined(PETSC_DEBUG) 761ace3abfcSBarry Smith PetscBool found = PETSC_FALSE; 7626543fbbaSBarry Smith #endif 7631eb62cbbSBarry Smith 7643a40ed3dSBarry Smith PetscFunctionBegin; 765ce94432eSBarry Smith ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr); 7661eb62cbbSBarry Smith /* first count number of contributors to each processor */ 767b1d57f15SBarry Smith ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr); 768b1d57f15SBarry Smith ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr); 769b1d57f15SBarry Smith ierr = PetscMalloc((N+1)*sizeof(PetscInt),&owner);CHKERRQ(ierr); /* see note*/ 7706543fbbaSBarry Smith j = 0; 7711eb62cbbSBarry Smith for (i=0; i<N; i++) { 7726543fbbaSBarry Smith if (lastidx > (idx = rows[i])) j = 0; 7736543fbbaSBarry Smith lastidx = idx; 7746543fbbaSBarry Smith for (; j<size; j++) { 7751eb62cbbSBarry Smith if (idx >= owners[j] && idx < owners[j+1]) { 7766543fbbaSBarry Smith nprocs[2*j]++; 7776543fbbaSBarry Smith nprocs[2*j+1] = 1; 7786543fbbaSBarry Smith owner[i] = j; 7796543fbbaSBarry Smith #if defined(PETSC_DEBUG) 7806543fbbaSBarry Smith found = PETSC_TRUE; 7816543fbbaSBarry Smith #endif 7826543fbbaSBarry Smith break; 7831eb62cbbSBarry Smith } 7841eb62cbbSBarry Smith } 7856543fbbaSBarry Smith #if defined(PETSC_DEBUG) 786e32f2f54SBarry Smith if (!found) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index out of range"); 7876543fbbaSBarry Smith found = PETSC_FALSE; 7886543fbbaSBarry Smith #endif 7891eb62cbbSBarry Smith } 7902205254eSKarl Rupp nsends = 0; 7912205254eSKarl Rupp for (i=0; i<size; i++) nsends += nprocs[2*i+1]; 7921eb62cbbSBarry Smith 7937367270fSBarry Smith if (A->nooffproczerorows) { 7947367270fSBarry Smith if (nsends > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"You called MatSetOption(,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE) but set an off process zero row"); 7957367270fSBarry Smith nrecvs = nsends; 7967367270fSBarry Smith nmax = N; 7977367270fSBarry Smith } else { 7981eb62cbbSBarry Smith /* inform other processors of number of messages and max length*/ 799c1dc657dSBarry Smith ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr); 8007367270fSBarry Smith } 8011eb62cbbSBarry Smith 8021eb62cbbSBarry Smith /* post receives: */ 803b1d57f15SBarry Smith ierr = PetscMalloc((nrecvs+1)*(nmax+1)*sizeof(PetscInt),&rvalues);CHKERRQ(ierr); 804b0a32e0cSBarry Smith ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr); 8051eb62cbbSBarry Smith for (i=0; i<nrecvs; i++) { 806b1d57f15SBarry Smith ierr = MPI_Irecv(rvalues+nmax*i,nmax,MPIU_INT,MPI_ANY_SOURCE,tag,comm,recv_waits+i);CHKERRQ(ierr); 8071eb62cbbSBarry Smith } 8081eb62cbbSBarry Smith 8091eb62cbbSBarry Smith /* do sends: 8101eb62cbbSBarry Smith 1) starts[i] gives the starting index in svalues for stuff going to 8111eb62cbbSBarry Smith the ith processor 8121eb62cbbSBarry Smith */ 813b1d57f15SBarry Smith ierr = PetscMalloc((N+1)*sizeof(PetscInt),&svalues);CHKERRQ(ierr); 814b0a32e0cSBarry Smith ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr); 815b1d57f15SBarry Smith ierr = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr); 8161eb62cbbSBarry Smith 8171eb62cbbSBarry Smith starts[0] = 0; 8182205254eSKarl Rupp for (i=1; i<size; i++) starts[i] = starts[i-1] + nprocs[2*i-2]; 8192205254eSKarl Rupp for (i=0; i<N; i++) svalues[starts[owner[i]]++] = rows[i]; 8202205254eSKarl Rupp 8212205254eSKarl Rupp starts[0] = 0; 8222205254eSKarl Rupp for (i=1; i<size+1; i++) starts[i] = starts[i-1] + nprocs[2*i-2]; 8231eb62cbbSBarry Smith count = 0; 82417699dbbSLois Curfman McInnes for (i=0; i<size; i++) { 825c1dc657dSBarry Smith if (nprocs[2*i+1]) { 826b1d57f15SBarry Smith ierr = MPI_Isend(svalues+starts[i],nprocs[2*i],MPIU_INT,i,tag,comm,send_waits+count++);CHKERRQ(ierr); 8271eb62cbbSBarry Smith } 8281eb62cbbSBarry Smith } 829606d414cSSatish Balay ierr = PetscFree(starts);CHKERRQ(ierr); 8301eb62cbbSBarry Smith 83117699dbbSLois Curfman McInnes base = owners[rank]; 8321eb62cbbSBarry Smith 8331eb62cbbSBarry Smith /* wait on receives */ 8341d79065fSBarry Smith ierr = PetscMalloc2(nrecvs,PetscInt,&lens,nrecvs,PetscInt,&source);CHKERRQ(ierr); 8351eb62cbbSBarry Smith count = nrecvs; slen = 0; 8361eb62cbbSBarry Smith while (count) { 837ca161407SBarry Smith ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr); 8381eb62cbbSBarry Smith /* unpack receives into our local space */ 839b1d57f15SBarry Smith ierr = MPI_Get_count(&recv_status,MPIU_INT,&n);CHKERRQ(ierr); 8402205254eSKarl Rupp 841d6dfbf8fSBarry Smith source[imdex] = recv_status.MPI_SOURCE; 842d6dfbf8fSBarry Smith lens[imdex] = n; 8431eb62cbbSBarry Smith slen += n; 8441eb62cbbSBarry Smith count--; 8451eb62cbbSBarry Smith } 846606d414cSSatish Balay ierr = PetscFree(recv_waits);CHKERRQ(ierr); 8471eb62cbbSBarry Smith 8481eb62cbbSBarry Smith /* move the data into the send scatter */ 849b1d57f15SBarry Smith ierr = PetscMalloc((slen+1)*sizeof(PetscInt),&lrows);CHKERRQ(ierr); 8501eb62cbbSBarry Smith count = 0; 8511eb62cbbSBarry Smith for (i=0; i<nrecvs; i++) { 8521eb62cbbSBarry Smith values = rvalues + i*nmax; 8532205254eSKarl Rupp for (j=0; j<lens[i]; j++) lrows[count++] = values[j] - base; 8541eb62cbbSBarry Smith } 855606d414cSSatish Balay ierr = PetscFree(rvalues);CHKERRQ(ierr); 8561d79065fSBarry Smith ierr = PetscFree2(lens,source);CHKERRQ(ierr); 857606d414cSSatish Balay ierr = PetscFree(owner);CHKERRQ(ierr); 858606d414cSSatish Balay ierr = PetscFree(nprocs);CHKERRQ(ierr); 8591eb62cbbSBarry Smith 86097b48c8fSBarry Smith /* fix right hand side if needed */ 86197b48c8fSBarry Smith if (x && b) { 86297b48c8fSBarry Smith ierr = VecGetArrayRead(x,&xx);CHKERRQ(ierr); 86397b48c8fSBarry Smith ierr = VecGetArray(b,&bb);CHKERRQ(ierr); 8642205254eSKarl Rupp for (i=0; i<slen; i++) bb[lrows[i]] = diag*xx[lrows[i]]; 86597b48c8fSBarry Smith ierr = VecRestoreArrayRead(x,&xx);CHKERRQ(ierr); 86697b48c8fSBarry Smith ierr = VecRestoreArray(b,&bb);CHKERRQ(ierr); 86797b48c8fSBarry Smith } 8686eb55b6aSBarry Smith /* 8696eb55b6aSBarry Smith Zero the required rows. If the "diagonal block" of the matrix 870a8c7a070SBarry Smith is square and the user wishes to set the diagonal we use separate 8716eb55b6aSBarry Smith code so that MatSetValues() is not called for each diagonal allocating 8726eb55b6aSBarry Smith new memory, thus calling lots of mallocs and slowing things down. 8736eb55b6aSBarry Smith 8746eb55b6aSBarry Smith */ 875e2d53e46SBarry Smith /* must zero l->B before l->A because the (diag) case below may put values into l->B*/ 8762b40b63fSBarry Smith ierr = MatZeroRows(l->B,slen,lrows,0.0,0,0);CHKERRQ(ierr); 877d0f46423SBarry Smith if ((diag != 0.0) && (l->A->rmap->N == l->A->cmap->N)) { 8782b40b63fSBarry Smith ierr = MatZeroRows(l->A,slen,lrows,diag,0,0);CHKERRQ(ierr); 879f4df32b1SMatthew Knepley } else if (diag != 0.0) { 8802b40b63fSBarry Smith ierr = MatZeroRows(l->A,slen,lrows,0.0,0,0);CHKERRQ(ierr); 8812205254eSKarl Rupp if (((Mat_SeqAIJ*)l->A->data)->nonew) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatZeroRows() on rectangular matrices cannot be used with the Mat options\nMAT_NEW_NONZERO_LOCATIONS,MAT_NEW_NONZERO_LOCATION_ERR,MAT_NEW_NONZERO_ALLOCATION_ERR"); 882e2d53e46SBarry Smith for (i = 0; i < slen; i++) { 883e2d53e46SBarry Smith row = lrows[i] + rstart; 884f4df32b1SMatthew Knepley ierr = MatSetValues(A,1,&row,1,&row,&diag,INSERT_VALUES);CHKERRQ(ierr); 885e2d53e46SBarry Smith } 886e2d53e46SBarry Smith ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 887e2d53e46SBarry Smith ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 8886eb55b6aSBarry Smith } else { 8892b40b63fSBarry Smith ierr = MatZeroRows(l->A,slen,lrows,0.0,0,0);CHKERRQ(ierr); 8906eb55b6aSBarry Smith } 891606d414cSSatish Balay ierr = PetscFree(lrows);CHKERRQ(ierr); 89272dacd9aSBarry Smith 8931eb62cbbSBarry Smith /* wait on sends */ 8941eb62cbbSBarry Smith if (nsends) { 895b0a32e0cSBarry Smith ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr); 896ca161407SBarry Smith ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr); 897606d414cSSatish Balay ierr = PetscFree(send_status);CHKERRQ(ierr); 8981eb62cbbSBarry Smith } 899606d414cSSatish Balay ierr = PetscFree(send_waits);CHKERRQ(ierr); 900606d414cSSatish Balay ierr = PetscFree(svalues);CHKERRQ(ierr); 9013a40ed3dSBarry Smith PetscFunctionReturn(0); 9021eb62cbbSBarry Smith } 9031eb62cbbSBarry Smith 9044a2ae208SSatish Balay #undef __FUNCT__ 9059c7c4993SBarry Smith #define __FUNCT__ "MatZeroRowsColumns_MPIAIJ" 9069c7c4993SBarry Smith PetscErrorCode MatZeroRowsColumns_MPIAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 9079c7c4993SBarry Smith { 9089c7c4993SBarry Smith Mat_MPIAIJ *l = (Mat_MPIAIJ*)A->data; 9099c7c4993SBarry Smith PetscErrorCode ierr; 9109c7c4993SBarry Smith PetscMPIInt size = l->size,imdex,n,rank = l->rank,tag = ((PetscObject)A)->tag,lastidx = -1; 9119c7c4993SBarry Smith PetscInt i,*owners = A->rmap->range; 912564f14d6SBarry Smith PetscInt *nprocs,j,idx,nsends; 9139c7c4993SBarry Smith PetscInt nmax,*svalues,*starts,*owner,nrecvs; 9149c7c4993SBarry Smith PetscInt *rvalues,count,base,slen,*source; 915564f14d6SBarry Smith PetscInt *lens,*lrows,*values,m; 916ce94432eSBarry Smith MPI_Comm comm; 9179c7c4993SBarry Smith MPI_Request *send_waits,*recv_waits; 9189c7c4993SBarry Smith MPI_Status recv_status,*send_status; 9199c7c4993SBarry Smith const PetscScalar *xx; 920564f14d6SBarry Smith PetscScalar *bb,*mask; 921564f14d6SBarry Smith Vec xmask,lmask; 922564f14d6SBarry Smith Mat_SeqAIJ *aij = (Mat_SeqAIJ*)l->B->data; 923564f14d6SBarry Smith const PetscInt *aj, *ii,*ridx; 924564f14d6SBarry Smith PetscScalar *aa; 9259c7c4993SBarry Smith #if defined(PETSC_DEBUG) 9269c7c4993SBarry Smith PetscBool found = PETSC_FALSE; 9279c7c4993SBarry Smith #endif 9289c7c4993SBarry Smith 9299c7c4993SBarry Smith PetscFunctionBegin; 930ce94432eSBarry Smith ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr); 9319c7c4993SBarry Smith /* first count number of contributors to each processor */ 9329c7c4993SBarry Smith ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr); 9339c7c4993SBarry Smith ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr); 9349c7c4993SBarry Smith ierr = PetscMalloc((N+1)*sizeof(PetscInt),&owner);CHKERRQ(ierr); /* see note*/ 9359c7c4993SBarry Smith j = 0; 9369c7c4993SBarry Smith for (i=0; i<N; i++) { 9379c7c4993SBarry Smith if (lastidx > (idx = rows[i])) j = 0; 9389c7c4993SBarry Smith lastidx = idx; 9399c7c4993SBarry Smith for (; j<size; j++) { 9409c7c4993SBarry Smith if (idx >= owners[j] && idx < owners[j+1]) { 9419c7c4993SBarry Smith nprocs[2*j]++; 9429c7c4993SBarry Smith nprocs[2*j+1] = 1; 9439c7c4993SBarry Smith owner[i] = j; 9449c7c4993SBarry Smith #if defined(PETSC_DEBUG) 9459c7c4993SBarry Smith found = PETSC_TRUE; 9469c7c4993SBarry Smith #endif 9479c7c4993SBarry Smith break; 9489c7c4993SBarry Smith } 9499c7c4993SBarry Smith } 9509c7c4993SBarry Smith #if defined(PETSC_DEBUG) 9519c7c4993SBarry Smith if (!found) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index out of range"); 9529c7c4993SBarry Smith found = PETSC_FALSE; 9539c7c4993SBarry Smith #endif 9549c7c4993SBarry Smith } 9552205254eSKarl Rupp nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1]; 9569c7c4993SBarry Smith 9579c7c4993SBarry Smith /* inform other processors of number of messages and max length*/ 9589c7c4993SBarry Smith ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr); 9599c7c4993SBarry Smith 9609c7c4993SBarry Smith /* post receives: */ 9619c7c4993SBarry Smith ierr = PetscMalloc((nrecvs+1)*(nmax+1)*sizeof(PetscInt),&rvalues);CHKERRQ(ierr); 9629c7c4993SBarry Smith ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr); 9639c7c4993SBarry Smith for (i=0; i<nrecvs; i++) { 9649c7c4993SBarry Smith ierr = MPI_Irecv(rvalues+nmax*i,nmax,MPIU_INT,MPI_ANY_SOURCE,tag,comm,recv_waits+i);CHKERRQ(ierr); 9659c7c4993SBarry Smith } 9669c7c4993SBarry Smith 9679c7c4993SBarry Smith /* do sends: 9689c7c4993SBarry Smith 1) starts[i] gives the starting index in svalues for stuff going to 9699c7c4993SBarry Smith the ith processor 9709c7c4993SBarry Smith */ 9719c7c4993SBarry Smith ierr = PetscMalloc((N+1)*sizeof(PetscInt),&svalues);CHKERRQ(ierr); 9729c7c4993SBarry Smith ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr); 9739c7c4993SBarry Smith ierr = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr); 9749c7c4993SBarry Smith 9759c7c4993SBarry Smith starts[0] = 0; 9762205254eSKarl Rupp for (i=1; i<size; i++) starts[i] = starts[i-1] + nprocs[2*i-2]; 9772205254eSKarl Rupp for (i=0; i<N; i++) svalues[starts[owner[i]]++] = rows[i]; 9782205254eSKarl Rupp 9792205254eSKarl Rupp starts[0] = 0; 9802205254eSKarl Rupp for (i=1; i<size+1; i++) starts[i] = starts[i-1] + nprocs[2*i-2]; 9819c7c4993SBarry Smith count = 0; 9829c7c4993SBarry Smith for (i=0; i<size; i++) { 9839c7c4993SBarry Smith if (nprocs[2*i+1]) { 9849c7c4993SBarry Smith ierr = MPI_Isend(svalues+starts[i],nprocs[2*i],MPIU_INT,i,tag,comm,send_waits+count++);CHKERRQ(ierr); 9859c7c4993SBarry Smith } 9869c7c4993SBarry Smith } 9879c7c4993SBarry Smith ierr = PetscFree(starts);CHKERRQ(ierr); 9889c7c4993SBarry Smith 9899c7c4993SBarry Smith base = owners[rank]; 9909c7c4993SBarry Smith 9919c7c4993SBarry Smith /* wait on receives */ 9929c7c4993SBarry Smith ierr = PetscMalloc2(nrecvs,PetscInt,&lens,nrecvs,PetscInt,&source);CHKERRQ(ierr); 9939c7c4993SBarry Smith count = nrecvs; slen = 0; 9949c7c4993SBarry Smith while (count) { 9959c7c4993SBarry Smith ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr); 9969c7c4993SBarry Smith /* unpack receives into our local space */ 9979c7c4993SBarry Smith ierr = MPI_Get_count(&recv_status,MPIU_INT,&n);CHKERRQ(ierr); 9982205254eSKarl Rupp 9999c7c4993SBarry Smith source[imdex] = recv_status.MPI_SOURCE; 10009c7c4993SBarry Smith lens[imdex] = n; 10019c7c4993SBarry Smith slen += n; 10029c7c4993SBarry Smith count--; 10039c7c4993SBarry Smith } 10049c7c4993SBarry Smith ierr = PetscFree(recv_waits);CHKERRQ(ierr); 10059c7c4993SBarry Smith 10069c7c4993SBarry Smith /* move the data into the send scatter */ 10079c7c4993SBarry Smith ierr = PetscMalloc((slen+1)*sizeof(PetscInt),&lrows);CHKERRQ(ierr); 10089c7c4993SBarry Smith count = 0; 10099c7c4993SBarry Smith for (i=0; i<nrecvs; i++) { 10109c7c4993SBarry Smith values = rvalues + i*nmax; 10112205254eSKarl Rupp for (j=0; j<lens[i]; j++) lrows[count++] = values[j] - base; 10129c7c4993SBarry Smith } 10139c7c4993SBarry Smith ierr = PetscFree(rvalues);CHKERRQ(ierr); 10149c7c4993SBarry Smith ierr = PetscFree2(lens,source);CHKERRQ(ierr); 10159c7c4993SBarry Smith ierr = PetscFree(owner);CHKERRQ(ierr); 10169c7c4993SBarry Smith ierr = PetscFree(nprocs);CHKERRQ(ierr); 1017564f14d6SBarry Smith /* lrows are the local rows to be zeroed, slen is the number of local rows */ 10189c7c4993SBarry Smith 1019564f14d6SBarry Smith /* zero diagonal part of matrix */ 1020564f14d6SBarry Smith ierr = MatZeroRowsColumns(l->A,slen,lrows,diag,x,b);CHKERRQ(ierr); 10219c7c4993SBarry Smith 1022564f14d6SBarry Smith /* handle off diagonal part of matrix */ 10230298fd71SBarry Smith ierr = MatGetVecs(A,&xmask,NULL);CHKERRQ(ierr); 1024564f14d6SBarry Smith ierr = VecDuplicate(l->lvec,&lmask);CHKERRQ(ierr); 1025564f14d6SBarry Smith ierr = VecGetArray(xmask,&bb);CHKERRQ(ierr); 10262205254eSKarl Rupp for (i=0; i<slen; i++) bb[lrows[i]] = 1; 1027564f14d6SBarry Smith ierr = VecRestoreArray(xmask,&bb);CHKERRQ(ierr); 1028564f14d6SBarry Smith ierr = VecScatterBegin(l->Mvctx,xmask,lmask,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1029564f14d6SBarry Smith ierr = VecScatterEnd(l->Mvctx,xmask,lmask,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 10306bf464f9SBarry Smith ierr = VecDestroy(&xmask);CHKERRQ(ierr); 1031377aa5a1SBarry Smith if (x) { 1032564f14d6SBarry Smith ierr = VecScatterBegin(l->Mvctx,x,l->lvec,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1033564f14d6SBarry Smith ierr = VecScatterEnd(l->Mvctx,x,l->lvec,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1034564f14d6SBarry Smith ierr = VecGetArrayRead(l->lvec,&xx);CHKERRQ(ierr); 1035564f14d6SBarry Smith ierr = VecGetArray(b,&bb);CHKERRQ(ierr); 1036377aa5a1SBarry Smith } 1037377aa5a1SBarry Smith ierr = VecGetArray(lmask,&mask);CHKERRQ(ierr); 1038564f14d6SBarry Smith 1039564f14d6SBarry Smith /* remove zeroed rows of off diagonal matrix */ 1040564f14d6SBarry Smith ii = aij->i; 1041564f14d6SBarry Smith for (i=0; i<slen; i++) { 1042564f14d6SBarry Smith ierr = PetscMemzero(aij->a + ii[lrows[i]],(ii[lrows[i]+1] - ii[lrows[i]])*sizeof(PetscScalar));CHKERRQ(ierr); 10439c7c4993SBarry Smith } 1044564f14d6SBarry Smith 1045564f14d6SBarry Smith /* loop over all elements of off process part of matrix zeroing removed columns*/ 1046564f14d6SBarry Smith if (aij->compressedrow.use) { 1047564f14d6SBarry Smith m = aij->compressedrow.nrows; 1048564f14d6SBarry Smith ii = aij->compressedrow.i; 1049564f14d6SBarry Smith ridx = aij->compressedrow.rindex; 1050564f14d6SBarry Smith for (i=0; i<m; i++) { 1051564f14d6SBarry Smith n = ii[i+1] - ii[i]; 1052564f14d6SBarry Smith aj = aij->j + ii[i]; 1053564f14d6SBarry Smith aa = aij->a + ii[i]; 1054564f14d6SBarry Smith 1055564f14d6SBarry Smith for (j=0; j<n; j++) { 105625266a92SSatish Balay if (PetscAbsScalar(mask[*aj])) { 1057377aa5a1SBarry Smith if (b) bb[*ridx] -= *aa*xx[*aj]; 1058564f14d6SBarry Smith *aa = 0.0; 1059564f14d6SBarry Smith } 1060564f14d6SBarry Smith aa++; 1061564f14d6SBarry Smith aj++; 1062564f14d6SBarry Smith } 1063564f14d6SBarry Smith ridx++; 1064564f14d6SBarry Smith } 1065564f14d6SBarry Smith } else { /* do not use compressed row format */ 1066564f14d6SBarry Smith m = l->B->rmap->n; 1067564f14d6SBarry Smith for (i=0; i<m; i++) { 1068564f14d6SBarry Smith n = ii[i+1] - ii[i]; 1069564f14d6SBarry Smith aj = aij->j + ii[i]; 1070564f14d6SBarry Smith aa = aij->a + ii[i]; 1071564f14d6SBarry Smith for (j=0; j<n; j++) { 107225266a92SSatish Balay if (PetscAbsScalar(mask[*aj])) { 1073377aa5a1SBarry Smith if (b) bb[i] -= *aa*xx[*aj]; 1074564f14d6SBarry Smith *aa = 0.0; 1075564f14d6SBarry Smith } 1076564f14d6SBarry Smith aa++; 1077564f14d6SBarry Smith aj++; 1078564f14d6SBarry Smith } 1079564f14d6SBarry Smith } 1080564f14d6SBarry Smith } 1081377aa5a1SBarry Smith if (x) { 1082564f14d6SBarry Smith ierr = VecRestoreArray(b,&bb);CHKERRQ(ierr); 1083564f14d6SBarry Smith ierr = VecRestoreArrayRead(l->lvec,&xx);CHKERRQ(ierr); 1084377aa5a1SBarry Smith } 1085377aa5a1SBarry Smith ierr = VecRestoreArray(lmask,&mask);CHKERRQ(ierr); 10866bf464f9SBarry Smith ierr = VecDestroy(&lmask);CHKERRQ(ierr); 10879c7c4993SBarry Smith ierr = PetscFree(lrows);CHKERRQ(ierr); 10889c7c4993SBarry Smith 10899c7c4993SBarry Smith /* wait on sends */ 10909c7c4993SBarry Smith if (nsends) { 10919c7c4993SBarry Smith ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr); 10929c7c4993SBarry Smith ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr); 10939c7c4993SBarry Smith ierr = PetscFree(send_status);CHKERRQ(ierr); 10949c7c4993SBarry Smith } 10959c7c4993SBarry Smith ierr = PetscFree(send_waits);CHKERRQ(ierr); 10969c7c4993SBarry Smith ierr = PetscFree(svalues);CHKERRQ(ierr); 10979c7c4993SBarry Smith PetscFunctionReturn(0); 10989c7c4993SBarry Smith } 10999c7c4993SBarry Smith 11009c7c4993SBarry Smith #undef __FUNCT__ 11014a2ae208SSatish Balay #define __FUNCT__ "MatMult_MPIAIJ" 1102dfbe8321SBarry Smith PetscErrorCode MatMult_MPIAIJ(Mat A,Vec xx,Vec yy) 11031eb62cbbSBarry Smith { 1104416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1105dfbe8321SBarry Smith PetscErrorCode ierr; 1106b1d57f15SBarry Smith PetscInt nt; 1107416022c9SBarry Smith 11083a40ed3dSBarry Smith PetscFunctionBegin; 1109a2ce50c7SBarry Smith ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr); 111065e19b50SBarry Smith if (nt != A->cmap->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible partition of A (%D) and xx (%D)",A->cmap->n,nt); 1111ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1112f830108cSBarry Smith ierr = (*a->A->ops->mult)(a->A,xx,yy);CHKERRQ(ierr); 1113ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1114f830108cSBarry Smith ierr = (*a->B->ops->multadd)(a->B,a->lvec,yy,yy);CHKERRQ(ierr); 11153a40ed3dSBarry Smith PetscFunctionReturn(0); 11161eb62cbbSBarry Smith } 11171eb62cbbSBarry Smith 11184a2ae208SSatish Balay #undef __FUNCT__ 1119bd0c2dcbSBarry Smith #define __FUNCT__ "MatMultDiagonalBlock_MPIAIJ" 1120bd0c2dcbSBarry Smith PetscErrorCode MatMultDiagonalBlock_MPIAIJ(Mat A,Vec bb,Vec xx) 1121bd0c2dcbSBarry Smith { 1122bd0c2dcbSBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1123bd0c2dcbSBarry Smith PetscErrorCode ierr; 1124bd0c2dcbSBarry Smith 1125bd0c2dcbSBarry Smith PetscFunctionBegin; 1126bd0c2dcbSBarry Smith ierr = MatMultDiagonalBlock(a->A,bb,xx);CHKERRQ(ierr); 1127bd0c2dcbSBarry Smith PetscFunctionReturn(0); 1128bd0c2dcbSBarry Smith } 1129bd0c2dcbSBarry Smith 1130bd0c2dcbSBarry Smith #undef __FUNCT__ 11314a2ae208SSatish Balay #define __FUNCT__ "MatMultAdd_MPIAIJ" 1132dfbe8321SBarry Smith PetscErrorCode MatMultAdd_MPIAIJ(Mat A,Vec xx,Vec yy,Vec zz) 1133da3a660dSBarry Smith { 1134416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1135dfbe8321SBarry Smith PetscErrorCode ierr; 11363a40ed3dSBarry Smith 11373a40ed3dSBarry Smith PetscFunctionBegin; 1138ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1139f830108cSBarry Smith ierr = (*a->A->ops->multadd)(a->A,xx,yy,zz);CHKERRQ(ierr); 1140ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1141f830108cSBarry Smith ierr = (*a->B->ops->multadd)(a->B,a->lvec,zz,zz);CHKERRQ(ierr); 11423a40ed3dSBarry Smith PetscFunctionReturn(0); 1143da3a660dSBarry Smith } 1144da3a660dSBarry Smith 11454a2ae208SSatish Balay #undef __FUNCT__ 11464a2ae208SSatish Balay #define __FUNCT__ "MatMultTranspose_MPIAIJ" 1147dfbe8321SBarry Smith PetscErrorCode MatMultTranspose_MPIAIJ(Mat A,Vec xx,Vec yy) 1148da3a660dSBarry Smith { 1149416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1150dfbe8321SBarry Smith PetscErrorCode ierr; 1151ace3abfcSBarry Smith PetscBool merged; 1152da3a660dSBarry Smith 11533a40ed3dSBarry Smith PetscFunctionBegin; 1154a5ff213dSBarry Smith ierr = VecScatterGetMerged(a->Mvctx,&merged);CHKERRQ(ierr); 1155da3a660dSBarry Smith /* do nondiagonal part */ 11567c922b88SBarry Smith ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr); 1157a5ff213dSBarry Smith if (!merged) { 1158da3a660dSBarry Smith /* send it on its way */ 1159ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1160da3a660dSBarry Smith /* do local part */ 11617c922b88SBarry Smith ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr); 1162da3a660dSBarry Smith /* receive remote parts: note this assumes the values are not actually */ 1163a5ff213dSBarry Smith /* added in yy until the next line, */ 1164ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1165a5ff213dSBarry Smith } else { 1166a5ff213dSBarry Smith /* do local part */ 1167a5ff213dSBarry Smith ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr); 1168a5ff213dSBarry Smith /* send it on its way */ 1169ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1170a5ff213dSBarry Smith /* values actually were received in the Begin() but we need to call this nop */ 1171ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1172a5ff213dSBarry Smith } 11733a40ed3dSBarry Smith PetscFunctionReturn(0); 1174da3a660dSBarry Smith } 1175da3a660dSBarry Smith 1176cd0d46ebSvictorle #undef __FUNCT__ 11775fbd3699SBarry Smith #define __FUNCT__ "MatIsTranspose_MPIAIJ" 11787087cfbeSBarry Smith PetscErrorCode MatIsTranspose_MPIAIJ(Mat Amat,Mat Bmat,PetscReal tol,PetscBool *f) 1179cd0d46ebSvictorle { 11804f423910Svictorle MPI_Comm comm; 1181cd0d46ebSvictorle Mat_MPIAIJ *Aij = (Mat_MPIAIJ*) Amat->data, *Bij; 118266501d38Svictorle Mat Adia = Aij->A, Bdia, Aoff,Boff,*Aoffs,*Boffs; 1183cd0d46ebSvictorle IS Me,Notme; 11846849ba73SBarry Smith PetscErrorCode ierr; 1185b1d57f15SBarry Smith PetscInt M,N,first,last,*notme,i; 1186b1d57f15SBarry Smith PetscMPIInt size; 1187cd0d46ebSvictorle 1188cd0d46ebSvictorle PetscFunctionBegin; 118942e5f5b4Svictorle /* Easy test: symmetric diagonal block */ 119066501d38Svictorle Bij = (Mat_MPIAIJ*) Bmat->data; Bdia = Bij->A; 11915485867bSBarry Smith ierr = MatIsTranspose(Adia,Bdia,tol,f);CHKERRQ(ierr); 1192cd0d46ebSvictorle if (!*f) PetscFunctionReturn(0); 11934f423910Svictorle ierr = PetscObjectGetComm((PetscObject)Amat,&comm);CHKERRQ(ierr); 1194b1d57f15SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 1195b1d57f15SBarry Smith if (size == 1) PetscFunctionReturn(0); 119642e5f5b4Svictorle 119742e5f5b4Svictorle /* Hard test: off-diagonal block. This takes a MatGetSubMatrix. */ 1198cd0d46ebSvictorle ierr = MatGetSize(Amat,&M,&N);CHKERRQ(ierr); 1199cd0d46ebSvictorle ierr = MatGetOwnershipRange(Amat,&first,&last);CHKERRQ(ierr); 1200b1d57f15SBarry Smith ierr = PetscMalloc((N-last+first)*sizeof(PetscInt),¬me);CHKERRQ(ierr); 1201cd0d46ebSvictorle for (i=0; i<first; i++) notme[i] = i; 1202cd0d46ebSvictorle for (i=last; i<M; i++) notme[i-last+first] = i; 120370b3c8c7SBarry Smith ierr = ISCreateGeneral(MPI_COMM_SELF,N-last+first,notme,PETSC_COPY_VALUES,&Notme);CHKERRQ(ierr); 1204268466fbSBarry Smith ierr = ISCreateStride(MPI_COMM_SELF,last-first,first,1,&Me);CHKERRQ(ierr); 1205268466fbSBarry Smith ierr = MatGetSubMatrices(Amat,1,&Me,&Notme,MAT_INITIAL_MATRIX,&Aoffs);CHKERRQ(ierr); 120666501d38Svictorle Aoff = Aoffs[0]; 1207268466fbSBarry Smith ierr = MatGetSubMatrices(Bmat,1,&Notme,&Me,MAT_INITIAL_MATRIX,&Boffs);CHKERRQ(ierr); 120866501d38Svictorle Boff = Boffs[0]; 12095485867bSBarry Smith ierr = MatIsTranspose(Aoff,Boff,tol,f);CHKERRQ(ierr); 121066501d38Svictorle ierr = MatDestroyMatrices(1,&Aoffs);CHKERRQ(ierr); 121166501d38Svictorle ierr = MatDestroyMatrices(1,&Boffs);CHKERRQ(ierr); 12126bf464f9SBarry Smith ierr = ISDestroy(&Me);CHKERRQ(ierr); 12136bf464f9SBarry Smith ierr = ISDestroy(&Notme);CHKERRQ(ierr); 12143e0d0d19SHong Zhang ierr = PetscFree(notme);CHKERRQ(ierr); 1215cd0d46ebSvictorle PetscFunctionReturn(0); 1216cd0d46ebSvictorle } 1217cd0d46ebSvictorle 12184a2ae208SSatish Balay #undef __FUNCT__ 12194a2ae208SSatish Balay #define __FUNCT__ "MatMultTransposeAdd_MPIAIJ" 1220dfbe8321SBarry Smith PetscErrorCode MatMultTransposeAdd_MPIAIJ(Mat A,Vec xx,Vec yy,Vec zz) 1221da3a660dSBarry Smith { 1222416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1223dfbe8321SBarry Smith PetscErrorCode ierr; 1224da3a660dSBarry Smith 12253a40ed3dSBarry Smith PetscFunctionBegin; 1226da3a660dSBarry Smith /* do nondiagonal part */ 12277c922b88SBarry Smith ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr); 1228da3a660dSBarry Smith /* send it on its way */ 1229ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1230da3a660dSBarry Smith /* do local part */ 12317c922b88SBarry Smith ierr = (*a->A->ops->multtransposeadd)(a->A,xx,yy,zz);CHKERRQ(ierr); 1232a5ff213dSBarry Smith /* receive remote parts */ 1233ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 12343a40ed3dSBarry Smith PetscFunctionReturn(0); 1235da3a660dSBarry Smith } 1236da3a660dSBarry Smith 12371eb62cbbSBarry Smith /* 12381eb62cbbSBarry Smith This only works correctly for square matrices where the subblock A->A is the 12391eb62cbbSBarry Smith diagonal block 12401eb62cbbSBarry Smith */ 12414a2ae208SSatish Balay #undef __FUNCT__ 12424a2ae208SSatish Balay #define __FUNCT__ "MatGetDiagonal_MPIAIJ" 1243dfbe8321SBarry Smith PetscErrorCode MatGetDiagonal_MPIAIJ(Mat A,Vec v) 12441eb62cbbSBarry Smith { 1245dfbe8321SBarry Smith PetscErrorCode ierr; 1246416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 12473a40ed3dSBarry Smith 12483a40ed3dSBarry Smith PetscFunctionBegin; 1249ce94432eSBarry Smith if (A->rmap->N != A->cmap->N) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Supports only square matrix where A->A is diag block"); 1250e7e72b3dSBarry Smith if (A->rmap->rstart != A->cmap->rstart || A->rmap->rend != A->cmap->rend) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"row partition must equal col partition"); 12513a40ed3dSBarry Smith ierr = MatGetDiagonal(a->A,v);CHKERRQ(ierr); 12523a40ed3dSBarry Smith PetscFunctionReturn(0); 12531eb62cbbSBarry Smith } 12541eb62cbbSBarry Smith 12554a2ae208SSatish Balay #undef __FUNCT__ 12564a2ae208SSatish Balay #define __FUNCT__ "MatScale_MPIAIJ" 1257f4df32b1SMatthew Knepley PetscErrorCode MatScale_MPIAIJ(Mat A,PetscScalar aa) 1258052efed2SBarry Smith { 1259052efed2SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1260dfbe8321SBarry Smith PetscErrorCode ierr; 12613a40ed3dSBarry Smith 12623a40ed3dSBarry Smith PetscFunctionBegin; 1263f4df32b1SMatthew Knepley ierr = MatScale(a->A,aa);CHKERRQ(ierr); 1264f4df32b1SMatthew Knepley ierr = MatScale(a->B,aa);CHKERRQ(ierr); 12653a40ed3dSBarry Smith PetscFunctionReturn(0); 1266052efed2SBarry Smith } 1267052efed2SBarry Smith 12684a2ae208SSatish Balay #undef __FUNCT__ 12694a2ae208SSatish Balay #define __FUNCT__ "MatDestroy_MPIAIJ" 1270dfbe8321SBarry Smith PetscErrorCode MatDestroy_MPIAIJ(Mat mat) 12711eb62cbbSBarry Smith { 127244a69424SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 1273dfbe8321SBarry Smith PetscErrorCode ierr; 127483e2fdc7SBarry Smith 12753a40ed3dSBarry Smith PetscFunctionBegin; 1276aa482453SBarry Smith #if defined(PETSC_USE_LOG) 1277d0f46423SBarry Smith PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D",mat->rmap->N,mat->cmap->N); 1278a5a9c739SBarry Smith #endif 12798798bf22SSatish Balay ierr = MatStashDestroy_Private(&mat->stash);CHKERRQ(ierr); 12806bf464f9SBarry Smith ierr = VecDestroy(&aij->diag);CHKERRQ(ierr); 12816bf464f9SBarry Smith ierr = MatDestroy(&aij->A);CHKERRQ(ierr); 12826bf464f9SBarry Smith ierr = MatDestroy(&aij->B);CHKERRQ(ierr); 1283aa482453SBarry Smith #if defined(PETSC_USE_CTABLE) 12846bc0bbbfSBarry Smith ierr = PetscTableDestroy(&aij->colmap);CHKERRQ(ierr); 1285b1fc9764SSatish Balay #else 128605b42c5fSBarry Smith ierr = PetscFree(aij->colmap);CHKERRQ(ierr); 1287b1fc9764SSatish Balay #endif 128805b42c5fSBarry Smith ierr = PetscFree(aij->garray);CHKERRQ(ierr); 12896bf464f9SBarry Smith ierr = VecDestroy(&aij->lvec);CHKERRQ(ierr); 12906bf464f9SBarry Smith ierr = VecScatterDestroy(&aij->Mvctx);CHKERRQ(ierr); 129103095fedSBarry Smith ierr = PetscFree2(aij->rowvalues,aij->rowindices);CHKERRQ(ierr); 12928aa348c1SBarry Smith ierr = PetscFree(aij->ld);CHKERRQ(ierr); 1293bf0cc555SLisandro Dalcin ierr = PetscFree(mat->data);CHKERRQ(ierr); 1294901853e0SKris Buschelman 1295dbd8c25aSHong Zhang ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr); 1296bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)mat,"MatStoreValues_C",NULL);CHKERRQ(ierr); 1297bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)mat,"MatRetrieveValues_C",NULL);CHKERRQ(ierr); 1298bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)mat,"MatGetDiagonalBlock_C",NULL);CHKERRQ(ierr); 1299bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)mat,"MatIsTranspose_C",NULL);CHKERRQ(ierr); 1300bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAIJSetPreallocation_C",NULL);CHKERRQ(ierr); 1301bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAIJSetPreallocationCSR_C",NULL);CHKERRQ(ierr); 1302bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)mat,"MatDiagonalScaleLocal_C",NULL);CHKERRQ(ierr); 1303bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)mat,"MatConvert_mpiaij_mpisbaij_C",NULL);CHKERRQ(ierr); 13043a40ed3dSBarry Smith PetscFunctionReturn(0); 13051eb62cbbSBarry Smith } 1306ee50ffe9SBarry Smith 13074a2ae208SSatish Balay #undef __FUNCT__ 13088e2fed03SBarry Smith #define __FUNCT__ "MatView_MPIAIJ_Binary" 1309dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ_Binary(Mat mat,PetscViewer viewer) 13108e2fed03SBarry Smith { 13118e2fed03SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 13128e2fed03SBarry Smith Mat_SeqAIJ *A = (Mat_SeqAIJ*)aij->A->data; 13138e2fed03SBarry Smith Mat_SeqAIJ *B = (Mat_SeqAIJ*)aij->B->data; 13146849ba73SBarry Smith PetscErrorCode ierr; 131532dcc486SBarry Smith PetscMPIInt rank,size,tag = ((PetscObject)viewer)->tag; 13166f69ff64SBarry Smith int fd; 1317a788621eSSatish Balay PetscInt nz,header[4],*row_lengths,*range=0,rlen,i; 1318d0f46423SBarry Smith PetscInt nzmax,*column_indices,j,k,col,*garray = aij->garray,cnt,cstart = mat->cmap->rstart,rnz; 13198e2fed03SBarry Smith PetscScalar *column_values; 132085ebf7a4SBarry Smith PetscInt message_count,flowcontrolcount; 1321b37d52dbSMark F. Adams FILE *file; 13228e2fed03SBarry Smith 13238e2fed03SBarry Smith PetscFunctionBegin; 1324ce94432eSBarry Smith ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mat),&rank);CHKERRQ(ierr); 1325ce94432eSBarry Smith ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&size);CHKERRQ(ierr); 13268e2fed03SBarry Smith nz = A->nz + B->nz; 1327958c9bccSBarry Smith if (!rank) { 13280700a824SBarry Smith header[0] = MAT_FILE_CLASSID; 1329d0f46423SBarry Smith header[1] = mat->rmap->N; 1330d0f46423SBarry Smith header[2] = mat->cmap->N; 13312205254eSKarl Rupp 1332ce94432eSBarry Smith ierr = MPI_Reduce(&nz,&header[3],1,MPIU_INT,MPI_SUM,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 13338e2fed03SBarry Smith ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr); 13346f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,header,4,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 13358e2fed03SBarry Smith /* get largest number of rows any processor has */ 1336d0f46423SBarry Smith rlen = mat->rmap->n; 1337d0f46423SBarry Smith range = mat->rmap->range; 13382205254eSKarl Rupp for (i=1; i<size; i++) rlen = PetscMax(rlen,range[i+1] - range[i]); 13398e2fed03SBarry Smith } else { 1340ce94432eSBarry Smith ierr = MPI_Reduce(&nz,0,1,MPIU_INT,MPI_SUM,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 1341d0f46423SBarry Smith rlen = mat->rmap->n; 13428e2fed03SBarry Smith } 13438e2fed03SBarry Smith 13448e2fed03SBarry Smith /* load up the local row counts */ 1345b1d57f15SBarry Smith ierr = PetscMalloc((rlen+1)*sizeof(PetscInt),&row_lengths);CHKERRQ(ierr); 13462205254eSKarl Rupp for (i=0; i<mat->rmap->n; i++) row_lengths[i] = A->i[i+1] - A->i[i] + B->i[i+1] - B->i[i]; 13478e2fed03SBarry Smith 13488e2fed03SBarry Smith /* store the row lengths to the file */ 134985ebf7a4SBarry Smith ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr); 1350958c9bccSBarry Smith if (!rank) { 1351d0f46423SBarry Smith ierr = PetscBinaryWrite(fd,row_lengths,mat->rmap->n,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 13528e2fed03SBarry Smith for (i=1; i<size; i++) { 1353639ff905SBarry Smith ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr); 13548e2fed03SBarry Smith rlen = range[i+1] - range[i]; 1355ce94432eSBarry Smith ierr = MPIULong_Recv(row_lengths,rlen,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 13566f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,row_lengths,rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 13578e2fed03SBarry Smith } 1358639ff905SBarry Smith ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr); 13598e2fed03SBarry Smith } else { 1360639ff905SBarry Smith ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr); 1361ce94432eSBarry Smith ierr = MPIULong_Send(row_lengths,mat->rmap->n,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 1362639ff905SBarry Smith ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr); 13638e2fed03SBarry Smith } 13648e2fed03SBarry Smith ierr = PetscFree(row_lengths);CHKERRQ(ierr); 13658e2fed03SBarry Smith 13668e2fed03SBarry Smith /* load up the local column indices */ 13671147fc2aSKarl Rupp nzmax = nz; /* th processor needs space a largest processor needs */ 1368ce94432eSBarry Smith ierr = MPI_Reduce(&nz,&nzmax,1,MPIU_INT,MPI_MAX,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 1369b1d57f15SBarry Smith ierr = PetscMalloc((nzmax+1)*sizeof(PetscInt),&column_indices);CHKERRQ(ierr); 13708e2fed03SBarry Smith cnt = 0; 1371d0f46423SBarry Smith for (i=0; i<mat->rmap->n; i++) { 13728e2fed03SBarry Smith for (j=B->i[i]; j<B->i[i+1]; j++) { 13738e2fed03SBarry Smith if ((col = garray[B->j[j]]) > cstart) break; 13748e2fed03SBarry Smith column_indices[cnt++] = col; 13758e2fed03SBarry Smith } 13762205254eSKarl Rupp for (k=A->i[i]; k<A->i[i+1]; k++) column_indices[cnt++] = A->j[k] + cstart; 13772205254eSKarl Rupp for (; j<B->i[i+1]; j++) column_indices[cnt++] = garray[B->j[j]]; 13788e2fed03SBarry Smith } 1379e32f2f54SBarry Smith if (cnt != A->nz + B->nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Internal PETSc error: cnt = %D nz = %D",cnt,A->nz+B->nz); 13808e2fed03SBarry Smith 13818e2fed03SBarry Smith /* store the column indices to the file */ 138285ebf7a4SBarry Smith ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr); 1383958c9bccSBarry Smith if (!rank) { 13848e2fed03SBarry Smith MPI_Status status; 13856f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,column_indices,nz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 13868e2fed03SBarry Smith for (i=1; i<size; i++) { 1387639ff905SBarry Smith ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr); 1388ce94432eSBarry Smith ierr = MPI_Recv(&rnz,1,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr); 1389e32f2f54SBarry Smith if (rnz > nzmax) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Internal PETSc error: nz = %D nzmax = %D",nz,nzmax); 1390ce94432eSBarry Smith ierr = MPIULong_Recv(column_indices,rnz,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 13916f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,column_indices,rnz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 13928e2fed03SBarry Smith } 1393639ff905SBarry Smith ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr); 13948e2fed03SBarry Smith } else { 1395639ff905SBarry Smith ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr); 1396ce94432eSBarry Smith ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 1397ce94432eSBarry Smith ierr = MPIULong_Send(column_indices,nz,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 1398639ff905SBarry Smith ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr); 13998e2fed03SBarry Smith } 14008e2fed03SBarry Smith ierr = PetscFree(column_indices);CHKERRQ(ierr); 14018e2fed03SBarry Smith 14028e2fed03SBarry Smith /* load up the local column values */ 14038e2fed03SBarry Smith ierr = PetscMalloc((nzmax+1)*sizeof(PetscScalar),&column_values);CHKERRQ(ierr); 14048e2fed03SBarry Smith cnt = 0; 1405d0f46423SBarry Smith for (i=0; i<mat->rmap->n; i++) { 14068e2fed03SBarry Smith for (j=B->i[i]; j<B->i[i+1]; j++) { 14078e2fed03SBarry Smith if (garray[B->j[j]] > cstart) break; 14088e2fed03SBarry Smith column_values[cnt++] = B->a[j]; 14098e2fed03SBarry Smith } 14102205254eSKarl Rupp for (k=A->i[i]; k<A->i[i+1]; k++) column_values[cnt++] = A->a[k]; 14112205254eSKarl Rupp for (; j<B->i[i+1]; j++) column_values[cnt++] = B->a[j]; 14128e2fed03SBarry Smith } 1413e32f2f54SBarry Smith if (cnt != A->nz + B->nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Internal PETSc error: cnt = %D nz = %D",cnt,A->nz+B->nz); 14148e2fed03SBarry Smith 14158e2fed03SBarry Smith /* store the column values to the file */ 141685ebf7a4SBarry Smith ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr); 1417958c9bccSBarry Smith if (!rank) { 14188e2fed03SBarry Smith MPI_Status status; 14196f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,column_values,nz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr); 14208e2fed03SBarry Smith for (i=1; i<size; i++) { 1421639ff905SBarry Smith ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr); 1422ce94432eSBarry Smith ierr = MPI_Recv(&rnz,1,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr); 1423e32f2f54SBarry Smith if (rnz > nzmax) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Internal PETSc error: nz = %D nzmax = %D",nz,nzmax); 1424ce94432eSBarry Smith ierr = MPIULong_Recv(column_values,rnz,MPIU_SCALAR,i,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 14256f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,column_values,rnz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr); 14268e2fed03SBarry Smith } 1427639ff905SBarry Smith ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr); 14288e2fed03SBarry Smith } else { 1429639ff905SBarry Smith ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr); 1430ce94432eSBarry Smith ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 1431ce94432eSBarry Smith ierr = MPIULong_Send(column_values,nz,MPIU_SCALAR,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 1432639ff905SBarry Smith ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr); 14338e2fed03SBarry Smith } 14348e2fed03SBarry Smith ierr = PetscFree(column_values);CHKERRQ(ierr); 1435b37d52dbSMark F. Adams 1436b37d52dbSMark F. Adams ierr = PetscViewerBinaryGetInfoPointer(viewer,&file);CHKERRQ(ierr); 14372205254eSKarl Rupp if (file) fprintf(file,"-matload_block_size %d\n",(int)mat->rmap->bs); 14388e2fed03SBarry Smith PetscFunctionReturn(0); 14398e2fed03SBarry Smith } 14408e2fed03SBarry Smith 14419804daf3SBarry Smith #include <petscdraw.h> 14428e2fed03SBarry Smith #undef __FUNCT__ 14434a2ae208SSatish Balay #define __FUNCT__ "MatView_MPIAIJ_ASCIIorDraworSocket" 1444dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ_ASCIIorDraworSocket(Mat mat,PetscViewer viewer) 1445416022c9SBarry Smith { 144644a69424SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 1447dfbe8321SBarry Smith PetscErrorCode ierr; 144832dcc486SBarry Smith PetscMPIInt rank = aij->rank,size = aij->size; 1449ace3abfcSBarry Smith PetscBool isdraw,iascii,isbinary; 1450b0a32e0cSBarry Smith PetscViewer sviewer; 1451f3ef73ceSBarry Smith PetscViewerFormat format; 1452416022c9SBarry Smith 14533a40ed3dSBarry Smith PetscFunctionBegin; 1454251f4c67SDmitry Karpeev ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 1455251f4c67SDmitry Karpeev ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 1456251f4c67SDmitry Karpeev ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 145732077d6dSBarry Smith if (iascii) { 1458b0a32e0cSBarry Smith ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 1459456192e2SBarry Smith if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) { 14604e220ebcSLois Curfman McInnes MatInfo info; 1461ace3abfcSBarry Smith PetscBool inodes; 1462923f20ffSKris Buschelman 1463ce94432eSBarry Smith ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mat),&rank);CHKERRQ(ierr); 1464888f2ed8SSatish Balay ierr = MatGetInfo(mat,MAT_LOCAL,&info);CHKERRQ(ierr); 14650298fd71SBarry Smith ierr = MatInodeGetInodeSizes(aij->A,NULL,(PetscInt**)&inodes,NULL);CHKERRQ(ierr); 14667b23a99aSBarry Smith ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr); 1467923f20ffSKris Buschelman if (!inodes) { 146877431f27SBarry Smith ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D mem %D, not using I-node routines\n", 1469d0f46423SBarry Smith rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,(PetscInt)info.memory);CHKERRQ(ierr); 14706831982aSBarry Smith } else { 147177431f27SBarry Smith ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D mem %D, using I-node routines\n", 1472d0f46423SBarry Smith rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,(PetscInt)info.memory);CHKERRQ(ierr); 14736831982aSBarry Smith } 1474888f2ed8SSatish Balay ierr = MatGetInfo(aij->A,MAT_LOCAL,&info);CHKERRQ(ierr); 147577431f27SBarry Smith ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] on-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr); 1476888f2ed8SSatish Balay ierr = MatGetInfo(aij->B,MAT_LOCAL,&info);CHKERRQ(ierr); 147777431f27SBarry Smith ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] off-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr); 1478b0a32e0cSBarry Smith ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 14797b23a99aSBarry Smith ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr); 148007d81ca4SBarry Smith ierr = PetscViewerASCIIPrintf(viewer,"Information on VecScatter used in matrix-vector product: \n");CHKERRQ(ierr); 1481a40aa06bSLois Curfman McInnes ierr = VecScatterView(aij->Mvctx,viewer);CHKERRQ(ierr); 14823a40ed3dSBarry Smith PetscFunctionReturn(0); 1483fb9695e5SSatish Balay } else if (format == PETSC_VIEWER_ASCII_INFO) { 1484923f20ffSKris Buschelman PetscInt inodecount,inodelimit,*inodes; 1485923f20ffSKris Buschelman ierr = MatInodeGetInodeSizes(aij->A,&inodecount,&inodes,&inodelimit);CHKERRQ(ierr); 1486923f20ffSKris Buschelman if (inodes) { 1487923f20ffSKris Buschelman ierr = PetscViewerASCIIPrintf(viewer,"using I-node (on process 0) routines: found %D nodes, limit used is %D\n",inodecount,inodelimit);CHKERRQ(ierr); 1488d38fa0fbSBarry Smith } else { 1489d38fa0fbSBarry Smith ierr = PetscViewerASCIIPrintf(viewer,"not using I-node (on process 0) routines\n");CHKERRQ(ierr); 1490d38fa0fbSBarry Smith } 14913a40ed3dSBarry Smith PetscFunctionReturn(0); 14924aedb280SBarry Smith } else if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) { 14934aedb280SBarry Smith PetscFunctionReturn(0); 149408480c60SBarry Smith } 14958e2fed03SBarry Smith } else if (isbinary) { 14968e2fed03SBarry Smith if (size == 1) { 14977adad957SLisandro Dalcin ierr = PetscObjectSetName((PetscObject)aij->A,((PetscObject)mat)->name);CHKERRQ(ierr); 14988e2fed03SBarry Smith ierr = MatView(aij->A,viewer);CHKERRQ(ierr); 14998e2fed03SBarry Smith } else { 15008e2fed03SBarry Smith ierr = MatView_MPIAIJ_Binary(mat,viewer);CHKERRQ(ierr); 15018e2fed03SBarry Smith } 15028e2fed03SBarry Smith PetscFunctionReturn(0); 15030f5bd95cSBarry Smith } else if (isdraw) { 1504b0a32e0cSBarry Smith PetscDraw draw; 1505ace3abfcSBarry Smith PetscBool isnull; 1506b0a32e0cSBarry Smith ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 1507b0a32e0cSBarry Smith ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0); 150819bcc07fSBarry Smith } 150919bcc07fSBarry Smith 151017699dbbSLois Curfman McInnes if (size == 1) { 15117adad957SLisandro Dalcin ierr = PetscObjectSetName((PetscObject)aij->A,((PetscObject)mat)->name);CHKERRQ(ierr); 151278b31e54SBarry Smith ierr = MatView(aij->A,viewer);CHKERRQ(ierr); 15133a40ed3dSBarry Smith } else { 151495373324SBarry Smith /* assemble the entire matrix onto first processor. */ 151595373324SBarry Smith Mat A; 1516ec8511deSBarry Smith Mat_SeqAIJ *Aloc; 1517d0f46423SBarry Smith PetscInt M = mat->rmap->N,N = mat->cmap->N,m,*ai,*aj,row,*cols,i,*ct; 1518dd6ea824SBarry Smith MatScalar *a; 15192ee70a88SLois Curfman McInnes 152032a366e4SMatthew Knepley if (mat->rmap->N > 1024) { 1521ace3abfcSBarry Smith PetscBool flg = PETSC_FALSE; 152232a366e4SMatthew Knepley 15230298fd71SBarry Smith ierr = PetscOptionsGetBool(((PetscObject) mat)->prefix, "-mat_ascii_output_large", &flg,NULL);CHKERRQ(ierr); 1524ce94432eSBarry Smith if (!flg) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_OUTOFRANGE,"ASCII matrix output not allowed for matrices with more than 1024 rows, use binary format instead.\nYou can override this restriction using -mat_ascii_output_large."); 152532a366e4SMatthew Knepley } 15260805154bSBarry Smith 1527ce94432eSBarry Smith ierr = MatCreate(PetscObjectComm((PetscObject)mat),&A);CHKERRQ(ierr); 152817699dbbSLois Curfman McInnes if (!rank) { 1529f69a0ea3SMatthew Knepley ierr = MatSetSizes(A,M,N,M,N);CHKERRQ(ierr); 15303a40ed3dSBarry Smith } else { 1531f69a0ea3SMatthew Knepley ierr = MatSetSizes(A,0,0,M,N);CHKERRQ(ierr); 153295373324SBarry Smith } 1533f204ca49SKris Buschelman /* This is just a temporary matrix, so explicitly using MATMPIAIJ is probably best */ 1534f204ca49SKris Buschelman ierr = MatSetType(A,MATMPIAIJ);CHKERRQ(ierr); 15350298fd71SBarry Smith ierr = MatMPIAIJSetPreallocation(A,0,NULL,0,NULL);CHKERRQ(ierr); 15362b82e772SSatish Balay ierr = MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_FALSE);CHKERRQ(ierr); 153752e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,A);CHKERRQ(ierr); 1538416022c9SBarry Smith 153995373324SBarry Smith /* copy over the A part */ 1540ec8511deSBarry Smith Aloc = (Mat_SeqAIJ*)aij->A->data; 1541d0f46423SBarry Smith m = aij->A->rmap->n; ai = Aloc->i; aj = Aloc->j; a = Aloc->a; 1542d0f46423SBarry Smith row = mat->rmap->rstart; 15432205254eSKarl Rupp for (i=0; i<ai[m]; i++) aj[i] += mat->cmap->rstart; 154495373324SBarry Smith for (i=0; i<m; i++) { 1545416022c9SBarry Smith ierr = MatSetValues(A,1,&row,ai[i+1]-ai[i],aj,a,INSERT_VALUES);CHKERRQ(ierr); 154626fbe8dcSKarl Rupp row++; 154726fbe8dcSKarl Rupp a += ai[i+1]-ai[i]; aj += ai[i+1]-ai[i]; 154895373324SBarry Smith } 15492ee70a88SLois Curfman McInnes aj = Aloc->j; 15502205254eSKarl Rupp for (i=0; i<ai[m]; i++) aj[i] -= mat->cmap->rstart; 155195373324SBarry Smith 155295373324SBarry Smith /* copy over the B part */ 1553ec8511deSBarry Smith Aloc = (Mat_SeqAIJ*)aij->B->data; 1554d0f46423SBarry Smith m = aij->B->rmap->n; ai = Aloc->i; aj = Aloc->j; a = Aloc->a; 1555d0f46423SBarry Smith row = mat->rmap->rstart; 1556b1d57f15SBarry Smith ierr = PetscMalloc((ai[m]+1)*sizeof(PetscInt),&cols);CHKERRQ(ierr); 1557b0a32e0cSBarry Smith ct = cols; 15582205254eSKarl Rupp for (i=0; i<ai[m]; i++) cols[i] = aij->garray[aj[i]]; 155995373324SBarry Smith for (i=0; i<m; i++) { 1560416022c9SBarry Smith ierr = MatSetValues(A,1,&row,ai[i+1]-ai[i],cols,a,INSERT_VALUES);CHKERRQ(ierr); 15612205254eSKarl Rupp row++; 15622205254eSKarl Rupp a += ai[i+1]-ai[i]; cols += ai[i+1]-ai[i]; 156395373324SBarry Smith } 1564606d414cSSatish Balay ierr = PetscFree(ct);CHKERRQ(ierr); 15656d4a8577SBarry Smith ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 15666d4a8577SBarry Smith ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 156755843e3eSBarry Smith /* 156855843e3eSBarry Smith Everyone has to call to draw the matrix since the graphics waits are 1569b0a32e0cSBarry Smith synchronized across all processors that share the PetscDraw object 157055843e3eSBarry Smith */ 1571b0a32e0cSBarry Smith ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr); 1572e03a110bSBarry Smith if (!rank) { 15737adad957SLisandro Dalcin ierr = PetscObjectSetName((PetscObject)((Mat_MPIAIJ*)(A->data))->A,((PetscObject)mat)->name);CHKERRQ(ierr); 15747566de4bSShri Abhyankar /* Set the type name to MATMPIAIJ so that the correct type can be printed out by PetscObjectPrintClassNamePrefixType() in MatView_SeqAIJ_ASCII()*/ 15757566de4bSShri Abhyankar PetscStrcpy(((PetscObject)((Mat_MPIAIJ*)(A->data))->A)->type_name,MATMPIAIJ); 15766831982aSBarry Smith ierr = MatView(((Mat_MPIAIJ*)(A->data))->A,sviewer);CHKERRQ(ierr); 157795373324SBarry Smith } 1578b0a32e0cSBarry Smith ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr); 15796bf464f9SBarry Smith ierr = MatDestroy(&A);CHKERRQ(ierr); 158095373324SBarry Smith } 15813a40ed3dSBarry Smith PetscFunctionReturn(0); 15821eb62cbbSBarry Smith } 15831eb62cbbSBarry Smith 15844a2ae208SSatish Balay #undef __FUNCT__ 15854a2ae208SSatish Balay #define __FUNCT__ "MatView_MPIAIJ" 1586dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ(Mat mat,PetscViewer viewer) 1587416022c9SBarry Smith { 1588dfbe8321SBarry Smith PetscErrorCode ierr; 1589ace3abfcSBarry Smith PetscBool iascii,isdraw,issocket,isbinary; 1590416022c9SBarry Smith 15913a40ed3dSBarry Smith PetscFunctionBegin; 1592251f4c67SDmitry Karpeev ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 1593251f4c67SDmitry Karpeev ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 1594251f4c67SDmitry Karpeev ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 1595251f4c67SDmitry Karpeev ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSOCKET,&issocket);CHKERRQ(ierr); 159632077d6dSBarry Smith if (iascii || isdraw || isbinary || issocket) { 15977b2a1423SBarry Smith ierr = MatView_MPIAIJ_ASCIIorDraworSocket(mat,viewer);CHKERRQ(ierr); 1598416022c9SBarry Smith } 15993a40ed3dSBarry Smith PetscFunctionReturn(0); 1600416022c9SBarry Smith } 1601416022c9SBarry Smith 16024a2ae208SSatish Balay #undef __FUNCT__ 160341f059aeSBarry Smith #define __FUNCT__ "MatSOR_MPIAIJ" 160441f059aeSBarry Smith PetscErrorCode MatSOR_MPIAIJ(Mat matin,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx) 16058a729477SBarry Smith { 160644a69424SLois Curfman McInnes Mat_MPIAIJ *mat = (Mat_MPIAIJ*)matin->data; 1607dfbe8321SBarry Smith PetscErrorCode ierr; 16086987fefcSBarry Smith Vec bb1 = 0; 1609ace3abfcSBarry Smith PetscBool hasop; 16108a729477SBarry Smith 16113a40ed3dSBarry Smith PetscFunctionBegin; 1612a2b30743SBarry Smith if (flag == SOR_APPLY_UPPER) { 161341f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr); 1614a2b30743SBarry Smith PetscFunctionReturn(0); 1615a2b30743SBarry Smith } 1616a2b30743SBarry Smith 16174e980039SJed Brown if (its > 1 || ~flag & SOR_ZERO_INITIAL_GUESS || flag & SOR_EISENSTAT) { 16184e980039SJed Brown ierr = VecDuplicate(bb,&bb1);CHKERRQ(ierr); 16194e980039SJed Brown } 16204e980039SJed Brown 1621c16cb8f2SBarry Smith if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP) { 1622da3a660dSBarry Smith if (flag & SOR_ZERO_INITIAL_GUESS) { 162341f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr); 16242798e883SHong Zhang its--; 1625da3a660dSBarry Smith } 16262798e883SHong Zhang 16272798e883SHong Zhang while (its--) { 1628ca9f406cSSatish Balay ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1629ca9f406cSSatish Balay ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 16302798e883SHong Zhang 1631c14dc6b6SHong Zhang /* update rhs: bb1 = bb - B*x */ 1632efb30889SBarry Smith ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr); 1633c14dc6b6SHong Zhang ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr); 16342798e883SHong Zhang 1635c14dc6b6SHong Zhang /* local sweep */ 163641f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_SYMMETRIC_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr); 16372798e883SHong Zhang } 16383a40ed3dSBarry Smith } else if (flag & SOR_LOCAL_FORWARD_SWEEP) { 1639da3a660dSBarry Smith if (flag & SOR_ZERO_INITIAL_GUESS) { 164041f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr); 16412798e883SHong Zhang its--; 1642da3a660dSBarry Smith } 16432798e883SHong Zhang while (its--) { 1644ca9f406cSSatish Balay ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1645ca9f406cSSatish Balay ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 16462798e883SHong Zhang 1647c14dc6b6SHong Zhang /* update rhs: bb1 = bb - B*x */ 1648efb30889SBarry Smith ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr); 1649c14dc6b6SHong Zhang ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr); 1650c14dc6b6SHong Zhang 1651c14dc6b6SHong Zhang /* local sweep */ 165241f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_FORWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr); 16532798e883SHong Zhang } 16543a40ed3dSBarry Smith } else if (flag & SOR_LOCAL_BACKWARD_SWEEP) { 1655da3a660dSBarry Smith if (flag & SOR_ZERO_INITIAL_GUESS) { 165641f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr); 16572798e883SHong Zhang its--; 1658da3a660dSBarry Smith } 16592798e883SHong Zhang while (its--) { 1660ca9f406cSSatish Balay ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1661ca9f406cSSatish Balay ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 16622798e883SHong Zhang 1663c14dc6b6SHong Zhang /* update rhs: bb1 = bb - B*x */ 1664efb30889SBarry Smith ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr); 1665c14dc6b6SHong Zhang ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr); 16662798e883SHong Zhang 1667c14dc6b6SHong Zhang /* local sweep */ 166841f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_BACKWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr); 16692798e883SHong Zhang } 1670a7420bb7SBarry Smith } else if (flag & SOR_EISENSTAT) { 1671a7420bb7SBarry Smith Vec xx1; 1672a7420bb7SBarry Smith 1673a7420bb7SBarry Smith ierr = VecDuplicate(bb,&xx1);CHKERRQ(ierr); 167441f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb,omega,(MatSORType)(SOR_ZERO_INITIAL_GUESS | SOR_LOCAL_BACKWARD_SWEEP),fshift,lits,1,xx);CHKERRQ(ierr); 1675a7420bb7SBarry Smith 1676a7420bb7SBarry Smith ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1677a7420bb7SBarry Smith ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1678a7420bb7SBarry Smith if (!mat->diag) { 16790298fd71SBarry Smith ierr = MatGetVecs(matin,&mat->diag,NULL);CHKERRQ(ierr); 1680a7420bb7SBarry Smith ierr = MatGetDiagonal(matin,mat->diag);CHKERRQ(ierr); 1681a7420bb7SBarry Smith } 1682bd0c2dcbSBarry Smith ierr = MatHasOperation(matin,MATOP_MULT_DIAGONAL_BLOCK,&hasop);CHKERRQ(ierr); 1683bd0c2dcbSBarry Smith if (hasop) { 1684bd0c2dcbSBarry Smith ierr = MatMultDiagonalBlock(matin,xx,bb1);CHKERRQ(ierr); 1685bd0c2dcbSBarry Smith } else { 1686a7420bb7SBarry Smith ierr = VecPointwiseMult(bb1,mat->diag,xx);CHKERRQ(ierr); 1687bd0c2dcbSBarry Smith } 1688887ee2caSBarry Smith ierr = VecAYPX(bb1,(omega-2.0)/omega,bb);CHKERRQ(ierr); 1689887ee2caSBarry Smith 1690a7420bb7SBarry Smith ierr = MatMultAdd(mat->B,mat->lvec,bb1,bb1);CHKERRQ(ierr); 1691a7420bb7SBarry Smith 1692a7420bb7SBarry Smith /* local sweep */ 169341f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,(MatSORType)(SOR_ZERO_INITIAL_GUESS | SOR_LOCAL_FORWARD_SWEEP),fshift,lits,1,xx1);CHKERRQ(ierr); 1694a7420bb7SBarry Smith ierr = VecAXPY(xx,1.0,xx1);CHKERRQ(ierr); 16956bf464f9SBarry Smith ierr = VecDestroy(&xx1);CHKERRQ(ierr); 1696ce94432eSBarry Smith } else SETERRQ(PetscObjectComm((PetscObject)matin),PETSC_ERR_SUP,"Parallel SOR not supported"); 1697c14dc6b6SHong Zhang 16986bf464f9SBarry Smith ierr = VecDestroy(&bb1);CHKERRQ(ierr); 16993a40ed3dSBarry Smith PetscFunctionReturn(0); 17008a729477SBarry Smith } 1701a66be287SLois Curfman McInnes 17024a2ae208SSatish Balay #undef __FUNCT__ 170342e855d1Svictor #define __FUNCT__ "MatPermute_MPIAIJ" 170442e855d1Svictor PetscErrorCode MatPermute_MPIAIJ(Mat A,IS rowp,IS colp,Mat *B) 170542e855d1Svictor { 170672e6a0cfSJed Brown Mat aA,aB,Aperm; 170772e6a0cfSJed Brown const PetscInt *rwant,*cwant,*gcols,*ai,*bi,*aj,*bj; 170872e6a0cfSJed Brown PetscScalar *aa,*ba; 170972e6a0cfSJed Brown PetscInt i,j,m,n,ng,anz,bnz,*dnnz,*onnz,*tdnnz,*tonnz,*rdest,*cdest,*work,*gcdest; 171072e6a0cfSJed Brown PetscSF rowsf,sf; 17110298fd71SBarry Smith IS parcolp = NULL; 171272e6a0cfSJed Brown PetscBool done; 171342e855d1Svictor PetscErrorCode ierr; 171442e855d1Svictor 171542e855d1Svictor PetscFunctionBegin; 171672e6a0cfSJed Brown ierr = MatGetLocalSize(A,&m,&n);CHKERRQ(ierr); 171772e6a0cfSJed Brown ierr = ISGetIndices(rowp,&rwant);CHKERRQ(ierr); 171872e6a0cfSJed Brown ierr = ISGetIndices(colp,&cwant);CHKERRQ(ierr); 171972e6a0cfSJed Brown ierr = PetscMalloc3(PetscMax(m,n),PetscInt,&work,m,PetscInt,&rdest,n,PetscInt,&cdest);CHKERRQ(ierr); 172072e6a0cfSJed Brown 172172e6a0cfSJed Brown /* Invert row permutation to find out where my rows should go */ 1722ce94432eSBarry Smith ierr = PetscSFCreate(PetscObjectComm((PetscObject)A),&rowsf);CHKERRQ(ierr); 17230298fd71SBarry Smith ierr = PetscSFSetGraphLayout(rowsf,A->rmap,A->rmap->n,NULL,PETSC_OWN_POINTER,rwant);CHKERRQ(ierr); 1724e9e74f11SJed Brown ierr = PetscSFSetFromOptions(rowsf);CHKERRQ(ierr); 172572e6a0cfSJed Brown for (i=0; i<m; i++) work[i] = A->rmap->rstart + i; 17268bfbc91cSJed Brown ierr = PetscSFReduceBegin(rowsf,MPIU_INT,work,rdest,MPIU_REPLACE);CHKERRQ(ierr); 17278bfbc91cSJed Brown ierr = PetscSFReduceEnd(rowsf,MPIU_INT,work,rdest,MPIU_REPLACE);CHKERRQ(ierr); 172872e6a0cfSJed Brown 172972e6a0cfSJed Brown /* Invert column permutation to find out where my columns should go */ 1730ce94432eSBarry Smith ierr = PetscSFCreate(PetscObjectComm((PetscObject)A),&sf);CHKERRQ(ierr); 17310298fd71SBarry Smith ierr = PetscSFSetGraphLayout(sf,A->cmap,A->cmap->n,NULL,PETSC_OWN_POINTER,cwant);CHKERRQ(ierr); 1732e9e74f11SJed Brown ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr); 173372e6a0cfSJed Brown for (i=0; i<n; i++) work[i] = A->cmap->rstart + i; 17348bfbc91cSJed Brown ierr = PetscSFReduceBegin(sf,MPIU_INT,work,cdest,MPIU_REPLACE);CHKERRQ(ierr); 17358bfbc91cSJed Brown ierr = PetscSFReduceEnd(sf,MPIU_INT,work,cdest,MPIU_REPLACE);CHKERRQ(ierr); 173672e6a0cfSJed Brown ierr = PetscSFDestroy(&sf);CHKERRQ(ierr); 173772e6a0cfSJed Brown 173872e6a0cfSJed Brown ierr = ISRestoreIndices(rowp,&rwant);CHKERRQ(ierr); 173972e6a0cfSJed Brown ierr = ISRestoreIndices(colp,&cwant);CHKERRQ(ierr); 174072e6a0cfSJed Brown ierr = MatMPIAIJGetSeqAIJ(A,&aA,&aB,&gcols);CHKERRQ(ierr); 174172e6a0cfSJed Brown 174272e6a0cfSJed Brown /* Find out where my gcols should go */ 17430298fd71SBarry Smith ierr = MatGetSize(aB,NULL,&ng);CHKERRQ(ierr); 174472e6a0cfSJed Brown ierr = PetscMalloc(ng*sizeof(PetscInt),&gcdest);CHKERRQ(ierr); 1745ce94432eSBarry Smith ierr = PetscSFCreate(PetscObjectComm((PetscObject)A),&sf);CHKERRQ(ierr); 17460298fd71SBarry Smith ierr = PetscSFSetGraphLayout(sf,A->cmap,ng,NULL,PETSC_OWN_POINTER,gcols);CHKERRQ(ierr); 1747e9e74f11SJed Brown ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr); 174872e6a0cfSJed Brown ierr = PetscSFBcastBegin(sf,MPIU_INT,cdest,gcdest);CHKERRQ(ierr); 174972e6a0cfSJed Brown ierr = PetscSFBcastEnd(sf,MPIU_INT,cdest,gcdest);CHKERRQ(ierr); 175072e6a0cfSJed Brown ierr = PetscSFDestroy(&sf);CHKERRQ(ierr); 175172e6a0cfSJed Brown 175272e6a0cfSJed Brown ierr = PetscMalloc4(m,PetscInt,&dnnz,m,PetscInt,&onnz,m,PetscInt,&tdnnz,m,PetscInt,&tonnz);CHKERRQ(ierr); 175372e6a0cfSJed Brown ierr = PetscMemzero(dnnz,m*sizeof(PetscInt));CHKERRQ(ierr); 175472e6a0cfSJed Brown ierr = PetscMemzero(onnz,m*sizeof(PetscInt));CHKERRQ(ierr); 175572e6a0cfSJed Brown ierr = MatGetRowIJ(aA,0,PETSC_FALSE,PETSC_FALSE,&anz,&ai,&aj,&done);CHKERRQ(ierr); 175672e6a0cfSJed Brown ierr = MatGetRowIJ(aB,0,PETSC_FALSE,PETSC_FALSE,&bnz,&bi,&bj,&done);CHKERRQ(ierr); 175772e6a0cfSJed Brown for (i=0; i<m; i++) { 175872e6a0cfSJed Brown PetscInt row = rdest[i],rowner; 175972e6a0cfSJed Brown ierr = PetscLayoutFindOwner(A->rmap,row,&rowner);CHKERRQ(ierr); 176072e6a0cfSJed Brown for (j=ai[i]; j<ai[i+1]; j++) { 176172e6a0cfSJed Brown PetscInt cowner,col = cdest[aj[j]]; 176272e6a0cfSJed Brown ierr = PetscLayoutFindOwner(A->cmap,col,&cowner);CHKERRQ(ierr); /* Could build an index for the columns to eliminate this search */ 176372e6a0cfSJed Brown if (rowner == cowner) dnnz[i]++; 176472e6a0cfSJed Brown else onnz[i]++; 176572e6a0cfSJed Brown } 176672e6a0cfSJed Brown for (j=bi[i]; j<bi[i+1]; j++) { 176772e6a0cfSJed Brown PetscInt cowner,col = gcdest[bj[j]]; 176872e6a0cfSJed Brown ierr = PetscLayoutFindOwner(A->cmap,col,&cowner);CHKERRQ(ierr); 176972e6a0cfSJed Brown if (rowner == cowner) dnnz[i]++; 177072e6a0cfSJed Brown else onnz[i]++; 177172e6a0cfSJed Brown } 177272e6a0cfSJed Brown } 177372e6a0cfSJed Brown ierr = PetscMemzero(tdnnz,m*sizeof(PetscInt));CHKERRQ(ierr); 177472e6a0cfSJed Brown ierr = PetscMemzero(tonnz,m*sizeof(PetscInt));CHKERRQ(ierr); 177572e6a0cfSJed Brown ierr = PetscSFBcastBegin(rowsf,MPIU_INT,dnnz,tdnnz);CHKERRQ(ierr); 177672e6a0cfSJed Brown ierr = PetscSFBcastEnd(rowsf,MPIU_INT,dnnz,tdnnz);CHKERRQ(ierr); 177772e6a0cfSJed Brown ierr = PetscSFBcastBegin(rowsf,MPIU_INT,onnz,tonnz);CHKERRQ(ierr); 177872e6a0cfSJed Brown ierr = PetscSFBcastEnd(rowsf,MPIU_INT,onnz,tonnz);CHKERRQ(ierr); 177972e6a0cfSJed Brown ierr = PetscSFDestroy(&rowsf);CHKERRQ(ierr); 178072e6a0cfSJed Brown 1781ce94432eSBarry Smith ierr = MatCreateAIJ(PetscObjectComm((PetscObject)A),A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N,0,tdnnz,0,tonnz,&Aperm);CHKERRQ(ierr); 178272e6a0cfSJed Brown ierr = MatSeqAIJGetArray(aA,&aa);CHKERRQ(ierr); 178372e6a0cfSJed Brown ierr = MatSeqAIJGetArray(aB,&ba);CHKERRQ(ierr); 178472e6a0cfSJed Brown for (i=0; i<m; i++) { 178572e6a0cfSJed Brown PetscInt *acols = dnnz,*bcols = onnz; /* Repurpose now-unneeded arrays */ 178672e6a0cfSJed Brown PetscInt rowlen; 178772e6a0cfSJed Brown rowlen = ai[i+1] - ai[i]; 178872e6a0cfSJed Brown for (j=0; j<rowlen; j++) acols[j] = cdest[aj[ai[i]+j]]; 178972e6a0cfSJed Brown ierr = MatSetValues(Aperm,1,&rdest[i],rowlen,acols,aa+ai[i],INSERT_VALUES);CHKERRQ(ierr); 179072e6a0cfSJed Brown rowlen = bi[i+1] - bi[i]; 179172e6a0cfSJed Brown for (j=0; j<rowlen; j++) bcols[j] = gcdest[bj[bi[i]+j]]; 179272e6a0cfSJed Brown ierr = MatSetValues(Aperm,1,&rdest[i],rowlen,bcols,ba+bi[i],INSERT_VALUES);CHKERRQ(ierr); 179372e6a0cfSJed Brown } 179472e6a0cfSJed Brown ierr = MatAssemblyBegin(Aperm,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 179572e6a0cfSJed Brown ierr = MatAssemblyEnd(Aperm,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 179672e6a0cfSJed Brown ierr = MatRestoreRowIJ(aA,0,PETSC_FALSE,PETSC_FALSE,&anz,&ai,&aj,&done);CHKERRQ(ierr); 179772e6a0cfSJed Brown ierr = MatRestoreRowIJ(aB,0,PETSC_FALSE,PETSC_FALSE,&bnz,&bi,&bj,&done);CHKERRQ(ierr); 179872e6a0cfSJed Brown ierr = MatSeqAIJRestoreArray(aA,&aa);CHKERRQ(ierr); 179972e6a0cfSJed Brown ierr = MatSeqAIJRestoreArray(aB,&ba);CHKERRQ(ierr); 180072e6a0cfSJed Brown ierr = PetscFree4(dnnz,onnz,tdnnz,tonnz);CHKERRQ(ierr); 180172e6a0cfSJed Brown ierr = PetscFree3(work,rdest,cdest);CHKERRQ(ierr); 180272e6a0cfSJed Brown ierr = PetscFree(gcdest);CHKERRQ(ierr); 180372e6a0cfSJed Brown if (parcolp) {ierr = ISDestroy(&colp);CHKERRQ(ierr);} 180472e6a0cfSJed Brown *B = Aperm; 180542e855d1Svictor PetscFunctionReturn(0); 180642e855d1Svictor } 180742e855d1Svictor 180842e855d1Svictor #undef __FUNCT__ 18094a2ae208SSatish Balay #define __FUNCT__ "MatGetInfo_MPIAIJ" 1810dfbe8321SBarry Smith PetscErrorCode MatGetInfo_MPIAIJ(Mat matin,MatInfoType flag,MatInfo *info) 1811a66be287SLois Curfman McInnes { 1812a66be287SLois Curfman McInnes Mat_MPIAIJ *mat = (Mat_MPIAIJ*)matin->data; 1813a66be287SLois Curfman McInnes Mat A = mat->A,B = mat->B; 1814dfbe8321SBarry Smith PetscErrorCode ierr; 1815329f5518SBarry Smith PetscReal isend[5],irecv[5]; 1816a66be287SLois Curfman McInnes 18173a40ed3dSBarry Smith PetscFunctionBegin; 18184e220ebcSLois Curfman McInnes info->block_size = 1.0; 18194e220ebcSLois Curfman McInnes ierr = MatGetInfo(A,MAT_LOCAL,info);CHKERRQ(ierr); 18202205254eSKarl Rupp 18214e220ebcSLois Curfman McInnes isend[0] = info->nz_used; isend[1] = info->nz_allocated; isend[2] = info->nz_unneeded; 18224e220ebcSLois Curfman McInnes isend[3] = info->memory; isend[4] = info->mallocs; 18232205254eSKarl Rupp 18244e220ebcSLois Curfman McInnes ierr = MatGetInfo(B,MAT_LOCAL,info);CHKERRQ(ierr); 18252205254eSKarl Rupp 18264e220ebcSLois Curfman McInnes isend[0] += info->nz_used; isend[1] += info->nz_allocated; isend[2] += info->nz_unneeded; 18274e220ebcSLois Curfman McInnes isend[3] += info->memory; isend[4] += info->mallocs; 1828a66be287SLois Curfman McInnes if (flag == MAT_LOCAL) { 18294e220ebcSLois Curfman McInnes info->nz_used = isend[0]; 18304e220ebcSLois Curfman McInnes info->nz_allocated = isend[1]; 18314e220ebcSLois Curfman McInnes info->nz_unneeded = isend[2]; 18324e220ebcSLois Curfman McInnes info->memory = isend[3]; 18334e220ebcSLois Curfman McInnes info->mallocs = isend[4]; 1834a66be287SLois Curfman McInnes } else if (flag == MAT_GLOBAL_MAX) { 1835ce94432eSBarry Smith ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr); 18362205254eSKarl Rupp 18374e220ebcSLois Curfman McInnes info->nz_used = irecv[0]; 18384e220ebcSLois Curfman McInnes info->nz_allocated = irecv[1]; 18394e220ebcSLois Curfman McInnes info->nz_unneeded = irecv[2]; 18404e220ebcSLois Curfman McInnes info->memory = irecv[3]; 18414e220ebcSLois Curfman McInnes info->mallocs = irecv[4]; 1842a66be287SLois Curfman McInnes } else if (flag == MAT_GLOBAL_SUM) { 1843ce94432eSBarry Smith ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr); 18442205254eSKarl Rupp 18454e220ebcSLois Curfman McInnes info->nz_used = irecv[0]; 18464e220ebcSLois Curfman McInnes info->nz_allocated = irecv[1]; 18474e220ebcSLois Curfman McInnes info->nz_unneeded = irecv[2]; 18484e220ebcSLois Curfman McInnes info->memory = irecv[3]; 18494e220ebcSLois Curfman McInnes info->mallocs = irecv[4]; 1850a66be287SLois Curfman McInnes } 18514e220ebcSLois Curfman McInnes info->fill_ratio_given = 0; /* no parallel LU/ILU/Cholesky */ 18524e220ebcSLois Curfman McInnes info->fill_ratio_needed = 0; 18534e220ebcSLois Curfman McInnes info->factor_mallocs = 0; 18543a40ed3dSBarry Smith PetscFunctionReturn(0); 1855a66be287SLois Curfman McInnes } 1856a66be287SLois Curfman McInnes 18574a2ae208SSatish Balay #undef __FUNCT__ 18584a2ae208SSatish Balay #define __FUNCT__ "MatSetOption_MPIAIJ" 1859ace3abfcSBarry Smith PetscErrorCode MatSetOption_MPIAIJ(Mat A,MatOption op,PetscBool flg) 1860c74985f6SBarry Smith { 1861c0bbcb79SLois Curfman McInnes Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1862dfbe8321SBarry Smith PetscErrorCode ierr; 1863c74985f6SBarry Smith 18643a40ed3dSBarry Smith PetscFunctionBegin; 186512c028f9SKris Buschelman switch (op) { 1866512a5fc5SBarry Smith case MAT_NEW_NONZERO_LOCATIONS: 186712c028f9SKris Buschelman case MAT_NEW_NONZERO_ALLOCATION_ERR: 186828b2fa4aSMatthew Knepley case MAT_UNUSED_NONZERO_LOCATION_ERR: 1869a9817697SBarry Smith case MAT_KEEP_NONZERO_PATTERN: 187012c028f9SKris Buschelman case MAT_NEW_NONZERO_LOCATION_ERR: 187112c028f9SKris Buschelman case MAT_USE_INODES: 187212c028f9SKris Buschelman case MAT_IGNORE_ZERO_ENTRIES: 1873fa1f0d2cSMatthew G Knepley MatCheckPreallocated(A,1); 18744e0d8c25SBarry Smith ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 18754e0d8c25SBarry Smith ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr); 187612c028f9SKris Buschelman break; 187712c028f9SKris Buschelman case MAT_ROW_ORIENTED: 18784e0d8c25SBarry Smith a->roworiented = flg; 18792205254eSKarl Rupp 18804e0d8c25SBarry Smith ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 18814e0d8c25SBarry Smith ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr); 188212c028f9SKris Buschelman break; 18834e0d8c25SBarry Smith case MAT_NEW_DIAGONALS: 1884290bbb0aSBarry Smith ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr); 188512c028f9SKris Buschelman break; 188612c028f9SKris Buschelman case MAT_IGNORE_OFF_PROC_ENTRIES: 18875c0f0b64SBarry Smith a->donotstash = flg; 188812c028f9SKris Buschelman break; 1889ffa07934SHong Zhang case MAT_SPD: 1890ffa07934SHong Zhang A->spd_set = PETSC_TRUE; 1891ffa07934SHong Zhang A->spd = flg; 1892ffa07934SHong Zhang if (flg) { 1893ffa07934SHong Zhang A->symmetric = PETSC_TRUE; 1894ffa07934SHong Zhang A->structurally_symmetric = PETSC_TRUE; 1895ffa07934SHong Zhang A->symmetric_set = PETSC_TRUE; 1896ffa07934SHong Zhang A->structurally_symmetric_set = PETSC_TRUE; 1897ffa07934SHong Zhang } 1898ffa07934SHong Zhang break; 189977e54ba9SKris Buschelman case MAT_SYMMETRIC: 19004e0d8c25SBarry Smith ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 190125f421beSHong Zhang break; 190277e54ba9SKris Buschelman case MAT_STRUCTURALLY_SYMMETRIC: 1903eeffb40dSHong Zhang ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 1904eeffb40dSHong Zhang break; 1905bf108f30SBarry Smith case MAT_HERMITIAN: 1906eeffb40dSHong Zhang ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 1907eeffb40dSHong Zhang break; 1908bf108f30SBarry Smith case MAT_SYMMETRY_ETERNAL: 19094e0d8c25SBarry Smith ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 191077e54ba9SKris Buschelman break; 191112c028f9SKris Buschelman default: 1912e32f2f54SBarry Smith SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"unknown option %d",op); 19133a40ed3dSBarry Smith } 19143a40ed3dSBarry Smith PetscFunctionReturn(0); 1915c74985f6SBarry Smith } 1916c74985f6SBarry Smith 19174a2ae208SSatish Balay #undef __FUNCT__ 19184a2ae208SSatish Balay #define __FUNCT__ "MatGetRow_MPIAIJ" 1919b1d57f15SBarry Smith PetscErrorCode MatGetRow_MPIAIJ(Mat matin,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 192039e00950SLois Curfman McInnes { 1921154123eaSLois Curfman McInnes Mat_MPIAIJ *mat = (Mat_MPIAIJ*)matin->data; 192287828ca2SBarry Smith PetscScalar *vworkA,*vworkB,**pvA,**pvB,*v_p; 19236849ba73SBarry Smith PetscErrorCode ierr; 1924d0f46423SBarry Smith PetscInt i,*cworkA,*cworkB,**pcA,**pcB,cstart = matin->cmap->rstart; 1925d0f46423SBarry Smith PetscInt nztot,nzA,nzB,lrow,rstart = matin->rmap->rstart,rend = matin->rmap->rend; 1926b1d57f15SBarry Smith PetscInt *cmap,*idx_p; 192739e00950SLois Curfman McInnes 19283a40ed3dSBarry Smith PetscFunctionBegin; 1929e32f2f54SBarry Smith if (mat->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Already active"); 19307a0afa10SBarry Smith mat->getrowactive = PETSC_TRUE; 19317a0afa10SBarry Smith 193270f0671dSBarry Smith if (!mat->rowvalues && (idx || v)) { 19337a0afa10SBarry Smith /* 19347a0afa10SBarry Smith allocate enough space to hold information from the longest row. 19357a0afa10SBarry Smith */ 19367a0afa10SBarry Smith Mat_SeqAIJ *Aa = (Mat_SeqAIJ*)mat->A->data,*Ba = (Mat_SeqAIJ*)mat->B->data; 1937b1d57f15SBarry Smith PetscInt max = 1,tmp; 1938d0f46423SBarry Smith for (i=0; i<matin->rmap->n; i++) { 19397a0afa10SBarry Smith tmp = Aa->i[i+1] - Aa->i[i] + Ba->i[i+1] - Ba->i[i]; 19402205254eSKarl Rupp if (max < tmp) max = tmp; 19417a0afa10SBarry Smith } 19421d79065fSBarry Smith ierr = PetscMalloc2(max,PetscScalar,&mat->rowvalues,max,PetscInt,&mat->rowindices);CHKERRQ(ierr); 19437a0afa10SBarry Smith } 19447a0afa10SBarry Smith 1945e7e72b3dSBarry Smith if (row < rstart || row >= rend) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Only local rows"); 1946abc0e9e4SLois Curfman McInnes lrow = row - rstart; 194739e00950SLois Curfman McInnes 1948154123eaSLois Curfman McInnes pvA = &vworkA; pcA = &cworkA; pvB = &vworkB; pcB = &cworkB; 1949154123eaSLois Curfman McInnes if (!v) {pvA = 0; pvB = 0;} 1950154123eaSLois Curfman McInnes if (!idx) {pcA = 0; if (!v) pcB = 0;} 1951f830108cSBarry Smith ierr = (*mat->A->ops->getrow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr); 1952f830108cSBarry Smith ierr = (*mat->B->ops->getrow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr); 1953154123eaSLois Curfman McInnes nztot = nzA + nzB; 1954154123eaSLois Curfman McInnes 195570f0671dSBarry Smith cmap = mat->garray; 1956154123eaSLois Curfman McInnes if (v || idx) { 1957154123eaSLois Curfman McInnes if (nztot) { 1958154123eaSLois Curfman McInnes /* Sort by increasing column numbers, assuming A and B already sorted */ 1959b1d57f15SBarry Smith PetscInt imark = -1; 1960154123eaSLois Curfman McInnes if (v) { 196170f0671dSBarry Smith *v = v_p = mat->rowvalues; 196239e00950SLois Curfman McInnes for (i=0; i<nzB; i++) { 196370f0671dSBarry Smith if (cmap[cworkB[i]] < cstart) v_p[i] = vworkB[i]; 1964154123eaSLois Curfman McInnes else break; 1965154123eaSLois Curfman McInnes } 1966154123eaSLois Curfman McInnes imark = i; 196770f0671dSBarry Smith for (i=0; i<nzA; i++) v_p[imark+i] = vworkA[i]; 196870f0671dSBarry Smith for (i=imark; i<nzB; i++) v_p[nzA+i] = vworkB[i]; 1969154123eaSLois Curfman McInnes } 1970154123eaSLois Curfman McInnes if (idx) { 197170f0671dSBarry Smith *idx = idx_p = mat->rowindices; 197270f0671dSBarry Smith if (imark > -1) { 197370f0671dSBarry Smith for (i=0; i<imark; i++) { 197470f0671dSBarry Smith idx_p[i] = cmap[cworkB[i]]; 197570f0671dSBarry Smith } 197670f0671dSBarry Smith } else { 1977154123eaSLois Curfman McInnes for (i=0; i<nzB; i++) { 197870f0671dSBarry Smith if (cmap[cworkB[i]] < cstart) idx_p[i] = cmap[cworkB[i]]; 1979154123eaSLois Curfman McInnes else break; 1980154123eaSLois Curfman McInnes } 1981154123eaSLois Curfman McInnes imark = i; 198270f0671dSBarry Smith } 198370f0671dSBarry Smith for (i=0; i<nzA; i++) idx_p[imark+i] = cstart + cworkA[i]; 198470f0671dSBarry Smith for (i=imark; i<nzB; i++) idx_p[nzA+i] = cmap[cworkB[i]]; 198539e00950SLois Curfman McInnes } 19863f97c4b0SBarry Smith } else { 19871ca473b0SSatish Balay if (idx) *idx = 0; 19881ca473b0SSatish Balay if (v) *v = 0; 19891ca473b0SSatish Balay } 1990154123eaSLois Curfman McInnes } 199139e00950SLois Curfman McInnes *nz = nztot; 1992f830108cSBarry Smith ierr = (*mat->A->ops->restorerow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr); 1993f830108cSBarry Smith ierr = (*mat->B->ops->restorerow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr); 19943a40ed3dSBarry Smith PetscFunctionReturn(0); 199539e00950SLois Curfman McInnes } 199639e00950SLois Curfman McInnes 19974a2ae208SSatish Balay #undef __FUNCT__ 19984a2ae208SSatish Balay #define __FUNCT__ "MatRestoreRow_MPIAIJ" 1999b1d57f15SBarry Smith PetscErrorCode MatRestoreRow_MPIAIJ(Mat mat,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 200039e00950SLois Curfman McInnes { 20017a0afa10SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 20023a40ed3dSBarry Smith 20033a40ed3dSBarry Smith PetscFunctionBegin; 2004e7e72b3dSBarry Smith if (!aij->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"MatGetRow() must be called first"); 20057a0afa10SBarry Smith aij->getrowactive = PETSC_FALSE; 20063a40ed3dSBarry Smith PetscFunctionReturn(0); 200739e00950SLois Curfman McInnes } 200839e00950SLois Curfman McInnes 20094a2ae208SSatish Balay #undef __FUNCT__ 20104a2ae208SSatish Balay #define __FUNCT__ "MatNorm_MPIAIJ" 2011dfbe8321SBarry Smith PetscErrorCode MatNorm_MPIAIJ(Mat mat,NormType type,PetscReal *norm) 2012855ac2c5SLois Curfman McInnes { 2013855ac2c5SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 2014ec8511deSBarry Smith Mat_SeqAIJ *amat = (Mat_SeqAIJ*)aij->A->data,*bmat = (Mat_SeqAIJ*)aij->B->data; 2015dfbe8321SBarry Smith PetscErrorCode ierr; 2016d0f46423SBarry Smith PetscInt i,j,cstart = mat->cmap->rstart; 2017329f5518SBarry Smith PetscReal sum = 0.0; 2018a77337e4SBarry Smith MatScalar *v; 201904ca555eSLois Curfman McInnes 20203a40ed3dSBarry Smith PetscFunctionBegin; 202117699dbbSLois Curfman McInnes if (aij->size == 1) { 202214183eadSLois Curfman McInnes ierr = MatNorm(aij->A,type,norm);CHKERRQ(ierr); 202337fa93a5SLois Curfman McInnes } else { 202404ca555eSLois Curfman McInnes if (type == NORM_FROBENIUS) { 202504ca555eSLois Curfman McInnes v = amat->a; 202604ca555eSLois Curfman McInnes for (i=0; i<amat->nz; i++) { 2027329f5518SBarry Smith sum += PetscRealPart(PetscConj(*v)*(*v)); v++; 202804ca555eSLois Curfman McInnes } 202904ca555eSLois Curfman McInnes v = bmat->a; 203004ca555eSLois Curfman McInnes for (i=0; i<bmat->nz; i++) { 2031329f5518SBarry Smith sum += PetscRealPart(PetscConj(*v)*(*v)); v++; 203204ca555eSLois Curfman McInnes } 2033ce94432eSBarry Smith ierr = MPI_Allreduce(&sum,norm,1,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 20348f1a2a5eSBarry Smith *norm = PetscSqrtReal(*norm); 20353a40ed3dSBarry Smith } else if (type == NORM_1) { /* max column norm */ 2036329f5518SBarry Smith PetscReal *tmp,*tmp2; 2037b1d57f15SBarry Smith PetscInt *jj,*garray = aij->garray; 2038d0f46423SBarry Smith ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscReal),&tmp);CHKERRQ(ierr); 2039d0f46423SBarry Smith ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscReal),&tmp2);CHKERRQ(ierr); 2040d0f46423SBarry Smith ierr = PetscMemzero(tmp,mat->cmap->N*sizeof(PetscReal));CHKERRQ(ierr); 204104ca555eSLois Curfman McInnes *norm = 0.0; 204204ca555eSLois Curfman McInnes v = amat->a; jj = amat->j; 204304ca555eSLois Curfman McInnes for (j=0; j<amat->nz; j++) { 2044bfec09a0SHong Zhang tmp[cstart + *jj++] += PetscAbsScalar(*v); v++; 204504ca555eSLois Curfman McInnes } 204604ca555eSLois Curfman McInnes v = bmat->a; jj = bmat->j; 204704ca555eSLois Curfman McInnes for (j=0; j<bmat->nz; j++) { 2048bfec09a0SHong Zhang tmp[garray[*jj++]] += PetscAbsScalar(*v); v++; 204904ca555eSLois Curfman McInnes } 2050ce94432eSBarry Smith ierr = MPI_Allreduce(tmp,tmp2,mat->cmap->N,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 2051d0f46423SBarry Smith for (j=0; j<mat->cmap->N; j++) { 205204ca555eSLois Curfman McInnes if (tmp2[j] > *norm) *norm = tmp2[j]; 205304ca555eSLois Curfman McInnes } 2054606d414cSSatish Balay ierr = PetscFree(tmp);CHKERRQ(ierr); 2055606d414cSSatish Balay ierr = PetscFree(tmp2);CHKERRQ(ierr); 20563a40ed3dSBarry Smith } else if (type == NORM_INFINITY) { /* max row norm */ 2057329f5518SBarry Smith PetscReal ntemp = 0.0; 2058d0f46423SBarry Smith for (j=0; j<aij->A->rmap->n; j++) { 2059bfec09a0SHong Zhang v = amat->a + amat->i[j]; 206004ca555eSLois Curfman McInnes sum = 0.0; 206104ca555eSLois Curfman McInnes for (i=0; i<amat->i[j+1]-amat->i[j]; i++) { 2062cddf8d76SBarry Smith sum += PetscAbsScalar(*v); v++; 206304ca555eSLois Curfman McInnes } 2064bfec09a0SHong Zhang v = bmat->a + bmat->i[j]; 206504ca555eSLois Curfman McInnes for (i=0; i<bmat->i[j+1]-bmat->i[j]; i++) { 2066cddf8d76SBarry Smith sum += PetscAbsScalar(*v); v++; 206704ca555eSLois Curfman McInnes } 2068515d9167SLois Curfman McInnes if (sum > ntemp) ntemp = sum; 206904ca555eSLois Curfman McInnes } 2070ce94432eSBarry Smith ierr = MPI_Allreduce(&ntemp,norm,1,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr); 2071ce94432eSBarry Smith } else SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"No support for two norm"); 207237fa93a5SLois Curfman McInnes } 20733a40ed3dSBarry Smith PetscFunctionReturn(0); 2074855ac2c5SLois Curfman McInnes } 2075855ac2c5SLois Curfman McInnes 20764a2ae208SSatish Balay #undef __FUNCT__ 20774a2ae208SSatish Balay #define __FUNCT__ "MatTranspose_MPIAIJ" 2078fc4dec0aSBarry Smith PetscErrorCode MatTranspose_MPIAIJ(Mat A,MatReuse reuse,Mat *matout) 2079b7c46309SBarry Smith { 2080b7c46309SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 2081da668accSHong Zhang Mat_SeqAIJ *Aloc=(Mat_SeqAIJ*)a->A->data,*Bloc=(Mat_SeqAIJ*)a->B->data; 2082dfbe8321SBarry Smith PetscErrorCode ierr; 208380bcc5a1SJed Brown PetscInt M = A->rmap->N,N = A->cmap->N,ma,na,mb,nb,*ai,*aj,*bi,*bj,row,*cols,*cols_tmp,i; 2084d0f46423SBarry Smith PetscInt cstart = A->cmap->rstart,ncol; 20853a40ed3dSBarry Smith Mat B; 2086a77337e4SBarry Smith MatScalar *array; 2087b7c46309SBarry Smith 20883a40ed3dSBarry Smith PetscFunctionBegin; 2089ce94432eSBarry Smith if (reuse == MAT_REUSE_MATRIX && A == *matout && M != N) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Square matrix only for in-place"); 2090da668accSHong Zhang 209180bcc5a1SJed Brown ma = A->rmap->n; na = A->cmap->n; mb = a->B->rmap->n; nb = a->B->cmap->n; 2092da668accSHong Zhang ai = Aloc->i; aj = Aloc->j; 2093da668accSHong Zhang bi = Bloc->i; bj = Bloc->j; 2094fc73b1b3SBarry Smith if (reuse == MAT_INITIAL_MATRIX || *matout == A) { 209580bcc5a1SJed Brown PetscInt *d_nnz,*g_nnz,*o_nnz; 209680bcc5a1SJed Brown PetscSFNode *oloc; 2097713c93b4SJed Brown PETSC_UNUSED PetscSF sf; 209880bcc5a1SJed Brown 209980bcc5a1SJed Brown ierr = PetscMalloc4(na,PetscInt,&d_nnz,na,PetscInt,&o_nnz,nb,PetscInt,&g_nnz,nb,PetscSFNode,&oloc);CHKERRQ(ierr); 210080bcc5a1SJed Brown /* compute d_nnz for preallocation */ 210180bcc5a1SJed Brown ierr = PetscMemzero(d_nnz,na*sizeof(PetscInt));CHKERRQ(ierr); 2102da668accSHong Zhang for (i=0; i<ai[ma]; i++) { 2103da668accSHong Zhang d_nnz[aj[i]]++; 2104da668accSHong Zhang aj[i] += cstart; /* global col index to be used by MatSetValues() */ 2105d4bb536fSBarry Smith } 210680bcc5a1SJed Brown /* compute local off-diagonal contributions */ 21070beca09bSJed Brown ierr = PetscMemzero(g_nnz,nb*sizeof(PetscInt));CHKERRQ(ierr); 210880bcc5a1SJed Brown for (i=0; i<bi[ma]; i++) g_nnz[bj[i]]++; 210980bcc5a1SJed Brown /* map those to global */ 2110ce94432eSBarry Smith ierr = PetscSFCreate(PetscObjectComm((PetscObject)A),&sf);CHKERRQ(ierr); 21110298fd71SBarry Smith ierr = PetscSFSetGraphLayout(sf,A->cmap,nb,NULL,PETSC_USE_POINTER,a->garray);CHKERRQ(ierr); 2112e9e74f11SJed Brown ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr); 211380bcc5a1SJed Brown ierr = PetscMemzero(o_nnz,na*sizeof(PetscInt));CHKERRQ(ierr); 211480bcc5a1SJed Brown ierr = PetscSFReduceBegin(sf,MPIU_INT,g_nnz,o_nnz,MPIU_SUM);CHKERRQ(ierr); 211580bcc5a1SJed Brown ierr = PetscSFReduceEnd(sf,MPIU_INT,g_nnz,o_nnz,MPIU_SUM);CHKERRQ(ierr); 211680bcc5a1SJed Brown ierr = PetscSFDestroy(&sf);CHKERRQ(ierr); 2117d4bb536fSBarry Smith 2118ce94432eSBarry Smith ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr); 2119d0f46423SBarry Smith ierr = MatSetSizes(B,A->cmap->n,A->rmap->n,N,M);CHKERRQ(ierr); 2120a2f3521dSMark F. Adams ierr = MatSetBlockSizes(B,A->cmap->bs,A->rmap->bs);CHKERRQ(ierr); 21217adad957SLisandro Dalcin ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr); 212280bcc5a1SJed Brown ierr = MatMPIAIJSetPreallocation(B,0,d_nnz,0,o_nnz);CHKERRQ(ierr); 212380bcc5a1SJed Brown ierr = PetscFree4(d_nnz,o_nnz,g_nnz,oloc);CHKERRQ(ierr); 2124fc4dec0aSBarry Smith } else { 2125fc4dec0aSBarry Smith B = *matout; 21266ffab4bbSHong Zhang ierr = MatSetOption(B,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr); 21272205254eSKarl Rupp for (i=0; i<ai[ma]; i++) aj[i] += cstart; /* global col index to be used by MatSetValues() */ 2128fc4dec0aSBarry Smith } 2129b7c46309SBarry Smith 2130b7c46309SBarry Smith /* copy over the A part */ 2131da668accSHong Zhang array = Aloc->a; 2132d0f46423SBarry Smith row = A->rmap->rstart; 2133da668accSHong Zhang for (i=0; i<ma; i++) { 2134da668accSHong Zhang ncol = ai[i+1]-ai[i]; 2135da668accSHong Zhang ierr = MatSetValues(B,ncol,aj,1,&row,array,INSERT_VALUES);CHKERRQ(ierr); 21362205254eSKarl Rupp row++; 21372205254eSKarl Rupp array += ncol; aj += ncol; 2138b7c46309SBarry Smith } 2139b7c46309SBarry Smith aj = Aloc->j; 2140da668accSHong Zhang for (i=0; i<ai[ma]; i++) aj[i] -= cstart; /* resume local col index */ 2141b7c46309SBarry Smith 2142b7c46309SBarry Smith /* copy over the B part */ 2143fc73b1b3SBarry Smith ierr = PetscMalloc(bi[mb]*sizeof(PetscInt),&cols);CHKERRQ(ierr); 2144fc73b1b3SBarry Smith ierr = PetscMemzero(cols,bi[mb]*sizeof(PetscInt));CHKERRQ(ierr); 2145da668accSHong Zhang array = Bloc->a; 2146d0f46423SBarry Smith row = A->rmap->rstart; 21472205254eSKarl Rupp for (i=0; i<bi[mb]; i++) cols[i] = a->garray[bj[i]]; 214861a2fbbaSHong Zhang cols_tmp = cols; 2149da668accSHong Zhang for (i=0; i<mb; i++) { 2150da668accSHong Zhang ncol = bi[i+1]-bi[i]; 215161a2fbbaSHong Zhang ierr = MatSetValues(B,ncol,cols_tmp,1,&row,array,INSERT_VALUES);CHKERRQ(ierr); 21522205254eSKarl Rupp row++; 21532205254eSKarl Rupp array += ncol; cols_tmp += ncol; 2154b7c46309SBarry Smith } 2155fc73b1b3SBarry Smith ierr = PetscFree(cols);CHKERRQ(ierr); 2156fc73b1b3SBarry Smith 21576d4a8577SBarry Smith ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 21586d4a8577SBarry Smith ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2159815cbec1SBarry Smith if (reuse == MAT_INITIAL_MATRIX || *matout != A) { 21600de55854SLois Curfman McInnes *matout = B; 21610de55854SLois Curfman McInnes } else { 2162eb6b5d47SBarry Smith ierr = MatHeaderMerge(A,B);CHKERRQ(ierr); 21630de55854SLois Curfman McInnes } 21643a40ed3dSBarry Smith PetscFunctionReturn(0); 2165b7c46309SBarry Smith } 2166b7c46309SBarry Smith 21674a2ae208SSatish Balay #undef __FUNCT__ 21684a2ae208SSatish Balay #define __FUNCT__ "MatDiagonalScale_MPIAIJ" 2169dfbe8321SBarry Smith PetscErrorCode MatDiagonalScale_MPIAIJ(Mat mat,Vec ll,Vec rr) 2170a008b906SSatish Balay { 21714b967eb1SSatish Balay Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 21724b967eb1SSatish Balay Mat a = aij->A,b = aij->B; 2173dfbe8321SBarry Smith PetscErrorCode ierr; 2174b1d57f15SBarry Smith PetscInt s1,s2,s3; 2175a008b906SSatish Balay 21763a40ed3dSBarry Smith PetscFunctionBegin; 21774b967eb1SSatish Balay ierr = MatGetLocalSize(mat,&s2,&s3);CHKERRQ(ierr); 21784b967eb1SSatish Balay if (rr) { 2179e1311b90SBarry Smith ierr = VecGetLocalSize(rr,&s1);CHKERRQ(ierr); 2180e32f2f54SBarry Smith if (s1!=s3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"right vector non-conforming local size"); 21814b967eb1SSatish Balay /* Overlap communication with computation. */ 2182ca9f406cSSatish Balay ierr = VecScatterBegin(aij->Mvctx,rr,aij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 2183a008b906SSatish Balay } 21844b967eb1SSatish Balay if (ll) { 2185e1311b90SBarry Smith ierr = VecGetLocalSize(ll,&s1);CHKERRQ(ierr); 2186e32f2f54SBarry Smith if (s1!=s2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"left vector non-conforming local size"); 2187f830108cSBarry Smith ierr = (*b->ops->diagonalscale)(b,ll,0);CHKERRQ(ierr); 21884b967eb1SSatish Balay } 21894b967eb1SSatish Balay /* scale the diagonal block */ 2190f830108cSBarry Smith ierr = (*a->ops->diagonalscale)(a,ll,rr);CHKERRQ(ierr); 21914b967eb1SSatish Balay 21924b967eb1SSatish Balay if (rr) { 21934b967eb1SSatish Balay /* Do a scatter end and then right scale the off-diagonal block */ 2194ca9f406cSSatish Balay ierr = VecScatterEnd(aij->Mvctx,rr,aij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 2195f830108cSBarry Smith ierr = (*b->ops->diagonalscale)(b,0,aij->lvec);CHKERRQ(ierr); 21964b967eb1SSatish Balay } 21973a40ed3dSBarry Smith PetscFunctionReturn(0); 2198a008b906SSatish Balay } 2199a008b906SSatish Balay 22004a2ae208SSatish Balay #undef __FUNCT__ 22014a2ae208SSatish Balay #define __FUNCT__ "MatSetUnfactored_MPIAIJ" 2202dfbe8321SBarry Smith PetscErrorCode MatSetUnfactored_MPIAIJ(Mat A) 2203bb5a7306SBarry Smith { 2204bb5a7306SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 2205dfbe8321SBarry Smith PetscErrorCode ierr; 22063a40ed3dSBarry Smith 22073a40ed3dSBarry Smith PetscFunctionBegin; 2208bb5a7306SBarry Smith ierr = MatSetUnfactored(a->A);CHKERRQ(ierr); 22093a40ed3dSBarry Smith PetscFunctionReturn(0); 2210bb5a7306SBarry Smith } 2211bb5a7306SBarry Smith 22124a2ae208SSatish Balay #undef __FUNCT__ 22134a2ae208SSatish Balay #define __FUNCT__ "MatEqual_MPIAIJ" 2214ace3abfcSBarry Smith PetscErrorCode MatEqual_MPIAIJ(Mat A,Mat B,PetscBool *flag) 2215d4bb536fSBarry Smith { 2216d4bb536fSBarry Smith Mat_MPIAIJ *matB = (Mat_MPIAIJ*)B->data,*matA = (Mat_MPIAIJ*)A->data; 2217d4bb536fSBarry Smith Mat a,b,c,d; 2218ace3abfcSBarry Smith PetscBool flg; 2219dfbe8321SBarry Smith PetscErrorCode ierr; 2220d4bb536fSBarry Smith 22213a40ed3dSBarry Smith PetscFunctionBegin; 2222d4bb536fSBarry Smith a = matA->A; b = matA->B; 2223d4bb536fSBarry Smith c = matB->A; d = matB->B; 2224d4bb536fSBarry Smith 2225d4bb536fSBarry Smith ierr = MatEqual(a,c,&flg);CHKERRQ(ierr); 2226abc0a331SBarry Smith if (flg) { 2227d4bb536fSBarry Smith ierr = MatEqual(b,d,&flg);CHKERRQ(ierr); 2228d4bb536fSBarry Smith } 2229ce94432eSBarry Smith ierr = MPI_Allreduce(&flg,flag,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr); 22303a40ed3dSBarry Smith PetscFunctionReturn(0); 2231d4bb536fSBarry Smith } 2232d4bb536fSBarry Smith 22334a2ae208SSatish Balay #undef __FUNCT__ 22344a2ae208SSatish Balay #define __FUNCT__ "MatCopy_MPIAIJ" 2235dfbe8321SBarry Smith PetscErrorCode MatCopy_MPIAIJ(Mat A,Mat B,MatStructure str) 2236cb5b572fSBarry Smith { 2237dfbe8321SBarry Smith PetscErrorCode ierr; 2238cb5b572fSBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 2239cb5b572fSBarry Smith Mat_MPIAIJ *b = (Mat_MPIAIJ*)B->data; 2240cb5b572fSBarry Smith 2241cb5b572fSBarry Smith PetscFunctionBegin; 224233f4a19fSKris Buschelman /* If the two matrices don't have the same copy implementation, they aren't compatible for fast copy. */ 224333f4a19fSKris Buschelman if ((str != SAME_NONZERO_PATTERN) || (A->ops->copy != B->ops->copy)) { 2244cb5b572fSBarry Smith /* because of the column compression in the off-processor part of the matrix a->B, 2245cb5b572fSBarry Smith the number of columns in a->B and b->B may be different, hence we cannot call 2246cb5b572fSBarry Smith the MatCopy() directly on the two parts. If need be, we can provide a more 2247cb5b572fSBarry Smith efficient copy than the MatCopy_Basic() by first uncompressing the a->B matrices 2248cb5b572fSBarry Smith then copying the submatrices */ 2249cb5b572fSBarry Smith ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr); 2250cb5b572fSBarry Smith } else { 2251cb5b572fSBarry Smith ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr); 2252cb5b572fSBarry Smith ierr = MatCopy(a->B,b->B,str);CHKERRQ(ierr); 2253cb5b572fSBarry Smith } 2254cb5b572fSBarry Smith PetscFunctionReturn(0); 2255cb5b572fSBarry Smith } 2256cb5b572fSBarry Smith 22574a2ae208SSatish Balay #undef __FUNCT__ 22584994cf47SJed Brown #define __FUNCT__ "MatSetUp_MPIAIJ" 22594994cf47SJed Brown PetscErrorCode MatSetUp_MPIAIJ(Mat A) 2260273d9f13SBarry Smith { 2261dfbe8321SBarry Smith PetscErrorCode ierr; 2262273d9f13SBarry Smith 2263273d9f13SBarry Smith PetscFunctionBegin; 2264273d9f13SBarry Smith ierr = MatMPIAIJSetPreallocation(A,PETSC_DEFAULT,0,PETSC_DEFAULT,0);CHKERRQ(ierr); 2265273d9f13SBarry Smith PetscFunctionReturn(0); 2266273d9f13SBarry Smith } 2267273d9f13SBarry Smith 2268ac90fabeSBarry Smith #undef __FUNCT__ 226995b7e79eSJed Brown #define __FUNCT__ "MatAXPYGetPreallocation_MPIAIJ" 227095b7e79eSJed Brown /* This is the same as MatAXPYGetPreallocation_SeqAIJ, except that the local-to-global map is provided */ 227195b7e79eSJed Brown static PetscErrorCode MatAXPYGetPreallocation_MPIAIJ(Mat Y,const PetscInt *yltog,Mat X,const PetscInt *xltog,PetscInt *nnz) 227295b7e79eSJed Brown { 227395b7e79eSJed Brown PetscInt i,m=Y->rmap->N; 227495b7e79eSJed Brown Mat_SeqAIJ *x = (Mat_SeqAIJ*)X->data; 227595b7e79eSJed Brown Mat_SeqAIJ *y = (Mat_SeqAIJ*)Y->data; 227695b7e79eSJed Brown const PetscInt *xi = x->i,*yi = y->i; 227795b7e79eSJed Brown 227895b7e79eSJed Brown PetscFunctionBegin; 227995b7e79eSJed Brown /* Set the number of nonzeros in the new matrix */ 228095b7e79eSJed Brown for (i=0; i<m; i++) { 228195b7e79eSJed Brown PetscInt j,k,nzx = xi[i+1] - xi[i],nzy = yi[i+1] - yi[i]; 228295b7e79eSJed Brown const PetscInt *xj = x->j+xi[i],*yj = y->j+yi[i]; 228395b7e79eSJed Brown nnz[i] = 0; 228495b7e79eSJed Brown for (j=0,k=0; j<nzx; j++) { /* Point in X */ 228595b7e79eSJed Brown for (; k<nzy && yltog[yj[k]]<xltog[xj[j]]; k++) nnz[i]++; /* Catch up to X */ 228695b7e79eSJed Brown if (k<nzy && yltog[yj[k]]==xltog[xj[j]]) k++; /* Skip duplicate */ 228795b7e79eSJed Brown nnz[i]++; 228895b7e79eSJed Brown } 228995b7e79eSJed Brown for (; k<nzy; k++) nnz[i]++; 229095b7e79eSJed Brown } 229195b7e79eSJed Brown PetscFunctionReturn(0); 229295b7e79eSJed Brown } 229395b7e79eSJed Brown 229495b7e79eSJed Brown #undef __FUNCT__ 2295ac90fabeSBarry Smith #define __FUNCT__ "MatAXPY_MPIAIJ" 2296f4df32b1SMatthew Knepley PetscErrorCode MatAXPY_MPIAIJ(Mat Y,PetscScalar a,Mat X,MatStructure str) 2297ac90fabeSBarry Smith { 2298dfbe8321SBarry Smith PetscErrorCode ierr; 2299b1d57f15SBarry Smith PetscInt i; 2300ac90fabeSBarry Smith Mat_MPIAIJ *xx = (Mat_MPIAIJ*)X->data,*yy = (Mat_MPIAIJ*)Y->data; 23014ce68768SBarry Smith PetscBLASInt bnz,one=1; 2302ac90fabeSBarry Smith Mat_SeqAIJ *x,*y; 2303ac90fabeSBarry Smith 2304ac90fabeSBarry Smith PetscFunctionBegin; 2305ac90fabeSBarry Smith if (str == SAME_NONZERO_PATTERN) { 2306f4df32b1SMatthew Knepley PetscScalar alpha = a; 2307ac90fabeSBarry Smith x = (Mat_SeqAIJ*)xx->A->data; 2308c5df96a5SBarry Smith ierr = PetscBLASIntCast(x->nz,&bnz);CHKERRQ(ierr); 2309ac90fabeSBarry Smith y = (Mat_SeqAIJ*)yy->A->data; 23108b83055fSJed Brown PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one)); 2311ac90fabeSBarry Smith x = (Mat_SeqAIJ*)xx->B->data; 2312ac90fabeSBarry Smith y = (Mat_SeqAIJ*)yy->B->data; 2313c5df96a5SBarry Smith ierr = PetscBLASIntCast(x->nz,&bnz);CHKERRQ(ierr); 23148b83055fSJed Brown PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one)); 2315a30b2313SHong Zhang } else if (str == SUBSET_NONZERO_PATTERN) { 2316f4df32b1SMatthew Knepley ierr = MatAXPY_SeqAIJ(yy->A,a,xx->A,str);CHKERRQ(ierr); 2317c537a176SHong Zhang 2318c537a176SHong Zhang x = (Mat_SeqAIJ*)xx->B->data; 2319a30b2313SHong Zhang y = (Mat_SeqAIJ*)yy->B->data; 2320a30b2313SHong Zhang if (y->xtoy && y->XtoY != xx->B) { 2321a30b2313SHong Zhang ierr = PetscFree(y->xtoy);CHKERRQ(ierr); 23226bf464f9SBarry Smith ierr = MatDestroy(&y->XtoY);CHKERRQ(ierr); 2323c537a176SHong Zhang } 2324a30b2313SHong Zhang if (!y->xtoy) { /* get xtoy */ 2325d0f46423SBarry Smith ierr = MatAXPYGetxtoy_Private(xx->B->rmap->n,x->i,x->j,xx->garray,y->i,y->j,yy->garray,&y->xtoy);CHKERRQ(ierr); 2326a30b2313SHong Zhang y->XtoY = xx->B; 2327407f6b05SHong Zhang ierr = PetscObjectReference((PetscObject)xx->B);CHKERRQ(ierr); 2328c537a176SHong Zhang } 2329f4df32b1SMatthew Knepley for (i=0; i<x->nz; i++) y->a[y->xtoy[i]] += a*(x->a[i]); 2330ac90fabeSBarry Smith } else { 23319f5f6813SShri Abhyankar Mat B; 23329f5f6813SShri Abhyankar PetscInt *nnz_d,*nnz_o; 23339f5f6813SShri Abhyankar ierr = PetscMalloc(yy->A->rmap->N*sizeof(PetscInt),&nnz_d);CHKERRQ(ierr); 23349f5f6813SShri Abhyankar ierr = PetscMalloc(yy->B->rmap->N*sizeof(PetscInt),&nnz_o);CHKERRQ(ierr); 2335ce94432eSBarry Smith ierr = MatCreate(PetscObjectComm((PetscObject)Y),&B);CHKERRQ(ierr); 2336bc5a2726SShri Abhyankar ierr = PetscObjectSetName((PetscObject)B,((PetscObject)Y)->name);CHKERRQ(ierr); 23379f5f6813SShri Abhyankar ierr = MatSetSizes(B,Y->rmap->n,Y->cmap->n,Y->rmap->N,Y->cmap->N);CHKERRQ(ierr); 2338a2f3521dSMark F. Adams ierr = MatSetBlockSizes(B,Y->rmap->bs,Y->cmap->bs);CHKERRQ(ierr); 23399f5f6813SShri Abhyankar ierr = MatSetType(B,MATMPIAIJ);CHKERRQ(ierr); 23409f5f6813SShri Abhyankar ierr = MatAXPYGetPreallocation_SeqAIJ(yy->A,xx->A,nnz_d);CHKERRQ(ierr); 234195b7e79eSJed Brown ierr = MatAXPYGetPreallocation_MPIAIJ(yy->B,yy->garray,xx->B,xx->garray,nnz_o);CHKERRQ(ierr); 2342ecd8bba6SJed Brown ierr = MatMPIAIJSetPreallocation(B,0,nnz_d,0,nnz_o);CHKERRQ(ierr); 23439f5f6813SShri Abhyankar ierr = MatAXPY_BasicWithPreallocation(B,Y,a,X,str);CHKERRQ(ierr); 2344a2ea699eSBarry Smith ierr = MatHeaderReplace(Y,B);CHKERRQ(ierr); 23459f5f6813SShri Abhyankar ierr = PetscFree(nnz_d);CHKERRQ(ierr); 23469f5f6813SShri Abhyankar ierr = PetscFree(nnz_o);CHKERRQ(ierr); 2347ac90fabeSBarry Smith } 2348ac90fabeSBarry Smith PetscFunctionReturn(0); 2349ac90fabeSBarry Smith } 2350ac90fabeSBarry Smith 23517087cfbeSBarry Smith extern PetscErrorCode MatConjugate_SeqAIJ(Mat); 2352354c94deSBarry Smith 2353354c94deSBarry Smith #undef __FUNCT__ 2354354c94deSBarry Smith #define __FUNCT__ "MatConjugate_MPIAIJ" 23557087cfbeSBarry Smith PetscErrorCode MatConjugate_MPIAIJ(Mat mat) 2356354c94deSBarry Smith { 2357354c94deSBarry Smith #if defined(PETSC_USE_COMPLEX) 2358354c94deSBarry Smith PetscErrorCode ierr; 2359354c94deSBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 2360354c94deSBarry Smith 2361354c94deSBarry Smith PetscFunctionBegin; 2362354c94deSBarry Smith ierr = MatConjugate_SeqAIJ(aij->A);CHKERRQ(ierr); 2363354c94deSBarry Smith ierr = MatConjugate_SeqAIJ(aij->B);CHKERRQ(ierr); 2364354c94deSBarry Smith #else 2365354c94deSBarry Smith PetscFunctionBegin; 2366354c94deSBarry Smith #endif 2367354c94deSBarry Smith PetscFunctionReturn(0); 2368354c94deSBarry Smith } 2369354c94deSBarry Smith 237099cafbc1SBarry Smith #undef __FUNCT__ 237199cafbc1SBarry Smith #define __FUNCT__ "MatRealPart_MPIAIJ" 237299cafbc1SBarry Smith PetscErrorCode MatRealPart_MPIAIJ(Mat A) 237399cafbc1SBarry Smith { 237499cafbc1SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 237599cafbc1SBarry Smith PetscErrorCode ierr; 237699cafbc1SBarry Smith 237799cafbc1SBarry Smith PetscFunctionBegin; 237899cafbc1SBarry Smith ierr = MatRealPart(a->A);CHKERRQ(ierr); 237999cafbc1SBarry Smith ierr = MatRealPart(a->B);CHKERRQ(ierr); 238099cafbc1SBarry Smith PetscFunctionReturn(0); 238199cafbc1SBarry Smith } 238299cafbc1SBarry Smith 238399cafbc1SBarry Smith #undef __FUNCT__ 238499cafbc1SBarry Smith #define __FUNCT__ "MatImaginaryPart_MPIAIJ" 238599cafbc1SBarry Smith PetscErrorCode MatImaginaryPart_MPIAIJ(Mat A) 238699cafbc1SBarry Smith { 238799cafbc1SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 238899cafbc1SBarry Smith PetscErrorCode ierr; 238999cafbc1SBarry Smith 239099cafbc1SBarry Smith PetscFunctionBegin; 239199cafbc1SBarry Smith ierr = MatImaginaryPart(a->A);CHKERRQ(ierr); 239299cafbc1SBarry Smith ierr = MatImaginaryPart(a->B);CHKERRQ(ierr); 239399cafbc1SBarry Smith PetscFunctionReturn(0); 239499cafbc1SBarry Smith } 239599cafbc1SBarry Smith 2396519f805aSKarl Rupp #if defined(PETSC_HAVE_PBGL) 2397103bf8bdSMatthew Knepley 2398103bf8bdSMatthew Knepley #include <boost/parallel/mpi/bsp_process_group.hpp> 2399a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_default_graph.hpp> 2400a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_0_block.hpp> 2401a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_preconditioner.hpp> 2402103bf8bdSMatthew Knepley #include <boost/graph/distributed/petsc/interface.hpp> 2403a2c909beSMatthew Knepley #include <boost/multi_array.hpp> 2404d0f46423SBarry Smith #include <boost/parallel/distributed_property_map->hpp> 2405103bf8bdSMatthew Knepley 2406103bf8bdSMatthew Knepley #undef __FUNCT__ 2407103bf8bdSMatthew Knepley #define __FUNCT__ "MatILUFactorSymbolic_MPIAIJ" 2408103bf8bdSMatthew Knepley /* 2409103bf8bdSMatthew Knepley This uses the parallel ILU factorization of Peter Gottschling <pgottsch@osl.iu.edu> 2410103bf8bdSMatthew Knepley */ 24110481f469SBarry Smith PetscErrorCode MatILUFactorSymbolic_MPIAIJ(Mat fact,Mat A, IS isrow, IS iscol, const MatFactorInfo *info) 2412103bf8bdSMatthew Knepley { 2413a2c909beSMatthew Knepley namespace petsc = boost::distributed::petsc; 2414a2c909beSMatthew Knepley 2415a2c909beSMatthew Knepley namespace graph_dist = boost::graph::distributed; 2416a2c909beSMatthew Knepley using boost::graph::distributed::ilu_default::process_group_type; 2417a2c909beSMatthew Knepley using boost::graph::ilu_permuted; 2418a2c909beSMatthew Knepley 2419ace3abfcSBarry Smith PetscBool row_identity, col_identity; 2420776b82aeSLisandro Dalcin PetscContainer c; 2421103bf8bdSMatthew Knepley PetscInt m, n, M, N; 2422103bf8bdSMatthew Knepley PetscErrorCode ierr; 2423103bf8bdSMatthew Knepley 2424103bf8bdSMatthew Knepley PetscFunctionBegin; 2425e32f2f54SBarry Smith if (info->levels != 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only levels = 0 supported for parallel ilu"); 2426103bf8bdSMatthew Knepley ierr = ISIdentity(isrow, &row_identity);CHKERRQ(ierr); 2427103bf8bdSMatthew Knepley ierr = ISIdentity(iscol, &col_identity);CHKERRQ(ierr); 2428f23aa3ddSBarry Smith if (!row_identity || !col_identity) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Row and column permutations must be identity for parallel ILU"); 2429103bf8bdSMatthew Knepley 2430103bf8bdSMatthew Knepley process_group_type pg; 2431a2c909beSMatthew Knepley typedef graph_dist::ilu_default::ilu_level_graph_type lgraph_type; 2432a2c909beSMatthew Knepley lgraph_type *lgraph_p = new lgraph_type(petsc::num_global_vertices(A), pg, petsc::matrix_distribution(A, pg)); 2433a2c909beSMatthew Knepley lgraph_type& level_graph = *lgraph_p; 2434a2c909beSMatthew Knepley graph_dist::ilu_default::graph_type& graph(level_graph.graph); 2435a2c909beSMatthew Knepley 2436103bf8bdSMatthew Knepley petsc::read_matrix(A, graph, get(boost::edge_weight, graph)); 2437a2c909beSMatthew Knepley ilu_permuted(level_graph); 2438103bf8bdSMatthew Knepley 2439103bf8bdSMatthew Knepley /* put together the new matrix */ 2440ce94432eSBarry Smith ierr = MatCreate(PetscObjectComm((PetscObject)A), fact);CHKERRQ(ierr); 2441103bf8bdSMatthew Knepley ierr = MatGetLocalSize(A, &m, &n);CHKERRQ(ierr); 2442103bf8bdSMatthew Knepley ierr = MatGetSize(A, &M, &N);CHKERRQ(ierr); 2443719d5645SBarry Smith ierr = MatSetSizes(fact, m, n, M, N);CHKERRQ(ierr); 2444a2f3521dSMark F. Adams ierr = MatSetBlockSizes(fact,A->rmap->bs,A->cmap->bs);CHKERRQ(ierr); 2445719d5645SBarry Smith ierr = MatSetType(fact, ((PetscObject)A)->type_name);CHKERRQ(ierr); 2446719d5645SBarry Smith ierr = MatAssemblyBegin(fact, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2447719d5645SBarry Smith ierr = MatAssemblyEnd(fact, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2448103bf8bdSMatthew Knepley 2449ce94432eSBarry Smith ierr = PetscContainerCreate(PetscObjectComm((PetscObject)A), &c); 2450776b82aeSLisandro Dalcin ierr = PetscContainerSetPointer(c, lgraph_p); 2451719d5645SBarry Smith ierr = PetscObjectCompose((PetscObject) (fact), "graph", (PetscObject) c); 2452bf0cc555SLisandro Dalcin ierr = PetscContainerDestroy(&c); 2453103bf8bdSMatthew Knepley PetscFunctionReturn(0); 2454103bf8bdSMatthew Knepley } 2455103bf8bdSMatthew Knepley 2456103bf8bdSMatthew Knepley #undef __FUNCT__ 2457103bf8bdSMatthew Knepley #define __FUNCT__ "MatLUFactorNumeric_MPIAIJ" 24580481f469SBarry Smith PetscErrorCode MatLUFactorNumeric_MPIAIJ(Mat B,Mat A, const MatFactorInfo *info) 2459103bf8bdSMatthew Knepley { 2460103bf8bdSMatthew Knepley PetscFunctionBegin; 2461103bf8bdSMatthew Knepley PetscFunctionReturn(0); 2462103bf8bdSMatthew Knepley } 2463103bf8bdSMatthew Knepley 2464103bf8bdSMatthew Knepley #undef __FUNCT__ 2465103bf8bdSMatthew Knepley #define __FUNCT__ "MatSolve_MPIAIJ" 2466103bf8bdSMatthew Knepley /* 2467103bf8bdSMatthew Knepley This uses the parallel ILU factorization of Peter Gottschling <pgottsch@osl.iu.edu> 2468103bf8bdSMatthew Knepley */ 2469103bf8bdSMatthew Knepley PetscErrorCode MatSolve_MPIAIJ(Mat A, Vec b, Vec x) 2470103bf8bdSMatthew Knepley { 2471a2c909beSMatthew Knepley namespace graph_dist = boost::graph::distributed; 2472a2c909beSMatthew Knepley 2473a2c909beSMatthew Knepley typedef graph_dist::ilu_default::ilu_level_graph_type lgraph_type; 2474a2c909beSMatthew Knepley lgraph_type *lgraph_p; 2475776b82aeSLisandro Dalcin PetscContainer c; 2476103bf8bdSMatthew Knepley PetscErrorCode ierr; 2477103bf8bdSMatthew Knepley 2478103bf8bdSMatthew Knepley PetscFunctionBegin; 2479103bf8bdSMatthew Knepley ierr = PetscObjectQuery((PetscObject) A, "graph", (PetscObject*) &c);CHKERRQ(ierr); 2480776b82aeSLisandro Dalcin ierr = PetscContainerGetPointer(c, (void**) &lgraph_p);CHKERRQ(ierr); 2481103bf8bdSMatthew Knepley ierr = VecCopy(b, x);CHKERRQ(ierr); 2482a2c909beSMatthew Knepley 2483a2c909beSMatthew Knepley PetscScalar *array_x; 2484a2c909beSMatthew Knepley ierr = VecGetArray(x, &array_x);CHKERRQ(ierr); 2485a2c909beSMatthew Knepley PetscInt sx; 2486a2c909beSMatthew Knepley ierr = VecGetSize(x, &sx);CHKERRQ(ierr); 2487a2c909beSMatthew Knepley 2488a2c909beSMatthew Knepley PetscScalar *array_b; 2489a2c909beSMatthew Knepley ierr = VecGetArray(b, &array_b);CHKERRQ(ierr); 2490a2c909beSMatthew Knepley PetscInt sb; 2491a2c909beSMatthew Knepley ierr = VecGetSize(b, &sb);CHKERRQ(ierr); 2492a2c909beSMatthew Knepley 2493a2c909beSMatthew Knepley lgraph_type& level_graph = *lgraph_p; 2494a2c909beSMatthew Knepley graph_dist::ilu_default::graph_type& graph(level_graph.graph); 2495a2c909beSMatthew Knepley 2496a2c909beSMatthew Knepley typedef boost::multi_array_ref<PetscScalar, 1> array_ref_type; 24972205254eSKarl Rupp array_ref_type ref_b(array_b, boost::extents[num_vertices(graph)]); 24982205254eSKarl Rupp array_ref_type ref_x(array_x, boost::extents[num_vertices(graph)]); 2499a2c909beSMatthew Knepley 2500a2c909beSMatthew Knepley typedef boost::iterator_property_map<array_ref_type::iterator, 2501a2c909beSMatthew Knepley boost::property_map<graph_dist::ilu_default::graph_type, boost::vertex_index_t>::type> gvector_type; 25022205254eSKarl Rupp gvector_type vector_b(ref_b.begin(), get(boost::vertex_index, graph)); 25032205254eSKarl Rupp gvector_type vector_x(ref_x.begin(), get(boost::vertex_index, graph)); 2504a2c909beSMatthew Knepley 2505a2c909beSMatthew Knepley ilu_set_solve(*lgraph_p, vector_b, vector_x); 2506103bf8bdSMatthew Knepley PetscFunctionReturn(0); 2507103bf8bdSMatthew Knepley } 2508103bf8bdSMatthew Knepley #endif 2509103bf8bdSMatthew Knepley 251069db28dcSHong Zhang #undef __FUNCT__ 25115cc03489SHong Zhang #define __FUNCT__ "MatDestroy_MatRedundant" 25125cc03489SHong Zhang PetscErrorCode MatDestroy_MatRedundant(Mat A) 251369db28dcSHong Zhang { 251469db28dcSHong Zhang PetscErrorCode ierr; 25155cc03489SHong Zhang Mat_Redundant *redund; 251669db28dcSHong Zhang PetscInt i; 25175cc03489SHong Zhang PetscMPIInt size; 251869db28dcSHong Zhang 251969db28dcSHong Zhang PetscFunctionBegin; 25205cc03489SHong Zhang ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr); 25215cc03489SHong Zhang if (size == 1) { 25225cc03489SHong Zhang Mat_SeqAIJ *a = (Mat_SeqAIJ*)A->data; 25235cc03489SHong Zhang redund = a->redundant; 25245cc03489SHong Zhang } else { 25255cc03489SHong Zhang Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 25265cc03489SHong Zhang redund = a->redundant; 25275cc03489SHong Zhang } 25285cc03489SHong Zhang if (redund){ 2529c79c5527SHong Zhang if (redund->matseq) { /* via MatGetSubMatrices() */ 25304388c78fSHong Zhang ierr = ISDestroy(&redund->isrow);CHKERRQ(ierr); 25314388c78fSHong Zhang ierr = ISDestroy(&redund->iscol);CHKERRQ(ierr); 25324388c78fSHong Zhang ierr = MatDestroy(&redund->matseq[0]);CHKERRQ(ierr); 25334388c78fSHong Zhang ierr = PetscFree(redund->matseq);CHKERRQ(ierr); 25344388c78fSHong Zhang } else { 25351d79065fSBarry Smith ierr = PetscFree2(redund->send_rank,redund->recv_rank);CHKERRQ(ierr); 253669db28dcSHong Zhang ierr = PetscFree(redund->sbuf_j);CHKERRQ(ierr); 253769db28dcSHong Zhang ierr = PetscFree(redund->sbuf_a);CHKERRQ(ierr); 253869db28dcSHong Zhang for (i=0; i<redund->nrecvs; i++) { 253969db28dcSHong Zhang ierr = PetscFree(redund->rbuf_j[i]);CHKERRQ(ierr); 254069db28dcSHong Zhang ierr = PetscFree(redund->rbuf_a[i]);CHKERRQ(ierr); 254169db28dcSHong Zhang } 25421d79065fSBarry Smith ierr = PetscFree4(redund->sbuf_nz,redund->rbuf_nz,redund->rbuf_j,redund->rbuf_a);CHKERRQ(ierr); 2543c79c5527SHong Zhang } 25440b291e46SHong Zhang 25450b291e46SHong Zhang if (redund->psubcomm) { 25460b291e46SHong Zhang ierr = PetscSubcommDestroy(&redund->psubcomm);CHKERRQ(ierr); 25470b291e46SHong Zhang } 25485cc03489SHong Zhang ierr = redund->Destroy(A);CHKERRQ(ierr); 254969db28dcSHong Zhang ierr = PetscFree(redund);CHKERRQ(ierr); 2550bf0cc555SLisandro Dalcin } 255169db28dcSHong Zhang PetscFunctionReturn(0); 255269db28dcSHong Zhang } 255369db28dcSHong Zhang 255469db28dcSHong Zhang #undef __FUNCT__ 255522559b1cSHong Zhang #define __FUNCT__ "MatGetRedundantMatrix_MPIAIJ_interlaced" 25567cb6ea77SHong Zhang PetscErrorCode MatGetRedundantMatrix_MPIAIJ_interlaced(Mat mat,PetscInt nsubcomm,MPI_Comm subcomm,MatReuse reuse,Mat *matredundant) 2557b4617e5dSHong Zhang { 2558b4617e5dSHong Zhang PetscMPIInt rank,size; 25597cb6ea77SHong Zhang MPI_Comm comm; 2560b4617e5dSHong Zhang PetscErrorCode ierr; 256134d19554SHong Zhang PetscInt nsends=0,nrecvs=0,i,rownz_max=0,M=mat->rmap->N,N=mat->cmap->N; 25625cc03489SHong Zhang PetscMPIInt *send_rank= NULL,*recv_rank=NULL,subrank,subsize; 2563b4617e5dSHong Zhang PetscInt *rowrange = mat->rmap->range; 2564b4617e5dSHong Zhang Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 2565b4617e5dSHong Zhang Mat A = aij->A,B=aij->B,C=*matredundant; 2566b4617e5dSHong Zhang Mat_SeqAIJ *a = (Mat_SeqAIJ*)A->data,*b=(Mat_SeqAIJ*)B->data; 2567b4617e5dSHong Zhang PetscScalar *sbuf_a; 2568b4617e5dSHong Zhang PetscInt nzlocal=a->nz+b->nz; 2569b4617e5dSHong Zhang PetscInt j,cstart=mat->cmap->rstart,cend=mat->cmap->rend,row,nzA,nzB,ncols,*cworkA,*cworkB; 257034d19554SHong Zhang PetscInt rstart=mat->rmap->rstart,rend=mat->rmap->rend,*bmap=aij->garray; 2571b4617e5dSHong Zhang PetscInt *cols,ctmp,lwrite,*rptr,l,*sbuf_j; 2572b4617e5dSHong Zhang MatScalar *aworkA,*aworkB; 2573b4617e5dSHong Zhang PetscScalar *vals; 2574b4617e5dSHong Zhang PetscMPIInt tag1,tag2,tag3,imdex; 2575b4617e5dSHong Zhang MPI_Request *s_waits1=NULL,*s_waits2=NULL,*s_waits3=NULL; 2576b4617e5dSHong Zhang MPI_Request *r_waits1=NULL,*r_waits2=NULL,*r_waits3=NULL; 2577b4617e5dSHong Zhang MPI_Status recv_status,*send_status; 2578b4617e5dSHong Zhang PetscInt *sbuf_nz=NULL,*rbuf_nz=NULL,count; 2579b4617e5dSHong Zhang PetscInt **rbuf_j=NULL; 2580b4617e5dSHong Zhang PetscScalar **rbuf_a=NULL; 2581b4617e5dSHong Zhang Mat_Redundant *redund =NULL; 2582b4617e5dSHong Zhang 2583b4617e5dSHong Zhang PetscFunctionBegin; 2584b4617e5dSHong Zhang ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr); 2585b4617e5dSHong Zhang ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 2586b4617e5dSHong Zhang ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 25875cc03489SHong Zhang ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr); 25885cc03489SHong Zhang ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr); 2589d3b23db5SHong Zhang 2590b4617e5dSHong Zhang if (reuse == MAT_REUSE_MATRIX) { 2591b4617e5dSHong Zhang if (M != mat->rmap->N || N != mat->cmap->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong global size"); 25925cc03489SHong Zhang if (subsize == 1) { 25935cc03489SHong Zhang Mat_SeqAIJ *c = (Mat_SeqAIJ*)C->data; 25945cc03489SHong Zhang redund = c->redundant; 25955cc03489SHong Zhang } else { 25965cc03489SHong Zhang Mat_MPIAIJ *c = (Mat_MPIAIJ*)C->data; 25975cc03489SHong Zhang redund = c->redundant; 25985cc03489SHong Zhang } 2599b4617e5dSHong Zhang if (nzlocal != redund->nzlocal) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong nzlocal"); 2600b4617e5dSHong Zhang 2601b4617e5dSHong Zhang nsends = redund->nsends; 2602b4617e5dSHong Zhang nrecvs = redund->nrecvs; 2603b4617e5dSHong Zhang send_rank = redund->send_rank; 2604b4617e5dSHong Zhang recv_rank = redund->recv_rank; 2605b4617e5dSHong Zhang sbuf_nz = redund->sbuf_nz; 2606b4617e5dSHong Zhang rbuf_nz = redund->rbuf_nz; 2607b4617e5dSHong Zhang sbuf_j = redund->sbuf_j; 2608b4617e5dSHong Zhang sbuf_a = redund->sbuf_a; 2609b4617e5dSHong Zhang rbuf_j = redund->rbuf_j; 2610b4617e5dSHong Zhang rbuf_a = redund->rbuf_a; 2611b4617e5dSHong Zhang } 2612b4617e5dSHong Zhang 2613b4617e5dSHong Zhang if (reuse == MAT_INITIAL_MATRIX) { 2614b4617e5dSHong Zhang PetscInt nleftover,np_subcomm; 2615b4617e5dSHong Zhang 2616b4617e5dSHong Zhang /* get the destination processors' id send_rank, nsends and nrecvs */ 2617b4617e5dSHong Zhang ierr = PetscMalloc2(size,PetscMPIInt,&send_rank,size,PetscMPIInt,&recv_rank);CHKERRQ(ierr); 2618b4617e5dSHong Zhang 2619b4617e5dSHong Zhang np_subcomm = size/nsubcomm; 2620b4617e5dSHong Zhang nleftover = size - nsubcomm*np_subcomm; 2621b4617e5dSHong Zhang 262222559b1cSHong Zhang /* block of codes below is specific for INTERLACED */ 262322559b1cSHong Zhang /* ------------------------------------------------*/ 2624b4617e5dSHong Zhang nsends = 0; nrecvs = 0; 2625b4617e5dSHong Zhang for (i=0; i<size; i++) { 2626b4617e5dSHong Zhang if (subrank == i/nsubcomm && i != rank) { /* my_subrank == other's subrank */ 262722559b1cSHong Zhang send_rank[nsends++] = i; 2628b4617e5dSHong Zhang recv_rank[nrecvs++] = i; 2629b4617e5dSHong Zhang } 2630b4617e5dSHong Zhang } 2631b4617e5dSHong Zhang if (rank >= size - nleftover) { /* this proc is a leftover processor */ 2632b4617e5dSHong Zhang i = size-nleftover-1; 2633b4617e5dSHong Zhang j = 0; 2634b4617e5dSHong Zhang while (j < nsubcomm - nleftover) { 2635b4617e5dSHong Zhang send_rank[nsends++] = i; 2636b4617e5dSHong Zhang i--; j++; 2637b4617e5dSHong Zhang } 2638b4617e5dSHong Zhang } 2639b4617e5dSHong Zhang 2640b4617e5dSHong Zhang if (nleftover && subsize == size/nsubcomm && subrank==subsize-1) { /* this proc recvs from leftover processors */ 2641b4617e5dSHong Zhang for (i=0; i<nleftover; i++) { 2642b4617e5dSHong Zhang recv_rank[nrecvs++] = size-nleftover+i; 2643b4617e5dSHong Zhang } 2644b4617e5dSHong Zhang } 264522559b1cSHong Zhang /*----------------------------------------------*/ 2646b4617e5dSHong Zhang 2647b4617e5dSHong Zhang /* allocate sbuf_j, sbuf_a */ 2648b4617e5dSHong Zhang i = nzlocal + rowrange[rank+1] - rowrange[rank] + 2; 2649b4617e5dSHong Zhang ierr = PetscMalloc(i*sizeof(PetscInt),&sbuf_j);CHKERRQ(ierr); 2650b4617e5dSHong Zhang ierr = PetscMalloc((nzlocal+1)*sizeof(PetscScalar),&sbuf_a);CHKERRQ(ierr); 2651e37c6257SHong Zhang /* 2652e37c6257SHong Zhang ierr = PetscSynchronizedPrintf(comm,"[%d] nsends %d, nrecvs %d\n",rank,nsends,nrecvs);CHKERRQ(ierr); 2653e37c6257SHong Zhang ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr); 2654e37c6257SHong Zhang */ 2655b4617e5dSHong Zhang } /* endof if (reuse == MAT_INITIAL_MATRIX) */ 2656b4617e5dSHong Zhang 2657b4617e5dSHong Zhang /* copy mat's local entries into the buffers */ 2658b4617e5dSHong Zhang if (reuse == MAT_INITIAL_MATRIX) { 2659b4617e5dSHong Zhang rownz_max = 0; 2660b4617e5dSHong Zhang rptr = sbuf_j; 2661b4617e5dSHong Zhang cols = sbuf_j + rend-rstart + 1; 2662b4617e5dSHong Zhang vals = sbuf_a; 2663b4617e5dSHong Zhang rptr[0] = 0; 2664b4617e5dSHong Zhang for (i=0; i<rend-rstart; i++) { 2665b4617e5dSHong Zhang row = i + rstart; 2666b4617e5dSHong Zhang nzA = a->i[i+1] - a->i[i]; nzB = b->i[i+1] - b->i[i]; 2667b4617e5dSHong Zhang ncols = nzA + nzB; 2668b4617e5dSHong Zhang cworkA = a->j + a->i[i]; cworkB = b->j + b->i[i]; 2669b4617e5dSHong Zhang aworkA = a->a + a->i[i]; aworkB = b->a + b->i[i]; 2670b4617e5dSHong Zhang /* load the column indices for this row into cols */ 2671b4617e5dSHong Zhang lwrite = 0; 2672b4617e5dSHong Zhang for (l=0; l<nzB; l++) { 2673b4617e5dSHong Zhang if ((ctmp = bmap[cworkB[l]]) < cstart) { 2674b4617e5dSHong Zhang vals[lwrite] = aworkB[l]; 2675b4617e5dSHong Zhang cols[lwrite++] = ctmp; 2676b4617e5dSHong Zhang } 2677b4617e5dSHong Zhang } 2678b4617e5dSHong Zhang for (l=0; l<nzA; l++) { 2679b4617e5dSHong Zhang vals[lwrite] = aworkA[l]; 2680b4617e5dSHong Zhang cols[lwrite++] = cstart + cworkA[l]; 2681b4617e5dSHong Zhang } 2682b4617e5dSHong Zhang for (l=0; l<nzB; l++) { 2683b4617e5dSHong Zhang if ((ctmp = bmap[cworkB[l]]) >= cend) { 2684b4617e5dSHong Zhang vals[lwrite] = aworkB[l]; 2685b4617e5dSHong Zhang cols[lwrite++] = ctmp; 2686b4617e5dSHong Zhang } 2687b4617e5dSHong Zhang } 2688b4617e5dSHong Zhang vals += ncols; 2689b4617e5dSHong Zhang cols += ncols; 2690b4617e5dSHong Zhang rptr[i+1] = rptr[i] + ncols; 2691b4617e5dSHong Zhang if (rownz_max < ncols) rownz_max = ncols; 2692b4617e5dSHong Zhang } 2693b4617e5dSHong Zhang if (rptr[rend-rstart] != a->nz + b->nz) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB, "rptr[%d] %d != %d + %d",rend-rstart,rptr[rend-rstart+1],a->nz,b->nz); 2694b4617e5dSHong Zhang } else { /* only copy matrix values into sbuf_a */ 2695b4617e5dSHong Zhang rptr = sbuf_j; 2696b4617e5dSHong Zhang vals = sbuf_a; 2697b4617e5dSHong Zhang rptr[0] = 0; 2698b4617e5dSHong Zhang for (i=0; i<rend-rstart; i++) { 2699b4617e5dSHong Zhang row = i + rstart; 2700b4617e5dSHong Zhang nzA = a->i[i+1] - a->i[i]; nzB = b->i[i+1] - b->i[i]; 2701b4617e5dSHong Zhang ncols = nzA + nzB; 2702b4617e5dSHong Zhang cworkB = b->j + b->i[i]; 2703b4617e5dSHong Zhang aworkA = a->a + a->i[i]; 2704b4617e5dSHong Zhang aworkB = b->a + b->i[i]; 2705b4617e5dSHong Zhang lwrite = 0; 2706b4617e5dSHong Zhang for (l=0; l<nzB; l++) { 2707b4617e5dSHong Zhang if ((ctmp = bmap[cworkB[l]]) < cstart) vals[lwrite++] = aworkB[l]; 2708b4617e5dSHong Zhang } 2709b4617e5dSHong Zhang for (l=0; l<nzA; l++) vals[lwrite++] = aworkA[l]; 2710b4617e5dSHong Zhang for (l=0; l<nzB; l++) { 2711b4617e5dSHong Zhang if ((ctmp = bmap[cworkB[l]]) >= cend) vals[lwrite++] = aworkB[l]; 2712b4617e5dSHong Zhang } 2713b4617e5dSHong Zhang vals += ncols; 2714b4617e5dSHong Zhang rptr[i+1] = rptr[i] + ncols; 2715b4617e5dSHong Zhang } 2716b4617e5dSHong Zhang } /* endof if (reuse == MAT_INITIAL_MATRIX) */ 2717b4617e5dSHong Zhang 2718b4617e5dSHong Zhang /* send nzlocal to others, and recv other's nzlocal */ 2719b4617e5dSHong Zhang /*--------------------------------------------------*/ 2720b4617e5dSHong Zhang if (reuse == MAT_INITIAL_MATRIX) { 2721b4617e5dSHong Zhang ierr = PetscMalloc2(3*(nsends + nrecvs)+1,MPI_Request,&s_waits3,nsends+1,MPI_Status,&send_status);CHKERRQ(ierr); 2722b4617e5dSHong Zhang 2723b4617e5dSHong Zhang s_waits2 = s_waits3 + nsends; 2724b4617e5dSHong Zhang s_waits1 = s_waits2 + nsends; 2725b4617e5dSHong Zhang r_waits1 = s_waits1 + nsends; 2726b4617e5dSHong Zhang r_waits2 = r_waits1 + nrecvs; 2727b4617e5dSHong Zhang r_waits3 = r_waits2 + nrecvs; 2728b4617e5dSHong Zhang } else { 2729b4617e5dSHong Zhang ierr = PetscMalloc2(nsends + nrecvs +1,MPI_Request,&s_waits3,nsends+1,MPI_Status,&send_status);CHKERRQ(ierr); 2730b4617e5dSHong Zhang 2731b4617e5dSHong Zhang r_waits3 = s_waits3 + nsends; 2732b4617e5dSHong Zhang } 2733b4617e5dSHong Zhang 2734b4617e5dSHong Zhang ierr = PetscObjectGetNewTag((PetscObject)mat,&tag3);CHKERRQ(ierr); 2735b4617e5dSHong Zhang if (reuse == MAT_INITIAL_MATRIX) { 2736b4617e5dSHong Zhang /* get new tags to keep the communication clean */ 2737b4617e5dSHong Zhang ierr = PetscObjectGetNewTag((PetscObject)mat,&tag1);CHKERRQ(ierr); 2738b4617e5dSHong Zhang ierr = PetscObjectGetNewTag((PetscObject)mat,&tag2);CHKERRQ(ierr); 2739b4617e5dSHong Zhang ierr = PetscMalloc4(nsends,PetscInt,&sbuf_nz,nrecvs,PetscInt,&rbuf_nz,nrecvs,PetscInt*,&rbuf_j,nrecvs,PetscScalar*,&rbuf_a);CHKERRQ(ierr); 2740b4617e5dSHong Zhang 2741b4617e5dSHong Zhang /* post receives of other's nzlocal */ 2742b4617e5dSHong Zhang for (i=0; i<nrecvs; i++) { 2743b4617e5dSHong Zhang ierr = MPI_Irecv(rbuf_nz+i,1,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,r_waits1+i);CHKERRQ(ierr); 2744b4617e5dSHong Zhang } 2745b4617e5dSHong Zhang /* send nzlocal to others */ 2746b4617e5dSHong Zhang for (i=0; i<nsends; i++) { 2747b4617e5dSHong Zhang sbuf_nz[i] = nzlocal; 2748b4617e5dSHong Zhang ierr = MPI_Isend(sbuf_nz+i,1,MPIU_INT,send_rank[i],tag1,comm,s_waits1+i);CHKERRQ(ierr); 2749b4617e5dSHong Zhang } 2750b4617e5dSHong Zhang /* wait on receives of nzlocal; allocate space for rbuf_j, rbuf_a */ 2751b4617e5dSHong Zhang count = nrecvs; 2752b4617e5dSHong Zhang while (count) { 2753b4617e5dSHong Zhang ierr = MPI_Waitany(nrecvs,r_waits1,&imdex,&recv_status);CHKERRQ(ierr); 2754b4617e5dSHong Zhang 2755b4617e5dSHong Zhang recv_rank[imdex] = recv_status.MPI_SOURCE; 2756b4617e5dSHong Zhang /* allocate rbuf_a and rbuf_j; then post receives of rbuf_j */ 2757b4617e5dSHong Zhang ierr = PetscMalloc((rbuf_nz[imdex]+1)*sizeof(PetscScalar),&rbuf_a[imdex]);CHKERRQ(ierr); 2758b4617e5dSHong Zhang 2759b4617e5dSHong Zhang i = rowrange[recv_status.MPI_SOURCE+1] - rowrange[recv_status.MPI_SOURCE]; /* number of expected mat->i */ 2760b4617e5dSHong Zhang 2761b4617e5dSHong Zhang rbuf_nz[imdex] += i + 2; 2762b4617e5dSHong Zhang 2763b4617e5dSHong Zhang ierr = PetscMalloc(rbuf_nz[imdex]*sizeof(PetscInt),&rbuf_j[imdex]);CHKERRQ(ierr); 2764b4617e5dSHong Zhang ierr = MPI_Irecv(rbuf_j[imdex],rbuf_nz[imdex],MPIU_INT,recv_status.MPI_SOURCE,tag2,comm,r_waits2+imdex);CHKERRQ(ierr); 2765b4617e5dSHong Zhang count--; 2766b4617e5dSHong Zhang } 2767b4617e5dSHong Zhang /* wait on sends of nzlocal */ 2768b4617e5dSHong Zhang if (nsends) {ierr = MPI_Waitall(nsends,s_waits1,send_status);CHKERRQ(ierr);} 2769b4617e5dSHong Zhang /* send mat->i,j to others, and recv from other's */ 2770b4617e5dSHong Zhang /*------------------------------------------------*/ 2771b4617e5dSHong Zhang for (i=0; i<nsends; i++) { 2772b4617e5dSHong Zhang j = nzlocal + rowrange[rank+1] - rowrange[rank] + 1; 2773b4617e5dSHong Zhang ierr = MPI_Isend(sbuf_j,j,MPIU_INT,send_rank[i],tag2,comm,s_waits2+i);CHKERRQ(ierr); 2774b4617e5dSHong Zhang } 2775b4617e5dSHong Zhang /* wait on receives of mat->i,j */ 2776b4617e5dSHong Zhang /*------------------------------*/ 2777b4617e5dSHong Zhang count = nrecvs; 2778b4617e5dSHong Zhang while (count) { 2779b4617e5dSHong Zhang ierr = MPI_Waitany(nrecvs,r_waits2,&imdex,&recv_status);CHKERRQ(ierr); 2780b4617e5dSHong Zhang if (recv_rank[imdex] != recv_status.MPI_SOURCE) SETERRQ2(PETSC_COMM_SELF,1, "recv_rank %d != MPI_SOURCE %d",recv_rank[imdex],recv_status.MPI_SOURCE); 2781b4617e5dSHong Zhang count--; 2782b4617e5dSHong Zhang } 2783b4617e5dSHong Zhang /* wait on sends of mat->i,j */ 2784b4617e5dSHong Zhang /*---------------------------*/ 2785b4617e5dSHong Zhang if (nsends) { 2786b4617e5dSHong Zhang ierr = MPI_Waitall(nsends,s_waits2,send_status);CHKERRQ(ierr); 2787b4617e5dSHong Zhang } 2788b4617e5dSHong Zhang } /* endof if (reuse == MAT_INITIAL_MATRIX) */ 2789b4617e5dSHong Zhang 2790b4617e5dSHong Zhang /* post receives, send and receive mat->a */ 2791b4617e5dSHong Zhang /*----------------------------------------*/ 2792b4617e5dSHong Zhang for (imdex=0; imdex<nrecvs; imdex++) { 2793b4617e5dSHong Zhang ierr = MPI_Irecv(rbuf_a[imdex],rbuf_nz[imdex],MPIU_SCALAR,recv_rank[imdex],tag3,comm,r_waits3+imdex);CHKERRQ(ierr); 2794b4617e5dSHong Zhang } 2795b4617e5dSHong Zhang for (i=0; i<nsends; i++) { 2796b4617e5dSHong Zhang ierr = MPI_Isend(sbuf_a,nzlocal,MPIU_SCALAR,send_rank[i],tag3,comm,s_waits3+i);CHKERRQ(ierr); 2797b4617e5dSHong Zhang } 2798b4617e5dSHong Zhang count = nrecvs; 2799b4617e5dSHong Zhang while (count) { 2800b4617e5dSHong Zhang ierr = MPI_Waitany(nrecvs,r_waits3,&imdex,&recv_status);CHKERRQ(ierr); 2801b4617e5dSHong Zhang if (recv_rank[imdex] != recv_status.MPI_SOURCE) SETERRQ2(PETSC_COMM_SELF,1, "recv_rank %d != MPI_SOURCE %d",recv_rank[imdex],recv_status.MPI_SOURCE); 2802b4617e5dSHong Zhang count--; 2803b4617e5dSHong Zhang } 2804b4617e5dSHong Zhang if (nsends) { 2805b4617e5dSHong Zhang ierr = MPI_Waitall(nsends,s_waits3,send_status);CHKERRQ(ierr); 2806b4617e5dSHong Zhang } 2807b4617e5dSHong Zhang 2808b4617e5dSHong Zhang ierr = PetscFree2(s_waits3,send_status);CHKERRQ(ierr); 2809b4617e5dSHong Zhang 2810b4617e5dSHong Zhang /* create redundant matrix */ 2811b4617e5dSHong Zhang /*-------------------------*/ 2812b4617e5dSHong Zhang if (reuse == MAT_INITIAL_MATRIX) { 281319171117SHong Zhang const PetscInt *range; 281419171117SHong Zhang PetscInt rstart_sub,rend_sub,mloc_sub; 281519171117SHong Zhang 2816b4617e5dSHong Zhang /* compute rownz_max for preallocation */ 2817b4617e5dSHong Zhang for (imdex=0; imdex<nrecvs; imdex++) { 2818b4617e5dSHong Zhang j = rowrange[recv_rank[imdex]+1] - rowrange[recv_rank[imdex]]; 2819b4617e5dSHong Zhang rptr = rbuf_j[imdex]; 2820b4617e5dSHong Zhang for (i=0; i<j; i++) { 2821b4617e5dSHong Zhang ncols = rptr[i+1] - rptr[i]; 2822b4617e5dSHong Zhang if (rownz_max < ncols) rownz_max = ncols; 2823b4617e5dSHong Zhang } 2824b4617e5dSHong Zhang } 2825b4617e5dSHong Zhang 2826b4617e5dSHong Zhang ierr = MatCreate(subcomm,&C);CHKERRQ(ierr); 282719171117SHong Zhang 282819171117SHong Zhang /* get local size of redundant matrix 282919171117SHong Zhang - mloc_sub is chosen for PETSC_SUBCOMM_INTERLACED, works for other types, but may not efficient! */ 283019171117SHong Zhang ierr = MatGetOwnershipRanges(mat,&range);CHKERRQ(ierr); 283119171117SHong Zhang rstart_sub = range[nsubcomm*subrank]; 283219171117SHong Zhang if (subrank+1 < subsize) { /* not the last proc in subcomm */ 283319171117SHong Zhang rend_sub = range[nsubcomm*(subrank+1)]; 283419171117SHong Zhang } else { 283519171117SHong Zhang rend_sub = mat->rmap->N; 283619171117SHong Zhang } 283719171117SHong Zhang mloc_sub = rend_sub - rstart_sub; 283819171117SHong Zhang 283934d19554SHong Zhang if (M == N) { 2840b4617e5dSHong Zhang ierr = MatSetSizes(C,mloc_sub,mloc_sub,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr); 284134d19554SHong Zhang } else { /* non-square matrix */ 284234d19554SHong Zhang ierr = MatSetSizes(C,mloc_sub,PETSC_DECIDE,PETSC_DECIDE,mat->cmap->N);CHKERRQ(ierr); 284334d19554SHong Zhang } 2844b4617e5dSHong Zhang ierr = MatSetBlockSizes(C,mat->rmap->bs,mat->cmap->bs);CHKERRQ(ierr); 2845b4617e5dSHong Zhang ierr = MatSetFromOptions(C);CHKERRQ(ierr); 2846b4617e5dSHong Zhang ierr = MatSeqAIJSetPreallocation(C,rownz_max,NULL);CHKERRQ(ierr); 2847b4617e5dSHong Zhang ierr = MatMPIAIJSetPreallocation(C,rownz_max,NULL,rownz_max,NULL);CHKERRQ(ierr); 2848b4617e5dSHong Zhang } else { 2849b4617e5dSHong Zhang C = *matredundant; 2850b4617e5dSHong Zhang } 2851b4617e5dSHong Zhang 2852b4617e5dSHong Zhang /* insert local matrix entries */ 2853b4617e5dSHong Zhang rptr = sbuf_j; 2854b4617e5dSHong Zhang cols = sbuf_j + rend-rstart + 1; 2855b4617e5dSHong Zhang vals = sbuf_a; 2856b4617e5dSHong Zhang for (i=0; i<rend-rstart; i++) { 2857b4617e5dSHong Zhang row = i + rstart; 2858b4617e5dSHong Zhang ncols = rptr[i+1] - rptr[i]; 2859b4617e5dSHong Zhang ierr = MatSetValues(C,1,&row,ncols,cols,vals,INSERT_VALUES);CHKERRQ(ierr); 2860b4617e5dSHong Zhang vals += ncols; 2861b4617e5dSHong Zhang cols += ncols; 2862b4617e5dSHong Zhang } 2863b4617e5dSHong Zhang /* insert received matrix entries */ 2864b4617e5dSHong Zhang for (imdex=0; imdex<nrecvs; imdex++) { 2865b4617e5dSHong Zhang rstart = rowrange[recv_rank[imdex]]; 2866b4617e5dSHong Zhang rend = rowrange[recv_rank[imdex]+1]; 2867e37c6257SHong Zhang /* printf("[%d] insert rows %d - %d\n",rank,rstart,rend-1); */ 2868b4617e5dSHong Zhang rptr = rbuf_j[imdex]; 2869b4617e5dSHong Zhang cols = rbuf_j[imdex] + rend-rstart + 1; 2870b4617e5dSHong Zhang vals = rbuf_a[imdex]; 2871b4617e5dSHong Zhang for (i=0; i<rend-rstart; i++) { 2872b4617e5dSHong Zhang row = i + rstart; 2873b4617e5dSHong Zhang ncols = rptr[i+1] - rptr[i]; 2874b4617e5dSHong Zhang ierr = MatSetValues(C,1,&row,ncols,cols,vals,INSERT_VALUES);CHKERRQ(ierr); 2875b4617e5dSHong Zhang vals += ncols; 2876b4617e5dSHong Zhang cols += ncols; 2877b4617e5dSHong Zhang } 2878b4617e5dSHong Zhang } 2879b4617e5dSHong Zhang ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2880b4617e5dSHong Zhang ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2881b4617e5dSHong Zhang 2882b4617e5dSHong Zhang if (reuse == MAT_INITIAL_MATRIX) { 2883b4617e5dSHong Zhang *matredundant = C; 28845cc03489SHong Zhang 2885b4617e5dSHong Zhang /* create a supporting struct and attach it to C for reuse */ 2886b4617e5dSHong Zhang ierr = PetscNewLog(C,Mat_Redundant,&redund);CHKERRQ(ierr); 28875cc03489SHong Zhang if (subsize == 1) { 28885cc03489SHong Zhang Mat_SeqAIJ *c = (Mat_SeqAIJ*)C->data; 28895cc03489SHong Zhang c->redundant = redund; 28905cc03489SHong Zhang } else { 28915cc03489SHong Zhang Mat_MPIAIJ *c = (Mat_MPIAIJ*)C->data; 28925cc03489SHong Zhang c->redundant = redund; 28935cc03489SHong Zhang } 2894b4617e5dSHong Zhang 2895b4617e5dSHong Zhang redund->nzlocal = nzlocal; 2896b4617e5dSHong Zhang redund->nsends = nsends; 2897b4617e5dSHong Zhang redund->nrecvs = nrecvs; 2898b4617e5dSHong Zhang redund->send_rank = send_rank; 2899b4617e5dSHong Zhang redund->recv_rank = recv_rank; 2900b4617e5dSHong Zhang redund->sbuf_nz = sbuf_nz; 2901b4617e5dSHong Zhang redund->rbuf_nz = rbuf_nz; 2902b4617e5dSHong Zhang redund->sbuf_j = sbuf_j; 2903b4617e5dSHong Zhang redund->sbuf_a = sbuf_a; 2904b4617e5dSHong Zhang redund->rbuf_j = rbuf_j; 2905b4617e5dSHong Zhang redund->rbuf_a = rbuf_a; 29060b291e46SHong Zhang redund->psubcomm = NULL; 2907b4617e5dSHong Zhang 2908b4617e5dSHong Zhang redund->Destroy = C->ops->destroy; 2909b4617e5dSHong Zhang C->ops->destroy = MatDestroy_MatRedundant; 2910b4617e5dSHong Zhang } 2911b4617e5dSHong Zhang PetscFunctionReturn(0); 2912b4617e5dSHong Zhang } 2913b4617e5dSHong Zhang 2914b4617e5dSHong Zhang #undef __FUNCT__ 291569db28dcSHong Zhang #define __FUNCT__ "MatGetRedundantMatrix_MPIAIJ" 2916b2bf6370SHong Zhang PetscErrorCode MatGetRedundantMatrix_MPIAIJ(Mat mat,PetscInt nsubcomm,MPI_Comm subcomm,MatReuse reuse,Mat *matredundant) 291769db28dcSHong Zhang { 2918f38d543fSHong Zhang PetscErrorCode ierr; 2919c79c5527SHong Zhang MPI_Comm comm; 2920c79c5527SHong Zhang PetscMPIInt size,subsize; 2921c79c5527SHong Zhang PetscInt mloc_sub,rstart,rend,M=mat->rmap->N,N=mat->cmap->N; 2922c79c5527SHong Zhang Mat_Redundant *redund=NULL; 2923*1f2d8ef4SHong Zhang PetscSubcomm psubcomm=NULL; 2924473f7991SHong Zhang MPI_Comm subcomm_in=subcomm; 2925*1f2d8ef4SHong Zhang Mat *matseq; 2926*1f2d8ef4SHong Zhang IS isrow,iscol; 292769db28dcSHong Zhang 292869db28dcSHong Zhang PetscFunctionBegin; 2929*1f2d8ef4SHong Zhang if (subcomm_in == MPI_COMM_NULL) { /* user does not provide subcomm */ 2930c79c5527SHong Zhang if (reuse == MAT_INITIAL_MATRIX) { 2931*1f2d8ef4SHong Zhang /* create psubcomm, then get subcomm */ 2932ce94432eSBarry Smith ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr); 293369db28dcSHong Zhang ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 29347cb6ea77SHong Zhang if (nsubcomm < 1 || nsubcomm > size) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"nsubcomm must between 1 and %D",size); 29357cb6ea77SHong Zhang 2936d3b23db5SHong Zhang ierr = PetscSubcommCreate(comm,&psubcomm);CHKERRQ(ierr); 2937d3b23db5SHong Zhang ierr = PetscSubcommSetNumber(psubcomm,nsubcomm);CHKERRQ(ierr); 2938c79c5527SHong Zhang ierr = PetscSubcommSetType(psubcomm,PETSC_SUBCOMM_CONTIGUOUS);CHKERRQ(ierr); 293919171117SHong Zhang ierr = PetscSubcommSetFromOptions(psubcomm);CHKERRQ(ierr); 2940c79c5527SHong Zhang subcomm = psubcomm->comm; 29417cb6ea77SHong Zhang } else { /* retrieve psubcomm and subcomm */ 2942c79c5527SHong Zhang ierr = PetscObjectGetComm((PetscObject)(*matredundant),&subcomm);CHKERRQ(ierr); 2943c79c5527SHong Zhang ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr); 2944c79c5527SHong Zhang if (subsize == 1) { 2945c79c5527SHong Zhang Mat_SeqAIJ *c = (Mat_SeqAIJ*)(*matredundant)->data; 29467cb6ea77SHong Zhang redund = c->redundant; 2947c79c5527SHong Zhang } else { 2948c79c5527SHong Zhang Mat_MPIAIJ *c = (Mat_MPIAIJ*)(*matredundant)->data; 29497cb6ea77SHong Zhang redund = c->redundant; 2950c79c5527SHong Zhang } 29517cb6ea77SHong Zhang psubcomm = redund->psubcomm; 2952fd7037dcSHong Zhang } 2953*1f2d8ef4SHong Zhang if (psubcomm->type == PETSC_SUBCOMM_INTERLACED) { 29547cb6ea77SHong Zhang ierr = MatGetRedundantMatrix_MPIAIJ_interlaced(mat,nsubcomm,subcomm,reuse,matredundant);CHKERRQ(ierr); 2955*1f2d8ef4SHong Zhang if (reuse == MAT_INITIAL_MATRIX) { /* psubcomm is created in this routine, free it in MatDestroy_MatRedundant() */ 2956*1f2d8ef4SHong Zhang ierr = MPI_Comm_size(psubcomm->comm,&subsize);CHKERRQ(ierr); 2957*1f2d8ef4SHong Zhang if (subsize == 1) { 2958*1f2d8ef4SHong Zhang Mat_SeqAIJ *c = (Mat_SeqAIJ*)(*matredundant)->data; 2959*1f2d8ef4SHong Zhang c->redundant->psubcomm = psubcomm; 2960*1f2d8ef4SHong Zhang } else { 2961*1f2d8ef4SHong Zhang Mat_MPIAIJ *c = (Mat_MPIAIJ*)(*matredundant)->data; 2962*1f2d8ef4SHong Zhang c->redundant->psubcomm = psubcomm ; 2963*1f2d8ef4SHong Zhang } 2964*1f2d8ef4SHong Zhang } 2965*1f2d8ef4SHong Zhang PetscFunctionReturn(0); 2966c79c5527SHong Zhang } 2967c79c5527SHong Zhang } 2968e37c6257SHong Zhang 2969*1f2d8ef4SHong Zhang /* use MPI subcomm via MatGetSubMatrices(); use subcomm_in or psubcomm->comm (psubcomm->type != INTERLACED) */ 29707cb6ea77SHong Zhang ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr); 2971c79c5527SHong Zhang if (reuse == MAT_INITIAL_MATRIX) { 2972c79c5527SHong Zhang /* create a local sequential matrix matseq[0] */ 2973c79c5527SHong Zhang mloc_sub = PETSC_DECIDE; 2974c79c5527SHong Zhang ierr = PetscSplitOwnership(subcomm,&mloc_sub,&M);CHKERRQ(ierr); 2975c79c5527SHong Zhang ierr = MPI_Scan(&mloc_sub,&rend,1,MPIU_INT,MPI_SUM,subcomm);CHKERRQ(ierr); 2976c79c5527SHong Zhang rstart = rend - mloc_sub; 2977c79c5527SHong Zhang ierr = ISCreateStride(PETSC_COMM_SELF,mloc_sub,rstart,1,&isrow);CHKERRQ(ierr); 2978c79c5527SHong Zhang ierr = ISCreateStride(PETSC_COMM_SELF,N,0,1,&iscol);CHKERRQ(ierr); 2979c79c5527SHong Zhang } else { /* reuse == MAT_REUSE_MATRIX */ 2980c79c5527SHong Zhang if (subsize == 1) { 2981c79c5527SHong Zhang Mat_SeqAIJ *c = (Mat_SeqAIJ*)(*matredundant)->data; 2982c79c5527SHong Zhang redund = c->redundant; 2983c79c5527SHong Zhang } else { 2984c79c5527SHong Zhang Mat_MPIAIJ *c = (Mat_MPIAIJ*)(*matredundant)->data; 2985c79c5527SHong Zhang redund = c->redundant; 2986c79c5527SHong Zhang } 2987c79c5527SHong Zhang 2988c79c5527SHong Zhang isrow = redund->isrow; 2989c79c5527SHong Zhang iscol = redund->iscol; 2990c79c5527SHong Zhang matseq = redund->matseq; 2991c79c5527SHong Zhang } 2992c79c5527SHong Zhang ierr = MatGetSubMatrices(mat,1,&isrow,&iscol,reuse,&matseq);CHKERRQ(ierr); 2993c79c5527SHong Zhang ierr = MatCreateMPIAIJConcatenateSeqAIJ(subcomm,matseq[0],PETSC_DECIDE,reuse,matredundant);CHKERRQ(ierr); 2994c79c5527SHong Zhang 2995c79c5527SHong Zhang if (reuse == MAT_INITIAL_MATRIX) { 2996c79c5527SHong Zhang /* create a supporting struct and attach it to C for reuse */ 2997c79c5527SHong Zhang ierr = PetscNewLog(*matredundant,Mat_Redundant,&redund);CHKERRQ(ierr); 2998c79c5527SHong Zhang if (subsize == 1) { 2999c79c5527SHong Zhang Mat_SeqAIJ *c = (Mat_SeqAIJ*)(*matredundant)->data; 3000c79c5527SHong Zhang c->redundant = redund; 3001c79c5527SHong Zhang } else { 3002c79c5527SHong Zhang Mat_MPIAIJ *c = (Mat_MPIAIJ*)(*matredundant)->data; 3003c79c5527SHong Zhang c->redundant = redund; 3004c79c5527SHong Zhang } 3005c79c5527SHong Zhang redund->isrow = isrow; 3006c79c5527SHong Zhang redund->iscol = iscol; 3007c79c5527SHong Zhang redund->matseq = matseq; 3008*1f2d8ef4SHong Zhang redund->psubcomm = psubcomm; 3009c79c5527SHong Zhang redund->Destroy = (*matredundant)->ops->destroy; 3010c79c5527SHong Zhang (*matredundant)->ops->destroy = MatDestroy_MatRedundant; 3011c79c5527SHong Zhang } 301269db28dcSHong Zhang PetscFunctionReturn(0); 301369db28dcSHong Zhang } 301469db28dcSHong Zhang 301503bc72f1SMatthew Knepley #undef __FUNCT__ 3016c91732d9SHong Zhang #define __FUNCT__ "MatGetRowMaxAbs_MPIAIJ" 3017c91732d9SHong Zhang PetscErrorCode MatGetRowMaxAbs_MPIAIJ(Mat A, Vec v, PetscInt idx[]) 3018c91732d9SHong Zhang { 3019c91732d9SHong Zhang Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 3020c91732d9SHong Zhang PetscErrorCode ierr; 3021c91732d9SHong Zhang PetscInt i,*idxb = 0; 3022c91732d9SHong Zhang PetscScalar *va,*vb; 3023c91732d9SHong Zhang Vec vtmp; 3024c91732d9SHong Zhang 3025c91732d9SHong Zhang PetscFunctionBegin; 3026c91732d9SHong Zhang ierr = MatGetRowMaxAbs(a->A,v,idx);CHKERRQ(ierr); 3027c91732d9SHong Zhang ierr = VecGetArray(v,&va);CHKERRQ(ierr); 3028c91732d9SHong Zhang if (idx) { 3029192daf7cSBarry Smith for (i=0; i<A->rmap->n; i++) { 3030d0f46423SBarry Smith if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart; 3031c91732d9SHong Zhang } 3032c91732d9SHong Zhang } 3033c91732d9SHong Zhang 3034d0f46423SBarry Smith ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->n,&vtmp);CHKERRQ(ierr); 3035c91732d9SHong Zhang if (idx) { 3036d0f46423SBarry Smith ierr = PetscMalloc(A->rmap->n*sizeof(PetscInt),&idxb);CHKERRQ(ierr); 3037c91732d9SHong Zhang } 3038c91732d9SHong Zhang ierr = MatGetRowMaxAbs(a->B,vtmp,idxb);CHKERRQ(ierr); 3039c91732d9SHong Zhang ierr = VecGetArray(vtmp,&vb);CHKERRQ(ierr); 3040c91732d9SHong Zhang 3041d0f46423SBarry Smith for (i=0; i<A->rmap->n; i++) { 3042c91732d9SHong Zhang if (PetscAbsScalar(va[i]) < PetscAbsScalar(vb[i])) { 3043c91732d9SHong Zhang va[i] = vb[i]; 3044c91732d9SHong Zhang if (idx) idx[i] = a->garray[idxb[i]]; 3045c91732d9SHong Zhang } 3046c91732d9SHong Zhang } 3047c91732d9SHong Zhang 3048c91732d9SHong Zhang ierr = VecRestoreArray(v,&va);CHKERRQ(ierr); 3049c91732d9SHong Zhang ierr = VecRestoreArray(vtmp,&vb);CHKERRQ(ierr); 3050c91732d9SHong Zhang ierr = PetscFree(idxb);CHKERRQ(ierr); 30516bf464f9SBarry Smith ierr = VecDestroy(&vtmp);CHKERRQ(ierr); 3052c91732d9SHong Zhang PetscFunctionReturn(0); 3053c91732d9SHong Zhang } 3054c91732d9SHong Zhang 3055c91732d9SHong Zhang #undef __FUNCT__ 3056c87e5d42SMatthew Knepley #define __FUNCT__ "MatGetRowMinAbs_MPIAIJ" 3057c87e5d42SMatthew Knepley PetscErrorCode MatGetRowMinAbs_MPIAIJ(Mat A, Vec v, PetscInt idx[]) 3058c87e5d42SMatthew Knepley { 3059c87e5d42SMatthew Knepley Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 3060c87e5d42SMatthew Knepley PetscErrorCode ierr; 3061c87e5d42SMatthew Knepley PetscInt i,*idxb = 0; 3062c87e5d42SMatthew Knepley PetscScalar *va,*vb; 3063c87e5d42SMatthew Knepley Vec vtmp; 3064c87e5d42SMatthew Knepley 3065c87e5d42SMatthew Knepley PetscFunctionBegin; 3066c87e5d42SMatthew Knepley ierr = MatGetRowMinAbs(a->A,v,idx);CHKERRQ(ierr); 3067c87e5d42SMatthew Knepley ierr = VecGetArray(v,&va);CHKERRQ(ierr); 3068c87e5d42SMatthew Knepley if (idx) { 3069c87e5d42SMatthew Knepley for (i=0; i<A->cmap->n; i++) { 3070c87e5d42SMatthew Knepley if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart; 3071c87e5d42SMatthew Knepley } 3072c87e5d42SMatthew Knepley } 3073c87e5d42SMatthew Knepley 3074c87e5d42SMatthew Knepley ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->n,&vtmp);CHKERRQ(ierr); 3075c87e5d42SMatthew Knepley if (idx) { 3076c87e5d42SMatthew Knepley ierr = PetscMalloc(A->rmap->n*sizeof(PetscInt),&idxb);CHKERRQ(ierr); 3077c87e5d42SMatthew Knepley } 3078c87e5d42SMatthew Knepley ierr = MatGetRowMinAbs(a->B,vtmp,idxb);CHKERRQ(ierr); 3079c87e5d42SMatthew Knepley ierr = VecGetArray(vtmp,&vb);CHKERRQ(ierr); 3080c87e5d42SMatthew Knepley 3081c87e5d42SMatthew Knepley for (i=0; i<A->rmap->n; i++) { 3082c87e5d42SMatthew Knepley if (PetscAbsScalar(va[i]) > PetscAbsScalar(vb[i])) { 3083c87e5d42SMatthew Knepley va[i] = vb[i]; 3084c87e5d42SMatthew Knepley if (idx) idx[i] = a->garray[idxb[i]]; 3085c87e5d42SMatthew Knepley } 3086c87e5d42SMatthew Knepley } 3087c87e5d42SMatthew Knepley 3088c87e5d42SMatthew Knepley ierr = VecRestoreArray(v,&va);CHKERRQ(ierr); 3089c87e5d42SMatthew Knepley ierr = VecRestoreArray(vtmp,&vb);CHKERRQ(ierr); 3090c87e5d42SMatthew Knepley ierr = PetscFree(idxb);CHKERRQ(ierr); 30916bf464f9SBarry Smith ierr = VecDestroy(&vtmp);CHKERRQ(ierr); 3092c87e5d42SMatthew Knepley PetscFunctionReturn(0); 3093c87e5d42SMatthew Knepley } 3094c87e5d42SMatthew Knepley 3095c87e5d42SMatthew Knepley #undef __FUNCT__ 309603bc72f1SMatthew Knepley #define __FUNCT__ "MatGetRowMin_MPIAIJ" 309703bc72f1SMatthew Knepley PetscErrorCode MatGetRowMin_MPIAIJ(Mat A, Vec v, PetscInt idx[]) 309803bc72f1SMatthew Knepley { 309903bc72f1SMatthew Knepley Mat_MPIAIJ *mat = (Mat_MPIAIJ*) A->data; 3100d0f46423SBarry Smith PetscInt n = A->rmap->n; 3101d0f46423SBarry Smith PetscInt cstart = A->cmap->rstart; 310203bc72f1SMatthew Knepley PetscInt *cmap = mat->garray; 310303bc72f1SMatthew Knepley PetscInt *diagIdx, *offdiagIdx; 310403bc72f1SMatthew Knepley Vec diagV, offdiagV; 310503bc72f1SMatthew Knepley PetscScalar *a, *diagA, *offdiagA; 310603bc72f1SMatthew Knepley PetscInt r; 310703bc72f1SMatthew Knepley PetscErrorCode ierr; 310803bc72f1SMatthew Knepley 310903bc72f1SMatthew Knepley PetscFunctionBegin; 311003bc72f1SMatthew Knepley ierr = PetscMalloc2(n,PetscInt,&diagIdx,n,PetscInt,&offdiagIdx);CHKERRQ(ierr); 3111ce94432eSBarry Smith ierr = VecCreateSeq(PetscObjectComm((PetscObject)A), n, &diagV);CHKERRQ(ierr); 3112ce94432eSBarry Smith ierr = VecCreateSeq(PetscObjectComm((PetscObject)A), n, &offdiagV);CHKERRQ(ierr); 311303bc72f1SMatthew Knepley ierr = MatGetRowMin(mat->A, diagV, diagIdx);CHKERRQ(ierr); 311403bc72f1SMatthew Knepley ierr = MatGetRowMin(mat->B, offdiagV, offdiagIdx);CHKERRQ(ierr); 311503bc72f1SMatthew Knepley ierr = VecGetArray(v, &a);CHKERRQ(ierr); 311603bc72f1SMatthew Knepley ierr = VecGetArray(diagV, &diagA);CHKERRQ(ierr); 311703bc72f1SMatthew Knepley ierr = VecGetArray(offdiagV, &offdiagA);CHKERRQ(ierr); 311803bc72f1SMatthew Knepley for (r = 0; r < n; ++r) { 3119028cd4eaSSatish Balay if (PetscAbsScalar(diagA[r]) <= PetscAbsScalar(offdiagA[r])) { 312003bc72f1SMatthew Knepley a[r] = diagA[r]; 312103bc72f1SMatthew Knepley idx[r] = cstart + diagIdx[r]; 312203bc72f1SMatthew Knepley } else { 312303bc72f1SMatthew Knepley a[r] = offdiagA[r]; 312403bc72f1SMatthew Knepley idx[r] = cmap[offdiagIdx[r]]; 312503bc72f1SMatthew Knepley } 312603bc72f1SMatthew Knepley } 312703bc72f1SMatthew Knepley ierr = VecRestoreArray(v, &a);CHKERRQ(ierr); 312803bc72f1SMatthew Knepley ierr = VecRestoreArray(diagV, &diagA);CHKERRQ(ierr); 312903bc72f1SMatthew Knepley ierr = VecRestoreArray(offdiagV, &offdiagA);CHKERRQ(ierr); 31306bf464f9SBarry Smith ierr = VecDestroy(&diagV);CHKERRQ(ierr); 31316bf464f9SBarry Smith ierr = VecDestroy(&offdiagV);CHKERRQ(ierr); 313203bc72f1SMatthew Knepley ierr = PetscFree2(diagIdx, offdiagIdx);CHKERRQ(ierr); 313303bc72f1SMatthew Knepley PetscFunctionReturn(0); 313403bc72f1SMatthew Knepley } 313503bc72f1SMatthew Knepley 31365494a064SHong Zhang #undef __FUNCT__ 3137c87e5d42SMatthew Knepley #define __FUNCT__ "MatGetRowMax_MPIAIJ" 3138c87e5d42SMatthew Knepley PetscErrorCode MatGetRowMax_MPIAIJ(Mat A, Vec v, PetscInt idx[]) 3139c87e5d42SMatthew Knepley { 3140c87e5d42SMatthew Knepley Mat_MPIAIJ *mat = (Mat_MPIAIJ*) A->data; 3141c87e5d42SMatthew Knepley PetscInt n = A->rmap->n; 3142c87e5d42SMatthew Knepley PetscInt cstart = A->cmap->rstart; 3143c87e5d42SMatthew Knepley PetscInt *cmap = mat->garray; 3144c87e5d42SMatthew Knepley PetscInt *diagIdx, *offdiagIdx; 3145c87e5d42SMatthew Knepley Vec diagV, offdiagV; 3146c87e5d42SMatthew Knepley PetscScalar *a, *diagA, *offdiagA; 3147c87e5d42SMatthew Knepley PetscInt r; 3148c87e5d42SMatthew Knepley PetscErrorCode ierr; 3149c87e5d42SMatthew Knepley 3150c87e5d42SMatthew Knepley PetscFunctionBegin; 3151c87e5d42SMatthew Knepley ierr = PetscMalloc2(n,PetscInt,&diagIdx,n,PetscInt,&offdiagIdx);CHKERRQ(ierr); 3152d11e49fbSSatish Balay ierr = VecCreateSeq(PETSC_COMM_SELF, n, &diagV);CHKERRQ(ierr); 3153d11e49fbSSatish Balay ierr = VecCreateSeq(PETSC_COMM_SELF, n, &offdiagV);CHKERRQ(ierr); 3154c87e5d42SMatthew Knepley ierr = MatGetRowMax(mat->A, diagV, diagIdx);CHKERRQ(ierr); 3155c87e5d42SMatthew Knepley ierr = MatGetRowMax(mat->B, offdiagV, offdiagIdx);CHKERRQ(ierr); 3156c87e5d42SMatthew Knepley ierr = VecGetArray(v, &a);CHKERRQ(ierr); 3157c87e5d42SMatthew Knepley ierr = VecGetArray(diagV, &diagA);CHKERRQ(ierr); 3158c87e5d42SMatthew Knepley ierr = VecGetArray(offdiagV, &offdiagA);CHKERRQ(ierr); 3159c87e5d42SMatthew Knepley for (r = 0; r < n; ++r) { 3160c87e5d42SMatthew Knepley if (PetscAbsScalar(diagA[r]) >= PetscAbsScalar(offdiagA[r])) { 3161c87e5d42SMatthew Knepley a[r] = diagA[r]; 3162c87e5d42SMatthew Knepley idx[r] = cstart + diagIdx[r]; 3163c87e5d42SMatthew Knepley } else { 3164c87e5d42SMatthew Knepley a[r] = offdiagA[r]; 3165c87e5d42SMatthew Knepley idx[r] = cmap[offdiagIdx[r]]; 3166c87e5d42SMatthew Knepley } 3167c87e5d42SMatthew Knepley } 3168c87e5d42SMatthew Knepley ierr = VecRestoreArray(v, &a);CHKERRQ(ierr); 3169c87e5d42SMatthew Knepley ierr = VecRestoreArray(diagV, &diagA);CHKERRQ(ierr); 3170c87e5d42SMatthew Knepley ierr = VecRestoreArray(offdiagV, &offdiagA);CHKERRQ(ierr); 31716bf464f9SBarry Smith ierr = VecDestroy(&diagV);CHKERRQ(ierr); 31726bf464f9SBarry Smith ierr = VecDestroy(&offdiagV);CHKERRQ(ierr); 3173c87e5d42SMatthew Knepley ierr = PetscFree2(diagIdx, offdiagIdx);CHKERRQ(ierr); 3174c87e5d42SMatthew Knepley PetscFunctionReturn(0); 3175c87e5d42SMatthew Knepley } 3176c87e5d42SMatthew Knepley 3177c87e5d42SMatthew Knepley #undef __FUNCT__ 3178d1adec66SJed Brown #define __FUNCT__ "MatGetSeqNonzeroStructure_MPIAIJ" 3179d1adec66SJed Brown PetscErrorCode MatGetSeqNonzeroStructure_MPIAIJ(Mat mat,Mat *newmat) 31805494a064SHong Zhang { 31815494a064SHong Zhang PetscErrorCode ierr; 3182f6d58c54SBarry Smith Mat *dummy; 31835494a064SHong Zhang 31845494a064SHong Zhang PetscFunctionBegin; 3185f6d58c54SBarry Smith ierr = MatGetSubMatrix_MPIAIJ_All(mat,MAT_DO_NOT_GET_VALUES,MAT_INITIAL_MATRIX,&dummy);CHKERRQ(ierr); 3186f6d58c54SBarry Smith *newmat = *dummy; 3187f6d58c54SBarry Smith ierr = PetscFree(dummy);CHKERRQ(ierr); 31885494a064SHong Zhang PetscFunctionReturn(0); 31895494a064SHong Zhang } 31905494a064SHong Zhang 31917087cfbeSBarry Smith extern PetscErrorCode MatFDColoringApply_AIJ(Mat,MatFDColoring,Vec,MatStructure*,void*); 3192bbead8a2SBarry Smith 3193bbead8a2SBarry Smith #undef __FUNCT__ 3194bbead8a2SBarry Smith #define __FUNCT__ "MatInvertBlockDiagonal_MPIAIJ" 3195713ccfa9SJed Brown PetscErrorCode MatInvertBlockDiagonal_MPIAIJ(Mat A,const PetscScalar **values) 3196bbead8a2SBarry Smith { 3197bbead8a2SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*) A->data; 3198bbead8a2SBarry Smith PetscErrorCode ierr; 3199bbead8a2SBarry Smith 3200bbead8a2SBarry Smith PetscFunctionBegin; 3201bbead8a2SBarry Smith ierr = MatInvertBlockDiagonal(a->A,values);CHKERRQ(ierr); 3202bbead8a2SBarry Smith PetscFunctionReturn(0); 3203bbead8a2SBarry Smith } 3204bbead8a2SBarry Smith 320573a71a0fSBarry Smith #undef __FUNCT__ 320673a71a0fSBarry Smith #define __FUNCT__ "MatSetRandom_MPIAIJ" 320773a71a0fSBarry Smith static PetscErrorCode MatSetRandom_MPIAIJ(Mat x,PetscRandom rctx) 320873a71a0fSBarry Smith { 320973a71a0fSBarry Smith PetscErrorCode ierr; 321073a71a0fSBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ*)x->data; 321173a71a0fSBarry Smith 321273a71a0fSBarry Smith PetscFunctionBegin; 321373a71a0fSBarry Smith ierr = MatSetRandom(aij->A,rctx);CHKERRQ(ierr); 321473a71a0fSBarry Smith ierr = MatSetRandom(aij->B,rctx);CHKERRQ(ierr); 321573a71a0fSBarry Smith ierr = MatAssemblyBegin(x,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 321673a71a0fSBarry Smith ierr = MatAssemblyEnd(x,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 321773a71a0fSBarry Smith PetscFunctionReturn(0); 321873a71a0fSBarry Smith } 3219bbead8a2SBarry Smith 32208a729477SBarry Smith /* -------------------------------------------------------------------*/ 3221cda55fadSBarry Smith static struct _MatOps MatOps_Values = {MatSetValues_MPIAIJ, 3222cda55fadSBarry Smith MatGetRow_MPIAIJ, 3223cda55fadSBarry Smith MatRestoreRow_MPIAIJ, 3224cda55fadSBarry Smith MatMult_MPIAIJ, 322597304618SKris Buschelman /* 4*/ MatMultAdd_MPIAIJ, 32267c922b88SBarry Smith MatMultTranspose_MPIAIJ, 32277c922b88SBarry Smith MatMultTransposeAdd_MPIAIJ, 3228519f805aSKarl Rupp #if defined(PETSC_HAVE_PBGL) 3229103bf8bdSMatthew Knepley MatSolve_MPIAIJ, 3230103bf8bdSMatthew Knepley #else 3231cda55fadSBarry Smith 0, 3232103bf8bdSMatthew Knepley #endif 3233cda55fadSBarry Smith 0, 3234cda55fadSBarry Smith 0, 323597304618SKris Buschelman /*10*/ 0, 3236cda55fadSBarry Smith 0, 3237cda55fadSBarry Smith 0, 323841f059aeSBarry Smith MatSOR_MPIAIJ, 3239b7c46309SBarry Smith MatTranspose_MPIAIJ, 324097304618SKris Buschelman /*15*/ MatGetInfo_MPIAIJ, 3241cda55fadSBarry Smith MatEqual_MPIAIJ, 3242cda55fadSBarry Smith MatGetDiagonal_MPIAIJ, 3243cda55fadSBarry Smith MatDiagonalScale_MPIAIJ, 3244cda55fadSBarry Smith MatNorm_MPIAIJ, 324597304618SKris Buschelman /*20*/ MatAssemblyBegin_MPIAIJ, 3246cda55fadSBarry Smith MatAssemblyEnd_MPIAIJ, 3247cda55fadSBarry Smith MatSetOption_MPIAIJ, 3248cda55fadSBarry Smith MatZeroEntries_MPIAIJ, 3249d519adbfSMatthew Knepley /*24*/ MatZeroRows_MPIAIJ, 3250cda55fadSBarry Smith 0, 3251519f805aSKarl Rupp #if defined(PETSC_HAVE_PBGL) 3252719d5645SBarry Smith 0, 3253103bf8bdSMatthew Knepley #else 3254cda55fadSBarry Smith 0, 3255103bf8bdSMatthew Knepley #endif 3256cda55fadSBarry Smith 0, 3257cda55fadSBarry Smith 0, 32584994cf47SJed Brown /*29*/ MatSetUp_MPIAIJ, 3259519f805aSKarl Rupp #if defined(PETSC_HAVE_PBGL) 3260719d5645SBarry Smith 0, 3261103bf8bdSMatthew Knepley #else 3262cda55fadSBarry Smith 0, 3263103bf8bdSMatthew Knepley #endif 3264cda55fadSBarry Smith 0, 3265cda55fadSBarry Smith 0, 3266cda55fadSBarry Smith 0, 3267d519adbfSMatthew Knepley /*34*/ MatDuplicate_MPIAIJ, 3268cda55fadSBarry Smith 0, 3269cda55fadSBarry Smith 0, 3270cda55fadSBarry Smith 0, 3271cda55fadSBarry Smith 0, 3272d519adbfSMatthew Knepley /*39*/ MatAXPY_MPIAIJ, 3273cda55fadSBarry Smith MatGetSubMatrices_MPIAIJ, 3274cda55fadSBarry Smith MatIncreaseOverlap_MPIAIJ, 3275cda55fadSBarry Smith MatGetValues_MPIAIJ, 3276cb5b572fSBarry Smith MatCopy_MPIAIJ, 3277d519adbfSMatthew Knepley /*44*/ MatGetRowMax_MPIAIJ, 3278cda55fadSBarry Smith MatScale_MPIAIJ, 3279cda55fadSBarry Smith 0, 3280cda55fadSBarry Smith 0, 3281564f14d6SBarry Smith MatZeroRowsColumns_MPIAIJ, 328273a71a0fSBarry Smith /*49*/ MatSetRandom_MPIAIJ, 3283cda55fadSBarry Smith 0, 3284cda55fadSBarry Smith 0, 3285cda55fadSBarry Smith 0, 3286cda55fadSBarry Smith 0, 3287d519adbfSMatthew Knepley /*54*/ MatFDColoringCreate_MPIAIJ, 3288cda55fadSBarry Smith 0, 3289cda55fadSBarry Smith MatSetUnfactored_MPIAIJ, 329072e6a0cfSJed Brown MatPermute_MPIAIJ, 3291cda55fadSBarry Smith 0, 3292d519adbfSMatthew Knepley /*59*/ MatGetSubMatrix_MPIAIJ, 3293e03a110bSBarry Smith MatDestroy_MPIAIJ, 3294e03a110bSBarry Smith MatView_MPIAIJ, 3295357abbc8SBarry Smith 0, 3296f996eeb8SHong Zhang MatMatMatMult_MPIAIJ_MPIAIJ_MPIAIJ, 3297f996eeb8SHong Zhang /*64*/ MatMatMatMultSymbolic_MPIAIJ_MPIAIJ_MPIAIJ, 3298f996eeb8SHong Zhang MatMatMatMultNumeric_MPIAIJ_MPIAIJ_MPIAIJ, 3299a2243be0SBarry Smith 0, 3300a2243be0SBarry Smith 0, 3301a2243be0SBarry Smith 0, 3302d519adbfSMatthew Knepley /*69*/ MatGetRowMaxAbs_MPIAIJ, 3303c87e5d42SMatthew Knepley MatGetRowMinAbs_MPIAIJ, 3304a2243be0SBarry Smith 0, 3305a2243be0SBarry Smith MatSetColoring_MPIAIJ, 3306dcf5cc72SBarry Smith 0, 330797304618SKris Buschelman MatSetValuesAdifor_MPIAIJ, 33083acb8795SBarry Smith /*75*/ MatFDColoringApply_AIJ, 330997304618SKris Buschelman 0, 331097304618SKris Buschelman 0, 331197304618SKris Buschelman 0, 3312f1f41ecbSJed Brown MatFindZeroDiagonals_MPIAIJ, 331397304618SKris Buschelman /*80*/ 0, 331497304618SKris Buschelman 0, 331597304618SKris Buschelman 0, 33165bba2384SShri Abhyankar /*83*/ MatLoad_MPIAIJ, 33176284ec50SHong Zhang 0, 33186284ec50SHong Zhang 0, 33196284ec50SHong Zhang 0, 33206284ec50SHong Zhang 0, 3321865e5f61SKris Buschelman 0, 3322d519adbfSMatthew Knepley /*89*/ MatMatMult_MPIAIJ_MPIAIJ, 332326be0446SHong Zhang MatMatMultSymbolic_MPIAIJ_MPIAIJ, 332426be0446SHong Zhang MatMatMultNumeric_MPIAIJ_MPIAIJ, 3325cf3ca8ceSHong Zhang MatPtAP_MPIAIJ_MPIAIJ, 3326cf3ca8ceSHong Zhang MatPtAPSymbolic_MPIAIJ_MPIAIJ, 3327cf3ca8ceSHong Zhang /*94*/ MatPtAPNumeric_MPIAIJ_MPIAIJ, 33287a7894deSKris Buschelman 0, 33297a7894deSKris Buschelman 0, 33307a7894deSKris Buschelman 0, 33317a7894deSKris Buschelman 0, 3332d519adbfSMatthew Knepley /*99*/ 0, 3333d2b207f1SPeter Brune 0, 3334d2b207f1SPeter Brune 0, 33352fd7e33dSBarry Smith MatConjugate_MPIAIJ, 33362fd7e33dSBarry Smith 0, 3337d519adbfSMatthew Knepley /*104*/MatSetValuesRow_MPIAIJ, 333899cafbc1SBarry Smith MatRealPart_MPIAIJ, 333969db28dcSHong Zhang MatImaginaryPart_MPIAIJ, 334069db28dcSHong Zhang 0, 334169db28dcSHong Zhang 0, 3342d519adbfSMatthew Knepley /*109*/0, 334303bc72f1SMatthew Knepley MatGetRedundantMatrix_MPIAIJ, 33445494a064SHong Zhang MatGetRowMin_MPIAIJ, 33455494a064SHong Zhang 0, 33465494a064SHong Zhang 0, 3347d1adec66SJed Brown /*114*/MatGetSeqNonzeroStructure_MPIAIJ, 3348bd0c2dcbSBarry Smith 0, 3349bd0c2dcbSBarry Smith 0, 3350bd0c2dcbSBarry Smith 0, 3351bd0c2dcbSBarry Smith 0, 33528fb81238SShri Abhyankar /*119*/0, 33538fb81238SShri Abhyankar 0, 33548fb81238SShri Abhyankar 0, 3355d6037b41SHong Zhang 0, 3356b9614d88SDmitry Karpeev MatGetMultiProcBlock_MPIAIJ, 3357f2c98031SJed Brown /*124*/MatFindNonzeroRows_MPIAIJ, 33580716a85fSBarry Smith MatGetColumnNorms_MPIAIJ, 3359bbead8a2SBarry Smith MatInvertBlockDiagonal_MPIAIJ, 3360b9614d88SDmitry Karpeev 0, 336137868618SMatthew G Knepley MatGetSubMatricesParallel_MPIAIJ, 3362187b3c17SHong Zhang /*129*/0, 3363187b3c17SHong Zhang MatTransposeMatMult_MPIAIJ_MPIAIJ, 3364187b3c17SHong Zhang MatTransposeMatMultSymbolic_MPIAIJ_MPIAIJ, 3365187b3c17SHong Zhang MatTransposeMatMultNumeric_MPIAIJ_MPIAIJ, 3366187b3c17SHong Zhang 0, 3367187b3c17SHong Zhang /*134*/0, 3368187b3c17SHong Zhang 0, 3369187b3c17SHong Zhang 0, 3370187b3c17SHong Zhang 0, 33713964eb88SJed Brown 0, 33723964eb88SJed Brown /*139*/0, 3373187b3c17SHong Zhang 0 3374bd0c2dcbSBarry Smith }; 337536ce4990SBarry Smith 33762e8a6d31SBarry Smith /* ----------------------------------------------------------------------------------------*/ 33772e8a6d31SBarry Smith 33784a2ae208SSatish Balay #undef __FUNCT__ 33794a2ae208SSatish Balay #define __FUNCT__ "MatStoreValues_MPIAIJ" 33807087cfbeSBarry Smith PetscErrorCode MatStoreValues_MPIAIJ(Mat mat) 33812e8a6d31SBarry Smith { 33822e8a6d31SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 3383dfbe8321SBarry Smith PetscErrorCode ierr; 33842e8a6d31SBarry Smith 33852e8a6d31SBarry Smith PetscFunctionBegin; 33862e8a6d31SBarry Smith ierr = MatStoreValues(aij->A);CHKERRQ(ierr); 33872e8a6d31SBarry Smith ierr = MatStoreValues(aij->B);CHKERRQ(ierr); 33882e8a6d31SBarry Smith PetscFunctionReturn(0); 33892e8a6d31SBarry Smith } 33902e8a6d31SBarry Smith 33914a2ae208SSatish Balay #undef __FUNCT__ 33924a2ae208SSatish Balay #define __FUNCT__ "MatRetrieveValues_MPIAIJ" 33937087cfbeSBarry Smith PetscErrorCode MatRetrieveValues_MPIAIJ(Mat mat) 33942e8a6d31SBarry Smith { 33952e8a6d31SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 3396dfbe8321SBarry Smith PetscErrorCode ierr; 33972e8a6d31SBarry Smith 33982e8a6d31SBarry Smith PetscFunctionBegin; 33992e8a6d31SBarry Smith ierr = MatRetrieveValues(aij->A);CHKERRQ(ierr); 34002e8a6d31SBarry Smith ierr = MatRetrieveValues(aij->B);CHKERRQ(ierr); 34012e8a6d31SBarry Smith PetscFunctionReturn(0); 34022e8a6d31SBarry Smith } 34038a729477SBarry Smith 34044a2ae208SSatish Balay #undef __FUNCT__ 3405a23d5eceSKris Buschelman #define __FUNCT__ "MatMPIAIJSetPreallocation_MPIAIJ" 34067087cfbeSBarry Smith PetscErrorCode MatMPIAIJSetPreallocation_MPIAIJ(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 3407a23d5eceSKris Buschelman { 3408a23d5eceSKris Buschelman Mat_MPIAIJ *b; 3409dfbe8321SBarry Smith PetscErrorCode ierr; 3410a23d5eceSKris Buschelman 3411a23d5eceSKris Buschelman PetscFunctionBegin; 341226283091SBarry Smith ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr); 341326283091SBarry Smith ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr); 3414a23d5eceSKris Buschelman b = (Mat_MPIAIJ*)B->data; 3415899cda47SBarry Smith 3416526dfc15SBarry Smith if (!B->preallocated) { 3417899cda47SBarry Smith /* Explicitly create 2 MATSEQAIJ matrices. */ 3418899cda47SBarry Smith ierr = MatCreate(PETSC_COMM_SELF,&b->A);CHKERRQ(ierr); 3419d0f46423SBarry Smith ierr = MatSetSizes(b->A,B->rmap->n,B->cmap->n,B->rmap->n,B->cmap->n);CHKERRQ(ierr); 3420f9e9af59SJed Brown ierr = MatSetBlockSizes(b->A,B->rmap->bs,B->cmap->bs);CHKERRQ(ierr); 3421899cda47SBarry Smith ierr = MatSetType(b->A,MATSEQAIJ);CHKERRQ(ierr); 3422899cda47SBarry Smith ierr = PetscLogObjectParent(B,b->A);CHKERRQ(ierr); 3423899cda47SBarry Smith ierr = MatCreate(PETSC_COMM_SELF,&b->B);CHKERRQ(ierr); 3424d0f46423SBarry Smith ierr = MatSetSizes(b->B,B->rmap->n,B->cmap->N,B->rmap->n,B->cmap->N);CHKERRQ(ierr); 3425f9e9af59SJed Brown ierr = MatSetBlockSizes(b->B,B->rmap->bs,B->cmap->bs);CHKERRQ(ierr); 3426899cda47SBarry Smith ierr = MatSetType(b->B,MATSEQAIJ);CHKERRQ(ierr); 3427899cda47SBarry Smith ierr = PetscLogObjectParent(B,b->B);CHKERRQ(ierr); 3428526dfc15SBarry Smith } 3429899cda47SBarry Smith 3430c60e587dSKris Buschelman ierr = MatSeqAIJSetPreallocation(b->A,d_nz,d_nnz);CHKERRQ(ierr); 3431c60e587dSKris Buschelman ierr = MatSeqAIJSetPreallocation(b->B,o_nz,o_nnz);CHKERRQ(ierr); 3432526dfc15SBarry Smith B->preallocated = PETSC_TRUE; 3433a23d5eceSKris Buschelman PetscFunctionReturn(0); 3434a23d5eceSKris Buschelman } 3435a23d5eceSKris Buschelman 34364a2ae208SSatish Balay #undef __FUNCT__ 34374a2ae208SSatish Balay #define __FUNCT__ "MatDuplicate_MPIAIJ" 3438dfbe8321SBarry Smith PetscErrorCode MatDuplicate_MPIAIJ(Mat matin,MatDuplicateOption cpvalues,Mat *newmat) 3439d6dfbf8fSBarry Smith { 3440d6dfbf8fSBarry Smith Mat mat; 3441416022c9SBarry Smith Mat_MPIAIJ *a,*oldmat = (Mat_MPIAIJ*)matin->data; 3442dfbe8321SBarry Smith PetscErrorCode ierr; 3443d6dfbf8fSBarry Smith 34443a40ed3dSBarry Smith PetscFunctionBegin; 3445416022c9SBarry Smith *newmat = 0; 3446ce94432eSBarry Smith ierr = MatCreate(PetscObjectComm((PetscObject)matin),&mat);CHKERRQ(ierr); 3447d0f46423SBarry Smith ierr = MatSetSizes(mat,matin->rmap->n,matin->cmap->n,matin->rmap->N,matin->cmap->N);CHKERRQ(ierr); 3448a2f3521dSMark F. Adams ierr = MatSetBlockSizes(mat,matin->rmap->bs,matin->cmap->bs);CHKERRQ(ierr); 34497adad957SLisandro Dalcin ierr = MatSetType(mat,((PetscObject)matin)->type_name);CHKERRQ(ierr); 34501d5dac46SHong Zhang ierr = PetscMemcpy(mat->ops,matin->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 3451273d9f13SBarry Smith a = (Mat_MPIAIJ*)mat->data; 3452e1b6402fSHong Zhang 3453d5f3da31SBarry Smith mat->factortype = matin->factortype; 3454d0f46423SBarry Smith mat->rmap->bs = matin->rmap->bs; 3455a2f3521dSMark F. Adams mat->cmap->bs = matin->cmap->bs; 3456c456f294SBarry Smith mat->assembled = PETSC_TRUE; 3457e7641de0SSatish Balay mat->insertmode = NOT_SET_VALUES; 3458273d9f13SBarry Smith mat->preallocated = PETSC_TRUE; 3459d6dfbf8fSBarry Smith 346017699dbbSLois Curfman McInnes a->size = oldmat->size; 346117699dbbSLois Curfman McInnes a->rank = oldmat->rank; 3462e7641de0SSatish Balay a->donotstash = oldmat->donotstash; 3463e7641de0SSatish Balay a->roworiented = oldmat->roworiented; 3464e7641de0SSatish Balay a->rowindices = 0; 3465bcd2baecSBarry Smith a->rowvalues = 0; 3466bcd2baecSBarry Smith a->getrowactive = PETSC_FALSE; 3467d6dfbf8fSBarry Smith 34681e1e43feSBarry Smith ierr = PetscLayoutReference(matin->rmap,&mat->rmap);CHKERRQ(ierr); 34691e1e43feSBarry Smith ierr = PetscLayoutReference(matin->cmap,&mat->cmap);CHKERRQ(ierr); 3470899cda47SBarry Smith 34712ee70a88SLois Curfman McInnes if (oldmat->colmap) { 3472aa482453SBarry Smith #if defined(PETSC_USE_CTABLE) 34730f5bd95cSBarry Smith ierr = PetscTableCreateCopy(oldmat->colmap,&a->colmap);CHKERRQ(ierr); 3474b1fc9764SSatish Balay #else 3475d0f46423SBarry Smith ierr = PetscMalloc((mat->cmap->N)*sizeof(PetscInt),&a->colmap);CHKERRQ(ierr); 3476d0f46423SBarry Smith ierr = PetscLogObjectMemory(mat,(mat->cmap->N)*sizeof(PetscInt));CHKERRQ(ierr); 3477d0f46423SBarry Smith ierr = PetscMemcpy(a->colmap,oldmat->colmap,(mat->cmap->N)*sizeof(PetscInt));CHKERRQ(ierr); 3478b1fc9764SSatish Balay #endif 3479416022c9SBarry Smith } else a->colmap = 0; 34803f41c07dSBarry Smith if (oldmat->garray) { 3481b1d57f15SBarry Smith PetscInt len; 3482d0f46423SBarry Smith len = oldmat->B->cmap->n; 3483b1d57f15SBarry Smith ierr = PetscMalloc((len+1)*sizeof(PetscInt),&a->garray);CHKERRQ(ierr); 348452e6d16bSBarry Smith ierr = PetscLogObjectMemory(mat,len*sizeof(PetscInt));CHKERRQ(ierr); 3485b1d57f15SBarry Smith if (len) { ierr = PetscMemcpy(a->garray,oldmat->garray,len*sizeof(PetscInt));CHKERRQ(ierr); } 3486416022c9SBarry Smith } else a->garray = 0; 3487d6dfbf8fSBarry Smith 3488416022c9SBarry Smith ierr = VecDuplicate(oldmat->lvec,&a->lvec);CHKERRQ(ierr); 348952e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,a->lvec);CHKERRQ(ierr); 3490a56f8943SBarry Smith ierr = VecScatterCopy(oldmat->Mvctx,&a->Mvctx);CHKERRQ(ierr); 349152e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,a->Mvctx);CHKERRQ(ierr); 34922e8a6d31SBarry Smith ierr = MatDuplicate(oldmat->A,cpvalues,&a->A);CHKERRQ(ierr); 349352e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,a->A);CHKERRQ(ierr); 34942e8a6d31SBarry Smith ierr = MatDuplicate(oldmat->B,cpvalues,&a->B);CHKERRQ(ierr); 349552e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,a->B);CHKERRQ(ierr); 3496140e18c1SBarry Smith ierr = PetscFunctionListDuplicate(((PetscObject)matin)->qlist,&((PetscObject)mat)->qlist);CHKERRQ(ierr); 34978a729477SBarry Smith *newmat = mat; 34983a40ed3dSBarry Smith PetscFunctionReturn(0); 34998a729477SBarry Smith } 3500416022c9SBarry Smith 35011a4ee126SBarry Smith 35021a4ee126SBarry Smith 35034a2ae208SSatish Balay #undef __FUNCT__ 35045bba2384SShri Abhyankar #define __FUNCT__ "MatLoad_MPIAIJ" 3505112444f4SShri Abhyankar PetscErrorCode MatLoad_MPIAIJ(Mat newMat, PetscViewer viewer) 35068fb81238SShri Abhyankar { 35078fb81238SShri Abhyankar PetscScalar *vals,*svals; 3508ce94432eSBarry Smith MPI_Comm comm; 35098fb81238SShri Abhyankar PetscErrorCode ierr; 35101a4ee126SBarry Smith PetscMPIInt rank,size,tag = ((PetscObject)viewer)->tag; 35118fb81238SShri Abhyankar PetscInt i,nz,j,rstart,rend,mmax,maxnz = 0,grows,gcols; 35128fb81238SShri Abhyankar PetscInt header[4],*rowlengths = 0,M,N,m,*cols; 35130298fd71SBarry Smith PetscInt *ourlens = NULL,*procsnz = NULL,*offlens = NULL,jj,*mycols,*smycols; 35148fb81238SShri Abhyankar PetscInt cend,cstart,n,*rowners,sizesset=1; 35158fb81238SShri Abhyankar int fd; 351608ea439dSMark F. Adams PetscInt bs = 1; 35178fb81238SShri Abhyankar 35188fb81238SShri Abhyankar PetscFunctionBegin; 3519ce94432eSBarry Smith ierr = PetscObjectGetComm((PetscObject)viewer,&comm);CHKERRQ(ierr); 35208fb81238SShri Abhyankar ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 35218fb81238SShri Abhyankar ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 35228fb81238SShri Abhyankar if (!rank) { 35238fb81238SShri Abhyankar ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr); 35248fb81238SShri Abhyankar ierr = PetscBinaryRead(fd,(char*)header,4,PETSC_INT);CHKERRQ(ierr); 35258fb81238SShri Abhyankar if (header[0] != MAT_FILE_CLASSID) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"not matrix object"); 35268fb81238SShri Abhyankar } 35278fb81238SShri Abhyankar 35280298fd71SBarry Smith ierr = PetscOptionsBegin(comm,NULL,"Options for loading SEQAIJ matrix","Mat");CHKERRQ(ierr); 35290298fd71SBarry Smith ierr = PetscOptionsInt("-matload_block_size","Set the blocksize used to store the matrix","MatLoad",bs,&bs,NULL);CHKERRQ(ierr); 353008ea439dSMark F. Adams ierr = PetscOptionsEnd();CHKERRQ(ierr); 353108ea439dSMark F. Adams 35328fb81238SShri Abhyankar if (newMat->rmap->n < 0 && newMat->rmap->N < 0 && newMat->cmap->n < 0 && newMat->cmap->N < 0) sizesset = 0; 35338fb81238SShri Abhyankar 35348fb81238SShri Abhyankar ierr = MPI_Bcast(header+1,3,MPIU_INT,0,comm);CHKERRQ(ierr); 35358fb81238SShri Abhyankar M = header[1]; N = header[2]; 35368fb81238SShri Abhyankar /* If global rows/cols are set to PETSC_DECIDE, set it to the sizes given in the file */ 35378fb81238SShri Abhyankar if (sizesset && newMat->rmap->N < 0) newMat->rmap->N = M; 35388fb81238SShri Abhyankar if (sizesset && newMat->cmap->N < 0) newMat->cmap->N = N; 35398fb81238SShri Abhyankar 35408fb81238SShri Abhyankar /* If global sizes are set, check if they are consistent with that given in the file */ 35418fb81238SShri Abhyankar if (sizesset) { 35428fb81238SShri Abhyankar ierr = MatGetSize(newMat,&grows,&gcols);CHKERRQ(ierr); 35438fb81238SShri Abhyankar } 3544abd38a8fSBarry Smith if (sizesset && newMat->rmap->N != grows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Inconsistent # of rows:Matrix in file has (%d) and input matrix has (%d)",M,grows); 3545abd38a8fSBarry Smith if (sizesset && newMat->cmap->N != gcols) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Inconsistent # of cols:Matrix in file has (%d) and input matrix has (%d)",N,gcols); 35468fb81238SShri Abhyankar 354708ea439dSMark F. Adams /* determine ownership of all (block) rows */ 354808ea439dSMark F. Adams if (M%bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Inconsistent # of rows (%d) and block size (%d)",M,bs); 354908ea439dSMark F. Adams if (newMat->rmap->n < 0) m = bs*((M/bs)/size + (((M/bs) % size) > rank)); /* PETSC_DECIDE */ 35504683f7a4SShri Abhyankar else m = newMat->rmap->n; /* Set by user */ 35518fb81238SShri Abhyankar 35528fb81238SShri Abhyankar ierr = PetscMalloc((size+1)*sizeof(PetscInt),&rowners);CHKERRQ(ierr); 35538fb81238SShri Abhyankar ierr = MPI_Allgather(&m,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr); 35548fb81238SShri Abhyankar 35558fb81238SShri Abhyankar /* First process needs enough room for process with most rows */ 35568fb81238SShri Abhyankar if (!rank) { 35578fb81238SShri Abhyankar mmax = rowners[1]; 35585c4ea359SMatthew G Knepley for (i=2; i<=size; i++) { 35598fb81238SShri Abhyankar mmax = PetscMax(mmax, rowners[i]); 35608fb81238SShri Abhyankar } 35613964eb88SJed Brown } else mmax = -1; /* unused, but compilers complain */ 35628fb81238SShri Abhyankar 35638fb81238SShri Abhyankar rowners[0] = 0; 35648fb81238SShri Abhyankar for (i=2; i<=size; i++) { 35658fb81238SShri Abhyankar rowners[i] += rowners[i-1]; 35668fb81238SShri Abhyankar } 35678fb81238SShri Abhyankar rstart = rowners[rank]; 35688fb81238SShri Abhyankar rend = rowners[rank+1]; 35698fb81238SShri Abhyankar 35708fb81238SShri Abhyankar /* distribute row lengths to all processors */ 35715aa9a6beSBarry Smith ierr = PetscMalloc2(m,PetscInt,&ourlens,m,PetscInt,&offlens);CHKERRQ(ierr); 35728fb81238SShri Abhyankar if (!rank) { 35738fb81238SShri Abhyankar ierr = PetscBinaryRead(fd,ourlens,m,PETSC_INT);CHKERRQ(ierr); 35745c4ea359SMatthew G Knepley ierr = PetscMalloc(mmax*sizeof(PetscInt),&rowlengths);CHKERRQ(ierr); 35758fb81238SShri Abhyankar ierr = PetscMalloc(size*sizeof(PetscInt),&procsnz);CHKERRQ(ierr); 35768fb81238SShri Abhyankar ierr = PetscMemzero(procsnz,size*sizeof(PetscInt));CHKERRQ(ierr); 35778fb81238SShri Abhyankar for (j=0; j<m; j++) { 35788fb81238SShri Abhyankar procsnz[0] += ourlens[j]; 35798fb81238SShri Abhyankar } 35808fb81238SShri Abhyankar for (i=1; i<size; i++) { 35818fb81238SShri Abhyankar ierr = PetscBinaryRead(fd,rowlengths,rowners[i+1]-rowners[i],PETSC_INT);CHKERRQ(ierr); 35828fb81238SShri Abhyankar /* calculate the number of nonzeros on each processor */ 35838fb81238SShri Abhyankar for (j=0; j<rowners[i+1]-rowners[i]; j++) { 35848fb81238SShri Abhyankar procsnz[i] += rowlengths[j]; 35858fb81238SShri Abhyankar } 3586a25532f0SBarry Smith ierr = MPIULong_Send(rowlengths,rowners[i+1]-rowners[i],MPIU_INT,i,tag,comm);CHKERRQ(ierr); 35878fb81238SShri Abhyankar } 35888fb81238SShri Abhyankar ierr = PetscFree(rowlengths);CHKERRQ(ierr); 35898fb81238SShri Abhyankar } else { 3590a25532f0SBarry Smith ierr = MPIULong_Recv(ourlens,m,MPIU_INT,0,tag,comm);CHKERRQ(ierr); 35918fb81238SShri Abhyankar } 35928fb81238SShri Abhyankar 35938fb81238SShri Abhyankar if (!rank) { 35948fb81238SShri Abhyankar /* determine max buffer needed and allocate it */ 35958fb81238SShri Abhyankar maxnz = 0; 35968fb81238SShri Abhyankar for (i=0; i<size; i++) { 35978fb81238SShri Abhyankar maxnz = PetscMax(maxnz,procsnz[i]); 35988fb81238SShri Abhyankar } 35998fb81238SShri Abhyankar ierr = PetscMalloc(maxnz*sizeof(PetscInt),&cols);CHKERRQ(ierr); 36008fb81238SShri Abhyankar 36018fb81238SShri Abhyankar /* read in my part of the matrix column indices */ 36028fb81238SShri Abhyankar nz = procsnz[0]; 36038fb81238SShri Abhyankar ierr = PetscMalloc(nz*sizeof(PetscInt),&mycols);CHKERRQ(ierr); 36048fb81238SShri Abhyankar ierr = PetscBinaryRead(fd,mycols,nz,PETSC_INT);CHKERRQ(ierr); 36058fb81238SShri Abhyankar 36068fb81238SShri Abhyankar /* read in every one elses and ship off */ 36078fb81238SShri Abhyankar for (i=1; i<size; i++) { 36088fb81238SShri Abhyankar nz = procsnz[i]; 36098fb81238SShri Abhyankar ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr); 3610a25532f0SBarry Smith ierr = MPIULong_Send(cols,nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr); 36118fb81238SShri Abhyankar } 36128fb81238SShri Abhyankar ierr = PetscFree(cols);CHKERRQ(ierr); 36138fb81238SShri Abhyankar } else { 36148fb81238SShri Abhyankar /* determine buffer space needed for message */ 36158fb81238SShri Abhyankar nz = 0; 36168fb81238SShri Abhyankar for (i=0; i<m; i++) { 36178fb81238SShri Abhyankar nz += ourlens[i]; 36188fb81238SShri Abhyankar } 36198fb81238SShri Abhyankar ierr = PetscMalloc(nz*sizeof(PetscInt),&mycols);CHKERRQ(ierr); 36208fb81238SShri Abhyankar 36218fb81238SShri Abhyankar /* receive message of column indices*/ 3622a25532f0SBarry Smith ierr = MPIULong_Recv(mycols,nz,MPIU_INT,0,tag,comm);CHKERRQ(ierr); 36238fb81238SShri Abhyankar } 36248fb81238SShri Abhyankar 36258fb81238SShri Abhyankar /* determine column ownership if matrix is not square */ 36268fb81238SShri Abhyankar if (N != M) { 36278fb81238SShri Abhyankar if (newMat->cmap->n < 0) n = N/size + ((N % size) > rank); 36288fb81238SShri Abhyankar else n = newMat->cmap->n; 36298fb81238SShri Abhyankar ierr = MPI_Scan(&n,&cend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr); 36308fb81238SShri Abhyankar cstart = cend - n; 36318fb81238SShri Abhyankar } else { 36328fb81238SShri Abhyankar cstart = rstart; 36338fb81238SShri Abhyankar cend = rend; 36348fb81238SShri Abhyankar n = cend - cstart; 36358fb81238SShri Abhyankar } 36368fb81238SShri Abhyankar 36378fb81238SShri Abhyankar /* loop over local rows, determining number of off diagonal entries */ 36388fb81238SShri Abhyankar ierr = PetscMemzero(offlens,m*sizeof(PetscInt));CHKERRQ(ierr); 36398fb81238SShri Abhyankar jj = 0; 36408fb81238SShri Abhyankar for (i=0; i<m; i++) { 36418fb81238SShri Abhyankar for (j=0; j<ourlens[i]; j++) { 36428fb81238SShri Abhyankar if (mycols[jj] < cstart || mycols[jj] >= cend) offlens[i]++; 36438fb81238SShri Abhyankar jj++; 36448fb81238SShri Abhyankar } 36458fb81238SShri Abhyankar } 36468fb81238SShri Abhyankar 36478fb81238SShri Abhyankar for (i=0; i<m; i++) { 36488fb81238SShri Abhyankar ourlens[i] -= offlens[i]; 36498fb81238SShri Abhyankar } 36508fb81238SShri Abhyankar if (!sizesset) { 36518fb81238SShri Abhyankar ierr = MatSetSizes(newMat,m,n,M,N);CHKERRQ(ierr); 36528fb81238SShri Abhyankar } 365308ea439dSMark F. Adams 365408ea439dSMark F. Adams if (bs > 1) {ierr = MatSetBlockSize(newMat,bs);CHKERRQ(ierr);} 365508ea439dSMark F. Adams 36568fb81238SShri Abhyankar ierr = MatMPIAIJSetPreallocation(newMat,0,ourlens,0,offlens);CHKERRQ(ierr); 36578fb81238SShri Abhyankar 36588fb81238SShri Abhyankar for (i=0; i<m; i++) { 36598fb81238SShri Abhyankar ourlens[i] += offlens[i]; 36608fb81238SShri Abhyankar } 36618fb81238SShri Abhyankar 36628fb81238SShri Abhyankar if (!rank) { 36638fb81238SShri Abhyankar ierr = PetscMalloc((maxnz+1)*sizeof(PetscScalar),&vals);CHKERRQ(ierr); 36648fb81238SShri Abhyankar 36658fb81238SShri Abhyankar /* read in my part of the matrix numerical values */ 36668fb81238SShri Abhyankar nz = procsnz[0]; 36678fb81238SShri Abhyankar ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr); 36688fb81238SShri Abhyankar 36698fb81238SShri Abhyankar /* insert into matrix */ 36708fb81238SShri Abhyankar jj = rstart; 36718fb81238SShri Abhyankar smycols = mycols; 36728fb81238SShri Abhyankar svals = vals; 36738fb81238SShri Abhyankar for (i=0; i<m; i++) { 36748fb81238SShri Abhyankar ierr = MatSetValues_MPIAIJ(newMat,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);CHKERRQ(ierr); 36758fb81238SShri Abhyankar smycols += ourlens[i]; 36768fb81238SShri Abhyankar svals += ourlens[i]; 36778fb81238SShri Abhyankar jj++; 36788fb81238SShri Abhyankar } 36798fb81238SShri Abhyankar 36808fb81238SShri Abhyankar /* read in other processors and ship out */ 36818fb81238SShri Abhyankar for (i=1; i<size; i++) { 36828fb81238SShri Abhyankar nz = procsnz[i]; 36838fb81238SShri Abhyankar ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr); 3684a25532f0SBarry Smith ierr = MPIULong_Send(vals,nz,MPIU_SCALAR,i,((PetscObject)newMat)->tag,comm);CHKERRQ(ierr); 36858fb81238SShri Abhyankar } 36868fb81238SShri Abhyankar ierr = PetscFree(procsnz);CHKERRQ(ierr); 36878fb81238SShri Abhyankar } else { 36888fb81238SShri Abhyankar /* receive numeric values */ 36898fb81238SShri Abhyankar ierr = PetscMalloc((nz+1)*sizeof(PetscScalar),&vals);CHKERRQ(ierr); 36908fb81238SShri Abhyankar 36918fb81238SShri Abhyankar /* receive message of values*/ 3692a25532f0SBarry Smith ierr = MPIULong_Recv(vals,nz,MPIU_SCALAR,0,((PetscObject)newMat)->tag,comm);CHKERRQ(ierr); 36938fb81238SShri Abhyankar 36948fb81238SShri Abhyankar /* insert into matrix */ 36958fb81238SShri Abhyankar jj = rstart; 36968fb81238SShri Abhyankar smycols = mycols; 36978fb81238SShri Abhyankar svals = vals; 36988fb81238SShri Abhyankar for (i=0; i<m; i++) { 36998fb81238SShri Abhyankar ierr = MatSetValues_MPIAIJ(newMat,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);CHKERRQ(ierr); 37008fb81238SShri Abhyankar smycols += ourlens[i]; 37018fb81238SShri Abhyankar svals += ourlens[i]; 37028fb81238SShri Abhyankar jj++; 37038fb81238SShri Abhyankar } 37048fb81238SShri Abhyankar } 37058fb81238SShri Abhyankar ierr = PetscFree2(ourlens,offlens);CHKERRQ(ierr); 37068fb81238SShri Abhyankar ierr = PetscFree(vals);CHKERRQ(ierr); 37078fb81238SShri Abhyankar ierr = PetscFree(mycols);CHKERRQ(ierr); 37088fb81238SShri Abhyankar ierr = PetscFree(rowners);CHKERRQ(ierr); 37098fb81238SShri Abhyankar ierr = MatAssemblyBegin(newMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 37108fb81238SShri Abhyankar ierr = MatAssemblyEnd(newMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 37118fb81238SShri Abhyankar PetscFunctionReturn(0); 37128fb81238SShri Abhyankar } 37138fb81238SShri Abhyankar 37148fb81238SShri Abhyankar #undef __FUNCT__ 37154a2ae208SSatish Balay #define __FUNCT__ "MatGetSubMatrix_MPIAIJ" 37164aa3045dSJed Brown PetscErrorCode MatGetSubMatrix_MPIAIJ(Mat mat,IS isrow,IS iscol,MatReuse call,Mat *newmat) 37174aa3045dSJed Brown { 37184aa3045dSJed Brown PetscErrorCode ierr; 37194aa3045dSJed Brown IS iscol_local; 37204aa3045dSJed Brown PetscInt csize; 37214aa3045dSJed Brown 37224aa3045dSJed Brown PetscFunctionBegin; 37234aa3045dSJed Brown ierr = ISGetLocalSize(iscol,&csize);CHKERRQ(ierr); 3724b79d0421SJed Brown if (call == MAT_REUSE_MATRIX) { 3725b79d0421SJed Brown ierr = PetscObjectQuery((PetscObject)*newmat,"ISAllGather",(PetscObject*)&iscol_local);CHKERRQ(ierr); 3726e32f2f54SBarry Smith if (!iscol_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse"); 3727b79d0421SJed Brown } else { 3728c5bfad50SMark F. Adams PetscInt cbs; 3729c5bfad50SMark F. Adams ierr = ISGetBlockSize(iscol,&cbs);CHKERRQ(ierr); 37304aa3045dSJed Brown ierr = ISAllGather(iscol,&iscol_local);CHKERRQ(ierr); 3731c5bfad50SMark F. Adams ierr = ISSetBlockSize(iscol_local,cbs);CHKERRQ(ierr); 3732b79d0421SJed Brown } 37334aa3045dSJed Brown ierr = MatGetSubMatrix_MPIAIJ_Private(mat,isrow,iscol_local,csize,call,newmat);CHKERRQ(ierr); 3734b79d0421SJed Brown if (call == MAT_INITIAL_MATRIX) { 3735b79d0421SJed Brown ierr = PetscObjectCompose((PetscObject)*newmat,"ISAllGather",(PetscObject)iscol_local);CHKERRQ(ierr); 37366bf464f9SBarry Smith ierr = ISDestroy(&iscol_local);CHKERRQ(ierr); 3737b79d0421SJed Brown } 37384aa3045dSJed Brown PetscFunctionReturn(0); 37394aa3045dSJed Brown } 37404aa3045dSJed Brown 374129dcf524SDmitry Karpeev extern PetscErrorCode MatGetSubMatrices_MPIAIJ_Local(Mat,PetscInt,const IS[],const IS[],MatReuse,PetscBool*,Mat*); 37424aa3045dSJed Brown #undef __FUNCT__ 37434aa3045dSJed Brown #define __FUNCT__ "MatGetSubMatrix_MPIAIJ_Private" 3744a0ff6018SBarry Smith /* 374529da9460SBarry Smith Not great since it makes two copies of the submatrix, first an SeqAIJ 374629da9460SBarry Smith in local and then by concatenating the local matrices the end result. 374729da9460SBarry Smith Writing it directly would be much like MatGetSubMatrices_MPIAIJ() 37484aa3045dSJed Brown 37494aa3045dSJed Brown Note: This requires a sequential iscol with all indices. 3750a0ff6018SBarry Smith */ 37514aa3045dSJed Brown PetscErrorCode MatGetSubMatrix_MPIAIJ_Private(Mat mat,IS isrow,IS iscol,PetscInt csize,MatReuse call,Mat *newmat) 3752a0ff6018SBarry Smith { 3753dfbe8321SBarry Smith PetscErrorCode ierr; 375432dcc486SBarry Smith PetscMPIInt rank,size; 3755a2f3521dSMark F. Adams PetscInt i,m,n,rstart,row,rend,nz,*cwork,j,bs,cbs; 375629dcf524SDmitry Karpeev PetscInt *ii,*jj,nlocal,*dlens,*olens,dlen,olen,jend,mglobal,ncol; 375729dcf524SDmitry Karpeev PetscBool allcolumns, colflag; 375829dcf524SDmitry Karpeev Mat M,Mreuse; 3759a77337e4SBarry Smith MatScalar *vwork,*aa; 3760ce94432eSBarry Smith MPI_Comm comm; 376100e6dbe6SBarry Smith Mat_SeqAIJ *aij; 37627e2c5f70SBarry Smith 3763a0ff6018SBarry Smith PetscFunctionBegin; 3764ce94432eSBarry Smith ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr); 37651dab6e02SBarry Smith ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 37661dab6e02SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 376700e6dbe6SBarry Smith 376829dcf524SDmitry Karpeev ierr = ISIdentity(iscol,&colflag);CHKERRQ(ierr); 376929dcf524SDmitry Karpeev ierr = ISGetLocalSize(iscol,&ncol);CHKERRQ(ierr); 377029dcf524SDmitry Karpeev if (colflag && ncol == mat->cmap->N) { 377129dcf524SDmitry Karpeev allcolumns = PETSC_TRUE; 377229dcf524SDmitry Karpeev } else { 377329dcf524SDmitry Karpeev allcolumns = PETSC_FALSE; 377429dcf524SDmitry Karpeev } 3775fee21e36SBarry Smith if (call == MAT_REUSE_MATRIX) { 3776fee21e36SBarry Smith ierr = PetscObjectQuery((PetscObject)*newmat,"SubMatrix",(PetscObject*)&Mreuse);CHKERRQ(ierr); 3777e32f2f54SBarry Smith if (!Mreuse) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse"); 377829dcf524SDmitry Karpeev ierr = MatGetSubMatrices_MPIAIJ_Local(mat,1,&isrow,&iscol,MAT_REUSE_MATRIX,&allcolumns,&Mreuse);CHKERRQ(ierr); 3779fee21e36SBarry Smith } else { 378029dcf524SDmitry Karpeev ierr = MatGetSubMatrices_MPIAIJ_Local(mat,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&allcolumns,&Mreuse);CHKERRQ(ierr); 3781fee21e36SBarry Smith } 3782a0ff6018SBarry Smith 3783a0ff6018SBarry Smith /* 3784a0ff6018SBarry Smith m - number of local rows 3785a0ff6018SBarry Smith n - number of columns (same on all processors) 3786a0ff6018SBarry Smith rstart - first row in new global matrix generated 3787a0ff6018SBarry Smith */ 3788fee21e36SBarry Smith ierr = MatGetSize(Mreuse,&m,&n);CHKERRQ(ierr); 3789a2f3521dSMark F. Adams ierr = MatGetBlockSizes(Mreuse,&bs,&cbs);CHKERRQ(ierr); 3790a0ff6018SBarry Smith if (call == MAT_INITIAL_MATRIX) { 3791fee21e36SBarry Smith aij = (Mat_SeqAIJ*)(Mreuse)->data; 379200e6dbe6SBarry Smith ii = aij->i; 379300e6dbe6SBarry Smith jj = aij->j; 379400e6dbe6SBarry Smith 3795a0ff6018SBarry Smith /* 379600e6dbe6SBarry Smith Determine the number of non-zeros in the diagonal and off-diagonal 379700e6dbe6SBarry Smith portions of the matrix in order to do correct preallocation 3798a0ff6018SBarry Smith */ 379900e6dbe6SBarry Smith 380000e6dbe6SBarry Smith /* first get start and end of "diagonal" columns */ 38016a6a5d1dSBarry Smith if (csize == PETSC_DECIDE) { 3802ab50ec6bSBarry Smith ierr = ISGetSize(isrow,&mglobal);CHKERRQ(ierr); 3803ab50ec6bSBarry Smith if (mglobal == n) { /* square matrix */ 3804e2c4fddaSBarry Smith nlocal = m; 38056a6a5d1dSBarry Smith } else { 3806ab50ec6bSBarry Smith nlocal = n/size + ((n % size) > rank); 3807ab50ec6bSBarry Smith } 3808ab50ec6bSBarry Smith } else { 38096a6a5d1dSBarry Smith nlocal = csize; 38106a6a5d1dSBarry Smith } 3811b1d57f15SBarry Smith ierr = MPI_Scan(&nlocal,&rend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr); 381200e6dbe6SBarry Smith rstart = rend - nlocal; 381365e19b50SBarry Smith if (rank == size - 1 && rend != n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local column sizes %D do not add up to total number of columns %D",rend,n); 381400e6dbe6SBarry Smith 381500e6dbe6SBarry Smith /* next, compute all the lengths */ 3816b1d57f15SBarry Smith ierr = PetscMalloc((2*m+1)*sizeof(PetscInt),&dlens);CHKERRQ(ierr); 381700e6dbe6SBarry Smith olens = dlens + m; 381800e6dbe6SBarry Smith for (i=0; i<m; i++) { 381900e6dbe6SBarry Smith jend = ii[i+1] - ii[i]; 382000e6dbe6SBarry Smith olen = 0; 382100e6dbe6SBarry Smith dlen = 0; 382200e6dbe6SBarry Smith for (j=0; j<jend; j++) { 382300e6dbe6SBarry Smith if (*jj < rstart || *jj >= rend) olen++; 382400e6dbe6SBarry Smith else dlen++; 382500e6dbe6SBarry Smith jj++; 382600e6dbe6SBarry Smith } 382700e6dbe6SBarry Smith olens[i] = olen; 382800e6dbe6SBarry Smith dlens[i] = dlen; 382900e6dbe6SBarry Smith } 3830f69a0ea3SMatthew Knepley ierr = MatCreate(comm,&M);CHKERRQ(ierr); 3831f69a0ea3SMatthew Knepley ierr = MatSetSizes(M,m,nlocal,PETSC_DECIDE,n);CHKERRQ(ierr); 3832a2f3521dSMark F. Adams ierr = MatSetBlockSizes(M,bs,cbs);CHKERRQ(ierr); 38337adad957SLisandro Dalcin ierr = MatSetType(M,((PetscObject)mat)->type_name);CHKERRQ(ierr); 3834e2d9671bSKris Buschelman ierr = MatMPIAIJSetPreallocation(M,0,dlens,0,olens);CHKERRQ(ierr); 3835606d414cSSatish Balay ierr = PetscFree(dlens);CHKERRQ(ierr); 3836a0ff6018SBarry Smith } else { 3837b1d57f15SBarry Smith PetscInt ml,nl; 3838a0ff6018SBarry Smith 3839a0ff6018SBarry Smith M = *newmat; 3840a0ff6018SBarry Smith ierr = MatGetLocalSize(M,&ml,&nl);CHKERRQ(ierr); 3841e32f2f54SBarry Smith if (ml != m) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Previous matrix must be same size/layout as request"); 3842a0ff6018SBarry Smith ierr = MatZeroEntries(M);CHKERRQ(ierr); 3843c48de900SBarry Smith /* 3844c48de900SBarry Smith The next two lines are needed so we may call MatSetValues_MPIAIJ() below directly, 3845c48de900SBarry Smith rather than the slower MatSetValues(). 3846c48de900SBarry Smith */ 3847c48de900SBarry Smith M->was_assembled = PETSC_TRUE; 3848c48de900SBarry Smith M->assembled = PETSC_FALSE; 3849a0ff6018SBarry Smith } 3850a0ff6018SBarry Smith ierr = MatGetOwnershipRange(M,&rstart,&rend);CHKERRQ(ierr); 3851fee21e36SBarry Smith aij = (Mat_SeqAIJ*)(Mreuse)->data; 385200e6dbe6SBarry Smith ii = aij->i; 385300e6dbe6SBarry Smith jj = aij->j; 385400e6dbe6SBarry Smith aa = aij->a; 3855a0ff6018SBarry Smith for (i=0; i<m; i++) { 3856a0ff6018SBarry Smith row = rstart + i; 385700e6dbe6SBarry Smith nz = ii[i+1] - ii[i]; 385800e6dbe6SBarry Smith cwork = jj; jj += nz; 385900e6dbe6SBarry Smith vwork = aa; aa += nz; 38608c638d02SBarry Smith ierr = MatSetValues_MPIAIJ(M,1,&row,nz,cwork,vwork,INSERT_VALUES);CHKERRQ(ierr); 3861a0ff6018SBarry Smith } 3862a0ff6018SBarry Smith 3863a0ff6018SBarry Smith ierr = MatAssemblyBegin(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3864a0ff6018SBarry Smith ierr = MatAssemblyEnd(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3865a0ff6018SBarry Smith *newmat = M; 3866fee21e36SBarry Smith 3867fee21e36SBarry Smith /* save submatrix used in processor for next request */ 3868fee21e36SBarry Smith if (call == MAT_INITIAL_MATRIX) { 3869fee21e36SBarry Smith ierr = PetscObjectCompose((PetscObject)M,"SubMatrix",(PetscObject)Mreuse);CHKERRQ(ierr); 3870bf0cc555SLisandro Dalcin ierr = MatDestroy(&Mreuse);CHKERRQ(ierr); 3871fee21e36SBarry Smith } 3872a0ff6018SBarry Smith PetscFunctionReturn(0); 3873a0ff6018SBarry Smith } 3874273d9f13SBarry Smith 38754a2ae208SSatish Balay #undef __FUNCT__ 3876ccd8e176SBarry Smith #define __FUNCT__ "MatMPIAIJSetPreallocationCSR_MPIAIJ" 38777087cfbeSBarry Smith PetscErrorCode MatMPIAIJSetPreallocationCSR_MPIAIJ(Mat B,const PetscInt Ii[],const PetscInt J[],const PetscScalar v[]) 3878ccd8e176SBarry Smith { 3879899cda47SBarry Smith PetscInt m,cstart, cend,j,nnz,i,d; 3880899cda47SBarry Smith PetscInt *d_nnz,*o_nnz,nnz_max = 0,rstart,ii; 3881ccd8e176SBarry Smith const PetscInt *JJ; 3882ccd8e176SBarry Smith PetscScalar *values; 3883ccd8e176SBarry Smith PetscErrorCode ierr; 3884ccd8e176SBarry Smith 3885ccd8e176SBarry Smith PetscFunctionBegin; 3886e32f2f54SBarry Smith if (Ii[0]) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ii[0] must be 0 it is %D",Ii[0]); 3887899cda47SBarry Smith 388826283091SBarry Smith ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr); 388926283091SBarry Smith ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr); 3890d0f46423SBarry Smith m = B->rmap->n; 3891d0f46423SBarry Smith cstart = B->cmap->rstart; 3892d0f46423SBarry Smith cend = B->cmap->rend; 3893d0f46423SBarry Smith rstart = B->rmap->rstart; 3894899cda47SBarry Smith 38951d79065fSBarry Smith ierr = PetscMalloc2(m,PetscInt,&d_nnz,m,PetscInt,&o_nnz);CHKERRQ(ierr); 3896ccd8e176SBarry Smith 3897ecc77c7aSBarry Smith #if defined(PETSC_USE_DEBUGGING) 3898ecc77c7aSBarry Smith for (i=0; i<m; i++) { 3899ecc77c7aSBarry Smith nnz = Ii[i+1]- Ii[i]; 3900ecc77c7aSBarry Smith JJ = J + Ii[i]; 3901e32f2f54SBarry Smith if (nnz < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local row %D has a negative %D number of columns",i,nnz); 3902ecc77c7aSBarry Smith if (nnz && (JJ[0] < 0)) SETERRRQ1(PETSC_ERR_ARG_WRONGSTATE,"Row %D starts with negative column index",i,j); 3903d0f46423SBarry Smith if (nnz && (JJ[nnz-1] >= B->cmap->N) SETERRRQ3(PETSC_ERR_ARG_WRONGSTATE,"Row %D ends with too large a column index %D (max allowed %D)",i,JJ[nnz-1],B->cmap->N); 3904ecc77c7aSBarry Smith } 3905ecc77c7aSBarry Smith #endif 3906ecc77c7aSBarry Smith 3907ccd8e176SBarry Smith for (i=0; i<m; i++) { 3908b7940d39SSatish Balay nnz = Ii[i+1]- Ii[i]; 3909b7940d39SSatish Balay JJ = J + Ii[i]; 3910ccd8e176SBarry Smith nnz_max = PetscMax(nnz_max,nnz); 3911ccd8e176SBarry Smith d = 0; 39120daa03b5SJed Brown for (j=0; j<nnz; j++) { 39130daa03b5SJed Brown if (cstart <= JJ[j] && JJ[j] < cend) d++; 3914ccd8e176SBarry Smith } 3915ccd8e176SBarry Smith d_nnz[i] = d; 3916ccd8e176SBarry Smith o_nnz[i] = nnz - d; 3917ccd8e176SBarry Smith } 3918ccd8e176SBarry Smith ierr = MatMPIAIJSetPreallocation(B,0,d_nnz,0,o_nnz);CHKERRQ(ierr); 39191d79065fSBarry Smith ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr); 3920ccd8e176SBarry Smith 3921ccd8e176SBarry Smith if (v) values = (PetscScalar*)v; 3922ccd8e176SBarry Smith else { 3923ccd8e176SBarry Smith ierr = PetscMalloc((nnz_max+1)*sizeof(PetscScalar),&values);CHKERRQ(ierr); 3924ccd8e176SBarry Smith ierr = PetscMemzero(values,nnz_max*sizeof(PetscScalar));CHKERRQ(ierr); 3925ccd8e176SBarry Smith } 3926ccd8e176SBarry Smith 3927ccd8e176SBarry Smith for (i=0; i<m; i++) { 3928ccd8e176SBarry Smith ii = i + rstart; 3929b7940d39SSatish Balay nnz = Ii[i+1]- Ii[i]; 3930b7940d39SSatish Balay ierr = MatSetValues_MPIAIJ(B,1,&ii,nnz,J+Ii[i],values+(v ? Ii[i] : 0),INSERT_VALUES);CHKERRQ(ierr); 3931ccd8e176SBarry Smith } 3932ccd8e176SBarry Smith ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3933ccd8e176SBarry Smith ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3934ccd8e176SBarry Smith 3935ccd8e176SBarry Smith if (!v) { 3936ccd8e176SBarry Smith ierr = PetscFree(values);CHKERRQ(ierr); 3937ccd8e176SBarry Smith } 39387827cd58SJed Brown ierr = MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr); 3939ccd8e176SBarry Smith PetscFunctionReturn(0); 3940ccd8e176SBarry Smith } 3941ccd8e176SBarry Smith 3942ccd8e176SBarry Smith #undef __FUNCT__ 3943ccd8e176SBarry Smith #define __FUNCT__ "MatMPIAIJSetPreallocationCSR" 39441eea217eSSatish Balay /*@ 3945ccd8e176SBarry Smith MatMPIAIJSetPreallocationCSR - Allocates memory for a sparse parallel matrix in AIJ format 3946ccd8e176SBarry Smith (the default parallel PETSc format). 3947ccd8e176SBarry Smith 3948ccd8e176SBarry Smith Collective on MPI_Comm 3949ccd8e176SBarry Smith 3950ccd8e176SBarry Smith Input Parameters: 3951a1661176SMatthew Knepley + B - the matrix 3952ccd8e176SBarry Smith . i - the indices into j for the start of each local row (starts with zero) 39530daa03b5SJed Brown . j - the column indices for each local row (starts with zero) 3954ccd8e176SBarry Smith - v - optional values in the matrix 3955ccd8e176SBarry Smith 3956ccd8e176SBarry Smith Level: developer 3957ccd8e176SBarry Smith 395812251496SSatish Balay Notes: 395912251496SSatish Balay The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc; 396012251496SSatish Balay thus you CANNOT change the matrix entries by changing the values of a[] after you have 396112251496SSatish Balay called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays. 396212251496SSatish Balay 396312251496SSatish Balay The i and j indices are 0 based, and i indices are indices corresponding to the local j array. 396412251496SSatish Balay 396512251496SSatish Balay The format which is used for the sparse matrix input, is equivalent to a 396612251496SSatish Balay row-major ordering.. i.e for the following matrix, the input data expected is 396712251496SSatish Balay as shown: 396812251496SSatish Balay 396912251496SSatish Balay 1 0 0 397012251496SSatish Balay 2 0 3 P0 397112251496SSatish Balay ------- 397212251496SSatish Balay 4 5 6 P1 397312251496SSatish Balay 397412251496SSatish Balay Process0 [P0]: rows_owned=[0,1] 397512251496SSatish Balay i = {0,1,3} [size = nrow+1 = 2+1] 397612251496SSatish Balay j = {0,0,2} [size = nz = 6] 397712251496SSatish Balay v = {1,2,3} [size = nz = 6] 397812251496SSatish Balay 397912251496SSatish Balay Process1 [P1]: rows_owned=[2] 398012251496SSatish Balay i = {0,3} [size = nrow+1 = 1+1] 398112251496SSatish Balay j = {0,1,2} [size = nz = 6] 398212251496SSatish Balay v = {4,5,6} [size = nz = 6] 398312251496SSatish Balay 3984ccd8e176SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 3985ccd8e176SBarry Smith 398669b1f4b7SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatCreateAIJ(), MPIAIJ, 39878d7a6e47SBarry Smith MatCreateSeqAIJWithArrays(), MatCreateMPIAIJWithSplitArrays() 3988ccd8e176SBarry Smith @*/ 39897087cfbeSBarry Smith PetscErrorCode MatMPIAIJSetPreallocationCSR(Mat B,const PetscInt i[],const PetscInt j[], const PetscScalar v[]) 3990ccd8e176SBarry Smith { 39914ac538c5SBarry Smith PetscErrorCode ierr; 3992ccd8e176SBarry Smith 3993ccd8e176SBarry Smith PetscFunctionBegin; 39944ac538c5SBarry Smith ierr = PetscTryMethod(B,"MatMPIAIJSetPreallocationCSR_C",(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]),(B,i,j,v));CHKERRQ(ierr); 3995ccd8e176SBarry Smith PetscFunctionReturn(0); 3996ccd8e176SBarry Smith } 3997ccd8e176SBarry Smith 3998ccd8e176SBarry Smith #undef __FUNCT__ 39994a2ae208SSatish Balay #define __FUNCT__ "MatMPIAIJSetPreallocation" 4000273d9f13SBarry Smith /*@C 4001ccd8e176SBarry Smith MatMPIAIJSetPreallocation - Preallocates memory for a sparse parallel matrix in AIJ format 4002273d9f13SBarry Smith (the default parallel PETSc format). For good matrix assembly performance 4003273d9f13SBarry Smith the user should preallocate the matrix storage by setting the parameters 4004273d9f13SBarry Smith d_nz (or d_nnz) and o_nz (or o_nnz). By setting these parameters accurately, 4005273d9f13SBarry Smith performance can be increased by more than a factor of 50. 4006273d9f13SBarry Smith 4007273d9f13SBarry Smith Collective on MPI_Comm 4008273d9f13SBarry Smith 4009273d9f13SBarry Smith Input Parameters: 4010273d9f13SBarry Smith + A - the matrix 4011273d9f13SBarry Smith . d_nz - number of nonzeros per row in DIAGONAL portion of local submatrix 4012273d9f13SBarry Smith (same value is used for all local rows) 4013273d9f13SBarry Smith . d_nnz - array containing the number of nonzeros in the various rows of the 4014273d9f13SBarry Smith DIAGONAL portion of the local submatrix (possibly different for each row) 40150298fd71SBarry Smith or NULL, if d_nz is used to specify the nonzero structure. 4016273d9f13SBarry Smith The size of this array is equal to the number of local rows, i.e 'm'. 40173287b5eaSJed Brown For matrices that will be factored, you must leave room for (and set) 40183287b5eaSJed Brown the diagonal entry even if it is zero. 4019273d9f13SBarry Smith . o_nz - number of nonzeros per row in the OFF-DIAGONAL portion of local 4020273d9f13SBarry Smith submatrix (same value is used for all local rows). 4021273d9f13SBarry Smith - o_nnz - array containing the number of nonzeros in the various rows of the 4022273d9f13SBarry Smith OFF-DIAGONAL portion of the local submatrix (possibly different for 40230298fd71SBarry Smith each row) or NULL, if o_nz is used to specify the nonzero 4024273d9f13SBarry Smith structure. The size of this array is equal to the number 4025273d9f13SBarry Smith of local rows, i.e 'm'. 4026273d9f13SBarry Smith 402749a6f317SBarry Smith If the *_nnz parameter is given then the *_nz parameter is ignored 402849a6f317SBarry Smith 4029273d9f13SBarry Smith The AIJ format (also called the Yale sparse matrix format or 4030ccd8e176SBarry Smith compressed row storage (CSR)), is fully compatible with standard Fortran 77 40310598bfebSBarry Smith storage. The stored row and column indices begin with zero. 40320598bfebSBarry Smith See the <A href="../../docs/manual.pdf#nameddest=ch_mat">Mat chapter of the users manual</A> for details. 4033273d9f13SBarry Smith 4034273d9f13SBarry Smith The parallel matrix is partitioned such that the first m0 rows belong to 4035273d9f13SBarry Smith process 0, the next m1 rows belong to process 1, the next m2 rows belong 4036273d9f13SBarry Smith to process 2 etc.. where m0,m1,m2... are the input parameter 'm'. 4037273d9f13SBarry Smith 4038273d9f13SBarry Smith The DIAGONAL portion of the local submatrix of a processor can be defined 4039a05b864aSJed Brown as the submatrix which is obtained by extraction the part corresponding to 4040a05b864aSJed Brown the rows r1-r2 and columns c1-c2 of the global matrix, where r1 is the 4041a05b864aSJed Brown first row that belongs to the processor, r2 is the last row belonging to 4042a05b864aSJed Brown the this processor, and c1-c2 is range of indices of the local part of a 4043a05b864aSJed Brown vector suitable for applying the matrix to. This is an mxn matrix. In the 4044a05b864aSJed Brown common case of a square matrix, the row and column ranges are the same and 4045a05b864aSJed Brown the DIAGONAL part is also square. The remaining portion of the local 4046a05b864aSJed Brown submatrix (mxN) constitute the OFF-DIAGONAL portion. 4047273d9f13SBarry Smith 4048273d9f13SBarry Smith If o_nnz, d_nnz are specified, then o_nz, and d_nz are ignored. 4049273d9f13SBarry Smith 4050aa95bbe8SBarry Smith You can call MatGetInfo() to get information on how effective the preallocation was; 4051aa95bbe8SBarry Smith for example the fields mallocs,nz_allocated,nz_used,nz_unneeded; 4052aa95bbe8SBarry Smith You can also run with the option -info and look for messages with the string 4053aa95bbe8SBarry Smith malloc in them to see if additional memory allocation was needed. 4054aa95bbe8SBarry Smith 4055273d9f13SBarry Smith Example usage: 4056273d9f13SBarry Smith 4057273d9f13SBarry Smith Consider the following 8x8 matrix with 34 non-zero values, that is 4058273d9f13SBarry Smith assembled across 3 processors. Lets assume that proc0 owns 3 rows, 4059273d9f13SBarry Smith proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown 4060273d9f13SBarry Smith as follows: 4061273d9f13SBarry Smith 4062273d9f13SBarry Smith .vb 4063273d9f13SBarry Smith 1 2 0 | 0 3 0 | 0 4 4064273d9f13SBarry Smith Proc0 0 5 6 | 7 0 0 | 8 0 4065273d9f13SBarry Smith 9 0 10 | 11 0 0 | 12 0 4066273d9f13SBarry Smith ------------------------------------- 4067273d9f13SBarry Smith 13 0 14 | 15 16 17 | 0 0 4068273d9f13SBarry Smith Proc1 0 18 0 | 19 20 21 | 0 0 4069273d9f13SBarry Smith 0 0 0 | 22 23 0 | 24 0 4070273d9f13SBarry Smith ------------------------------------- 4071273d9f13SBarry Smith Proc2 25 26 27 | 0 0 28 | 29 0 4072273d9f13SBarry Smith 30 0 0 | 31 32 33 | 0 34 4073273d9f13SBarry Smith .ve 4074273d9f13SBarry Smith 4075273d9f13SBarry Smith This can be represented as a collection of submatrices as: 4076273d9f13SBarry Smith 4077273d9f13SBarry Smith .vb 4078273d9f13SBarry Smith A B C 4079273d9f13SBarry Smith D E F 4080273d9f13SBarry Smith G H I 4081273d9f13SBarry Smith .ve 4082273d9f13SBarry Smith 4083273d9f13SBarry Smith Where the submatrices A,B,C are owned by proc0, D,E,F are 4084273d9f13SBarry Smith owned by proc1, G,H,I are owned by proc2. 4085273d9f13SBarry Smith 4086273d9f13SBarry Smith The 'm' parameters for proc0,proc1,proc2 are 3,3,2 respectively. 4087273d9f13SBarry Smith The 'n' parameters for proc0,proc1,proc2 are 3,3,2 respectively. 4088273d9f13SBarry Smith The 'M','N' parameters are 8,8, and have the same values on all procs. 4089273d9f13SBarry Smith 4090273d9f13SBarry Smith The DIAGONAL submatrices corresponding to proc0,proc1,proc2 are 4091273d9f13SBarry Smith submatrices [A], [E], [I] respectively. The OFF-DIAGONAL submatrices 4092273d9f13SBarry Smith corresponding to proc0,proc1,proc2 are [BC], [DF], [GH] respectively. 4093273d9f13SBarry Smith Internally, each processor stores the DIAGONAL part, and the OFF-DIAGONAL 4094273d9f13SBarry Smith part as SeqAIJ matrices. for eg: proc1 will store [E] as a SeqAIJ 4095273d9f13SBarry Smith matrix, ans [DF] as another SeqAIJ matrix. 4096273d9f13SBarry Smith 4097273d9f13SBarry Smith When d_nz, o_nz parameters are specified, d_nz storage elements are 4098273d9f13SBarry Smith allocated for every row of the local diagonal submatrix, and o_nz 4099273d9f13SBarry Smith storage locations are allocated for every row of the OFF-DIAGONAL submat. 4100273d9f13SBarry Smith One way to choose d_nz and o_nz is to use the max nonzerors per local 4101273d9f13SBarry Smith rows for each of the local DIAGONAL, and the OFF-DIAGONAL submatrices. 4102273d9f13SBarry Smith In this case, the values of d_nz,o_nz are: 4103273d9f13SBarry Smith .vb 4104273d9f13SBarry Smith proc0 : dnz = 2, o_nz = 2 4105273d9f13SBarry Smith proc1 : dnz = 3, o_nz = 2 4106273d9f13SBarry Smith proc2 : dnz = 1, o_nz = 4 4107273d9f13SBarry Smith .ve 4108273d9f13SBarry Smith We are allocating m*(d_nz+o_nz) storage locations for every proc. This 4109273d9f13SBarry Smith translates to 3*(2+2)=12 for proc0, 3*(3+2)=15 for proc1, 2*(1+4)=10 4110273d9f13SBarry Smith for proc3. i.e we are using 12+15+10=37 storage locations to store 4111273d9f13SBarry Smith 34 values. 4112273d9f13SBarry Smith 4113273d9f13SBarry Smith When d_nnz, o_nnz parameters are specified, the storage is specified 4114273d9f13SBarry Smith for every row, coresponding to both DIAGONAL and OFF-DIAGONAL submatrices. 4115273d9f13SBarry Smith In the above case the values for d_nnz,o_nnz are: 4116273d9f13SBarry Smith .vb 4117273d9f13SBarry Smith proc0: d_nnz = [2,2,2] and o_nnz = [2,2,2] 4118273d9f13SBarry Smith proc1: d_nnz = [3,3,2] and o_nnz = [2,1,1] 4119273d9f13SBarry Smith proc2: d_nnz = [1,1] and o_nnz = [4,4] 4120273d9f13SBarry Smith .ve 4121273d9f13SBarry Smith Here the space allocated is sum of all the above values i.e 34, and 4122273d9f13SBarry Smith hence pre-allocation is perfect. 4123273d9f13SBarry Smith 4124273d9f13SBarry Smith Level: intermediate 4125273d9f13SBarry Smith 4126273d9f13SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 4127273d9f13SBarry Smith 412869b1f4b7SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatCreateAIJ(), MatMPIAIJSetPreallocationCSR(), 4129ab978733SBarry Smith MPIAIJ, MatGetInfo(), PetscSplitOwnership() 4130273d9f13SBarry Smith @*/ 41317087cfbeSBarry Smith PetscErrorCode MatMPIAIJSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 4132273d9f13SBarry Smith { 41334ac538c5SBarry Smith PetscErrorCode ierr; 4134273d9f13SBarry Smith 4135273d9f13SBarry Smith PetscFunctionBegin; 41366ba663aaSJed Brown PetscValidHeaderSpecific(B,MAT_CLASSID,1); 41376ba663aaSJed Brown PetscValidType(B,1); 41384ac538c5SBarry Smith ierr = PetscTryMethod(B,"MatMPIAIJSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr); 4139273d9f13SBarry Smith PetscFunctionReturn(0); 4140273d9f13SBarry Smith } 4141273d9f13SBarry Smith 41424a2ae208SSatish Balay #undef __FUNCT__ 41432fb0ec9aSBarry Smith #define __FUNCT__ "MatCreateMPIAIJWithArrays" 414458d36128SBarry Smith /*@ 41452fb0ec9aSBarry Smith MatCreateMPIAIJWithArrays - creates a MPI AIJ matrix using arrays that contain in standard 41462fb0ec9aSBarry Smith CSR format the local rows. 41472fb0ec9aSBarry Smith 41482fb0ec9aSBarry Smith Collective on MPI_Comm 41492fb0ec9aSBarry Smith 41502fb0ec9aSBarry Smith Input Parameters: 41512fb0ec9aSBarry Smith + comm - MPI communicator 41522fb0ec9aSBarry Smith . m - number of local rows (Cannot be PETSC_DECIDE) 41532fb0ec9aSBarry Smith . n - This value should be the same as the local size used in creating the 41542fb0ec9aSBarry Smith x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have 41552fb0ec9aSBarry Smith calculated if N is given) For square matrices n is almost always m. 41562fb0ec9aSBarry Smith . M - number of global rows (or PETSC_DETERMINE to have calculated if m is given) 41572fb0ec9aSBarry Smith . N - number of global columns (or PETSC_DETERMINE to have calculated if n is given) 41582fb0ec9aSBarry Smith . i - row indices 41592fb0ec9aSBarry Smith . j - column indices 41602fb0ec9aSBarry Smith - a - matrix values 41612fb0ec9aSBarry Smith 41622fb0ec9aSBarry Smith Output Parameter: 41632fb0ec9aSBarry Smith . mat - the matrix 416403bfb495SBarry Smith 41652fb0ec9aSBarry Smith Level: intermediate 41662fb0ec9aSBarry Smith 41672fb0ec9aSBarry Smith Notes: 41682fb0ec9aSBarry Smith The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc; 41692fb0ec9aSBarry Smith thus you CANNOT change the matrix entries by changing the values of a[] after you have 41708d7a6e47SBarry Smith called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays. 41712fb0ec9aSBarry Smith 417212251496SSatish Balay The i and j indices are 0 based, and i indices are indices corresponding to the local j array. 417312251496SSatish Balay 417412251496SSatish Balay The format which is used for the sparse matrix input, is equivalent to a 417512251496SSatish Balay row-major ordering.. i.e for the following matrix, the input data expected is 417612251496SSatish Balay as shown: 417712251496SSatish Balay 417812251496SSatish Balay 1 0 0 417912251496SSatish Balay 2 0 3 P0 418012251496SSatish Balay ------- 418112251496SSatish Balay 4 5 6 P1 418212251496SSatish Balay 418312251496SSatish Balay Process0 [P0]: rows_owned=[0,1] 418412251496SSatish Balay i = {0,1,3} [size = nrow+1 = 2+1] 418512251496SSatish Balay j = {0,0,2} [size = nz = 6] 418612251496SSatish Balay v = {1,2,3} [size = nz = 6] 418712251496SSatish Balay 418812251496SSatish Balay Process1 [P1]: rows_owned=[2] 418912251496SSatish Balay i = {0,3} [size = nrow+1 = 1+1] 419012251496SSatish Balay j = {0,1,2} [size = nz = 6] 419112251496SSatish Balay v = {4,5,6} [size = nz = 6] 41922fb0ec9aSBarry Smith 41932fb0ec9aSBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 41942fb0ec9aSBarry Smith 41952fb0ec9aSBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(), 419669b1f4b7SBarry Smith MPIAIJ, MatCreateAIJ(), MatCreateMPIAIJWithSplitArrays() 41972fb0ec9aSBarry Smith @*/ 41987087cfbeSBarry Smith PetscErrorCode MatCreateMPIAIJWithArrays(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt i[],const PetscInt j[],const PetscScalar a[],Mat *mat) 41992fb0ec9aSBarry Smith { 42002fb0ec9aSBarry Smith PetscErrorCode ierr; 42012fb0ec9aSBarry Smith 42022fb0ec9aSBarry Smith PetscFunctionBegin; 420369b1f4b7SBarry Smith if (i[0]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0"); 4204e32f2f54SBarry Smith if (m < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative"); 42052fb0ec9aSBarry Smith ierr = MatCreate(comm,mat);CHKERRQ(ierr); 4206d4146a68SBarry Smith ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr); 4207a2f3521dSMark F. Adams /* ierr = MatSetBlockSizes(M,bs,cbs);CHKERRQ(ierr); */ 42082fb0ec9aSBarry Smith ierr = MatSetType(*mat,MATMPIAIJ);CHKERRQ(ierr); 42092fb0ec9aSBarry Smith ierr = MatMPIAIJSetPreallocationCSR(*mat,i,j,a);CHKERRQ(ierr); 42102fb0ec9aSBarry Smith PetscFunctionReturn(0); 42112fb0ec9aSBarry Smith } 42122fb0ec9aSBarry Smith 42132fb0ec9aSBarry Smith #undef __FUNCT__ 421469b1f4b7SBarry Smith #define __FUNCT__ "MatCreateAIJ" 4215273d9f13SBarry Smith /*@C 421669b1f4b7SBarry Smith MatCreateAIJ - Creates a sparse parallel matrix in AIJ format 4217273d9f13SBarry Smith (the default parallel PETSc format). For good matrix assembly performance 4218273d9f13SBarry Smith the user should preallocate the matrix storage by setting the parameters 4219273d9f13SBarry Smith d_nz (or d_nnz) and o_nz (or o_nnz). By setting these parameters accurately, 4220273d9f13SBarry Smith performance can be increased by more than a factor of 50. 4221273d9f13SBarry Smith 4222273d9f13SBarry Smith Collective on MPI_Comm 4223273d9f13SBarry Smith 4224273d9f13SBarry Smith Input Parameters: 4225273d9f13SBarry Smith + comm - MPI communicator 4226273d9f13SBarry Smith . m - number of local rows (or PETSC_DECIDE to have calculated if M is given) 4227273d9f13SBarry Smith This value should be the same as the local size used in creating the 4228273d9f13SBarry Smith y vector for the matrix-vector product y = Ax. 4229273d9f13SBarry Smith . n - This value should be the same as the local size used in creating the 4230273d9f13SBarry Smith x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have 4231273d9f13SBarry Smith calculated if N is given) For square matrices n is almost always m. 4232273d9f13SBarry Smith . M - number of global rows (or PETSC_DETERMINE to have calculated if m is given) 4233273d9f13SBarry Smith . N - number of global columns (or PETSC_DETERMINE to have calculated if n is given) 4234273d9f13SBarry Smith . d_nz - number of nonzeros per row in DIAGONAL portion of local submatrix 4235273d9f13SBarry Smith (same value is used for all local rows) 4236273d9f13SBarry Smith . d_nnz - array containing the number of nonzeros in the various rows of the 4237273d9f13SBarry Smith DIAGONAL portion of the local submatrix (possibly different for each row) 42380298fd71SBarry Smith or NULL, if d_nz is used to specify the nonzero structure. 4239273d9f13SBarry Smith The size of this array is equal to the number of local rows, i.e 'm'. 4240273d9f13SBarry Smith . o_nz - number of nonzeros per row in the OFF-DIAGONAL portion of local 4241273d9f13SBarry Smith submatrix (same value is used for all local rows). 4242273d9f13SBarry Smith - o_nnz - array containing the number of nonzeros in the various rows of the 4243273d9f13SBarry Smith OFF-DIAGONAL portion of the local submatrix (possibly different for 42440298fd71SBarry Smith each row) or NULL, if o_nz is used to specify the nonzero 4245273d9f13SBarry Smith structure. The size of this array is equal to the number 4246273d9f13SBarry Smith of local rows, i.e 'm'. 4247273d9f13SBarry Smith 4248273d9f13SBarry Smith Output Parameter: 4249273d9f13SBarry Smith . A - the matrix 4250273d9f13SBarry Smith 4251175b88e8SBarry Smith It is recommended that one use the MatCreate(), MatSetType() and/or MatSetFromOptions(), 4252ae1d86c5SBarry Smith MatXXXXSetPreallocation() paradgm instead of this routine directly. 4253175b88e8SBarry Smith [MatXXXXSetPreallocation() is, for example, MatSeqAIJSetPreallocation] 4254175b88e8SBarry Smith 4255273d9f13SBarry Smith Notes: 425649a6f317SBarry Smith If the *_nnz parameter is given then the *_nz parameter is ignored 425749a6f317SBarry Smith 4258273d9f13SBarry Smith m,n,M,N parameters specify the size of the matrix, and its partitioning across 4259273d9f13SBarry Smith processors, while d_nz,d_nnz,o_nz,o_nnz parameters specify the approximate 4260273d9f13SBarry Smith storage requirements for this matrix. 4261273d9f13SBarry Smith 4262273d9f13SBarry Smith If PETSC_DECIDE or PETSC_DETERMINE is used for a particular argument on one 4263273d9f13SBarry Smith processor than it must be used on all processors that share the object for 4264273d9f13SBarry Smith that argument. 4265273d9f13SBarry Smith 4266273d9f13SBarry Smith The user MUST specify either the local or global matrix dimensions 4267273d9f13SBarry Smith (possibly both). 4268273d9f13SBarry Smith 426933a7c187SSatish Balay The parallel matrix is partitioned across processors such that the 427033a7c187SSatish Balay first m0 rows belong to process 0, the next m1 rows belong to 427133a7c187SSatish Balay process 1, the next m2 rows belong to process 2 etc.. where 427233a7c187SSatish Balay m0,m1,m2,.. are the input parameter 'm'. i.e each processor stores 427333a7c187SSatish Balay values corresponding to [m x N] submatrix. 4274273d9f13SBarry Smith 427533a7c187SSatish Balay The columns are logically partitioned with the n0 columns belonging 427633a7c187SSatish Balay to 0th partition, the next n1 columns belonging to the next 427733a7c187SSatish Balay partition etc.. where n0,n1,n2... are the the input parameter 'n'. 427833a7c187SSatish Balay 427933a7c187SSatish Balay The DIAGONAL portion of the local submatrix on any given processor 428033a7c187SSatish Balay is the submatrix corresponding to the rows and columns m,n 428133a7c187SSatish Balay corresponding to the given processor. i.e diagonal matrix on 428233a7c187SSatish Balay process 0 is [m0 x n0], diagonal matrix on process 1 is [m1 x n1] 428333a7c187SSatish Balay etc. The remaining portion of the local submatrix [m x (N-n)] 428433a7c187SSatish Balay constitute the OFF-DIAGONAL portion. The example below better 428533a7c187SSatish Balay illustrates this concept. 428633a7c187SSatish Balay 428733a7c187SSatish Balay For a square global matrix we define each processor's diagonal portion 428833a7c187SSatish Balay to be its local rows and the corresponding columns (a square submatrix); 428933a7c187SSatish Balay each processor's off-diagonal portion encompasses the remainder of the 429033a7c187SSatish Balay local matrix (a rectangular submatrix). 4291273d9f13SBarry Smith 4292273d9f13SBarry Smith If o_nnz, d_nnz are specified, then o_nz, and d_nz are ignored. 4293273d9f13SBarry Smith 429497d05335SKris Buschelman When calling this routine with a single process communicator, a matrix of 429597d05335SKris Buschelman type SEQAIJ is returned. If a matrix of type MPIAIJ is desired for this 429697d05335SKris Buschelman type of communicator, use the construction mechanism: 429778102f6cSMatthew Knepley MatCreate(...,&A); MatSetType(A,MATMPIAIJ); MatSetSizes(A, m,n,M,N); MatMPIAIJSetPreallocation(A,...); 429897d05335SKris Buschelman 4299273d9f13SBarry Smith By default, this format uses inodes (identical nodes) when possible. 4300273d9f13SBarry Smith We search for consecutive rows with the same nonzero structure, thereby 4301273d9f13SBarry Smith reusing matrix information to achieve increased efficiency. 4302273d9f13SBarry Smith 4303273d9f13SBarry Smith Options Database Keys: 4304923f20ffSKris Buschelman + -mat_no_inode - Do not use inodes 4305923f20ffSKris Buschelman . -mat_inode_limit <limit> - Sets inode limit (max limit=5) 4306273d9f13SBarry Smith - -mat_aij_oneindex - Internally use indexing starting at 1 4307273d9f13SBarry Smith rather than 0. Note that when calling MatSetValues(), 4308273d9f13SBarry Smith the user still MUST index entries starting at 0! 4309273d9f13SBarry Smith 4310273d9f13SBarry Smith 4311273d9f13SBarry Smith Example usage: 4312273d9f13SBarry Smith 4313273d9f13SBarry Smith Consider the following 8x8 matrix with 34 non-zero values, that is 4314273d9f13SBarry Smith assembled across 3 processors. Lets assume that proc0 owns 3 rows, 4315273d9f13SBarry Smith proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown 4316273d9f13SBarry Smith as follows: 4317273d9f13SBarry Smith 4318273d9f13SBarry Smith .vb 4319273d9f13SBarry Smith 1 2 0 | 0 3 0 | 0 4 4320273d9f13SBarry Smith Proc0 0 5 6 | 7 0 0 | 8 0 4321273d9f13SBarry Smith 9 0 10 | 11 0 0 | 12 0 4322273d9f13SBarry Smith ------------------------------------- 4323273d9f13SBarry Smith 13 0 14 | 15 16 17 | 0 0 4324273d9f13SBarry Smith Proc1 0 18 0 | 19 20 21 | 0 0 4325273d9f13SBarry Smith 0 0 0 | 22 23 0 | 24 0 4326273d9f13SBarry Smith ------------------------------------- 4327273d9f13SBarry Smith Proc2 25 26 27 | 0 0 28 | 29 0 4328273d9f13SBarry Smith 30 0 0 | 31 32 33 | 0 34 4329273d9f13SBarry Smith .ve 4330273d9f13SBarry Smith 4331273d9f13SBarry Smith This can be represented as a collection of submatrices as: 4332273d9f13SBarry Smith 4333273d9f13SBarry Smith .vb 4334273d9f13SBarry Smith A B C 4335273d9f13SBarry Smith D E F 4336273d9f13SBarry Smith G H I 4337273d9f13SBarry Smith .ve 4338273d9f13SBarry Smith 4339273d9f13SBarry Smith Where the submatrices A,B,C are owned by proc0, D,E,F are 4340273d9f13SBarry Smith owned by proc1, G,H,I are owned by proc2. 4341273d9f13SBarry Smith 4342273d9f13SBarry Smith The 'm' parameters for proc0,proc1,proc2 are 3,3,2 respectively. 4343273d9f13SBarry Smith The 'n' parameters for proc0,proc1,proc2 are 3,3,2 respectively. 4344273d9f13SBarry Smith The 'M','N' parameters are 8,8, and have the same values on all procs. 4345273d9f13SBarry Smith 4346273d9f13SBarry Smith The DIAGONAL submatrices corresponding to proc0,proc1,proc2 are 4347273d9f13SBarry Smith submatrices [A], [E], [I] respectively. The OFF-DIAGONAL submatrices 4348273d9f13SBarry Smith corresponding to proc0,proc1,proc2 are [BC], [DF], [GH] respectively. 4349273d9f13SBarry Smith Internally, each processor stores the DIAGONAL part, and the OFF-DIAGONAL 4350273d9f13SBarry Smith part as SeqAIJ matrices. for eg: proc1 will store [E] as a SeqAIJ 4351273d9f13SBarry Smith matrix, ans [DF] as another SeqAIJ matrix. 4352273d9f13SBarry Smith 4353273d9f13SBarry Smith When d_nz, o_nz parameters are specified, d_nz storage elements are 4354273d9f13SBarry Smith allocated for every row of the local diagonal submatrix, and o_nz 4355273d9f13SBarry Smith storage locations are allocated for every row of the OFF-DIAGONAL submat. 4356273d9f13SBarry Smith One way to choose d_nz and o_nz is to use the max nonzerors per local 4357273d9f13SBarry Smith rows for each of the local DIAGONAL, and the OFF-DIAGONAL submatrices. 4358273d9f13SBarry Smith In this case, the values of d_nz,o_nz are: 4359273d9f13SBarry Smith .vb 4360273d9f13SBarry Smith proc0 : dnz = 2, o_nz = 2 4361273d9f13SBarry Smith proc1 : dnz = 3, o_nz = 2 4362273d9f13SBarry Smith proc2 : dnz = 1, o_nz = 4 4363273d9f13SBarry Smith .ve 4364273d9f13SBarry Smith We are allocating m*(d_nz+o_nz) storage locations for every proc. This 4365273d9f13SBarry Smith translates to 3*(2+2)=12 for proc0, 3*(3+2)=15 for proc1, 2*(1+4)=10 4366273d9f13SBarry Smith for proc3. i.e we are using 12+15+10=37 storage locations to store 4367273d9f13SBarry Smith 34 values. 4368273d9f13SBarry Smith 4369273d9f13SBarry Smith When d_nnz, o_nnz parameters are specified, the storage is specified 4370273d9f13SBarry Smith for every row, coresponding to both DIAGONAL and OFF-DIAGONAL submatrices. 4371273d9f13SBarry Smith In the above case the values for d_nnz,o_nnz are: 4372273d9f13SBarry Smith .vb 4373273d9f13SBarry Smith proc0: d_nnz = [2,2,2] and o_nnz = [2,2,2] 4374273d9f13SBarry Smith proc1: d_nnz = [3,3,2] and o_nnz = [2,1,1] 4375273d9f13SBarry Smith proc2: d_nnz = [1,1] and o_nnz = [4,4] 4376273d9f13SBarry Smith .ve 4377273d9f13SBarry Smith Here the space allocated is sum of all the above values i.e 34, and 4378273d9f13SBarry Smith hence pre-allocation is perfect. 4379273d9f13SBarry Smith 4380273d9f13SBarry Smith Level: intermediate 4381273d9f13SBarry Smith 4382273d9f13SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 4383273d9f13SBarry Smith 4384ccd8e176SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(), 43852fb0ec9aSBarry Smith MPIAIJ, MatCreateMPIAIJWithArrays() 4386273d9f13SBarry Smith @*/ 438769b1f4b7SBarry Smith PetscErrorCode MatCreateAIJ(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[],Mat *A) 4388273d9f13SBarry Smith { 43896849ba73SBarry Smith PetscErrorCode ierr; 4390b1d57f15SBarry Smith PetscMPIInt size; 4391273d9f13SBarry Smith 4392273d9f13SBarry Smith PetscFunctionBegin; 4393f69a0ea3SMatthew Knepley ierr = MatCreate(comm,A);CHKERRQ(ierr); 4394f69a0ea3SMatthew Knepley ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr); 4395273d9f13SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 4396273d9f13SBarry Smith if (size > 1) { 4397273d9f13SBarry Smith ierr = MatSetType(*A,MATMPIAIJ);CHKERRQ(ierr); 4398273d9f13SBarry Smith ierr = MatMPIAIJSetPreallocation(*A,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr); 4399273d9f13SBarry Smith } else { 4400273d9f13SBarry Smith ierr = MatSetType(*A,MATSEQAIJ);CHKERRQ(ierr); 4401273d9f13SBarry Smith ierr = MatSeqAIJSetPreallocation(*A,d_nz,d_nnz);CHKERRQ(ierr); 4402273d9f13SBarry Smith } 4403273d9f13SBarry Smith PetscFunctionReturn(0); 4404273d9f13SBarry Smith } 4405195d93cdSBarry Smith 44064a2ae208SSatish Balay #undef __FUNCT__ 44074a2ae208SSatish Balay #define __FUNCT__ "MatMPIAIJGetSeqAIJ" 44089230625dSJed Brown PetscErrorCode MatMPIAIJGetSeqAIJ(Mat A,Mat *Ad,Mat *Ao,const PetscInt *colmap[]) 4409195d93cdSBarry Smith { 4410195d93cdSBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 4411b1d57f15SBarry Smith 4412195d93cdSBarry Smith PetscFunctionBegin; 4413195d93cdSBarry Smith *Ad = a->A; 4414195d93cdSBarry Smith *Ao = a->B; 4415195d93cdSBarry Smith *colmap = a->garray; 4416195d93cdSBarry Smith PetscFunctionReturn(0); 4417195d93cdSBarry Smith } 4418a2243be0SBarry Smith 4419a2243be0SBarry Smith #undef __FUNCT__ 4420a2243be0SBarry Smith #define __FUNCT__ "MatSetColoring_MPIAIJ" 4421dfbe8321SBarry Smith PetscErrorCode MatSetColoring_MPIAIJ(Mat A,ISColoring coloring) 4422a2243be0SBarry Smith { 4423dfbe8321SBarry Smith PetscErrorCode ierr; 4424b1d57f15SBarry Smith PetscInt i; 4425a2243be0SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 4426a2243be0SBarry Smith 4427a2243be0SBarry Smith PetscFunctionBegin; 44288ee2e534SBarry Smith if (coloring->ctype == IS_COLORING_GLOBAL) { 442908b6dcc0SBarry Smith ISColoringValue *allcolors,*colors; 4430a2243be0SBarry Smith ISColoring ocoloring; 4431a2243be0SBarry Smith 4432a2243be0SBarry Smith /* set coloring for diagonal portion */ 4433a2243be0SBarry Smith ierr = MatSetColoring_SeqAIJ(a->A,coloring);CHKERRQ(ierr); 4434a2243be0SBarry Smith 4435a2243be0SBarry Smith /* set coloring for off-diagonal portion */ 4436ce94432eSBarry Smith ierr = ISAllGatherColors(PetscObjectComm((PetscObject)A),coloring->n,coloring->colors,NULL,&allcolors);CHKERRQ(ierr); 4437d0f46423SBarry Smith ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr); 4438d0f46423SBarry Smith for (i=0; i<a->B->cmap->n; i++) { 4439a2243be0SBarry Smith colors[i] = allcolors[a->garray[i]]; 4440a2243be0SBarry Smith } 4441a2243be0SBarry Smith ierr = PetscFree(allcolors);CHKERRQ(ierr); 4442d0f46423SBarry Smith ierr = ISColoringCreate(MPI_COMM_SELF,coloring->n,a->B->cmap->n,colors,&ocoloring);CHKERRQ(ierr); 4443a2243be0SBarry Smith ierr = MatSetColoring_SeqAIJ(a->B,ocoloring);CHKERRQ(ierr); 44446bf464f9SBarry Smith ierr = ISColoringDestroy(&ocoloring);CHKERRQ(ierr); 4445a2243be0SBarry Smith } else if (coloring->ctype == IS_COLORING_GHOSTED) { 444608b6dcc0SBarry Smith ISColoringValue *colors; 4447b1d57f15SBarry Smith PetscInt *larray; 4448a2243be0SBarry Smith ISColoring ocoloring; 4449a2243be0SBarry Smith 4450a2243be0SBarry Smith /* set coloring for diagonal portion */ 4451d0f46423SBarry Smith ierr = PetscMalloc((a->A->cmap->n+1)*sizeof(PetscInt),&larray);CHKERRQ(ierr); 4452d0f46423SBarry Smith for (i=0; i<a->A->cmap->n; i++) { 4453d0f46423SBarry Smith larray[i] = i + A->cmap->rstart; 4454a2243be0SBarry Smith } 44550298fd71SBarry Smith ierr = ISGlobalToLocalMappingApply(A->cmap->mapping,IS_GTOLM_MASK,a->A->cmap->n,larray,NULL,larray);CHKERRQ(ierr); 4456d0f46423SBarry Smith ierr = PetscMalloc((a->A->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr); 4457d0f46423SBarry Smith for (i=0; i<a->A->cmap->n; i++) { 4458a2243be0SBarry Smith colors[i] = coloring->colors[larray[i]]; 4459a2243be0SBarry Smith } 4460a2243be0SBarry Smith ierr = PetscFree(larray);CHKERRQ(ierr); 4461d0f46423SBarry Smith ierr = ISColoringCreate(PETSC_COMM_SELF,coloring->n,a->A->cmap->n,colors,&ocoloring);CHKERRQ(ierr); 4462a2243be0SBarry Smith ierr = MatSetColoring_SeqAIJ(a->A,ocoloring);CHKERRQ(ierr); 44636bf464f9SBarry Smith ierr = ISColoringDestroy(&ocoloring);CHKERRQ(ierr); 4464a2243be0SBarry Smith 4465a2243be0SBarry Smith /* set coloring for off-diagonal portion */ 4466d0f46423SBarry Smith ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(PetscInt),&larray);CHKERRQ(ierr); 44670298fd71SBarry Smith ierr = ISGlobalToLocalMappingApply(A->cmap->mapping,IS_GTOLM_MASK,a->B->cmap->n,a->garray,NULL,larray);CHKERRQ(ierr); 4468d0f46423SBarry Smith ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr); 4469d0f46423SBarry Smith for (i=0; i<a->B->cmap->n; i++) { 4470a2243be0SBarry Smith colors[i] = coloring->colors[larray[i]]; 4471a2243be0SBarry Smith } 4472a2243be0SBarry Smith ierr = PetscFree(larray);CHKERRQ(ierr); 4473d0f46423SBarry Smith ierr = ISColoringCreate(MPI_COMM_SELF,coloring->n,a->B->cmap->n,colors,&ocoloring);CHKERRQ(ierr); 4474a2243be0SBarry Smith ierr = MatSetColoring_SeqAIJ(a->B,ocoloring);CHKERRQ(ierr); 44756bf464f9SBarry Smith ierr = ISColoringDestroy(&ocoloring);CHKERRQ(ierr); 44766bf464f9SBarry Smith } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support ISColoringType %d",(int)coloring->ctype); 4477a2243be0SBarry Smith PetscFunctionReturn(0); 4478a2243be0SBarry Smith } 4479a2243be0SBarry Smith 4480779c1a83SBarry Smith #undef __FUNCT__ 4481779c1a83SBarry Smith #define __FUNCT__ "MatSetValuesAdifor_MPIAIJ" 4482b1d57f15SBarry Smith PetscErrorCode MatSetValuesAdifor_MPIAIJ(Mat A,PetscInt nl,void *advalues) 4483779c1a83SBarry Smith { 4484779c1a83SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 4485dfbe8321SBarry Smith PetscErrorCode ierr; 4486779c1a83SBarry Smith 4487779c1a83SBarry Smith PetscFunctionBegin; 4488779c1a83SBarry Smith ierr = MatSetValuesAdifor_SeqAIJ(a->A,nl,advalues);CHKERRQ(ierr); 4489779c1a83SBarry Smith ierr = MatSetValuesAdifor_SeqAIJ(a->B,nl,advalues);CHKERRQ(ierr); 4490a2243be0SBarry Smith PetscFunctionReturn(0); 4491a2243be0SBarry Smith } 4492c5d6d63eSBarry Smith 4493c5d6d63eSBarry Smith #undef __FUNCT__ 449490431a8fSHong Zhang #define __FUNCT__ "MatCreateMPIAIJConcatenateSeqAIJSymbolic" 449590431a8fSHong Zhang PetscErrorCode MatCreateMPIAIJConcatenateSeqAIJSymbolic(MPI_Comm comm,Mat inmat,PetscInt n,Mat *outmat) 44969b8102ccSHong Zhang { 44979b8102ccSHong Zhang PetscErrorCode ierr; 4498a2f3521dSMark F. Adams PetscInt m,N,i,rstart,nnz,*dnz,*onz,sum,bs,cbs; 44999b8102ccSHong Zhang PetscInt *indx; 45009b8102ccSHong Zhang 45019b8102ccSHong Zhang PetscFunctionBegin; 45029b8102ccSHong Zhang /* This routine will ONLY return MPIAIJ type matrix */ 45039b8102ccSHong Zhang ierr = MatGetSize(inmat,&m,&N);CHKERRQ(ierr); 4504a2f3521dSMark F. Adams ierr = MatGetBlockSizes(inmat,&bs,&cbs);CHKERRQ(ierr); 45059b8102ccSHong Zhang if (n == PETSC_DECIDE) { 45069b8102ccSHong Zhang ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 45079b8102ccSHong Zhang } 4508a22543b6SHong Zhang /* Check sum(n) = N */ 4509a95133b1SBarry Smith ierr = MPI_Allreduce(&n,&sum,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr); 4510a22543b6SHong Zhang if (sum != N) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Sum of local columns != global columns %d",N); 4511a22543b6SHong Zhang 45129b8102ccSHong Zhang ierr = MPI_Scan(&m, &rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr); 45139b8102ccSHong Zhang rstart -= m; 45149b8102ccSHong Zhang 45159b8102ccSHong Zhang ierr = MatPreallocateInitialize(comm,m,n,dnz,onz);CHKERRQ(ierr); 45169b8102ccSHong Zhang for (i=0; i<m; i++) { 45170298fd71SBarry Smith ierr = MatGetRow_SeqAIJ(inmat,i,&nnz,&indx,NULL);CHKERRQ(ierr); 45189b8102ccSHong Zhang ierr = MatPreallocateSet(i+rstart,nnz,indx,dnz,onz);CHKERRQ(ierr); 45190298fd71SBarry Smith ierr = MatRestoreRow_SeqAIJ(inmat,i,&nnz,&indx,NULL);CHKERRQ(ierr); 45209b8102ccSHong Zhang } 45219b8102ccSHong Zhang 45229b8102ccSHong Zhang ierr = MatCreate(comm,outmat);CHKERRQ(ierr); 45239b8102ccSHong Zhang ierr = MatSetSizes(*outmat,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 4524a2f3521dSMark F. Adams ierr = MatSetBlockSizes(*outmat,bs,cbs);CHKERRQ(ierr); 45259b8102ccSHong Zhang ierr = MatSetType(*outmat,MATMPIAIJ);CHKERRQ(ierr); 45269b8102ccSHong Zhang ierr = MatMPIAIJSetPreallocation(*outmat,0,dnz,0,onz);CHKERRQ(ierr); 45279b8102ccSHong Zhang ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr); 45289b8102ccSHong Zhang PetscFunctionReturn(0); 45299b8102ccSHong Zhang } 45309b8102ccSHong Zhang 45319b8102ccSHong Zhang #undef __FUNCT__ 453290431a8fSHong Zhang #define __FUNCT__ "MatCreateMPIAIJConcatenateSeqAIJNumeric" 453390431a8fSHong Zhang PetscErrorCode MatCreateMPIAIJConcatenateSeqAIJNumeric(MPI_Comm comm,Mat inmat,PetscInt n,Mat outmat) 45349b8102ccSHong Zhang { 45359b8102ccSHong Zhang PetscErrorCode ierr; 45369b8102ccSHong Zhang PetscInt m,N,i,rstart,nnz,Ii; 45379b8102ccSHong Zhang PetscInt *indx; 45389b8102ccSHong Zhang PetscScalar *values; 45399b8102ccSHong Zhang 45409b8102ccSHong Zhang PetscFunctionBegin; 45419b8102ccSHong Zhang ierr = MatGetSize(inmat,&m,&N);CHKERRQ(ierr); 45420298fd71SBarry Smith ierr = MatGetOwnershipRange(outmat,&rstart,NULL);CHKERRQ(ierr); 45439b8102ccSHong Zhang for (i=0; i<m; i++) { 45449b8102ccSHong Zhang ierr = MatGetRow_SeqAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr); 45459b8102ccSHong Zhang Ii = i + rstart; 45463c79b8e7SHong Zhang ierr = MatSetValues(outmat,1,&Ii,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr); 45479b8102ccSHong Zhang ierr = MatRestoreRow_SeqAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr); 45489b8102ccSHong Zhang } 45499b8102ccSHong Zhang ierr = MatAssemblyBegin(outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 45509b8102ccSHong Zhang ierr = MatAssemblyEnd(outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 45519b8102ccSHong Zhang PetscFunctionReturn(0); 45529b8102ccSHong Zhang } 45539b8102ccSHong Zhang 45549b8102ccSHong Zhang #undef __FUNCT__ 455590431a8fSHong Zhang #define __FUNCT__ "MatCreateMPIAIJConcatenateSeqAIJ" 4556bc08b0f1SBarry Smith /*@ 455790431a8fSHong Zhang MatCreateMPIAIJConcatenateSeqAIJ - Creates a single large PETSc matrix by concatenating sequential 455851dd7536SBarry Smith matrices from each processor 4559c5d6d63eSBarry Smith 4560c5d6d63eSBarry Smith Collective on MPI_Comm 4561c5d6d63eSBarry Smith 4562c5d6d63eSBarry Smith Input Parameters: 456351dd7536SBarry Smith + comm - the communicators the parallel matrix will live on 4564d6bb3c2dSHong Zhang . inmat - the input sequential matrices 45650e36024fSHong Zhang . n - number of local columns (or PETSC_DECIDE) 4566d6bb3c2dSHong Zhang - scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 456751dd7536SBarry Smith 456851dd7536SBarry Smith Output Parameter: 456951dd7536SBarry Smith . outmat - the parallel matrix generated 4570c5d6d63eSBarry Smith 45717e25d530SSatish Balay Level: advanced 45727e25d530SSatish Balay 4573f08fae4eSHong Zhang Notes: The number of columns of the matrix in EACH processor MUST be the same. 4574c5d6d63eSBarry Smith 4575c5d6d63eSBarry Smith @*/ 457690431a8fSHong Zhang PetscErrorCode MatCreateMPIAIJConcatenateSeqAIJ(MPI_Comm comm,Mat inmat,PetscInt n,MatReuse scall,Mat *outmat) 4577c5d6d63eSBarry Smith { 4578dfbe8321SBarry Smith PetscErrorCode ierr; 4579f4703a44SHong Zhang PetscMPIInt size; 4580c5d6d63eSBarry Smith 4581c5d6d63eSBarry Smith PetscFunctionBegin; 4582f4703a44SHong Zhang ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 45839b8102ccSHong Zhang ierr = PetscLogEventBegin(MAT_Merge,inmat,0,0,0);CHKERRQ(ierr); 4584f4703a44SHong Zhang if (size == 1) { 4585f4703a44SHong Zhang if (scall == MAT_INITIAL_MATRIX) { 4586f4703a44SHong Zhang ierr = MatDuplicate(inmat,MAT_COPY_VALUES,outmat);CHKERRQ(ierr); 4587f4703a44SHong Zhang } else { 4588f4703a44SHong Zhang ierr = MatCopy(inmat,*outmat,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 4589f4703a44SHong Zhang } 4590f4703a44SHong Zhang } else { 4591d6bb3c2dSHong Zhang if (scall == MAT_INITIAL_MATRIX) { 459290431a8fSHong Zhang ierr = MatCreateMPIAIJConcatenateSeqAIJSymbolic(comm,inmat,n,outmat);CHKERRQ(ierr); 45930e36024fSHong Zhang } 459490431a8fSHong Zhang ierr = MatCreateMPIAIJConcatenateSeqAIJNumeric(comm,inmat,n,*outmat);CHKERRQ(ierr); 4595f4703a44SHong Zhang } 45969b8102ccSHong Zhang ierr = PetscLogEventEnd(MAT_Merge,inmat,0,0,0);CHKERRQ(ierr); 4597c5d6d63eSBarry Smith PetscFunctionReturn(0); 4598c5d6d63eSBarry Smith } 4599c5d6d63eSBarry Smith 4600c5d6d63eSBarry Smith #undef __FUNCT__ 4601c5d6d63eSBarry Smith #define __FUNCT__ "MatFileSplit" 4602dfbe8321SBarry Smith PetscErrorCode MatFileSplit(Mat A,char *outfile) 4603c5d6d63eSBarry Smith { 4604dfbe8321SBarry Smith PetscErrorCode ierr; 460532dcc486SBarry Smith PetscMPIInt rank; 4606b1d57f15SBarry Smith PetscInt m,N,i,rstart,nnz; 4607de4209c5SBarry Smith size_t len; 4608b1d57f15SBarry Smith const PetscInt *indx; 4609c5d6d63eSBarry Smith PetscViewer out; 4610c5d6d63eSBarry Smith char *name; 4611c5d6d63eSBarry Smith Mat B; 4612b3cc6726SBarry Smith const PetscScalar *values; 4613c5d6d63eSBarry Smith 4614c5d6d63eSBarry Smith PetscFunctionBegin; 4615c5d6d63eSBarry Smith ierr = MatGetLocalSize(A,&m,0);CHKERRQ(ierr); 4616c5d6d63eSBarry Smith ierr = MatGetSize(A,0,&N);CHKERRQ(ierr); 4617f204ca49SKris Buschelman /* Should this be the type of the diagonal block of A? */ 4618f69a0ea3SMatthew Knepley ierr = MatCreate(PETSC_COMM_SELF,&B);CHKERRQ(ierr); 4619f69a0ea3SMatthew Knepley ierr = MatSetSizes(B,m,N,m,N);CHKERRQ(ierr); 4620a2f3521dSMark F. Adams ierr = MatSetBlockSizes(B,A->rmap->bs,A->cmap->bs);CHKERRQ(ierr); 4621f204ca49SKris Buschelman ierr = MatSetType(B,MATSEQAIJ);CHKERRQ(ierr); 46220298fd71SBarry Smith ierr = MatSeqAIJSetPreallocation(B,0,NULL);CHKERRQ(ierr); 4623c5d6d63eSBarry Smith ierr = MatGetOwnershipRange(A,&rstart,0);CHKERRQ(ierr); 4624c5d6d63eSBarry Smith for (i=0; i<m; i++) { 4625c5d6d63eSBarry Smith ierr = MatGetRow(A,i+rstart,&nnz,&indx,&values);CHKERRQ(ierr); 4626c5d6d63eSBarry Smith ierr = MatSetValues(B,1,&i,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr); 4627c5d6d63eSBarry Smith ierr = MatRestoreRow(A,i+rstart,&nnz,&indx,&values);CHKERRQ(ierr); 4628c5d6d63eSBarry Smith } 4629c5d6d63eSBarry Smith ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 4630c5d6d63eSBarry Smith ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 4631c5d6d63eSBarry Smith 4632ce94432eSBarry Smith ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)A),&rank);CHKERRQ(ierr); 4633c5d6d63eSBarry Smith ierr = PetscStrlen(outfile,&len);CHKERRQ(ierr); 4634c5d6d63eSBarry Smith ierr = PetscMalloc((len+5)*sizeof(char),&name);CHKERRQ(ierr); 4635c5d6d63eSBarry Smith sprintf(name,"%s.%d",outfile,rank); 4636852598b0SBarry Smith ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,name,FILE_MODE_APPEND,&out);CHKERRQ(ierr); 4637a2ea699eSBarry Smith ierr = PetscFree(name);CHKERRQ(ierr); 4638c5d6d63eSBarry Smith ierr = MatView(B,out);CHKERRQ(ierr); 46396bf464f9SBarry Smith ierr = PetscViewerDestroy(&out);CHKERRQ(ierr); 46406bf464f9SBarry Smith ierr = MatDestroy(&B);CHKERRQ(ierr); 4641c5d6d63eSBarry Smith PetscFunctionReturn(0); 4642c5d6d63eSBarry Smith } 4643e5f2cdd8SHong Zhang 464409573ac7SBarry Smith extern PetscErrorCode MatDestroy_MPIAIJ(Mat); 464551a7d1a8SHong Zhang #undef __FUNCT__ 464651a7d1a8SHong Zhang #define __FUNCT__ "MatDestroy_MPIAIJ_SeqsToMPI" 46477087cfbeSBarry Smith PetscErrorCode MatDestroy_MPIAIJ_SeqsToMPI(Mat A) 464851a7d1a8SHong Zhang { 464951a7d1a8SHong Zhang PetscErrorCode ierr; 4650671beff6SHong Zhang Mat_Merge_SeqsToMPI *merge; 4651776b82aeSLisandro Dalcin PetscContainer container; 465251a7d1a8SHong Zhang 465351a7d1a8SHong Zhang PetscFunctionBegin; 4654671beff6SHong Zhang ierr = PetscObjectQuery((PetscObject)A,"MatMergeSeqsToMPI",(PetscObject*)&container);CHKERRQ(ierr); 4655671beff6SHong Zhang if (container) { 4656776b82aeSLisandro Dalcin ierr = PetscContainerGetPointer(container,(void**)&merge);CHKERRQ(ierr); 465751a7d1a8SHong Zhang ierr = PetscFree(merge->id_r);CHKERRQ(ierr); 46583e06a4e6SHong Zhang ierr = PetscFree(merge->len_s);CHKERRQ(ierr); 46593e06a4e6SHong Zhang ierr = PetscFree(merge->len_r);CHKERRQ(ierr); 466051a7d1a8SHong Zhang ierr = PetscFree(merge->bi);CHKERRQ(ierr); 466151a7d1a8SHong Zhang ierr = PetscFree(merge->bj);CHKERRQ(ierr); 4662533163c2SBarry Smith ierr = PetscFree(merge->buf_ri[0]);CHKERRQ(ierr); 466302c68681SHong Zhang ierr = PetscFree(merge->buf_ri);CHKERRQ(ierr); 4664533163c2SBarry Smith ierr = PetscFree(merge->buf_rj[0]);CHKERRQ(ierr); 466502c68681SHong Zhang ierr = PetscFree(merge->buf_rj);CHKERRQ(ierr); 466605b42c5fSBarry Smith ierr = PetscFree(merge->coi);CHKERRQ(ierr); 466705b42c5fSBarry Smith ierr = PetscFree(merge->coj);CHKERRQ(ierr); 466805b42c5fSBarry Smith ierr = PetscFree(merge->owners_co);CHKERRQ(ierr); 46696bf464f9SBarry Smith ierr = PetscLayoutDestroy(&merge->rowmap);CHKERRQ(ierr); 4670bf0cc555SLisandro Dalcin ierr = PetscFree(merge);CHKERRQ(ierr); 4671671beff6SHong Zhang ierr = PetscObjectCompose((PetscObject)A,"MatMergeSeqsToMPI",0);CHKERRQ(ierr); 4672671beff6SHong Zhang } 467351a7d1a8SHong Zhang ierr = MatDestroy_MPIAIJ(A);CHKERRQ(ierr); 467451a7d1a8SHong Zhang PetscFunctionReturn(0); 467551a7d1a8SHong Zhang } 467651a7d1a8SHong Zhang 4677c6db04a5SJed Brown #include <../src/mat/utils/freespace.h> 4678c6db04a5SJed Brown #include <petscbt.h> 46794ebed01fSBarry Smith 4680e5f2cdd8SHong Zhang #undef __FUNCT__ 468190431a8fSHong Zhang #define __FUNCT__ "MatCreateMPIAIJSumSeqAIJNumeric" 468290431a8fSHong Zhang PetscErrorCode MatCreateMPIAIJSumSeqAIJNumeric(Mat seqmat,Mat mpimat) 468355d1abb9SHong Zhang { 468455d1abb9SHong Zhang PetscErrorCode ierr; 4685ce94432eSBarry Smith MPI_Comm comm; 468655d1abb9SHong Zhang Mat_SeqAIJ *a =(Mat_SeqAIJ*)seqmat->data; 4687b1d57f15SBarry Smith PetscMPIInt size,rank,taga,*len_s; 4688a2ea699eSBarry Smith PetscInt N=mpimat->cmap->N,i,j,*owners,*ai=a->i,*aj; 4689b1d57f15SBarry Smith PetscInt proc,m; 4690b1d57f15SBarry Smith PetscInt **buf_ri,**buf_rj; 4691b1d57f15SBarry Smith PetscInt k,anzi,*bj_i,*bi,*bj,arow,bnzi,nextaj; 4692b1d57f15SBarry Smith PetscInt nrows,**buf_ri_k,**nextrow,**nextai; 469355d1abb9SHong Zhang MPI_Request *s_waits,*r_waits; 469455d1abb9SHong Zhang MPI_Status *status; 4695a77337e4SBarry Smith MatScalar *aa=a->a; 4696dd6ea824SBarry Smith MatScalar **abuf_r,*ba_i; 469755d1abb9SHong Zhang Mat_Merge_SeqsToMPI *merge; 4698776b82aeSLisandro Dalcin PetscContainer container; 469955d1abb9SHong Zhang 470055d1abb9SHong Zhang PetscFunctionBegin; 4701bedda5b1SHong Zhang ierr = PetscObjectGetComm((PetscObject)mpimat,&comm);CHKERRQ(ierr); 47024ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Seqstompinum,seqmat,0,0,0);CHKERRQ(ierr); 47033c2c1871SHong Zhang 470455d1abb9SHong Zhang ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 470555d1abb9SHong Zhang ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 470655d1abb9SHong Zhang 470755d1abb9SHong Zhang ierr = PetscObjectQuery((PetscObject)mpimat,"MatMergeSeqsToMPI",(PetscObject*)&container);CHKERRQ(ierr); 4708776b82aeSLisandro Dalcin ierr = PetscContainerGetPointer(container,(void**)&merge);CHKERRQ(ierr); 4709bf0cc555SLisandro Dalcin 471055d1abb9SHong Zhang bi = merge->bi; 471155d1abb9SHong Zhang bj = merge->bj; 471255d1abb9SHong Zhang buf_ri = merge->buf_ri; 471355d1abb9SHong Zhang buf_rj = merge->buf_rj; 471455d1abb9SHong Zhang 471555d1abb9SHong Zhang ierr = PetscMalloc(size*sizeof(MPI_Status),&status);CHKERRQ(ierr); 47167a2fc3feSBarry Smith owners = merge->rowmap->range; 471755d1abb9SHong Zhang len_s = merge->len_s; 471855d1abb9SHong Zhang 471955d1abb9SHong Zhang /* send and recv matrix values */ 472055d1abb9SHong Zhang /*-----------------------------*/ 4721357abbc8SBarry Smith ierr = PetscObjectGetNewTag((PetscObject)mpimat,&taga);CHKERRQ(ierr); 472255d1abb9SHong Zhang ierr = PetscPostIrecvScalar(comm,taga,merge->nrecv,merge->id_r,merge->len_r,&abuf_r,&r_waits);CHKERRQ(ierr); 472355d1abb9SHong Zhang 472455d1abb9SHong Zhang ierr = PetscMalloc((merge->nsend+1)*sizeof(MPI_Request),&s_waits);CHKERRQ(ierr); 472555d1abb9SHong Zhang for (proc=0,k=0; proc<size; proc++) { 472655d1abb9SHong Zhang if (!len_s[proc]) continue; 472755d1abb9SHong Zhang i = owners[proc]; 472855d1abb9SHong Zhang ierr = MPI_Isend(aa+ai[i],len_s[proc],MPIU_MATSCALAR,proc,taga,comm,s_waits+k);CHKERRQ(ierr); 472955d1abb9SHong Zhang k++; 473055d1abb9SHong Zhang } 473155d1abb9SHong Zhang 47320c468ba9SBarry Smith if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,r_waits,status);CHKERRQ(ierr);} 47330c468ba9SBarry Smith if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,s_waits,status);CHKERRQ(ierr);} 473455d1abb9SHong Zhang ierr = PetscFree(status);CHKERRQ(ierr); 473555d1abb9SHong Zhang 473655d1abb9SHong Zhang ierr = PetscFree(s_waits);CHKERRQ(ierr); 473755d1abb9SHong Zhang ierr = PetscFree(r_waits);CHKERRQ(ierr); 473855d1abb9SHong Zhang 473955d1abb9SHong Zhang /* insert mat values of mpimat */ 474055d1abb9SHong Zhang /*----------------------------*/ 4741a77337e4SBarry Smith ierr = PetscMalloc(N*sizeof(PetscScalar),&ba_i);CHKERRQ(ierr); 47420572522cSBarry Smith ierr = PetscMalloc3(merge->nrecv,PetscInt*,&buf_ri_k,merge->nrecv,PetscInt*,&nextrow,merge->nrecv,PetscInt*,&nextai);CHKERRQ(ierr); 474355d1abb9SHong Zhang 474455d1abb9SHong Zhang for (k=0; k<merge->nrecv; k++) { 474555d1abb9SHong Zhang buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */ 474655d1abb9SHong Zhang nrows = *(buf_ri_k[k]); 474755d1abb9SHong Zhang nextrow[k] = buf_ri_k[k]+1; /* next row number of k-th recved i-structure */ 474855d1abb9SHong Zhang nextai[k] = buf_ri_k[k] + (nrows + 1); /* poins to the next i-structure of k-th recved i-structure */ 474955d1abb9SHong Zhang } 475055d1abb9SHong Zhang 475155d1abb9SHong Zhang /* set values of ba */ 47527a2fc3feSBarry Smith m = merge->rowmap->n; 475355d1abb9SHong Zhang for (i=0; i<m; i++) { 475455d1abb9SHong Zhang arow = owners[rank] + i; 475555d1abb9SHong Zhang bj_i = bj+bi[i]; /* col indices of the i-th row of mpimat */ 475655d1abb9SHong Zhang bnzi = bi[i+1] - bi[i]; 4757a77337e4SBarry Smith ierr = PetscMemzero(ba_i,bnzi*sizeof(PetscScalar));CHKERRQ(ierr); 475855d1abb9SHong Zhang 475955d1abb9SHong Zhang /* add local non-zero vals of this proc's seqmat into ba */ 476055d1abb9SHong Zhang anzi = ai[arow+1] - ai[arow]; 476155d1abb9SHong Zhang aj = a->j + ai[arow]; 476255d1abb9SHong Zhang aa = a->a + ai[arow]; 476355d1abb9SHong Zhang nextaj = 0; 476455d1abb9SHong Zhang for (j=0; nextaj<anzi; j++) { 476555d1abb9SHong Zhang if (*(bj_i + j) == aj[nextaj]) { /* bcol == acol */ 476655d1abb9SHong Zhang ba_i[j] += aa[nextaj++]; 476755d1abb9SHong Zhang } 476855d1abb9SHong Zhang } 476955d1abb9SHong Zhang 477055d1abb9SHong Zhang /* add received vals into ba */ 477155d1abb9SHong Zhang for (k=0; k<merge->nrecv; k++) { /* k-th received message */ 477255d1abb9SHong Zhang /* i-th row */ 477355d1abb9SHong Zhang if (i == *nextrow[k]) { 477455d1abb9SHong Zhang anzi = *(nextai[k]+1) - *nextai[k]; 477555d1abb9SHong Zhang aj = buf_rj[k] + *(nextai[k]); 477655d1abb9SHong Zhang aa = abuf_r[k] + *(nextai[k]); 477755d1abb9SHong Zhang nextaj = 0; 477855d1abb9SHong Zhang for (j=0; nextaj<anzi; j++) { 477955d1abb9SHong Zhang if (*(bj_i + j) == aj[nextaj]) { /* bcol == acol */ 478055d1abb9SHong Zhang ba_i[j] += aa[nextaj++]; 478155d1abb9SHong Zhang } 478255d1abb9SHong Zhang } 478355d1abb9SHong Zhang nextrow[k]++; nextai[k]++; 478455d1abb9SHong Zhang } 478555d1abb9SHong Zhang } 478655d1abb9SHong Zhang ierr = MatSetValues(mpimat,1,&arow,bnzi,bj_i,ba_i,INSERT_VALUES);CHKERRQ(ierr); 478755d1abb9SHong Zhang } 478855d1abb9SHong Zhang ierr = MatAssemblyBegin(mpimat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 478955d1abb9SHong Zhang ierr = MatAssemblyEnd(mpimat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 479055d1abb9SHong Zhang 4791533163c2SBarry Smith ierr = PetscFree(abuf_r[0]);CHKERRQ(ierr); 479255d1abb9SHong Zhang ierr = PetscFree(abuf_r);CHKERRQ(ierr); 479355d1abb9SHong Zhang ierr = PetscFree(ba_i);CHKERRQ(ierr); 47941d79065fSBarry Smith ierr = PetscFree3(buf_ri_k,nextrow,nextai);CHKERRQ(ierr); 47954ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Seqstompinum,seqmat,0,0,0);CHKERRQ(ierr); 479655d1abb9SHong Zhang PetscFunctionReturn(0); 479755d1abb9SHong Zhang } 479838f152feSBarry Smith 47996bc0bbbfSBarry Smith extern PetscErrorCode MatDestroy_MPIAIJ_SeqsToMPI(Mat); 48006bc0bbbfSBarry Smith 480138f152feSBarry Smith #undef __FUNCT__ 480290431a8fSHong Zhang #define __FUNCT__ "MatCreateMPIAIJSumSeqAIJSymbolic" 480390431a8fSHong Zhang PetscErrorCode MatCreateMPIAIJSumSeqAIJSymbolic(MPI_Comm comm,Mat seqmat,PetscInt m,PetscInt n,Mat *mpimat) 4804e5f2cdd8SHong Zhang { 4805f08fae4eSHong Zhang PetscErrorCode ierr; 480655a3bba9SHong Zhang Mat B_mpi; 4807c2234fe3SHong Zhang Mat_SeqAIJ *a=(Mat_SeqAIJ*)seqmat->data; 4808b1d57f15SBarry Smith PetscMPIInt size,rank,tagi,tagj,*len_s,*len_si,*len_ri; 4809b1d57f15SBarry Smith PetscInt **buf_rj,**buf_ri,**buf_ri_k; 4810d0f46423SBarry Smith PetscInt M=seqmat->rmap->n,N=seqmat->cmap->n,i,*owners,*ai=a->i,*aj=a->j; 4811a2f3521dSMark F. Adams PetscInt len,proc,*dnz,*onz,bs,cbs; 4812b1d57f15SBarry Smith PetscInt k,anzi,*bi,*bj,*lnk,nlnk,arow,bnzi,nspacedouble=0; 4813b1d57f15SBarry Smith PetscInt nrows,*buf_s,*buf_si,*buf_si_i,**nextrow,**nextai; 481455d1abb9SHong Zhang MPI_Request *si_waits,*sj_waits,*ri_waits,*rj_waits; 481558cb9c82SHong Zhang MPI_Status *status; 48160298fd71SBarry Smith PetscFreeSpaceList free_space=NULL,current_space=NULL; 4817be0fcf8dSHong Zhang PetscBT lnkbt; 481851a7d1a8SHong Zhang Mat_Merge_SeqsToMPI *merge; 4819776b82aeSLisandro Dalcin PetscContainer container; 482002c68681SHong Zhang 4821e5f2cdd8SHong Zhang PetscFunctionBegin; 48224ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Seqstompisym,seqmat,0,0,0);CHKERRQ(ierr); 48233c2c1871SHong Zhang 482438f152feSBarry Smith /* make sure it is a PETSc comm */ 48250298fd71SBarry Smith ierr = PetscCommDuplicate(comm,&comm,NULL);CHKERRQ(ierr); 4826e5f2cdd8SHong Zhang ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 4827e5f2cdd8SHong Zhang ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 482855d1abb9SHong Zhang 482951a7d1a8SHong Zhang ierr = PetscNew(Mat_Merge_SeqsToMPI,&merge);CHKERRQ(ierr); 4830c2234fe3SHong Zhang ierr = PetscMalloc(size*sizeof(MPI_Status),&status);CHKERRQ(ierr); 4831e5f2cdd8SHong Zhang 48326abd8857SHong Zhang /* determine row ownership */ 4833f08fae4eSHong Zhang /*---------------------------------------------------------*/ 483426283091SBarry Smith ierr = PetscLayoutCreate(comm,&merge->rowmap);CHKERRQ(ierr); 483526283091SBarry Smith ierr = PetscLayoutSetLocalSize(merge->rowmap,m);CHKERRQ(ierr); 483626283091SBarry Smith ierr = PetscLayoutSetSize(merge->rowmap,M);CHKERRQ(ierr); 483726283091SBarry Smith ierr = PetscLayoutSetBlockSize(merge->rowmap,1);CHKERRQ(ierr); 483826283091SBarry Smith ierr = PetscLayoutSetUp(merge->rowmap);CHKERRQ(ierr); 4839b1d57f15SBarry Smith ierr = PetscMalloc(size*sizeof(PetscMPIInt),&len_si);CHKERRQ(ierr); 4840b1d57f15SBarry Smith ierr = PetscMalloc(size*sizeof(PetscMPIInt),&merge->len_s);CHKERRQ(ierr); 484155d1abb9SHong Zhang 48427a2fc3feSBarry Smith m = merge->rowmap->n; 48437a2fc3feSBarry Smith owners = merge->rowmap->range; 48446abd8857SHong Zhang 48456abd8857SHong Zhang /* determine the number of messages to send, their lengths */ 48466abd8857SHong Zhang /*---------------------------------------------------------*/ 48473e06a4e6SHong Zhang len_s = merge->len_s; 484851a7d1a8SHong Zhang 48492257cef7SHong Zhang len = 0; /* length of buf_si[] */ 4850c2234fe3SHong Zhang merge->nsend = 0; 4851409913e3SHong Zhang for (proc=0; proc<size; proc++) { 48522257cef7SHong Zhang len_si[proc] = 0; 48533e06a4e6SHong Zhang if (proc == rank) { 48546abd8857SHong Zhang len_s[proc] = 0; 48553e06a4e6SHong Zhang } else { 485602c68681SHong Zhang len_si[proc] = owners[proc+1] - owners[proc] + 1; 48573e06a4e6SHong Zhang len_s[proc] = ai[owners[proc+1]] - ai[owners[proc]]; /* num of rows to be sent to [proc] */ 48583e06a4e6SHong Zhang } 48593e06a4e6SHong Zhang if (len_s[proc]) { 4860c2234fe3SHong Zhang merge->nsend++; 48612257cef7SHong Zhang nrows = 0; 48622257cef7SHong Zhang for (i=owners[proc]; i<owners[proc+1]; i++) { 48632257cef7SHong Zhang if (ai[i+1] > ai[i]) nrows++; 48642257cef7SHong Zhang } 48652257cef7SHong Zhang len_si[proc] = 2*(nrows+1); 48662257cef7SHong Zhang len += len_si[proc]; 4867409913e3SHong Zhang } 486858cb9c82SHong Zhang } 4869409913e3SHong Zhang 48702257cef7SHong Zhang /* determine the number and length of messages to receive for ij-structure */ 48712257cef7SHong Zhang /*-------------------------------------------------------------------------*/ 48720298fd71SBarry Smith ierr = PetscGatherNumberOfMessages(comm,NULL,len_s,&merge->nrecv);CHKERRQ(ierr); 487355d1abb9SHong Zhang ierr = PetscGatherMessageLengths2(comm,merge->nsend,merge->nrecv,len_s,len_si,&merge->id_r,&merge->len_r,&len_ri);CHKERRQ(ierr); 4874671beff6SHong Zhang 48753e06a4e6SHong Zhang /* post the Irecv of j-structure */ 48763e06a4e6SHong Zhang /*-------------------------------*/ 48772c72b5baSSatish Balay ierr = PetscCommGetNewTag(comm,&tagj);CHKERRQ(ierr); 48783e06a4e6SHong Zhang ierr = PetscPostIrecvInt(comm,tagj,merge->nrecv,merge->id_r,merge->len_r,&buf_rj,&rj_waits);CHKERRQ(ierr); 487902c68681SHong Zhang 48803e06a4e6SHong Zhang /* post the Isend of j-structure */ 4881affca5deSHong Zhang /*--------------------------------*/ 48821d79065fSBarry Smith ierr = PetscMalloc2(merge->nsend,MPI_Request,&si_waits,merge->nsend,MPI_Request,&sj_waits);CHKERRQ(ierr); 48833e06a4e6SHong Zhang 48842257cef7SHong Zhang for (proc=0, k=0; proc<size; proc++) { 4885409913e3SHong Zhang if (!len_s[proc]) continue; 488602c68681SHong Zhang i = owners[proc]; 4887b1d57f15SBarry Smith ierr = MPI_Isend(aj+ai[i],len_s[proc],MPIU_INT,proc,tagj,comm,sj_waits+k);CHKERRQ(ierr); 488851a7d1a8SHong Zhang k++; 488951a7d1a8SHong Zhang } 489051a7d1a8SHong Zhang 48913e06a4e6SHong Zhang /* receives and sends of j-structure are complete */ 48923e06a4e6SHong Zhang /*------------------------------------------------*/ 48930c468ba9SBarry Smith if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,rj_waits,status);CHKERRQ(ierr);} 48940c468ba9SBarry Smith if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,sj_waits,status);CHKERRQ(ierr);} 489502c68681SHong Zhang 489602c68681SHong Zhang /* send and recv i-structure */ 489702c68681SHong Zhang /*---------------------------*/ 48982c72b5baSSatish Balay ierr = PetscCommGetNewTag(comm,&tagi);CHKERRQ(ierr); 489902c68681SHong Zhang ierr = PetscPostIrecvInt(comm,tagi,merge->nrecv,merge->id_r,len_ri,&buf_ri,&ri_waits);CHKERRQ(ierr); 490002c68681SHong Zhang 4901b1d57f15SBarry Smith ierr = PetscMalloc((len+1)*sizeof(PetscInt),&buf_s);CHKERRQ(ierr); 49023e06a4e6SHong Zhang buf_si = buf_s; /* points to the beginning of k-th msg to be sent */ 49032257cef7SHong Zhang for (proc=0,k=0; proc<size; proc++) { 490402c68681SHong Zhang if (!len_s[proc]) continue; 49053e06a4e6SHong Zhang /* form outgoing message for i-structure: 49063e06a4e6SHong Zhang buf_si[0]: nrows to be sent 49073e06a4e6SHong Zhang [1:nrows]: row index (global) 49083e06a4e6SHong Zhang [nrows+1:2*nrows+1]: i-structure index 49093e06a4e6SHong Zhang */ 49103e06a4e6SHong Zhang /*-------------------------------------------*/ 49112257cef7SHong Zhang nrows = len_si[proc]/2 - 1; 49123e06a4e6SHong Zhang buf_si_i = buf_si + nrows+1; 49133e06a4e6SHong Zhang buf_si[0] = nrows; 49143e06a4e6SHong Zhang buf_si_i[0] = 0; 49153e06a4e6SHong Zhang nrows = 0; 49163e06a4e6SHong Zhang for (i=owners[proc]; i<owners[proc+1]; i++) { 49173e06a4e6SHong Zhang anzi = ai[i+1] - ai[i]; 49183e06a4e6SHong Zhang if (anzi) { 49193e06a4e6SHong Zhang buf_si_i[nrows+1] = buf_si_i[nrows] + anzi; /* i-structure */ 49203e06a4e6SHong Zhang buf_si[nrows+1] = i-owners[proc]; /* local row index */ 49213e06a4e6SHong Zhang nrows++; 49223e06a4e6SHong Zhang } 49233e06a4e6SHong Zhang } 4924b1d57f15SBarry Smith ierr = MPI_Isend(buf_si,len_si[proc],MPIU_INT,proc,tagi,comm,si_waits+k);CHKERRQ(ierr); 492502c68681SHong Zhang k++; 49262257cef7SHong Zhang buf_si += len_si[proc]; 492702c68681SHong Zhang } 49282257cef7SHong Zhang 49290c468ba9SBarry Smith if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,ri_waits,status);CHKERRQ(ierr);} 49300c468ba9SBarry Smith if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,si_waits,status);CHKERRQ(ierr);} 493102c68681SHong Zhang 4932ae15b995SBarry Smith ierr = PetscInfo2(seqmat,"nsend: %D, nrecv: %D\n",merge->nsend,merge->nrecv);CHKERRQ(ierr); 49333e06a4e6SHong Zhang for (i=0; i<merge->nrecv; i++) { 4934ae15b995SBarry Smith ierr = PetscInfo3(seqmat,"recv len_ri=%D, len_rj=%D from [%D]\n",len_ri[i],merge->len_r[i],merge->id_r[i]);CHKERRQ(ierr); 49353e06a4e6SHong Zhang } 49363e06a4e6SHong Zhang 49373e06a4e6SHong Zhang ierr = PetscFree(len_si);CHKERRQ(ierr); 493802c68681SHong Zhang ierr = PetscFree(len_ri);CHKERRQ(ierr); 493902c68681SHong Zhang ierr = PetscFree(rj_waits);CHKERRQ(ierr); 49401d79065fSBarry Smith ierr = PetscFree2(si_waits,sj_waits);CHKERRQ(ierr); 49412257cef7SHong Zhang ierr = PetscFree(ri_waits);CHKERRQ(ierr); 49423e06a4e6SHong Zhang ierr = PetscFree(buf_s);CHKERRQ(ierr); 4943bcc1bcd5SHong Zhang ierr = PetscFree(status);CHKERRQ(ierr); 494458cb9c82SHong Zhang 4945bcc1bcd5SHong Zhang /* compute a local seq matrix in each processor */ 4946bcc1bcd5SHong Zhang /*----------------------------------------------*/ 494758cb9c82SHong Zhang /* allocate bi array and free space for accumulating nonzero column info */ 4948b1d57f15SBarry Smith ierr = PetscMalloc((m+1)*sizeof(PetscInt),&bi);CHKERRQ(ierr); 494958cb9c82SHong Zhang bi[0] = 0; 495058cb9c82SHong Zhang 4951be0fcf8dSHong Zhang /* create and initialize a linked list */ 4952be0fcf8dSHong Zhang nlnk = N+1; 4953be0fcf8dSHong Zhang ierr = PetscLLCreate(N,N,nlnk,lnk,lnkbt);CHKERRQ(ierr); 495458cb9c82SHong Zhang 4955bcc1bcd5SHong Zhang /* initial FreeSpace size is 2*(num of local nnz(seqmat)) */ 4956bcc1bcd5SHong Zhang len = ai[owners[rank+1]] - ai[owners[rank]]; 4957a1a86e44SBarry Smith ierr = PetscFreeSpaceGet((PetscInt)(2*len+1),&free_space);CHKERRQ(ierr); 49582205254eSKarl Rupp 495958cb9c82SHong Zhang current_space = free_space; 496058cb9c82SHong Zhang 4961bcc1bcd5SHong Zhang /* determine symbolic info for each local row */ 49620572522cSBarry Smith ierr = PetscMalloc3(merge->nrecv,PetscInt*,&buf_ri_k,merge->nrecv,PetscInt*,&nextrow,merge->nrecv,PetscInt*,&nextai);CHKERRQ(ierr); 49631d79065fSBarry Smith 49643e06a4e6SHong Zhang for (k=0; k<merge->nrecv; k++) { 49652257cef7SHong Zhang buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */ 49663e06a4e6SHong Zhang nrows = *buf_ri_k[k]; 49673e06a4e6SHong Zhang nextrow[k] = buf_ri_k[k] + 1; /* next row number of k-th recved i-structure */ 49682257cef7SHong Zhang nextai[k] = buf_ri_k[k] + (nrows + 1); /* poins to the next i-structure of k-th recved i-structure */ 49693e06a4e6SHong Zhang } 49702257cef7SHong Zhang 4971bcc1bcd5SHong Zhang ierr = MatPreallocateInitialize(comm,m,n,dnz,onz);CHKERRQ(ierr); 4972bcc1bcd5SHong Zhang len = 0; 497358cb9c82SHong Zhang for (i=0; i<m; i++) { 497458cb9c82SHong Zhang bnzi = 0; 497558cb9c82SHong Zhang /* add local non-zero cols of this proc's seqmat into lnk */ 497658cb9c82SHong Zhang arow = owners[rank] + i; 497758cb9c82SHong Zhang anzi = ai[arow+1] - ai[arow]; 497858cb9c82SHong Zhang aj = a->j + ai[arow]; 4979dadf0e6bSHong Zhang ierr = PetscLLAddSorted(anzi,aj,N,nlnk,lnk,lnkbt);CHKERRQ(ierr); 498058cb9c82SHong Zhang bnzi += nlnk; 498158cb9c82SHong Zhang /* add received col data into lnk */ 498251a7d1a8SHong Zhang for (k=0; k<merge->nrecv; k++) { /* k-th received message */ 498355d1abb9SHong Zhang if (i == *nextrow[k]) { /* i-th row */ 49843e06a4e6SHong Zhang anzi = *(nextai[k]+1) - *nextai[k]; 49853e06a4e6SHong Zhang aj = buf_rj[k] + *nextai[k]; 4986dadf0e6bSHong Zhang ierr = PetscLLAddSorted(anzi,aj,N,nlnk,lnk,lnkbt);CHKERRQ(ierr); 49873e06a4e6SHong Zhang bnzi += nlnk; 49883e06a4e6SHong Zhang nextrow[k]++; nextai[k]++; 49893e06a4e6SHong Zhang } 499058cb9c82SHong Zhang } 4991bcc1bcd5SHong Zhang if (len < bnzi) len = bnzi; /* =max(bnzi) */ 499258cb9c82SHong Zhang 499358cb9c82SHong Zhang /* if free space is not available, make more free space */ 499458cb9c82SHong Zhang if (current_space->local_remaining<bnzi) { 49954238b7adSHong Zhang ierr = PetscFreeSpaceGet(bnzi+current_space->total_array_size,¤t_space);CHKERRQ(ierr); 499658cb9c82SHong Zhang nspacedouble++; 499758cb9c82SHong Zhang } 499858cb9c82SHong Zhang /* copy data into free space, then initialize lnk */ 4999be0fcf8dSHong Zhang ierr = PetscLLClean(N,N,bnzi,lnk,current_space->array,lnkbt);CHKERRQ(ierr); 5000bcc1bcd5SHong Zhang ierr = MatPreallocateSet(i+owners[rank],bnzi,current_space->array,dnz,onz);CHKERRQ(ierr); 5001bcc1bcd5SHong Zhang 500258cb9c82SHong Zhang current_space->array += bnzi; 500358cb9c82SHong Zhang current_space->local_used += bnzi; 500458cb9c82SHong Zhang current_space->local_remaining -= bnzi; 500558cb9c82SHong Zhang 500658cb9c82SHong Zhang bi[i+1] = bi[i] + bnzi; 500758cb9c82SHong Zhang } 5008bcc1bcd5SHong Zhang 50091d79065fSBarry Smith ierr = PetscFree3(buf_ri_k,nextrow,nextai);CHKERRQ(ierr); 5010bcc1bcd5SHong Zhang 5011b1d57f15SBarry Smith ierr = PetscMalloc((bi[m]+1)*sizeof(PetscInt),&bj);CHKERRQ(ierr); 5012a1a86e44SBarry Smith ierr = PetscFreeSpaceContiguous(&free_space,bj);CHKERRQ(ierr); 5013be0fcf8dSHong Zhang ierr = PetscLLDestroy(lnk,lnkbt);CHKERRQ(ierr); 5014409913e3SHong Zhang 5015bcc1bcd5SHong Zhang /* create symbolic parallel matrix B_mpi */ 5016bcc1bcd5SHong Zhang /*---------------------------------------*/ 5017a2f3521dSMark F. Adams ierr = MatGetBlockSizes(seqmat,&bs,&cbs);CHKERRQ(ierr); 5018f69a0ea3SMatthew Knepley ierr = MatCreate(comm,&B_mpi);CHKERRQ(ierr); 501954b84b50SHong Zhang if (n==PETSC_DECIDE) { 5020f69a0ea3SMatthew Knepley ierr = MatSetSizes(B_mpi,m,n,PETSC_DETERMINE,N);CHKERRQ(ierr); 502154b84b50SHong Zhang } else { 5022f69a0ea3SMatthew Knepley ierr = MatSetSizes(B_mpi,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 502354b84b50SHong Zhang } 5024a2f3521dSMark F. Adams ierr = MatSetBlockSizes(B_mpi,bs,cbs);CHKERRQ(ierr); 5025bcc1bcd5SHong Zhang ierr = MatSetType(B_mpi,MATMPIAIJ);CHKERRQ(ierr); 5026bcc1bcd5SHong Zhang ierr = MatMPIAIJSetPreallocation(B_mpi,0,dnz,0,onz);CHKERRQ(ierr); 5027bcc1bcd5SHong Zhang ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr); 50287e63b356SHong Zhang ierr = MatSetOption(B_mpi,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE);CHKERRQ(ierr); 502958cb9c82SHong Zhang 503090431a8fSHong Zhang /* B_mpi is not ready for use - assembly will be done by MatCreateMPIAIJSumSeqAIJNumeric() */ 50316abd8857SHong Zhang B_mpi->assembled = PETSC_FALSE; 5032affca5deSHong Zhang B_mpi->ops->destroy = MatDestroy_MPIAIJ_SeqsToMPI; 5033affca5deSHong Zhang merge->bi = bi; 5034affca5deSHong Zhang merge->bj = bj; 503502c68681SHong Zhang merge->buf_ri = buf_ri; 503602c68681SHong Zhang merge->buf_rj = buf_rj; 50370298fd71SBarry Smith merge->coi = NULL; 50380298fd71SBarry Smith merge->coj = NULL; 50390298fd71SBarry Smith merge->owners_co = NULL; 5040affca5deSHong Zhang 5041bf0cc555SLisandro Dalcin ierr = PetscCommDestroy(&comm);CHKERRQ(ierr); 5042bf0cc555SLisandro Dalcin 5043affca5deSHong Zhang /* attach the supporting struct to B_mpi for reuse */ 5044776b82aeSLisandro Dalcin ierr = PetscContainerCreate(PETSC_COMM_SELF,&container);CHKERRQ(ierr); 5045776b82aeSLisandro Dalcin ierr = PetscContainerSetPointer(container,merge);CHKERRQ(ierr); 5046affca5deSHong Zhang ierr = PetscObjectCompose((PetscObject)B_mpi,"MatMergeSeqsToMPI",(PetscObject)container);CHKERRQ(ierr); 5047bf0cc555SLisandro Dalcin ierr = PetscContainerDestroy(&container);CHKERRQ(ierr); 5048affca5deSHong Zhang *mpimat = B_mpi; 504938f152feSBarry Smith 50504ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Seqstompisym,seqmat,0,0,0);CHKERRQ(ierr); 5051e5f2cdd8SHong Zhang PetscFunctionReturn(0); 5052e5f2cdd8SHong Zhang } 505325616d81SHong Zhang 505438f152feSBarry Smith #undef __FUNCT__ 505590431a8fSHong Zhang #define __FUNCT__ "MatCreateMPIAIJSumSeqAIJ" 5056d4036a1aSHong Zhang /*@C 505790431a8fSHong Zhang MatCreateMPIAIJSumSeqAIJ - Creates a MPIAIJ matrix by adding sequential 5058d4036a1aSHong Zhang matrices from each processor 5059d4036a1aSHong Zhang 5060d4036a1aSHong Zhang Collective on MPI_Comm 5061d4036a1aSHong Zhang 5062d4036a1aSHong Zhang Input Parameters: 5063d4036a1aSHong Zhang + comm - the communicators the parallel matrix will live on 5064d4036a1aSHong Zhang . seqmat - the input sequential matrices 5065d4036a1aSHong Zhang . m - number of local rows (or PETSC_DECIDE) 5066d4036a1aSHong Zhang . n - number of local columns (or PETSC_DECIDE) 5067d4036a1aSHong Zhang - scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 5068d4036a1aSHong Zhang 5069d4036a1aSHong Zhang Output Parameter: 5070d4036a1aSHong Zhang . mpimat - the parallel matrix generated 5071d4036a1aSHong Zhang 5072d4036a1aSHong Zhang Level: advanced 5073d4036a1aSHong Zhang 5074d4036a1aSHong Zhang Notes: 5075d4036a1aSHong Zhang The dimensions of the sequential matrix in each processor MUST be the same. 5076d4036a1aSHong Zhang The input seqmat is included into the container "Mat_Merge_SeqsToMPI", and will be 5077d4036a1aSHong Zhang destroyed when mpimat is destroyed. Call PetscObjectQuery() to access seqmat. 5078d4036a1aSHong Zhang @*/ 507990431a8fSHong Zhang PetscErrorCode MatCreateMPIAIJSumSeqAIJ(MPI_Comm comm,Mat seqmat,PetscInt m,PetscInt n,MatReuse scall,Mat *mpimat) 508055d1abb9SHong Zhang { 508155d1abb9SHong Zhang PetscErrorCode ierr; 50827e63b356SHong Zhang PetscMPIInt size; 508355d1abb9SHong Zhang 508455d1abb9SHong Zhang PetscFunctionBegin; 50857e63b356SHong Zhang ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 50867e63b356SHong Zhang if (size == 1) { 50877e63b356SHong Zhang ierr = PetscLogEventBegin(MAT_Seqstompi,seqmat,0,0,0);CHKERRQ(ierr); 50887e63b356SHong Zhang if (scall == MAT_INITIAL_MATRIX) { 50897e63b356SHong Zhang ierr = MatDuplicate(seqmat,MAT_COPY_VALUES,mpimat);CHKERRQ(ierr); 50907e63b356SHong Zhang } else { 50917e63b356SHong Zhang ierr = MatCopy(seqmat,*mpimat,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 50927e63b356SHong Zhang } 50937e63b356SHong Zhang ierr = PetscLogEventEnd(MAT_Seqstompi,seqmat,0,0,0);CHKERRQ(ierr); 50947e63b356SHong Zhang PetscFunctionReturn(0); 50957e63b356SHong Zhang } 50964ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Seqstompi,seqmat,0,0,0);CHKERRQ(ierr); 509755d1abb9SHong Zhang if (scall == MAT_INITIAL_MATRIX) { 509890431a8fSHong Zhang ierr = MatCreateMPIAIJSumSeqAIJSymbolic(comm,seqmat,m,n,mpimat);CHKERRQ(ierr); 509955d1abb9SHong Zhang } 510090431a8fSHong Zhang ierr = MatCreateMPIAIJSumSeqAIJNumeric(seqmat,*mpimat);CHKERRQ(ierr); 51014ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Seqstompi,seqmat,0,0,0);CHKERRQ(ierr); 510255d1abb9SHong Zhang PetscFunctionReturn(0); 510355d1abb9SHong Zhang } 51044ebed01fSBarry Smith 510525616d81SHong Zhang #undef __FUNCT__ 51064a2b5492SBarry Smith #define __FUNCT__ "MatMPIAIJGetLocalMat" 5107bc08b0f1SBarry Smith /*@ 51084a2b5492SBarry Smith MatMPIAIJGetLocalMat - Creates a SeqAIJ from a MPIAIJ matrix by taking all its local rows and putting them into a sequential vector with 51098661ff28SBarry Smith mlocal rows and n columns. Where mlocal is the row count obtained with MatGetLocalSize() and n is the global column count obtained 51108661ff28SBarry Smith with MatGetSize() 511125616d81SHong Zhang 511232fba14fSHong Zhang Not Collective 511325616d81SHong Zhang 511425616d81SHong Zhang Input Parameters: 511525616d81SHong Zhang + A - the matrix 511625616d81SHong Zhang . scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 511725616d81SHong Zhang 511825616d81SHong Zhang Output Parameter: 511925616d81SHong Zhang . A_loc - the local sequential matrix generated 512025616d81SHong Zhang 512125616d81SHong Zhang Level: developer 512225616d81SHong Zhang 5123ba264940SBarry Smith .seealso: MatGetOwnerShipRange(), MatMPIAIJGetLocalMatCondensed() 51248661ff28SBarry Smith 512525616d81SHong Zhang @*/ 51264a2b5492SBarry Smith PetscErrorCode MatMPIAIJGetLocalMat(Mat A,MatReuse scall,Mat *A_loc) 512725616d81SHong Zhang { 512825616d81SHong Zhang PetscErrorCode ierr; 512901b7ae99SHong Zhang Mat_MPIAIJ *mpimat=(Mat_MPIAIJ*)A->data; 513001b7ae99SHong Zhang Mat_SeqAIJ *mat,*a=(Mat_SeqAIJ*)(mpimat->A)->data,*b=(Mat_SeqAIJ*)(mpimat->B)->data; 513101b7ae99SHong Zhang PetscInt *ai=a->i,*aj=a->j,*bi=b->i,*bj=b->j,*cmap=mpimat->garray; 5132a77337e4SBarry Smith MatScalar *aa=a->a,*ba=b->a,*cam; 5133a77337e4SBarry Smith PetscScalar *ca; 5134d0f46423SBarry Smith PetscInt am=A->rmap->n,i,j,k,cstart=A->cmap->rstart; 51355a7d977cSHong Zhang PetscInt *ci,*cj,col,ncols_d,ncols_o,jo; 51368661ff28SBarry Smith PetscBool match; 513725616d81SHong Zhang 513825616d81SHong Zhang PetscFunctionBegin; 5139251f4c67SDmitry Karpeev ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&match);CHKERRQ(ierr); 5140ce94432eSBarry Smith if (!match) SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_SUP,"Requires MPIAIJ matrix as input"); 51414ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Getlocalmat,A,0,0,0);CHKERRQ(ierr); 514201b7ae99SHong Zhang if (scall == MAT_INITIAL_MATRIX) { 5143dea91ad1SHong Zhang ierr = PetscMalloc((1+am)*sizeof(PetscInt),&ci);CHKERRQ(ierr); 5144dea91ad1SHong Zhang ci[0] = 0; 514501b7ae99SHong Zhang for (i=0; i<am; i++) { 5146dea91ad1SHong Zhang ci[i+1] = ci[i] + (ai[i+1] - ai[i]) + (bi[i+1] - bi[i]); 514701b7ae99SHong Zhang } 5148dea91ad1SHong Zhang ierr = PetscMalloc((1+ci[am])*sizeof(PetscInt),&cj);CHKERRQ(ierr); 5149dea91ad1SHong Zhang ierr = PetscMalloc((1+ci[am])*sizeof(PetscScalar),&ca);CHKERRQ(ierr); 5150dea91ad1SHong Zhang k = 0; 515101b7ae99SHong Zhang for (i=0; i<am; i++) { 51525a7d977cSHong Zhang ncols_o = bi[i+1] - bi[i]; 51535a7d977cSHong Zhang ncols_d = ai[i+1] - ai[i]; 515401b7ae99SHong Zhang /* off-diagonal portion of A */ 51555a7d977cSHong Zhang for (jo=0; jo<ncols_o; jo++) { 51565a7d977cSHong Zhang col = cmap[*bj]; 51575a7d977cSHong Zhang if (col >= cstart) break; 51585a7d977cSHong Zhang cj[k] = col; bj++; 51595a7d977cSHong Zhang ca[k++] = *ba++; 51605a7d977cSHong Zhang } 51615a7d977cSHong Zhang /* diagonal portion of A */ 51625a7d977cSHong Zhang for (j=0; j<ncols_d; j++) { 51635a7d977cSHong Zhang cj[k] = cstart + *aj++; 51645a7d977cSHong Zhang ca[k++] = *aa++; 51655a7d977cSHong Zhang } 51665a7d977cSHong Zhang /* off-diagonal portion of A */ 51675a7d977cSHong Zhang for (j=jo; j<ncols_o; j++) { 51685a7d977cSHong Zhang cj[k] = cmap[*bj++]; 51695a7d977cSHong Zhang ca[k++] = *ba++; 51705a7d977cSHong Zhang } 517125616d81SHong Zhang } 5172dea91ad1SHong Zhang /* put together the new matrix */ 5173d0f46423SBarry Smith ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,am,A->cmap->N,ci,cj,ca,A_loc);CHKERRQ(ierr); 5174dea91ad1SHong Zhang /* MatCreateSeqAIJWithArrays flags matrix so PETSc doesn't free the user's arrays. */ 5175dea91ad1SHong Zhang /* Since these are PETSc arrays, change flags to free them as necessary. */ 5176dea91ad1SHong Zhang mat = (Mat_SeqAIJ*)(*A_loc)->data; 5177e6b907acSBarry Smith mat->free_a = PETSC_TRUE; 5178e6b907acSBarry Smith mat->free_ij = PETSC_TRUE; 5179dea91ad1SHong Zhang mat->nonew = 0; 51805a7d977cSHong Zhang } else if (scall == MAT_REUSE_MATRIX) { 51815a7d977cSHong Zhang mat=(Mat_SeqAIJ*)(*A_loc)->data; 5182a77337e4SBarry Smith ci = mat->i; cj = mat->j; cam = mat->a; 51835a7d977cSHong Zhang for (i=0; i<am; i++) { 51845a7d977cSHong Zhang /* off-diagonal portion of A */ 51855a7d977cSHong Zhang ncols_o = bi[i+1] - bi[i]; 51865a7d977cSHong Zhang for (jo=0; jo<ncols_o; jo++) { 51875a7d977cSHong Zhang col = cmap[*bj]; 51885a7d977cSHong Zhang if (col >= cstart) break; 5189a77337e4SBarry Smith *cam++ = *ba++; bj++; 51905a7d977cSHong Zhang } 51915a7d977cSHong Zhang /* diagonal portion of A */ 5192ecc9b87dSHong Zhang ncols_d = ai[i+1] - ai[i]; 5193a77337e4SBarry Smith for (j=0; j<ncols_d; j++) *cam++ = *aa++; 51945a7d977cSHong Zhang /* off-diagonal portion of A */ 5195f33d1a9aSHong Zhang for (j=jo; j<ncols_o; j++) { 5196a77337e4SBarry Smith *cam++ = *ba++; bj++; 5197f33d1a9aSHong Zhang } 51985a7d977cSHong Zhang } 51998661ff28SBarry Smith } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid MatReuse %d",(int)scall); 52004ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Getlocalmat,A,0,0,0);CHKERRQ(ierr); 520125616d81SHong Zhang PetscFunctionReturn(0); 520225616d81SHong Zhang } 520325616d81SHong Zhang 520432fba14fSHong Zhang #undef __FUNCT__ 52054a2b5492SBarry Smith #define __FUNCT__ "MatMPIAIJGetLocalMatCondensed" 520632fba14fSHong Zhang /*@C 5207ba264940SBarry Smith MatMPIAIJGetLocalMatCondensed - Creates a SeqAIJ matrix from an MPIAIJ matrix by taking all its local rows and NON-ZERO columns 520832fba14fSHong Zhang 520932fba14fSHong Zhang Not Collective 521032fba14fSHong Zhang 521132fba14fSHong Zhang Input Parameters: 521232fba14fSHong Zhang + A - the matrix 521332fba14fSHong Zhang . scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 52140298fd71SBarry Smith - row, col - index sets of rows and columns to extract (or NULL) 521532fba14fSHong Zhang 521632fba14fSHong Zhang Output Parameter: 521732fba14fSHong Zhang . A_loc - the local sequential matrix generated 521832fba14fSHong Zhang 521932fba14fSHong Zhang Level: developer 522032fba14fSHong Zhang 5221ba264940SBarry Smith .seealso: MatGetOwnershipRange(), MatMPIAIJGetLocalMat() 5222ba264940SBarry Smith 522332fba14fSHong Zhang @*/ 52244a2b5492SBarry Smith PetscErrorCode MatMPIAIJGetLocalMatCondensed(Mat A,MatReuse scall,IS *row,IS *col,Mat *A_loc) 522532fba14fSHong Zhang { 522632fba14fSHong Zhang Mat_MPIAIJ *a=(Mat_MPIAIJ*)A->data; 522732fba14fSHong Zhang PetscErrorCode ierr; 522832fba14fSHong Zhang PetscInt i,start,end,ncols,nzA,nzB,*cmap,imark,*idx; 522932fba14fSHong Zhang IS isrowa,iscola; 523032fba14fSHong Zhang Mat *aloc; 52314a2b5492SBarry Smith PetscBool match; 523232fba14fSHong Zhang 523332fba14fSHong Zhang PetscFunctionBegin; 5234251f4c67SDmitry Karpeev ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&match);CHKERRQ(ierr); 5235ce94432eSBarry Smith if (!match) SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_SUP,"Requires MPIAIJ matrix as input"); 52364ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Getlocalmatcondensed,A,0,0,0);CHKERRQ(ierr); 523732fba14fSHong Zhang if (!row) { 5238d0f46423SBarry Smith start = A->rmap->rstart; end = A->rmap->rend; 523932fba14fSHong Zhang ierr = ISCreateStride(PETSC_COMM_SELF,end-start,start,1,&isrowa);CHKERRQ(ierr); 524032fba14fSHong Zhang } else { 524132fba14fSHong Zhang isrowa = *row; 524232fba14fSHong Zhang } 524332fba14fSHong Zhang if (!col) { 5244d0f46423SBarry Smith start = A->cmap->rstart; 524532fba14fSHong Zhang cmap = a->garray; 5246d0f46423SBarry Smith nzA = a->A->cmap->n; 5247d0f46423SBarry Smith nzB = a->B->cmap->n; 524832fba14fSHong Zhang ierr = PetscMalloc((nzA+nzB)*sizeof(PetscInt), &idx);CHKERRQ(ierr); 524932fba14fSHong Zhang ncols = 0; 525032fba14fSHong Zhang for (i=0; i<nzB; i++) { 525132fba14fSHong Zhang if (cmap[i] < start) idx[ncols++] = cmap[i]; 525232fba14fSHong Zhang else break; 525332fba14fSHong Zhang } 525432fba14fSHong Zhang imark = i; 525532fba14fSHong Zhang for (i=0; i<nzA; i++) idx[ncols++] = start + i; 525632fba14fSHong Zhang for (i=imark; i<nzB; i++) idx[ncols++] = cmap[i]; 5257d67e408aSBarry Smith ierr = ISCreateGeneral(PETSC_COMM_SELF,ncols,idx,PETSC_OWN_POINTER,&iscola);CHKERRQ(ierr); 525832fba14fSHong Zhang } else { 525932fba14fSHong Zhang iscola = *col; 526032fba14fSHong Zhang } 526132fba14fSHong Zhang if (scall != MAT_INITIAL_MATRIX) { 526232fba14fSHong Zhang ierr = PetscMalloc(sizeof(Mat),&aloc);CHKERRQ(ierr); 526332fba14fSHong Zhang aloc[0] = *A_loc; 526432fba14fSHong Zhang } 526532fba14fSHong Zhang ierr = MatGetSubMatrices(A,1,&isrowa,&iscola,scall,&aloc);CHKERRQ(ierr); 526632fba14fSHong Zhang *A_loc = aloc[0]; 526732fba14fSHong Zhang ierr = PetscFree(aloc);CHKERRQ(ierr); 526832fba14fSHong Zhang if (!row) { 52696bf464f9SBarry Smith ierr = ISDestroy(&isrowa);CHKERRQ(ierr); 527032fba14fSHong Zhang } 527132fba14fSHong Zhang if (!col) { 52726bf464f9SBarry Smith ierr = ISDestroy(&iscola);CHKERRQ(ierr); 527332fba14fSHong Zhang } 52744ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Getlocalmatcondensed,A,0,0,0);CHKERRQ(ierr); 527532fba14fSHong Zhang PetscFunctionReturn(0); 527632fba14fSHong Zhang } 527732fba14fSHong Zhang 527825616d81SHong Zhang #undef __FUNCT__ 527925616d81SHong Zhang #define __FUNCT__ "MatGetBrowsOfAcols" 528025616d81SHong Zhang /*@C 528132fba14fSHong Zhang MatGetBrowsOfAcols - Creates a SeqAIJ matrix by taking rows of B that equal to nonzero columns of local A 528225616d81SHong Zhang 528325616d81SHong Zhang Collective on Mat 528425616d81SHong Zhang 528525616d81SHong Zhang Input Parameters: 5286e240928fSHong Zhang + A,B - the matrices in mpiaij format 528725616d81SHong Zhang . scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 52880298fd71SBarry Smith - rowb, colb - index sets of rows and columns of B to extract (or NULL) 528925616d81SHong Zhang 529025616d81SHong Zhang Output Parameter: 529125616d81SHong Zhang + rowb, colb - index sets of rows and columns of B to extract 529225616d81SHong Zhang - B_seq - the sequential matrix generated 529325616d81SHong Zhang 529425616d81SHong Zhang Level: developer 529525616d81SHong Zhang 529625616d81SHong Zhang @*/ 529766bfb163SHong Zhang PetscErrorCode MatGetBrowsOfAcols(Mat A,Mat B,MatReuse scall,IS *rowb,IS *colb,Mat *B_seq) 529825616d81SHong Zhang { 5299899cda47SBarry Smith Mat_MPIAIJ *a=(Mat_MPIAIJ*)A->data; 530025616d81SHong Zhang PetscErrorCode ierr; 5301b1d57f15SBarry Smith PetscInt *idx,i,start,ncols,nzA,nzB,*cmap,imark; 530225616d81SHong Zhang IS isrowb,iscolb; 53030298fd71SBarry Smith Mat *bseq=NULL; 530425616d81SHong Zhang 530525616d81SHong Zhang PetscFunctionBegin; 5306d0f46423SBarry Smith if (A->cmap->rstart != B->rmap->rstart || A->cmap->rend != B->rmap->rend) { 5307e32f2f54SBarry Smith SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, (%D, %D) != (%D,%D)",A->cmap->rstart,A->cmap->rend,B->rmap->rstart,B->rmap->rend); 530825616d81SHong Zhang } 53094ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_GetBrowsOfAcols,A,B,0,0);CHKERRQ(ierr); 531025616d81SHong Zhang 531125616d81SHong Zhang if (scall == MAT_INITIAL_MATRIX) { 5312d0f46423SBarry Smith start = A->cmap->rstart; 531325616d81SHong Zhang cmap = a->garray; 5314d0f46423SBarry Smith nzA = a->A->cmap->n; 5315d0f46423SBarry Smith nzB = a->B->cmap->n; 5316b1d57f15SBarry Smith ierr = PetscMalloc((nzA+nzB)*sizeof(PetscInt), &idx);CHKERRQ(ierr); 531725616d81SHong Zhang ncols = 0; 53180390132cSHong Zhang for (i=0; i<nzB; i++) { /* row < local row index */ 531925616d81SHong Zhang if (cmap[i] < start) idx[ncols++] = cmap[i]; 532025616d81SHong Zhang else break; 532125616d81SHong Zhang } 532225616d81SHong Zhang imark = i; 53230390132cSHong Zhang for (i=0; i<nzA; i++) idx[ncols++] = start + i; /* local rows */ 53240390132cSHong Zhang for (i=imark; i<nzB; i++) idx[ncols++] = cmap[i]; /* row > local row index */ 5325d67e408aSBarry Smith ierr = ISCreateGeneral(PETSC_COMM_SELF,ncols,idx,PETSC_OWN_POINTER,&isrowb);CHKERRQ(ierr); 5326d0f46423SBarry Smith ierr = ISCreateStride(PETSC_COMM_SELF,B->cmap->N,0,1,&iscolb);CHKERRQ(ierr); 532725616d81SHong Zhang } else { 5328e32f2f54SBarry Smith if (!rowb || !colb) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"IS rowb and colb must be provided for MAT_REUSE_MATRIX"); 532925616d81SHong Zhang isrowb = *rowb; iscolb = *colb; 533025616d81SHong Zhang ierr = PetscMalloc(sizeof(Mat),&bseq);CHKERRQ(ierr); 533125616d81SHong Zhang bseq[0] = *B_seq; 533225616d81SHong Zhang } 533325616d81SHong Zhang ierr = MatGetSubMatrices(B,1,&isrowb,&iscolb,scall,&bseq);CHKERRQ(ierr); 533425616d81SHong Zhang *B_seq = bseq[0]; 533525616d81SHong Zhang ierr = PetscFree(bseq);CHKERRQ(ierr); 533625616d81SHong Zhang if (!rowb) { 53376bf464f9SBarry Smith ierr = ISDestroy(&isrowb);CHKERRQ(ierr); 533825616d81SHong Zhang } else { 533925616d81SHong Zhang *rowb = isrowb; 534025616d81SHong Zhang } 534125616d81SHong Zhang if (!colb) { 53426bf464f9SBarry Smith ierr = ISDestroy(&iscolb);CHKERRQ(ierr); 534325616d81SHong Zhang } else { 534425616d81SHong Zhang *colb = iscolb; 534525616d81SHong Zhang } 53464ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_GetBrowsOfAcols,A,B,0,0);CHKERRQ(ierr); 534725616d81SHong Zhang PetscFunctionReturn(0); 534825616d81SHong Zhang } 5349429d309bSHong Zhang 5350a61c8c0fSHong Zhang #undef __FUNCT__ 5351f8487c73SHong Zhang #define __FUNCT__ "MatGetBrowsOfAoCols_MPIAIJ" 5352f8487c73SHong Zhang /* 5353f8487c73SHong Zhang MatGetBrowsOfAoCols_MPIAIJ - Creates a SeqAIJ matrix by taking rows of B that equal to nonzero columns 535401b7ae99SHong Zhang of the OFF-DIAGONAL portion of local A 5355429d309bSHong Zhang 5356429d309bSHong Zhang Collective on Mat 5357429d309bSHong Zhang 5358429d309bSHong Zhang Input Parameters: 5359429d309bSHong Zhang + A,B - the matrices in mpiaij format 5360598bc09dSHong Zhang - scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 5361429d309bSHong Zhang 5362429d309bSHong Zhang Output Parameter: 53630298fd71SBarry Smith + startsj_s - starting point in B's sending j-arrays, saved for MAT_REUSE (or NULL) 53640298fd71SBarry Smith . startsj_r - starting point in B's receiving j-arrays, saved for MAT_REUSE (or NULL) 53650298fd71SBarry Smith . bufa_ptr - array for sending matrix values, saved for MAT_REUSE (or NULL) 5366598bc09dSHong Zhang - B_oth - the sequential matrix generated with size aBn=a->B->cmap->n by B->cmap->N 5367429d309bSHong Zhang 5368429d309bSHong Zhang Level: developer 5369429d309bSHong Zhang 5370f8487c73SHong Zhang */ 5371b7f45c76SHong Zhang PetscErrorCode MatGetBrowsOfAoCols_MPIAIJ(Mat A,Mat B,MatReuse scall,PetscInt **startsj_s,PetscInt **startsj_r,MatScalar **bufa_ptr,Mat *B_oth) 5372429d309bSHong Zhang { 5373a6b2eed2SHong Zhang VecScatter_MPI_General *gen_to,*gen_from; 5374429d309bSHong Zhang PetscErrorCode ierr; 5375899cda47SBarry Smith Mat_MPIAIJ *a=(Mat_MPIAIJ*)A->data; 537687025532SHong Zhang Mat_SeqAIJ *b_oth; 5377a6b2eed2SHong Zhang VecScatter ctx =a->Mvctx; 5378ce94432eSBarry Smith MPI_Comm comm; 53797adad957SLisandro Dalcin PetscMPIInt *rprocs,*sprocs,tag=((PetscObject)ctx)->tag,rank; 5380d0f46423SBarry Smith PetscInt *rowlen,*bufj,*bufJ,ncols,aBn=a->B->cmap->n,row,*b_othi,*b_othj; 5381dd6ea824SBarry Smith PetscScalar *rvalues,*svalues; 5382dd6ea824SBarry Smith MatScalar *b_otha,*bufa,*bufA; 5383e42f35eeSHong Zhang PetscInt i,j,k,l,ll,nrecvs,nsends,nrows,*srow,*rstarts,*rstartsj = 0,*sstarts,*sstartsj,len; 53840298fd71SBarry Smith MPI_Request *rwaits = NULL,*swaits = NULL; 538587025532SHong Zhang MPI_Status *sstatus,rstatus; 5386aa5bb8c0SSatish Balay PetscMPIInt jj; 5387e42f35eeSHong Zhang PetscInt *cols,sbs,rbs; 5388ba8c8a56SBarry Smith PetscScalar *vals; 5389429d309bSHong Zhang 5390429d309bSHong Zhang PetscFunctionBegin; 5391ce94432eSBarry Smith ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr); 5392d0f46423SBarry Smith if (A->cmap->rstart != B->rmap->rstart || A->cmap->rend != B->rmap->rend) { 5393e32f2f54SBarry Smith SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, (%d, %d) != (%d,%d)",A->cmap->rstart,A->cmap->rend,B->rmap->rstart,B->rmap->rend); 5394429d309bSHong Zhang } 53954ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_GetBrowsOfAocols,A,B,0,0);CHKERRQ(ierr); 5396a6b2eed2SHong Zhang ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 5397a6b2eed2SHong Zhang 5398a6b2eed2SHong Zhang gen_to = (VecScatter_MPI_General*)ctx->todata; 5399a6b2eed2SHong Zhang gen_from = (VecScatter_MPI_General*)ctx->fromdata; 5400e42f35eeSHong Zhang rvalues = gen_from->values; /* holds the length of receiving row */ 5401e42f35eeSHong Zhang svalues = gen_to->values; /* holds the length of sending row */ 5402a6b2eed2SHong Zhang nrecvs = gen_from->n; 5403a6b2eed2SHong Zhang nsends = gen_to->n; 5404d7ee0231SBarry Smith 5405d7ee0231SBarry Smith ierr = PetscMalloc2(nrecvs,MPI_Request,&rwaits,nsends,MPI_Request,&swaits);CHKERRQ(ierr); 5406a6b2eed2SHong Zhang srow = gen_to->indices; /* local row index to be sent */ 5407a6b2eed2SHong Zhang sstarts = gen_to->starts; 5408a6b2eed2SHong Zhang sprocs = gen_to->procs; 5409a6b2eed2SHong Zhang sstatus = gen_to->sstatus; 5410e42f35eeSHong Zhang sbs = gen_to->bs; 5411e42f35eeSHong Zhang rstarts = gen_from->starts; 5412e42f35eeSHong Zhang rprocs = gen_from->procs; 5413e42f35eeSHong Zhang rbs = gen_from->bs; 5414429d309bSHong Zhang 5415b7f45c76SHong Zhang if (!startsj_s || !bufa_ptr) scall = MAT_INITIAL_MATRIX; 5416429d309bSHong Zhang if (scall == MAT_INITIAL_MATRIX) { 5417a6b2eed2SHong Zhang /* i-array */ 5418a6b2eed2SHong Zhang /*---------*/ 5419a6b2eed2SHong Zhang /* post receives */ 5420a6b2eed2SHong Zhang for (i=0; i<nrecvs; i++) { 5421e42f35eeSHong Zhang rowlen = (PetscInt*)rvalues + rstarts[i]*rbs; 5422e42f35eeSHong Zhang nrows = (rstarts[i+1]-rstarts[i])*rbs; /* num of indices to be received */ 542387025532SHong Zhang ierr = MPI_Irecv(rowlen,nrows,MPIU_INT,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr); 5424429d309bSHong Zhang } 5425a6b2eed2SHong Zhang 5426a6b2eed2SHong Zhang /* pack the outgoing message */ 54271d79065fSBarry Smith ierr = PetscMalloc2(nsends+1,PetscInt,&sstartsj,nrecvs+1,PetscInt,&rstartsj);CHKERRQ(ierr); 54282205254eSKarl Rupp 54292205254eSKarl Rupp sstartsj[0] = 0; 54302205254eSKarl Rupp rstartsj[0] = 0; 5431a6b2eed2SHong Zhang len = 0; /* total length of j or a array to be sent */ 5432a6b2eed2SHong Zhang k = 0; 5433a6b2eed2SHong Zhang for (i=0; i<nsends; i++) { 5434e42f35eeSHong Zhang rowlen = (PetscInt*)svalues + sstarts[i]*sbs; 5435e42f35eeSHong Zhang nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */ 543687025532SHong Zhang for (j=0; j<nrows; j++) { 5437d0f46423SBarry Smith row = srow[k] + B->rmap->range[rank]; /* global row idx */ 5438e42f35eeSHong Zhang for (l=0; l<sbs; l++) { 54390298fd71SBarry Smith ierr = MatGetRow_MPIAIJ(B,row+l,&ncols,NULL,NULL);CHKERRQ(ierr); /* rowlength */ 54402205254eSKarl Rupp 5441e42f35eeSHong Zhang rowlen[j*sbs+l] = ncols; 54422205254eSKarl Rupp 5443e42f35eeSHong Zhang len += ncols; 54440298fd71SBarry Smith ierr = MatRestoreRow_MPIAIJ(B,row+l,&ncols,NULL,NULL);CHKERRQ(ierr); 5445e42f35eeSHong Zhang } 5446a6b2eed2SHong Zhang k++; 5447429d309bSHong Zhang } 5448e42f35eeSHong Zhang ierr = MPI_Isend(rowlen,nrows*sbs,MPIU_INT,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr); 54492205254eSKarl Rupp 5450dea91ad1SHong Zhang sstartsj[i+1] = len; /* starting point of (i+1)-th outgoing msg in bufj and bufa */ 5451429d309bSHong Zhang } 545287025532SHong Zhang /* recvs and sends of i-array are completed */ 545387025532SHong Zhang i = nrecvs; 545487025532SHong Zhang while (i--) { 5455aa5bb8c0SSatish Balay ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr); 545687025532SHong Zhang } 54570c468ba9SBarry Smith if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);} 5458e42f35eeSHong Zhang 5459a6b2eed2SHong Zhang /* allocate buffers for sending j and a arrays */ 5460a6b2eed2SHong Zhang ierr = PetscMalloc((len+1)*sizeof(PetscInt),&bufj);CHKERRQ(ierr); 5461a6b2eed2SHong Zhang ierr = PetscMalloc((len+1)*sizeof(PetscScalar),&bufa);CHKERRQ(ierr); 5462a6b2eed2SHong Zhang 546387025532SHong Zhang /* create i-array of B_oth */ 546487025532SHong Zhang ierr = PetscMalloc((aBn+2)*sizeof(PetscInt),&b_othi);CHKERRQ(ierr); 54652205254eSKarl Rupp 546687025532SHong Zhang b_othi[0] = 0; 5467a6b2eed2SHong Zhang len = 0; /* total length of j or a array to be received */ 5468a6b2eed2SHong Zhang k = 0; 5469a6b2eed2SHong Zhang for (i=0; i<nrecvs; i++) { 5470fd0ff01cSHong Zhang rowlen = (PetscInt*)rvalues + rstarts[i]*rbs; 5471e42f35eeSHong Zhang nrows = rbs*(rstarts[i+1]-rstarts[i]); /* num of rows to be recieved */ 547287025532SHong Zhang for (j=0; j<nrows; j++) { 547387025532SHong Zhang b_othi[k+1] = b_othi[k] + rowlen[j]; 5474a6b2eed2SHong Zhang len += rowlen[j]; k++; 5475a6b2eed2SHong Zhang } 5476dea91ad1SHong Zhang rstartsj[i+1] = len; /* starting point of (i+1)-th incoming msg in bufj and bufa */ 5477a6b2eed2SHong Zhang } 5478a6b2eed2SHong Zhang 547987025532SHong Zhang /* allocate space for j and a arrrays of B_oth */ 548087025532SHong Zhang ierr = PetscMalloc((b_othi[aBn]+1)*sizeof(PetscInt),&b_othj);CHKERRQ(ierr); 5481dd6ea824SBarry Smith ierr = PetscMalloc((b_othi[aBn]+1)*sizeof(MatScalar),&b_otha);CHKERRQ(ierr); 5482a6b2eed2SHong Zhang 548387025532SHong Zhang /* j-array */ 548487025532SHong Zhang /*---------*/ 5485a6b2eed2SHong Zhang /* post receives of j-array */ 5486a6b2eed2SHong Zhang for (i=0; i<nrecvs; i++) { 548787025532SHong Zhang nrows = rstartsj[i+1]-rstartsj[i]; /* length of the msg received */ 548887025532SHong Zhang ierr = MPI_Irecv(b_othj+rstartsj[i],nrows,MPIU_INT,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr); 5489a6b2eed2SHong Zhang } 5490e42f35eeSHong Zhang 5491e42f35eeSHong Zhang /* pack the outgoing message j-array */ 5492a6b2eed2SHong Zhang k = 0; 5493a6b2eed2SHong Zhang for (i=0; i<nsends; i++) { 5494e42f35eeSHong Zhang nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */ 5495a6b2eed2SHong Zhang bufJ = bufj+sstartsj[i]; 549687025532SHong Zhang for (j=0; j<nrows; j++) { 5497d0f46423SBarry Smith row = srow[k++] + B->rmap->range[rank]; /* global row idx */ 5498e42f35eeSHong Zhang for (ll=0; ll<sbs; ll++) { 54990298fd71SBarry Smith ierr = MatGetRow_MPIAIJ(B,row+ll,&ncols,&cols,NULL);CHKERRQ(ierr); 5500a6b2eed2SHong Zhang for (l=0; l<ncols; l++) { 5501a6b2eed2SHong Zhang *bufJ++ = cols[l]; 550287025532SHong Zhang } 55030298fd71SBarry Smith ierr = MatRestoreRow_MPIAIJ(B,row+ll,&ncols,&cols,NULL);CHKERRQ(ierr); 5504e42f35eeSHong Zhang } 550587025532SHong Zhang } 550687025532SHong Zhang ierr = MPI_Isend(bufj+sstartsj[i],sstartsj[i+1]-sstartsj[i],MPIU_INT,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr); 550787025532SHong Zhang } 550887025532SHong Zhang 550987025532SHong Zhang /* recvs and sends of j-array are completed */ 551087025532SHong Zhang i = nrecvs; 551187025532SHong Zhang while (i--) { 5512aa5bb8c0SSatish Balay ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr); 551387025532SHong Zhang } 55140c468ba9SBarry Smith if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);} 551587025532SHong Zhang } else if (scall == MAT_REUSE_MATRIX) { 5516b7f45c76SHong Zhang sstartsj = *startsj_s; 55171d79065fSBarry Smith rstartsj = *startsj_r; 551887025532SHong Zhang bufa = *bufa_ptr; 551987025532SHong Zhang b_oth = (Mat_SeqAIJ*)(*B_oth)->data; 552087025532SHong Zhang b_otha = b_oth->a; 5521f23aa3ddSBarry Smith } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "Matrix P does not posses an object container"); 552287025532SHong Zhang 552387025532SHong Zhang /* a-array */ 552487025532SHong Zhang /*---------*/ 552587025532SHong Zhang /* post receives of a-array */ 552687025532SHong Zhang for (i=0; i<nrecvs; i++) { 552787025532SHong Zhang nrows = rstartsj[i+1]-rstartsj[i]; /* length of the msg received */ 552887025532SHong Zhang ierr = MPI_Irecv(b_otha+rstartsj[i],nrows,MPIU_SCALAR,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr); 552987025532SHong Zhang } 5530e42f35eeSHong Zhang 5531e42f35eeSHong Zhang /* pack the outgoing message a-array */ 553287025532SHong Zhang k = 0; 553387025532SHong Zhang for (i=0; i<nsends; i++) { 5534e42f35eeSHong Zhang nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */ 553587025532SHong Zhang bufA = bufa+sstartsj[i]; 553687025532SHong Zhang for (j=0; j<nrows; j++) { 5537d0f46423SBarry Smith row = srow[k++] + B->rmap->range[rank]; /* global row idx */ 5538e42f35eeSHong Zhang for (ll=0; ll<sbs; ll++) { 55390298fd71SBarry Smith ierr = MatGetRow_MPIAIJ(B,row+ll,&ncols,NULL,&vals);CHKERRQ(ierr); 554087025532SHong Zhang for (l=0; l<ncols; l++) { 5541a6b2eed2SHong Zhang *bufA++ = vals[l]; 5542a6b2eed2SHong Zhang } 55430298fd71SBarry Smith ierr = MatRestoreRow_MPIAIJ(B,row+ll,&ncols,NULL,&vals);CHKERRQ(ierr); 5544e42f35eeSHong Zhang } 5545a6b2eed2SHong Zhang } 554687025532SHong Zhang ierr = MPI_Isend(bufa+sstartsj[i],sstartsj[i+1]-sstartsj[i],MPIU_SCALAR,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr); 5547a6b2eed2SHong Zhang } 554887025532SHong Zhang /* recvs and sends of a-array are completed */ 554987025532SHong Zhang i = nrecvs; 555087025532SHong Zhang while (i--) { 5551aa5bb8c0SSatish Balay ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr); 555287025532SHong Zhang } 55530c468ba9SBarry Smith if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);} 5554d7ee0231SBarry Smith ierr = PetscFree2(rwaits,swaits);CHKERRQ(ierr); 5555a6b2eed2SHong Zhang 555687025532SHong Zhang if (scall == MAT_INITIAL_MATRIX) { 5557a6b2eed2SHong Zhang /* put together the new matrix */ 5558d0f46423SBarry Smith ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,aBn,B->cmap->N,b_othi,b_othj,b_otha,B_oth);CHKERRQ(ierr); 5559a6b2eed2SHong Zhang 5560a6b2eed2SHong Zhang /* MatCreateSeqAIJWithArrays flags matrix so PETSc doesn't free the user's arrays. */ 5561a6b2eed2SHong Zhang /* Since these are PETSc arrays, change flags to free them as necessary. */ 556287025532SHong Zhang b_oth = (Mat_SeqAIJ*)(*B_oth)->data; 5563e6b907acSBarry Smith b_oth->free_a = PETSC_TRUE; 5564e6b907acSBarry Smith b_oth->free_ij = PETSC_TRUE; 556587025532SHong Zhang b_oth->nonew = 0; 5566a6b2eed2SHong Zhang 5567a6b2eed2SHong Zhang ierr = PetscFree(bufj);CHKERRQ(ierr); 5568b7f45c76SHong Zhang if (!startsj_s || !bufa_ptr) { 55691d79065fSBarry Smith ierr = PetscFree2(sstartsj,rstartsj);CHKERRQ(ierr); 5570dea91ad1SHong Zhang ierr = PetscFree(bufa_ptr);CHKERRQ(ierr); 5571dea91ad1SHong Zhang } else { 5572b7f45c76SHong Zhang *startsj_s = sstartsj; 55731d79065fSBarry Smith *startsj_r = rstartsj; 557487025532SHong Zhang *bufa_ptr = bufa; 557587025532SHong Zhang } 5576dea91ad1SHong Zhang } 55774ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_GetBrowsOfAocols,A,B,0,0);CHKERRQ(ierr); 5578429d309bSHong Zhang PetscFunctionReturn(0); 5579429d309bSHong Zhang } 5580ccd8e176SBarry Smith 558143eb5e2fSMatthew Knepley #undef __FUNCT__ 558243eb5e2fSMatthew Knepley #define __FUNCT__ "MatGetCommunicationStructs" 558343eb5e2fSMatthew Knepley /*@C 558443eb5e2fSMatthew Knepley MatGetCommunicationStructs - Provides access to the communication structures used in matrix-vector multiplication. 558543eb5e2fSMatthew Knepley 558643eb5e2fSMatthew Knepley Not Collective 558743eb5e2fSMatthew Knepley 558843eb5e2fSMatthew Knepley Input Parameters: 558943eb5e2fSMatthew Knepley . A - The matrix in mpiaij format 559043eb5e2fSMatthew Knepley 559143eb5e2fSMatthew Knepley Output Parameter: 559243eb5e2fSMatthew Knepley + lvec - The local vector holding off-process values from the argument to a matrix-vector product 559343eb5e2fSMatthew Knepley . colmap - A map from global column index to local index into lvec 559443eb5e2fSMatthew Knepley - multScatter - A scatter from the argument of a matrix-vector product to lvec 559543eb5e2fSMatthew Knepley 559643eb5e2fSMatthew Knepley Level: developer 559743eb5e2fSMatthew Knepley 559843eb5e2fSMatthew Knepley @*/ 559943eb5e2fSMatthew Knepley #if defined(PETSC_USE_CTABLE) 56007087cfbeSBarry Smith PetscErrorCode MatGetCommunicationStructs(Mat A, Vec *lvec, PetscTable *colmap, VecScatter *multScatter) 560143eb5e2fSMatthew Knepley #else 56027087cfbeSBarry Smith PetscErrorCode MatGetCommunicationStructs(Mat A, Vec *lvec, PetscInt *colmap[], VecScatter *multScatter) 560343eb5e2fSMatthew Knepley #endif 560443eb5e2fSMatthew Knepley { 560543eb5e2fSMatthew Knepley Mat_MPIAIJ *a; 560643eb5e2fSMatthew Knepley 560743eb5e2fSMatthew Knepley PetscFunctionBegin; 56080700a824SBarry Smith PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 5609e414b56bSJed Brown PetscValidPointer(lvec, 2); 5610e414b56bSJed Brown PetscValidPointer(colmap, 3); 5611e414b56bSJed Brown PetscValidPointer(multScatter, 4); 561243eb5e2fSMatthew Knepley a = (Mat_MPIAIJ*) A->data; 561343eb5e2fSMatthew Knepley if (lvec) *lvec = a->lvec; 561443eb5e2fSMatthew Knepley if (colmap) *colmap = a->colmap; 561543eb5e2fSMatthew Knepley if (multScatter) *multScatter = a->Mvctx; 561643eb5e2fSMatthew Knepley PetscFunctionReturn(0); 561743eb5e2fSMatthew Knepley } 561843eb5e2fSMatthew Knepley 56198cc058d9SJed Brown PETSC_EXTERN PetscErrorCode MatConvert_MPIAIJ_MPIAIJCRL(Mat,MatType,MatReuse,Mat*); 56208cc058d9SJed Brown PETSC_EXTERN PetscErrorCode MatConvert_MPIAIJ_MPIAIJPERM(Mat,MatType,MatReuse,Mat*); 56218cc058d9SJed Brown PETSC_EXTERN PetscErrorCode MatConvert_MPIAIJ_MPISBAIJ(Mat,MatType,MatReuse,Mat*); 562217667f90SBarry Smith 5623fc4dec0aSBarry Smith #undef __FUNCT__ 5624fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMultNumeric_MPIDense_MPIAIJ" 5625fc4dec0aSBarry Smith /* 5626fc4dec0aSBarry Smith Computes (B'*A')' since computing B*A directly is untenable 5627fc4dec0aSBarry Smith 5628fc4dec0aSBarry Smith n p p 5629fc4dec0aSBarry Smith ( ) ( ) ( ) 5630fc4dec0aSBarry Smith m ( A ) * n ( B ) = m ( C ) 5631fc4dec0aSBarry Smith ( ) ( ) ( ) 5632fc4dec0aSBarry Smith 5633fc4dec0aSBarry Smith */ 5634fc4dec0aSBarry Smith PetscErrorCode MatMatMultNumeric_MPIDense_MPIAIJ(Mat A,Mat B,Mat C) 5635fc4dec0aSBarry Smith { 5636fc4dec0aSBarry Smith PetscErrorCode ierr; 5637fc4dec0aSBarry Smith Mat At,Bt,Ct; 5638fc4dec0aSBarry Smith 5639fc4dec0aSBarry Smith PetscFunctionBegin; 5640fc4dec0aSBarry Smith ierr = MatTranspose(A,MAT_INITIAL_MATRIX,&At);CHKERRQ(ierr); 5641fc4dec0aSBarry Smith ierr = MatTranspose(B,MAT_INITIAL_MATRIX,&Bt);CHKERRQ(ierr); 5642fc4dec0aSBarry Smith ierr = MatMatMult(Bt,At,MAT_INITIAL_MATRIX,1.0,&Ct);CHKERRQ(ierr); 56436bf464f9SBarry Smith ierr = MatDestroy(&At);CHKERRQ(ierr); 56446bf464f9SBarry Smith ierr = MatDestroy(&Bt);CHKERRQ(ierr); 5645fc4dec0aSBarry Smith ierr = MatTranspose(Ct,MAT_REUSE_MATRIX,&C);CHKERRQ(ierr); 56466bf464f9SBarry Smith ierr = MatDestroy(&Ct);CHKERRQ(ierr); 5647fc4dec0aSBarry Smith PetscFunctionReturn(0); 5648fc4dec0aSBarry Smith } 5649fc4dec0aSBarry Smith 5650fc4dec0aSBarry Smith #undef __FUNCT__ 5651fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMultSymbolic_MPIDense_MPIAIJ" 5652fc4dec0aSBarry Smith PetscErrorCode MatMatMultSymbolic_MPIDense_MPIAIJ(Mat A,Mat B,PetscReal fill,Mat *C) 5653fc4dec0aSBarry Smith { 5654fc4dec0aSBarry Smith PetscErrorCode ierr; 5655d0f46423SBarry Smith PetscInt m=A->rmap->n,n=B->cmap->n; 5656fc4dec0aSBarry Smith Mat Cmat; 5657fc4dec0aSBarry Smith 5658fc4dec0aSBarry Smith PetscFunctionBegin; 5659e32f2f54SBarry Smith if (A->cmap->n != B->rmap->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"A->cmap->n %d != B->rmap->n %d\n",A->cmap->n,B->rmap->n); 5660ce94432eSBarry Smith ierr = MatCreate(PetscObjectComm((PetscObject)A),&Cmat);CHKERRQ(ierr); 5661fc4dec0aSBarry Smith ierr = MatSetSizes(Cmat,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 5662a2f3521dSMark F. Adams ierr = MatSetBlockSizes(Cmat,A->rmap->bs,B->cmap->bs);CHKERRQ(ierr); 5663fc4dec0aSBarry Smith ierr = MatSetType(Cmat,MATMPIDENSE);CHKERRQ(ierr); 56640298fd71SBarry Smith ierr = MatMPIDenseSetPreallocation(Cmat,NULL);CHKERRQ(ierr); 566538556019SBarry Smith ierr = MatAssemblyBegin(Cmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 566638556019SBarry Smith ierr = MatAssemblyEnd(Cmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 5667f75ecaa4SHong Zhang 5668f75ecaa4SHong Zhang Cmat->ops->matmultnumeric = MatMatMultNumeric_MPIDense_MPIAIJ; 56692205254eSKarl Rupp 5670fc4dec0aSBarry Smith *C = Cmat; 5671fc4dec0aSBarry Smith PetscFunctionReturn(0); 5672fc4dec0aSBarry Smith } 5673fc4dec0aSBarry Smith 5674fc4dec0aSBarry Smith /* ----------------------------------------------------------------*/ 5675fc4dec0aSBarry Smith #undef __FUNCT__ 5676fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMult_MPIDense_MPIAIJ" 5677fc4dec0aSBarry Smith PetscErrorCode MatMatMult_MPIDense_MPIAIJ(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C) 5678fc4dec0aSBarry Smith { 5679fc4dec0aSBarry Smith PetscErrorCode ierr; 5680fc4dec0aSBarry Smith 5681fc4dec0aSBarry Smith PetscFunctionBegin; 5682fc4dec0aSBarry Smith if (scall == MAT_INITIAL_MATRIX) { 56833ff4c91cSHong Zhang ierr = PetscLogEventBegin(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr); 5684fc4dec0aSBarry Smith ierr = MatMatMultSymbolic_MPIDense_MPIAIJ(A,B,fill,C);CHKERRQ(ierr); 56853ff4c91cSHong Zhang ierr = PetscLogEventEnd(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr); 5686fc4dec0aSBarry Smith } 56873ff4c91cSHong Zhang ierr = PetscLogEventBegin(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr); 5688fc4dec0aSBarry Smith ierr = MatMatMultNumeric_MPIDense_MPIAIJ(A,B,*C);CHKERRQ(ierr); 56893ff4c91cSHong Zhang ierr = PetscLogEventEnd(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr); 5690fc4dec0aSBarry Smith PetscFunctionReturn(0); 5691fc4dec0aSBarry Smith } 5692fc4dec0aSBarry Smith 5693611f576cSBarry Smith #if defined(PETSC_HAVE_MUMPS) 56948cc058d9SJed Brown PETSC_EXTERN PetscErrorCode MatGetFactor_aij_mumps(Mat,MatFactorType,Mat*); 5695611f576cSBarry Smith #endif 56963bf14a46SMatthew Knepley #if defined(PETSC_HAVE_PASTIX) 56978cc058d9SJed Brown PETSC_EXTERN PetscErrorCode MatGetFactor_mpiaij_pastix(Mat,MatFactorType,Mat*); 56983bf14a46SMatthew Knepley #endif 5699611f576cSBarry Smith #if defined(PETSC_HAVE_SUPERLU_DIST) 57008cc058d9SJed Brown PETSC_EXTERN PetscErrorCode MatGetFactor_mpiaij_superlu_dist(Mat,MatFactorType,Mat*); 5701611f576cSBarry Smith #endif 570217f1a0eaSHong Zhang #if defined(PETSC_HAVE_CLIQUE) 57038cc058d9SJed Brown PETSC_EXTERN PetscErrorCode MatGetFactor_aij_clique(Mat,MatFactorType,Mat*); 570417f1a0eaSHong Zhang #endif 57055c9eb25fSBarry Smith 5706ccd8e176SBarry Smith /*MC 5707ccd8e176SBarry Smith MATMPIAIJ - MATMPIAIJ = "mpiaij" - A matrix type to be used for parallel sparse matrices. 5708ccd8e176SBarry Smith 5709ccd8e176SBarry Smith Options Database Keys: 5710ccd8e176SBarry Smith . -mat_type mpiaij - sets the matrix type to "mpiaij" during a call to MatSetFromOptions() 5711ccd8e176SBarry Smith 5712ccd8e176SBarry Smith Level: beginner 5713ccd8e176SBarry Smith 571469b1f4b7SBarry Smith .seealso: MatCreateAIJ() 5715ccd8e176SBarry Smith M*/ 5716ccd8e176SBarry Smith 5717ccd8e176SBarry Smith #undef __FUNCT__ 5718ccd8e176SBarry Smith #define __FUNCT__ "MatCreate_MPIAIJ" 57198cc058d9SJed Brown PETSC_EXTERN PetscErrorCode MatCreate_MPIAIJ(Mat B) 5720ccd8e176SBarry Smith { 5721ccd8e176SBarry Smith Mat_MPIAIJ *b; 5722ccd8e176SBarry Smith PetscErrorCode ierr; 5723ccd8e176SBarry Smith PetscMPIInt size; 5724ccd8e176SBarry Smith 5725ccd8e176SBarry Smith PetscFunctionBegin; 5726ce94432eSBarry Smith ierr = MPI_Comm_size(PetscObjectComm((PetscObject)B),&size);CHKERRQ(ierr); 57272205254eSKarl Rupp 572838f2d2fdSLisandro Dalcin ierr = PetscNewLog(B,Mat_MPIAIJ,&b);CHKERRQ(ierr); 5729ccd8e176SBarry Smith B->data = (void*)b; 5730ccd8e176SBarry Smith ierr = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr); 5731ccd8e176SBarry Smith B->assembled = PETSC_FALSE; 5732ccd8e176SBarry Smith B->insertmode = NOT_SET_VALUES; 5733ccd8e176SBarry Smith b->size = size; 57342205254eSKarl Rupp 5735ce94432eSBarry Smith ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)B),&b->rank);CHKERRQ(ierr); 5736ccd8e176SBarry Smith 5737ccd8e176SBarry Smith /* build cache for off array entries formed */ 5738ce94432eSBarry Smith ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash);CHKERRQ(ierr); 57392205254eSKarl Rupp 5740ccd8e176SBarry Smith b->donotstash = PETSC_FALSE; 5741ccd8e176SBarry Smith b->colmap = 0; 5742ccd8e176SBarry Smith b->garray = 0; 5743ccd8e176SBarry Smith b->roworiented = PETSC_TRUE; 5744ccd8e176SBarry Smith 5745ccd8e176SBarry Smith /* stuff used for matrix vector multiply */ 57460298fd71SBarry Smith b->lvec = NULL; 57470298fd71SBarry Smith b->Mvctx = NULL; 5748ccd8e176SBarry Smith 5749ccd8e176SBarry Smith /* stuff for MatGetRow() */ 5750ccd8e176SBarry Smith b->rowindices = 0; 5751ccd8e176SBarry Smith b->rowvalues = 0; 5752ccd8e176SBarry Smith b->getrowactive = PETSC_FALSE; 5753ccd8e176SBarry Smith 5754bbf3fe20SPaul Mullowney /* flexible pointer used in CUSP/CUSPARSE classes */ 57550298fd71SBarry Smith b->spptr = NULL; 5756f60c3dc2SHong Zhang 5757611f576cSBarry Smith #if defined(PETSC_HAVE_MUMPS) 5758bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)B,"MatGetFactor_mumps_C",MatGetFactor_aij_mumps);CHKERRQ(ierr); 5759611f576cSBarry Smith #endif 57603bf14a46SMatthew Knepley #if defined(PETSC_HAVE_PASTIX) 5761bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)B,"MatGetFactor_pastix_C",MatGetFactor_mpiaij_pastix);CHKERRQ(ierr); 57623bf14a46SMatthew Knepley #endif 5763611f576cSBarry Smith #if defined(PETSC_HAVE_SUPERLU_DIST) 5764bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)B,"MatGetFactor_superlu_dist_C",MatGetFactor_mpiaij_superlu_dist);CHKERRQ(ierr); 5765611f576cSBarry Smith #endif 576617f1a0eaSHong Zhang #if defined(PETSC_HAVE_CLIQUE) 5767bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)B,"MatGetFactor_clique_C",MatGetFactor_aij_clique);CHKERRQ(ierr); 576817f1a0eaSHong Zhang #endif 5769bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)B,"MatStoreValues_C",MatStoreValues_MPIAIJ);CHKERRQ(ierr); 5770bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)B,"MatRetrieveValues_C",MatRetrieveValues_MPIAIJ);CHKERRQ(ierr); 5771bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)B,"MatGetDiagonalBlock_C",MatGetDiagonalBlock_MPIAIJ);CHKERRQ(ierr); 5772bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)B,"MatIsTranspose_C",MatIsTranspose_MPIAIJ);CHKERRQ(ierr); 5773bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAIJSetPreallocation_C",MatMPIAIJSetPreallocation_MPIAIJ);CHKERRQ(ierr); 5774bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAIJSetPreallocationCSR_C",MatMPIAIJSetPreallocationCSR_MPIAIJ);CHKERRQ(ierr); 5775bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)B,"MatDiagonalScaleLocal_C",MatDiagonalScaleLocal_MPIAIJ);CHKERRQ(ierr); 5776bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpiaij_mpiaijperm_C",MatConvert_MPIAIJ_MPIAIJPERM);CHKERRQ(ierr); 5777bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpiaij_mpiaijcrl_C",MatConvert_MPIAIJ_MPIAIJCRL);CHKERRQ(ierr); 5778bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpiaij_mpisbaij_C",MatConvert_MPIAIJ_MPISBAIJ);CHKERRQ(ierr); 5779bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)B,"MatMatMult_mpidense_mpiaij_C",MatMatMult_MPIDense_MPIAIJ);CHKERRQ(ierr); 5780bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)B,"MatMatMultSymbolic_mpidense_mpiaij_C",MatMatMultSymbolic_MPIDense_MPIAIJ);CHKERRQ(ierr); 5781bdf89e91SBarry Smith ierr = PetscObjectComposeFunction((PetscObject)B,"MatMatMultNumeric_mpidense_mpiaij_C",MatMatMultNumeric_MPIDense_MPIAIJ);CHKERRQ(ierr); 578217667f90SBarry Smith ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIAIJ);CHKERRQ(ierr); 5783ccd8e176SBarry Smith PetscFunctionReturn(0); 5784ccd8e176SBarry Smith } 578581824310SBarry Smith 578603bfb495SBarry Smith #undef __FUNCT__ 578703bfb495SBarry Smith #define __FUNCT__ "MatCreateMPIAIJWithSplitArrays" 578858d36128SBarry Smith /*@ 578903bfb495SBarry Smith MatCreateMPIAIJWithSplitArrays - creates a MPI AIJ matrix using arrays that contain the "diagonal" 579003bfb495SBarry Smith and "off-diagonal" part of the matrix in CSR format. 579103bfb495SBarry Smith 579203bfb495SBarry Smith Collective on MPI_Comm 579303bfb495SBarry Smith 579403bfb495SBarry Smith Input Parameters: 579503bfb495SBarry Smith + comm - MPI communicator 579603bfb495SBarry Smith . m - number of local rows (Cannot be PETSC_DECIDE) 579703bfb495SBarry Smith . n - This value should be the same as the local size used in creating the 579803bfb495SBarry Smith x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have 579903bfb495SBarry Smith calculated if N is given) For square matrices n is almost always m. 580003bfb495SBarry Smith . M - number of global rows (or PETSC_DETERMINE to have calculated if m is given) 580103bfb495SBarry Smith . N - number of global columns (or PETSC_DETERMINE to have calculated if n is given) 580203bfb495SBarry Smith . i - row indices for "diagonal" portion of matrix 580303bfb495SBarry Smith . j - column indices 580403bfb495SBarry Smith . a - matrix values 580503bfb495SBarry Smith . oi - row indices for "off-diagonal" portion of matrix 580603bfb495SBarry Smith . oj - column indices 580703bfb495SBarry Smith - oa - matrix values 580803bfb495SBarry Smith 580903bfb495SBarry Smith Output Parameter: 581003bfb495SBarry Smith . mat - the matrix 581103bfb495SBarry Smith 581203bfb495SBarry Smith Level: advanced 581303bfb495SBarry Smith 581403bfb495SBarry Smith Notes: 5815292fb18eSBarry Smith The i, j, and a arrays ARE NOT copied by this routine into the internal format used by PETSc. The user 5816292fb18eSBarry Smith must free the arrays once the matrix has been destroyed and not before. 581703bfb495SBarry Smith 581803bfb495SBarry Smith The i and j indices are 0 based 581903bfb495SBarry Smith 582069b1f4b7SBarry Smith See MatCreateAIJ() for the definition of "diagonal" and "off-diagonal" portion of the matrix 582103bfb495SBarry Smith 58227b55108eSBarry Smith This sets local rows and cannot be used to set off-processor values. 58237b55108eSBarry Smith 5824dca341c0SJed Brown Use of this routine is discouraged because it is inflexible and cumbersome to use. It is extremely rare that a 5825dca341c0SJed Brown legacy application natively assembles into exactly this split format. The code to do so is nontrivial and does 5826dca341c0SJed Brown not easily support in-place reassembly. It is recommended to use MatSetValues() (or a variant thereof) because 5827dca341c0SJed Brown the resulting assembly is easier to implement, will work with any matrix format, and the user does not have to 5828dca341c0SJed Brown keep track of the underlying array. Use MatSetOption(A,MAT_IGNORE_OFF_PROC_ENTRIES,PETSC_TRUE) to disable all 5829dca341c0SJed Brown communication if it is known that only local entries will be set. 583003bfb495SBarry Smith 583103bfb495SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 583203bfb495SBarry Smith 583303bfb495SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(), 583469b1f4b7SBarry Smith MPIAIJ, MatCreateAIJ(), MatCreateMPIAIJWithArrays() 583503bfb495SBarry Smith @*/ 58362205254eSKarl Rupp PetscErrorCode MatCreateMPIAIJWithSplitArrays(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt i[],PetscInt j[],PetscScalar a[],PetscInt oi[], PetscInt oj[],PetscScalar oa[],Mat *mat) 583703bfb495SBarry Smith { 583803bfb495SBarry Smith PetscErrorCode ierr; 583903bfb495SBarry Smith Mat_MPIAIJ *maij; 584003bfb495SBarry Smith 584103bfb495SBarry Smith PetscFunctionBegin; 5842e32f2f54SBarry Smith if (m < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative"); 5843ea345e14SBarry Smith if (i[0]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0"); 5844ea345e14SBarry Smith if (oi[0]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"oi (row indices) must start with 0"); 584503bfb495SBarry Smith ierr = MatCreate(comm,mat);CHKERRQ(ierr); 584603bfb495SBarry Smith ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr); 584703bfb495SBarry Smith ierr = MatSetType(*mat,MATMPIAIJ);CHKERRQ(ierr); 584803bfb495SBarry Smith maij = (Mat_MPIAIJ*) (*mat)->data; 58492205254eSKarl Rupp 58508d7a6e47SBarry Smith (*mat)->preallocated = PETSC_TRUE; 585103bfb495SBarry Smith 585226283091SBarry Smith ierr = PetscLayoutSetUp((*mat)->rmap);CHKERRQ(ierr); 585326283091SBarry Smith ierr = PetscLayoutSetUp((*mat)->cmap);CHKERRQ(ierr); 585403bfb495SBarry Smith 585503bfb495SBarry Smith ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,m,n,i,j,a,&maij->A);CHKERRQ(ierr); 5856d0f46423SBarry Smith ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,m,(*mat)->cmap->N,oi,oj,oa,&maij->B);CHKERRQ(ierr); 585703bfb495SBarry Smith 58588d7a6e47SBarry Smith ierr = MatAssemblyBegin(maij->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 58598d7a6e47SBarry Smith ierr = MatAssemblyEnd(maij->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 58608d7a6e47SBarry Smith ierr = MatAssemblyBegin(maij->B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 58618d7a6e47SBarry Smith ierr = MatAssemblyEnd(maij->B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 58628d7a6e47SBarry Smith 586303bfb495SBarry Smith ierr = MatAssemblyBegin(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 586403bfb495SBarry Smith ierr = MatAssemblyEnd(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 5865dca341c0SJed Brown ierr = MatSetOption(*mat,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr); 586603bfb495SBarry Smith PetscFunctionReturn(0); 586703bfb495SBarry Smith } 586803bfb495SBarry Smith 586981824310SBarry Smith /* 587081824310SBarry Smith Special version for direct calls from Fortran 587181824310SBarry Smith */ 5872b45d2f2cSJed Brown #include <petsc-private/fortranimpl.h> 58737087cfbeSBarry Smith 587481824310SBarry Smith #if defined(PETSC_HAVE_FORTRAN_CAPS) 587581824310SBarry Smith #define matsetvaluesmpiaij_ MATSETVALUESMPIAIJ 587681824310SBarry Smith #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) 587781824310SBarry Smith #define matsetvaluesmpiaij_ matsetvaluesmpiaij 587881824310SBarry Smith #endif 587981824310SBarry Smith 588081824310SBarry Smith /* Change these macros so can be used in void function */ 588181824310SBarry Smith #undef CHKERRQ 5882e32f2f54SBarry Smith #define CHKERRQ(ierr) CHKERRABORT(PETSC_COMM_WORLD,ierr) 588381824310SBarry Smith #undef SETERRQ2 5884e32f2f54SBarry Smith #define SETERRQ2(comm,ierr,b,c,d) CHKERRABORT(comm,ierr) 58854994cf47SJed Brown #undef SETERRQ3 58864994cf47SJed Brown #define SETERRQ3(comm,ierr,b,c,d,e) CHKERRABORT(comm,ierr) 588781824310SBarry Smith #undef SETERRQ 5888e32f2f54SBarry Smith #define SETERRQ(c,ierr,b) CHKERRABORT(c,ierr) 588981824310SBarry Smith 589081824310SBarry Smith #undef __FUNCT__ 589181824310SBarry Smith #define __FUNCT__ "matsetvaluesmpiaij_" 58928cc058d9SJed Brown PETSC_EXTERN void PETSC_STDCALL matsetvaluesmpiaij_(Mat *mmat,PetscInt *mm,const PetscInt im[],PetscInt *mn,const PetscInt in[],const PetscScalar v[],InsertMode *maddv,PetscErrorCode *_ierr) 589381824310SBarry Smith { 589481824310SBarry Smith Mat mat = *mmat; 589581824310SBarry Smith PetscInt m = *mm, n = *mn; 589681824310SBarry Smith InsertMode addv = *maddv; 589781824310SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 589881824310SBarry Smith PetscScalar value; 589981824310SBarry Smith PetscErrorCode ierr; 5900899cda47SBarry Smith 59014994cf47SJed Brown MatCheckPreallocated(mat,1); 59022205254eSKarl Rupp if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv; 59032205254eSKarl Rupp 590481824310SBarry Smith #if defined(PETSC_USE_DEBUG) 5905f23aa3ddSBarry Smith else if (mat->insertmode != addv) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values"); 590681824310SBarry Smith #endif 590781824310SBarry Smith { 5908d0f46423SBarry Smith PetscInt i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend; 5909d0f46423SBarry Smith PetscInt cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col; 5910ace3abfcSBarry Smith PetscBool roworiented = aij->roworiented; 591181824310SBarry Smith 591281824310SBarry Smith /* Some Variables required in the macro */ 591381824310SBarry Smith Mat A = aij->A; 591481824310SBarry Smith Mat_SeqAIJ *a = (Mat_SeqAIJ*)A->data; 591581824310SBarry Smith PetscInt *aimax = a->imax,*ai = a->i,*ailen = a->ilen,*aj = a->j; 5916dd6ea824SBarry Smith MatScalar *aa = a->a; 5917ace3abfcSBarry Smith PetscBool ignorezeroentries = (((a->ignorezeroentries)&&(addv==ADD_VALUES)) ? PETSC_TRUE : PETSC_FALSE); 591881824310SBarry Smith Mat B = aij->B; 591981824310SBarry Smith Mat_SeqAIJ *b = (Mat_SeqAIJ*)B->data; 5920d0f46423SBarry Smith PetscInt *bimax = b->imax,*bi = b->i,*bilen = b->ilen,*bj = b->j,bm = aij->B->rmap->n,am = aij->A->rmap->n; 5921dd6ea824SBarry Smith MatScalar *ba = b->a; 592281824310SBarry Smith 592381824310SBarry Smith PetscInt *rp1,*rp2,ii,nrow1,nrow2,_i,rmax1,rmax2,N,low1,high1,low2,high2,t,lastcol1,lastcol2; 592481824310SBarry Smith PetscInt nonew = a->nonew; 5925dd6ea824SBarry Smith MatScalar *ap1,*ap2; 592681824310SBarry Smith 592781824310SBarry Smith PetscFunctionBegin; 592881824310SBarry Smith for (i=0; i<m; i++) { 592981824310SBarry Smith if (im[i] < 0) continue; 593081824310SBarry Smith #if defined(PETSC_USE_DEBUG) 5931e32f2f54SBarry Smith if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1); 593281824310SBarry Smith #endif 593381824310SBarry Smith if (im[i] >= rstart && im[i] < rend) { 593481824310SBarry Smith row = im[i] - rstart; 593581824310SBarry Smith lastcol1 = -1; 593681824310SBarry Smith rp1 = aj + ai[row]; 593781824310SBarry Smith ap1 = aa + ai[row]; 593881824310SBarry Smith rmax1 = aimax[row]; 593981824310SBarry Smith nrow1 = ailen[row]; 594081824310SBarry Smith low1 = 0; 594181824310SBarry Smith high1 = nrow1; 594281824310SBarry Smith lastcol2 = -1; 594381824310SBarry Smith rp2 = bj + bi[row]; 594481824310SBarry Smith ap2 = ba + bi[row]; 594581824310SBarry Smith rmax2 = bimax[row]; 594681824310SBarry Smith nrow2 = bilen[row]; 594781824310SBarry Smith low2 = 0; 594881824310SBarry Smith high2 = nrow2; 594981824310SBarry Smith 595081824310SBarry Smith for (j=0; j<n; j++) { 59512205254eSKarl Rupp if (roworiented) value = v[i*n+j]; 59522205254eSKarl Rupp else value = v[i+j*m]; 595381824310SBarry Smith if (ignorezeroentries && value == 0.0 && (addv == ADD_VALUES)) continue; 595481824310SBarry Smith if (in[j] >= cstart && in[j] < cend) { 595581824310SBarry Smith col = in[j] - cstart; 595681824310SBarry Smith MatSetValues_SeqAIJ_A_Private(row,col,value,addv); 595781824310SBarry Smith } else if (in[j] < 0) continue; 595881824310SBarry Smith #if defined(PETSC_USE_DEBUG) 5959cb9801acSJed Brown else if (in[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1); 596081824310SBarry Smith #endif 596181824310SBarry Smith else { 596281824310SBarry Smith if (mat->was_assembled) { 596381824310SBarry Smith if (!aij->colmap) { 5964ab9863d7SBarry Smith ierr = MatCreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr); 596581824310SBarry Smith } 596681824310SBarry Smith #if defined(PETSC_USE_CTABLE) 596781824310SBarry Smith ierr = PetscTableFind(aij->colmap,in[j]+1,&col);CHKERRQ(ierr); 596881824310SBarry Smith col--; 596981824310SBarry Smith #else 597081824310SBarry Smith col = aij->colmap[in[j]] - 1; 597181824310SBarry Smith #endif 597281824310SBarry Smith if (col < 0 && !((Mat_SeqAIJ*)(aij->A->data))->nonew) { 5973ab9863d7SBarry Smith ierr = MatDisAssemble_MPIAIJ(mat);CHKERRQ(ierr); 597481824310SBarry Smith col = in[j]; 597581824310SBarry Smith /* Reinitialize the variables required by MatSetValues_SeqAIJ_B_Private() */ 597681824310SBarry Smith B = aij->B; 597781824310SBarry Smith b = (Mat_SeqAIJ*)B->data; 597881824310SBarry Smith bimax = b->imax; bi = b->i; bilen = b->ilen; bj = b->j; 597981824310SBarry Smith rp2 = bj + bi[row]; 598081824310SBarry Smith ap2 = ba + bi[row]; 598181824310SBarry Smith rmax2 = bimax[row]; 598281824310SBarry Smith nrow2 = bilen[row]; 598381824310SBarry Smith low2 = 0; 598481824310SBarry Smith high2 = nrow2; 5985d0f46423SBarry Smith bm = aij->B->rmap->n; 598681824310SBarry Smith ba = b->a; 598781824310SBarry Smith } 598881824310SBarry Smith } else col = in[j]; 598981824310SBarry Smith MatSetValues_SeqAIJ_B_Private(row,col,value,addv); 599081824310SBarry Smith } 599181824310SBarry Smith } 59922205254eSKarl Rupp } else if (!aij->donotstash) { 599381824310SBarry Smith if (roworiented) { 5994ace3abfcSBarry Smith ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,(PetscBool)(ignorezeroentries && (addv == ADD_VALUES)));CHKERRQ(ierr); 599581824310SBarry Smith } else { 5996ace3abfcSBarry Smith ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,(PetscBool)(ignorezeroentries && (addv == ADD_VALUES)));CHKERRQ(ierr); 599781824310SBarry Smith } 599881824310SBarry Smith } 599981824310SBarry Smith } 60002205254eSKarl Rupp } 600181824310SBarry Smith PetscFunctionReturnVoid(); 600281824310SBarry Smith } 600303bfb495SBarry Smith 6004