18a729477SBarry Smith 2c6db04a5SJed Brown #include <../src/mat/impls/aij/mpi/mpiaij.h> /*I "petscmat.h" I*/ 3c6db04a5SJed Brown #include <petscblaslapack.h> 48a729477SBarry Smith 501bebe75SBarry Smith /*MC 601bebe75SBarry Smith MATAIJ - MATAIJ = "aij" - A matrix type to be used for sparse matrices. 701bebe75SBarry Smith 801bebe75SBarry Smith This matrix type is identical to MATSEQAIJ when constructed with a single process communicator, 901bebe75SBarry Smith and MATMPIAIJ otherwise. As a result, for single process communicators, 1001bebe75SBarry Smith MatSeqAIJSetPreallocation is supported, and similarly MatMPIAIJSetPreallocation is supported 1101bebe75SBarry Smith for communicators controlling multiple processes. It is recommended that you call both of 1201bebe75SBarry Smith the above preallocation routines for simplicity. 1301bebe75SBarry Smith 1401bebe75SBarry Smith Options Database Keys: 1501bebe75SBarry Smith . -mat_type aij - sets the matrix type to "aij" during a call to MatSetFromOptions() 1601bebe75SBarry Smith 1701bebe75SBarry Smith Developer Notes: Subclasses include MATAIJCUSP, MATAIJPERM, MATAIJCRL, and also automatically switches over to use inodes when 1801bebe75SBarry Smith enough exist. 1901bebe75SBarry Smith 2001bebe75SBarry Smith Level: beginner 2101bebe75SBarry Smith 2201bebe75SBarry Smith .seealso: MatCreateMPIAIJ(), MatCreateSeqAIJ(), MATSEQAIJ,MATMPIAIJ 2301bebe75SBarry Smith M*/ 2401bebe75SBarry Smith 2501bebe75SBarry Smith /*MC 2601bebe75SBarry Smith MATAIJCRL - MATAIJCRL = "aijcrl" - A matrix type to be used for sparse matrices. 2701bebe75SBarry Smith 2801bebe75SBarry Smith This matrix type is identical to MATSEQAIJCRL when constructed with a single process communicator, 2901bebe75SBarry Smith and MATMPIAIJCRL otherwise. As a result, for single process communicators, 3001bebe75SBarry Smith MatSeqAIJSetPreallocation() is supported, and similarly MatMPIAIJSetPreallocation() is supported 3101bebe75SBarry Smith for communicators controlling multiple processes. It is recommended that you call both of 3201bebe75SBarry Smith the above preallocation routines for simplicity. 3301bebe75SBarry Smith 3401bebe75SBarry Smith Options Database Keys: 3501bebe75SBarry Smith . -mat_type aijcrl - sets the matrix type to "aijcrl" during a call to MatSetFromOptions() 3601bebe75SBarry Smith 3701bebe75SBarry Smith Level: beginner 3801bebe75SBarry Smith 3901bebe75SBarry Smith .seealso: MatCreateMPIAIJCRL,MATSEQAIJCRL,MATMPIAIJCRL, MATSEQAIJCRL, MATMPIAIJCRL 4001bebe75SBarry Smith M*/ 4101bebe75SBarry Smith 42dd6ea824SBarry Smith #undef __FUNCT__ 4327d4218bSShri Abhyankar #define __FUNCT__ "MatFindNonZeroRows_MPIAIJ" 4427d4218bSShri Abhyankar PetscErrorCode MatFindNonZeroRows_MPIAIJ(Mat M,IS *keptrows) 4527d4218bSShri Abhyankar { 4627d4218bSShri Abhyankar PetscErrorCode ierr; 4727d4218bSShri Abhyankar Mat_MPIAIJ *mat = (Mat_MPIAIJ*)M->data; 4827d4218bSShri Abhyankar Mat_SeqAIJ *a = (Mat_SeqAIJ*)mat->A->data; 4927d4218bSShri Abhyankar Mat_SeqAIJ *b = (Mat_SeqAIJ*)mat->B->data; 5027d4218bSShri Abhyankar const PetscInt *ia,*ib; 5127d4218bSShri Abhyankar const MatScalar *aa,*bb; 5227d4218bSShri Abhyankar PetscInt na,nb,i,j,*rows,cnt=0,n0rows; 5327d4218bSShri Abhyankar PetscInt m = M->rmap->n,rstart = M->rmap->rstart; 5427d4218bSShri Abhyankar 5527d4218bSShri Abhyankar PetscFunctionBegin; 5627d4218bSShri Abhyankar *keptrows = 0; 5727d4218bSShri Abhyankar ia = a->i; 5827d4218bSShri Abhyankar ib = b->i; 5927d4218bSShri Abhyankar for (i=0; i<m; i++) { 6027d4218bSShri Abhyankar na = ia[i+1] - ia[i]; 6127d4218bSShri Abhyankar nb = ib[i+1] - ib[i]; 6227d4218bSShri Abhyankar if (!na && !nb) { 6327d4218bSShri Abhyankar cnt++; 6427d4218bSShri Abhyankar goto ok1; 6527d4218bSShri Abhyankar } 6627d4218bSShri Abhyankar aa = a->a + ia[i]; 6727d4218bSShri Abhyankar for (j=0; j<na; j++) { 6827d4218bSShri Abhyankar if (aa[j] != 0.0) goto ok1; 6927d4218bSShri Abhyankar } 7027d4218bSShri Abhyankar bb = b->a + ib[i]; 7127d4218bSShri Abhyankar for (j=0; j <nb; j++) { 7227d4218bSShri Abhyankar if (bb[j] != 0.0) goto ok1; 7327d4218bSShri Abhyankar } 7427d4218bSShri Abhyankar cnt++; 7527d4218bSShri Abhyankar ok1:; 7627d4218bSShri Abhyankar } 7727d4218bSShri Abhyankar ierr = MPI_Allreduce(&cnt,&n0rows,1,MPIU_INT,MPI_SUM,((PetscObject)M)->comm);CHKERRQ(ierr); 7827d4218bSShri Abhyankar if (!n0rows) PetscFunctionReturn(0); 7927d4218bSShri Abhyankar ierr = PetscMalloc((M->rmap->n-cnt)*sizeof(PetscInt),&rows);CHKERRQ(ierr); 8027d4218bSShri Abhyankar cnt = 0; 8127d4218bSShri Abhyankar for (i=0; i<m; i++) { 8227d4218bSShri Abhyankar na = ia[i+1] - ia[i]; 8327d4218bSShri Abhyankar nb = ib[i+1] - ib[i]; 8427d4218bSShri Abhyankar if (!na && !nb) continue; 8527d4218bSShri Abhyankar aa = a->a + ia[i]; 8627d4218bSShri Abhyankar for(j=0; j<na;j++) { 8727d4218bSShri Abhyankar if (aa[j] != 0.0) { 8827d4218bSShri Abhyankar rows[cnt++] = rstart + i; 8927d4218bSShri Abhyankar goto ok2; 9027d4218bSShri Abhyankar } 9127d4218bSShri Abhyankar } 9227d4218bSShri Abhyankar bb = b->a + ib[i]; 9327d4218bSShri Abhyankar for (j=0; j<nb; j++) { 9427d4218bSShri Abhyankar if (bb[j] != 0.0) { 9527d4218bSShri Abhyankar rows[cnt++] = rstart + i; 9627d4218bSShri Abhyankar goto ok2; 9727d4218bSShri Abhyankar } 9827d4218bSShri Abhyankar } 9927d4218bSShri Abhyankar ok2:; 10027d4218bSShri Abhyankar } 10127d4218bSShri Abhyankar ierr = ISCreateGeneral(PETSC_COMM_WORLD,cnt,rows,PETSC_OWN_POINTER,keptrows);CHKERRQ(ierr); 10227d4218bSShri Abhyankar PetscFunctionReturn(0); 10327d4218bSShri Abhyankar } 10427d4218bSShri Abhyankar 10527d4218bSShri Abhyankar #undef __FUNCT__ 1060716a85fSBarry Smith #define __FUNCT__ "MatGetColumnNorms_MPIAIJ" 1070716a85fSBarry Smith PetscErrorCode MatGetColumnNorms_MPIAIJ(Mat A,NormType type,PetscReal *norms) 1080716a85fSBarry Smith { 1090716a85fSBarry Smith PetscErrorCode ierr; 1100716a85fSBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ*)A->data; 1110716a85fSBarry Smith PetscInt i,n,*garray = aij->garray; 1120716a85fSBarry Smith Mat_SeqAIJ *a_aij = (Mat_SeqAIJ*) aij->A->data; 1130716a85fSBarry Smith Mat_SeqAIJ *b_aij = (Mat_SeqAIJ*) aij->B->data; 1140716a85fSBarry Smith PetscReal *work; 1150716a85fSBarry Smith 1160716a85fSBarry Smith PetscFunctionBegin; 1170716a85fSBarry Smith ierr = MatGetSize(A,PETSC_NULL,&n);CHKERRQ(ierr); 1180716a85fSBarry Smith ierr = PetscMalloc(n*sizeof(PetscReal),&work);CHKERRQ(ierr); 1190716a85fSBarry Smith ierr = PetscMemzero(work,n*sizeof(PetscReal));CHKERRQ(ierr); 1200716a85fSBarry Smith if (type == NORM_2) { 1210716a85fSBarry Smith for (i=0; i<a_aij->i[aij->A->rmap->n]; i++) { 1220716a85fSBarry Smith work[A->cmap->rstart + a_aij->j[i]] += PetscAbsScalar(a_aij->a[i]*a_aij->a[i]); 1230716a85fSBarry Smith } 1240716a85fSBarry Smith for (i=0; i<b_aij->i[aij->B->rmap->n]; i++) { 1250716a85fSBarry Smith work[garray[b_aij->j[i]]] += PetscAbsScalar(b_aij->a[i]*b_aij->a[i]); 1260716a85fSBarry Smith } 1270716a85fSBarry Smith } else if (type == NORM_1) { 1280716a85fSBarry Smith for (i=0; i<a_aij->i[aij->A->rmap->n]; i++) { 1290716a85fSBarry Smith work[A->cmap->rstart + a_aij->j[i]] += PetscAbsScalar(a_aij->a[i]); 1300716a85fSBarry Smith } 1310716a85fSBarry Smith for (i=0; i<b_aij->i[aij->B->rmap->n]; i++) { 1320716a85fSBarry Smith work[garray[b_aij->j[i]]] += PetscAbsScalar(b_aij->a[i]); 1330716a85fSBarry Smith } 1340716a85fSBarry Smith } else if (type == NORM_INFINITY) { 1350716a85fSBarry Smith for (i=0; i<a_aij->i[aij->A->rmap->n]; i++) { 1360716a85fSBarry Smith work[A->cmap->rstart + a_aij->j[i]] = PetscMax(PetscAbsScalar(a_aij->a[i]), work[A->cmap->rstart + a_aij->j[i]]); 1370716a85fSBarry Smith } 1380716a85fSBarry Smith for (i=0; i<b_aij->i[aij->B->rmap->n]; i++) { 1390716a85fSBarry Smith work[garray[b_aij->j[i]]] = PetscMax(PetscAbsScalar(b_aij->a[i]),work[garray[b_aij->j[i]]]); 1400716a85fSBarry Smith } 1410716a85fSBarry Smith 1420716a85fSBarry Smith } else SETERRQ(((PetscObject)A)->comm,PETSC_ERR_ARG_WRONG,"Unknown NormType"); 1430716a85fSBarry Smith if (type == NORM_INFINITY) { 1440716a85fSBarry Smith ierr = MPI_Allreduce(work,norms,n,MPIU_REAL,MPIU_MAX,A->hdr.comm);CHKERRQ(ierr); 1450716a85fSBarry Smith } else { 1460716a85fSBarry Smith ierr = MPI_Allreduce(work,norms,n,MPIU_REAL,MPIU_SUM,A->hdr.comm);CHKERRQ(ierr); 1470716a85fSBarry Smith } 1480716a85fSBarry Smith ierr = PetscFree(work);CHKERRQ(ierr); 1490716a85fSBarry Smith if (type == NORM_2) { 1500716a85fSBarry Smith for (i=0; i<n; i++) norms[i] = sqrt(norms[i]); 1510716a85fSBarry Smith } 1520716a85fSBarry Smith PetscFunctionReturn(0); 1530716a85fSBarry Smith } 1540716a85fSBarry Smith 1550716a85fSBarry Smith #undef __FUNCT__ 156dd6ea824SBarry Smith #define __FUNCT__ "MatDistribute_MPIAIJ" 157dd6ea824SBarry Smith /* 158dd6ea824SBarry Smith Distributes a SeqAIJ matrix across a set of processes. Code stolen from 159dd6ea824SBarry Smith MatLoad_MPIAIJ(). Horrible lack of reuse. Should be a routine for each matrix type. 160dd6ea824SBarry Smith 161dd6ea824SBarry Smith Only for square matrices 162dd6ea824SBarry Smith */ 163dd6ea824SBarry Smith PetscErrorCode MatDistribute_MPIAIJ(MPI_Comm comm,Mat gmat,PetscInt m,MatReuse reuse,Mat *inmat) 164dd6ea824SBarry Smith { 165dd6ea824SBarry Smith PetscMPIInt rank,size; 166dd6ea824SBarry Smith PetscInt *rowners,*dlens,*olens,i,rstart,rend,j,jj,nz,*gmataj,cnt,row,*ld; 167dd6ea824SBarry Smith PetscErrorCode ierr; 168dd6ea824SBarry Smith Mat mat; 169dd6ea824SBarry Smith Mat_SeqAIJ *gmata; 170dd6ea824SBarry Smith PetscMPIInt tag; 171dd6ea824SBarry Smith MPI_Status status; 172ace3abfcSBarry Smith PetscBool aij; 173dd6ea824SBarry Smith MatScalar *gmataa,*ao,*ad,*gmataarestore=0; 174dd6ea824SBarry Smith 175dd6ea824SBarry Smith PetscFunctionBegin; 176dd6ea824SBarry Smith CHKMEMQ; 177dd6ea824SBarry Smith ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 178dd6ea824SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 179dd6ea824SBarry Smith if (!rank) { 180dd6ea824SBarry Smith ierr = PetscTypeCompare((PetscObject)gmat,MATSEQAIJ,&aij);CHKERRQ(ierr); 18165e19b50SBarry Smith if (!aij) SETERRQ1(((PetscObject)gmat)->comm,PETSC_ERR_SUP,"Currently no support for input matrix of type %s\n",((PetscObject)gmat)->type_name); 182dd6ea824SBarry Smith } 183dd6ea824SBarry Smith if (reuse == MAT_INITIAL_MATRIX) { 184dd6ea824SBarry Smith ierr = MatCreate(comm,&mat);CHKERRQ(ierr); 185dd6ea824SBarry Smith ierr = MatSetSizes(mat,m,m,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 186dd6ea824SBarry Smith ierr = MatSetType(mat,MATAIJ);CHKERRQ(ierr); 187dd6ea824SBarry Smith ierr = PetscMalloc((size+1)*sizeof(PetscInt),&rowners);CHKERRQ(ierr); 188dd6ea824SBarry Smith ierr = PetscMalloc2(m,PetscInt,&dlens,m,PetscInt,&olens);CHKERRQ(ierr); 189dd6ea824SBarry Smith ierr = MPI_Allgather(&m,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr); 190dd6ea824SBarry Smith rowners[0] = 0; 191dd6ea824SBarry Smith for (i=2; i<=size; i++) { 192dd6ea824SBarry Smith rowners[i] += rowners[i-1]; 193dd6ea824SBarry Smith } 194dd6ea824SBarry Smith rstart = rowners[rank]; 195dd6ea824SBarry Smith rend = rowners[rank+1]; 196dd6ea824SBarry Smith ierr = PetscObjectGetNewTag((PetscObject)mat,&tag);CHKERRQ(ierr); 197dd6ea824SBarry Smith if (!rank) { 198dd6ea824SBarry Smith gmata = (Mat_SeqAIJ*) gmat->data; 199dd6ea824SBarry Smith /* send row lengths to all processors */ 200dd6ea824SBarry Smith for (i=0; i<m; i++) dlens[i] = gmata->ilen[i]; 201dd6ea824SBarry Smith for (i=1; i<size; i++) { 202dd6ea824SBarry Smith ierr = MPI_Send(gmata->ilen + rowners[i],rowners[i+1]-rowners[i],MPIU_INT,i,tag,comm);CHKERRQ(ierr); 203dd6ea824SBarry Smith } 204dd6ea824SBarry Smith /* determine number diagonal and off-diagonal counts */ 205dd6ea824SBarry Smith ierr = PetscMemzero(olens,m*sizeof(PetscInt));CHKERRQ(ierr); 206dd6ea824SBarry Smith ierr = PetscMalloc(m*sizeof(PetscInt),&ld);CHKERRQ(ierr); 207dd6ea824SBarry Smith ierr = PetscMemzero(ld,m*sizeof(PetscInt));CHKERRQ(ierr); 208dd6ea824SBarry Smith jj = 0; 209dd6ea824SBarry Smith for (i=0; i<m; i++) { 210dd6ea824SBarry Smith for (j=0; j<dlens[i]; j++) { 211dd6ea824SBarry Smith if (gmata->j[jj] < rstart) ld[i]++; 212dd6ea824SBarry Smith if (gmata->j[jj] < rstart || gmata->j[jj] >= rend) olens[i]++; 213dd6ea824SBarry Smith jj++; 214dd6ea824SBarry Smith } 215dd6ea824SBarry Smith } 216dd6ea824SBarry Smith /* send column indices to other processes */ 217dd6ea824SBarry Smith for (i=1; i<size; i++) { 218dd6ea824SBarry Smith nz = gmata->i[rowners[i+1]]-gmata->i[rowners[i]]; 219dd6ea824SBarry Smith ierr = MPI_Send(&nz,1,MPIU_INT,i,tag,comm);CHKERRQ(ierr); 220dd6ea824SBarry Smith ierr = MPI_Send(gmata->j + gmata->i[rowners[i]],nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr); 221dd6ea824SBarry Smith } 222dd6ea824SBarry Smith 223dd6ea824SBarry Smith /* send numerical values to other processes */ 224dd6ea824SBarry Smith for (i=1; i<size; i++) { 225dd6ea824SBarry Smith nz = gmata->i[rowners[i+1]]-gmata->i[rowners[i]]; 226dd6ea824SBarry Smith ierr = MPI_Send(gmata->a + gmata->i[rowners[i]],nz,MPIU_SCALAR,i,tag,comm);CHKERRQ(ierr); 227dd6ea824SBarry Smith } 228dd6ea824SBarry Smith gmataa = gmata->a; 229dd6ea824SBarry Smith gmataj = gmata->j; 230dd6ea824SBarry Smith 231dd6ea824SBarry Smith } else { 232dd6ea824SBarry Smith /* receive row lengths */ 233dd6ea824SBarry Smith ierr = MPI_Recv(dlens,m,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr); 234dd6ea824SBarry Smith /* receive column indices */ 235dd6ea824SBarry Smith ierr = MPI_Recv(&nz,1,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr); 236dd6ea824SBarry Smith ierr = PetscMalloc2(nz,PetscScalar,&gmataa,nz,PetscInt,&gmataj);CHKERRQ(ierr); 237dd6ea824SBarry Smith ierr = MPI_Recv(gmataj,nz,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr); 238dd6ea824SBarry Smith /* determine number diagonal and off-diagonal counts */ 239dd6ea824SBarry Smith ierr = PetscMemzero(olens,m*sizeof(PetscInt));CHKERRQ(ierr); 240dd6ea824SBarry Smith ierr = PetscMalloc(m*sizeof(PetscInt),&ld);CHKERRQ(ierr); 241dd6ea824SBarry Smith ierr = PetscMemzero(ld,m*sizeof(PetscInt));CHKERRQ(ierr); 242dd6ea824SBarry Smith jj = 0; 243dd6ea824SBarry Smith for (i=0; i<m; i++) { 244dd6ea824SBarry Smith for (j=0; j<dlens[i]; j++) { 245dd6ea824SBarry Smith if (gmataj[jj] < rstart) ld[i]++; 246dd6ea824SBarry Smith if (gmataj[jj] < rstart || gmataj[jj] >= rend) olens[i]++; 247dd6ea824SBarry Smith jj++; 248dd6ea824SBarry Smith } 249dd6ea824SBarry Smith } 250dd6ea824SBarry Smith /* receive numerical values */ 251dd6ea824SBarry Smith ierr = PetscMemzero(gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); 252dd6ea824SBarry Smith ierr = MPI_Recv(gmataa,nz,MPIU_SCALAR,0,tag,comm,&status);CHKERRQ(ierr); 253dd6ea824SBarry Smith } 254dd6ea824SBarry Smith /* set preallocation */ 255dd6ea824SBarry Smith for (i=0; i<m; i++) { 256dd6ea824SBarry Smith dlens[i] -= olens[i]; 257dd6ea824SBarry Smith } 258dd6ea824SBarry Smith ierr = MatSeqAIJSetPreallocation(mat,0,dlens);CHKERRQ(ierr); 259dd6ea824SBarry Smith ierr = MatMPIAIJSetPreallocation(mat,0,dlens,0,olens);CHKERRQ(ierr); 260dd6ea824SBarry Smith 261dd6ea824SBarry Smith for (i=0; i<m; i++) { 262dd6ea824SBarry Smith dlens[i] += olens[i]; 263dd6ea824SBarry Smith } 264dd6ea824SBarry Smith cnt = 0; 265dd6ea824SBarry Smith for (i=0; i<m; i++) { 266dd6ea824SBarry Smith row = rstart + i; 267dd6ea824SBarry Smith ierr = MatSetValues(mat,1,&row,dlens[i],gmataj+cnt,gmataa+cnt,INSERT_VALUES);CHKERRQ(ierr); 268dd6ea824SBarry Smith cnt += dlens[i]; 269dd6ea824SBarry Smith } 270dd6ea824SBarry Smith if (rank) { 271dd6ea824SBarry Smith ierr = PetscFree2(gmataa,gmataj);CHKERRQ(ierr); 272dd6ea824SBarry Smith } 273dd6ea824SBarry Smith ierr = PetscFree2(dlens,olens);CHKERRQ(ierr); 274dd6ea824SBarry Smith ierr = PetscFree(rowners);CHKERRQ(ierr); 275dd6ea824SBarry Smith ((Mat_MPIAIJ*)(mat->data))->ld = ld; 276dd6ea824SBarry Smith *inmat = mat; 277dd6ea824SBarry Smith } else { /* column indices are already set; only need to move over numerical values from process 0 */ 278dd6ea824SBarry Smith Mat_SeqAIJ *Ad = (Mat_SeqAIJ*)((Mat_MPIAIJ*)((*inmat)->data))->A->data; 279dd6ea824SBarry Smith Mat_SeqAIJ *Ao = (Mat_SeqAIJ*)((Mat_MPIAIJ*)((*inmat)->data))->B->data; 280dd6ea824SBarry Smith mat = *inmat; 281dd6ea824SBarry Smith ierr = PetscObjectGetNewTag((PetscObject)mat,&tag);CHKERRQ(ierr); 282dd6ea824SBarry Smith if (!rank) { 283dd6ea824SBarry Smith /* send numerical values to other processes */ 284dd6ea824SBarry Smith gmata = (Mat_SeqAIJ*) gmat->data; 285dd6ea824SBarry Smith ierr = MatGetOwnershipRanges(mat,(const PetscInt**)&rowners);CHKERRQ(ierr); 286dd6ea824SBarry Smith gmataa = gmata->a; 287dd6ea824SBarry Smith for (i=1; i<size; i++) { 288dd6ea824SBarry Smith nz = gmata->i[rowners[i+1]]-gmata->i[rowners[i]]; 289dd6ea824SBarry Smith ierr = MPI_Send(gmataa + gmata->i[rowners[i]],nz,MPIU_SCALAR,i,tag,comm);CHKERRQ(ierr); 290dd6ea824SBarry Smith } 291dd6ea824SBarry Smith nz = gmata->i[rowners[1]]-gmata->i[rowners[0]]; 292dd6ea824SBarry Smith } else { 293dd6ea824SBarry Smith /* receive numerical values from process 0*/ 294dd6ea824SBarry Smith nz = Ad->nz + Ao->nz; 295dd6ea824SBarry Smith ierr = PetscMalloc(nz*sizeof(PetscScalar),&gmataa);CHKERRQ(ierr); gmataarestore = gmataa; 296dd6ea824SBarry Smith ierr = MPI_Recv(gmataa,nz,MPIU_SCALAR,0,tag,comm,&status);CHKERRQ(ierr); 297dd6ea824SBarry Smith } 298dd6ea824SBarry Smith /* transfer numerical values into the diagonal A and off diagonal B parts of mat */ 299dd6ea824SBarry Smith ld = ((Mat_MPIAIJ*)(mat->data))->ld; 300dd6ea824SBarry Smith ad = Ad->a; 301dd6ea824SBarry Smith ao = Ao->a; 302d0f46423SBarry Smith if (mat->rmap->n) { 303dd6ea824SBarry Smith i = 0; 304dd6ea824SBarry Smith nz = ld[i]; ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ao += nz; gmataa += nz; 305dd6ea824SBarry Smith nz = Ad->i[i+1] - Ad->i[i]; ierr = PetscMemcpy(ad,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ad += nz; gmataa += nz; 306dd6ea824SBarry Smith } 307d0f46423SBarry Smith for (i=1; i<mat->rmap->n; i++) { 308dd6ea824SBarry Smith nz = Ao->i[i] - Ao->i[i-1] - ld[i-1] + ld[i]; ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ao += nz; gmataa += nz; 309dd6ea824SBarry Smith nz = Ad->i[i+1] - Ad->i[i]; ierr = PetscMemcpy(ad,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ad += nz; gmataa += nz; 310dd6ea824SBarry Smith } 311dd6ea824SBarry Smith i--; 312d0f46423SBarry Smith if (mat->rmap->n) { 313dd6ea824SBarry Smith nz = Ao->i[i+1] - Ao->i[i] - ld[i]; ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ao += nz; gmataa += nz; 314dd6ea824SBarry Smith } 315dd6ea824SBarry Smith if (rank) { 316dd6ea824SBarry Smith ierr = PetscFree(gmataarestore);CHKERRQ(ierr); 317dd6ea824SBarry Smith } 318dd6ea824SBarry Smith } 319dd6ea824SBarry Smith ierr = MatAssemblyBegin(mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 320dd6ea824SBarry Smith ierr = MatAssemblyEnd(mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 321dd6ea824SBarry Smith CHKMEMQ; 322dd6ea824SBarry Smith PetscFunctionReturn(0); 323dd6ea824SBarry Smith } 324dd6ea824SBarry Smith 3250f5bd95cSBarry Smith /* 3260f5bd95cSBarry Smith Local utility routine that creates a mapping from the global column 3279e25ed09SBarry Smith number to the local number in the off-diagonal part of the local 3280f5bd95cSBarry Smith storage of the matrix. When PETSC_USE_CTABLE is used this is scalable at 3290f5bd95cSBarry Smith a slightly higher hash table cost; without it it is not scalable (each processor 3300f5bd95cSBarry Smith has an order N integer array but is fast to acess. 3319e25ed09SBarry Smith */ 3324a2ae208SSatish Balay #undef __FUNCT__ 3334a2ae208SSatish Balay #define __FUNCT__ "CreateColmap_MPIAIJ_Private" 334dfbe8321SBarry Smith PetscErrorCode CreateColmap_MPIAIJ_Private(Mat mat) 3359e25ed09SBarry Smith { 33644a69424SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 3376849ba73SBarry Smith PetscErrorCode ierr; 338d0f46423SBarry Smith PetscInt n = aij->B->cmap->n,i; 339dbb450caSBarry Smith 3403a40ed3dSBarry Smith PetscFunctionBegin; 341aa482453SBarry Smith #if defined (PETSC_USE_CTABLE) 342273d9f13SBarry Smith ierr = PetscTableCreate(n,&aij->colmap);CHKERRQ(ierr); 343b1fc9764SSatish Balay for (i=0; i<n; i++){ 3440f5bd95cSBarry Smith ierr = PetscTableAdd(aij->colmap,aij->garray[i]+1,i+1);CHKERRQ(ierr); 345b1fc9764SSatish Balay } 346b1fc9764SSatish Balay #else 347d0f46423SBarry Smith ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscInt),&aij->colmap);CHKERRQ(ierr); 348d0f46423SBarry Smith ierr = PetscLogObjectMemory(mat,mat->cmap->N*sizeof(PetscInt));CHKERRQ(ierr); 349d0f46423SBarry Smith ierr = PetscMemzero(aij->colmap,mat->cmap->N*sizeof(PetscInt));CHKERRQ(ierr); 350905e6a2fSBarry Smith for (i=0; i<n; i++) aij->colmap[aij->garray[i]] = i+1; 351b1fc9764SSatish Balay #endif 3523a40ed3dSBarry Smith PetscFunctionReturn(0); 3539e25ed09SBarry Smith } 3549e25ed09SBarry Smith 35530770e4dSSatish Balay #define MatSetValues_SeqAIJ_A_Private(row,col,value,addv) \ 3560520107fSSatish Balay { \ 3577cd84e04SBarry Smith if (col <= lastcol1) low1 = 0; else high1 = nrow1; \ 358fd3458f5SBarry Smith lastcol1 = col;\ 359fd3458f5SBarry Smith while (high1-low1 > 5) { \ 360fd3458f5SBarry Smith t = (low1+high1)/2; \ 361fd3458f5SBarry Smith if (rp1[t] > col) high1 = t; \ 362fd3458f5SBarry Smith else low1 = t; \ 363ba4e3ef2SSatish Balay } \ 364fd3458f5SBarry Smith for (_i=low1; _i<high1; _i++) { \ 365fd3458f5SBarry Smith if (rp1[_i] > col) break; \ 366fd3458f5SBarry Smith if (rp1[_i] == col) { \ 367fd3458f5SBarry Smith if (addv == ADD_VALUES) ap1[_i] += value; \ 368fd3458f5SBarry Smith else ap1[_i] = value; \ 36930770e4dSSatish Balay goto a_noinsert; \ 3700520107fSSatish Balay } \ 3710520107fSSatish Balay } \ 372e44c0bd4SBarry Smith if (value == 0.0 && ignorezeroentries) {low1 = 0; high1 = nrow1;goto a_noinsert;} \ 373e44c0bd4SBarry Smith if (nonew == 1) {low1 = 0; high1 = nrow1; goto a_noinsert;} \ 374e32f2f54SBarry Smith if (nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \ 375fef13f97SBarry Smith MatSeqXAIJReallocateAIJ(A,am,1,nrow1,row,col,rmax1,aa,ai,aj,rp1,ap1,aimax,nonew,MatScalar); \ 376669a8dbcSSatish Balay N = nrow1++ - 1; a->nz++; high1++; \ 3770520107fSSatish Balay /* shift up all the later entries in this row */ \ 3780520107fSSatish Balay for (ii=N; ii>=_i; ii--) { \ 379fd3458f5SBarry Smith rp1[ii+1] = rp1[ii]; \ 380fd3458f5SBarry Smith ap1[ii+1] = ap1[ii]; \ 3810520107fSSatish Balay } \ 382fd3458f5SBarry Smith rp1[_i] = col; \ 383fd3458f5SBarry Smith ap1[_i] = value; \ 38430770e4dSSatish Balay a_noinsert: ; \ 385fd3458f5SBarry Smith ailen[row] = nrow1; \ 3860520107fSSatish Balay } 3870a198c4cSBarry Smith 388085a36d4SBarry Smith 38930770e4dSSatish Balay #define MatSetValues_SeqAIJ_B_Private(row,col,value,addv) \ 39030770e4dSSatish Balay { \ 3917cd84e04SBarry Smith if (col <= lastcol2) low2 = 0; else high2 = nrow2; \ 392fd3458f5SBarry Smith lastcol2 = col;\ 393fd3458f5SBarry Smith while (high2-low2 > 5) { \ 394fd3458f5SBarry Smith t = (low2+high2)/2; \ 395fd3458f5SBarry Smith if (rp2[t] > col) high2 = t; \ 396fd3458f5SBarry Smith else low2 = t; \ 397ba4e3ef2SSatish Balay } \ 398fd3458f5SBarry Smith for (_i=low2; _i<high2; _i++) { \ 399fd3458f5SBarry Smith if (rp2[_i] > col) break; \ 400fd3458f5SBarry Smith if (rp2[_i] == col) { \ 401fd3458f5SBarry Smith if (addv == ADD_VALUES) ap2[_i] += value; \ 402fd3458f5SBarry Smith else ap2[_i] = value; \ 40330770e4dSSatish Balay goto b_noinsert; \ 40430770e4dSSatish Balay } \ 40530770e4dSSatish Balay } \ 406e44c0bd4SBarry Smith if (value == 0.0 && ignorezeroentries) {low2 = 0; high2 = nrow2; goto b_noinsert;} \ 407e44c0bd4SBarry Smith if (nonew == 1) {low2 = 0; high2 = nrow2; goto b_noinsert;} \ 408e32f2f54SBarry Smith if (nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \ 409fef13f97SBarry Smith MatSeqXAIJReallocateAIJ(B,bm,1,nrow2,row,col,rmax2,ba,bi,bj,rp2,ap2,bimax,nonew,MatScalar); \ 410669a8dbcSSatish Balay N = nrow2++ - 1; b->nz++; high2++; \ 41130770e4dSSatish Balay /* shift up all the later entries in this row */ \ 41230770e4dSSatish Balay for (ii=N; ii>=_i; ii--) { \ 413fd3458f5SBarry Smith rp2[ii+1] = rp2[ii]; \ 414fd3458f5SBarry Smith ap2[ii+1] = ap2[ii]; \ 41530770e4dSSatish Balay } \ 416fd3458f5SBarry Smith rp2[_i] = col; \ 417fd3458f5SBarry Smith ap2[_i] = value; \ 41830770e4dSSatish Balay b_noinsert: ; \ 419fd3458f5SBarry Smith bilen[row] = nrow2; \ 42030770e4dSSatish Balay } 42130770e4dSSatish Balay 4224a2ae208SSatish Balay #undef __FUNCT__ 4232fd7e33dSBarry Smith #define __FUNCT__ "MatSetValuesRow_MPIAIJ" 4242fd7e33dSBarry Smith PetscErrorCode MatSetValuesRow_MPIAIJ(Mat A,PetscInt row,const PetscScalar v[]) 4252fd7e33dSBarry Smith { 4262fd7e33dSBarry Smith Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data; 4272fd7e33dSBarry Smith Mat_SeqAIJ *a = (Mat_SeqAIJ*)mat->A->data,*b = (Mat_SeqAIJ*)mat->B->data; 4282fd7e33dSBarry Smith PetscErrorCode ierr; 4292fd7e33dSBarry Smith PetscInt l,*garray = mat->garray,diag; 4302fd7e33dSBarry Smith 4312fd7e33dSBarry Smith PetscFunctionBegin; 4322fd7e33dSBarry Smith /* code only works for square matrices A */ 4332fd7e33dSBarry Smith 4342fd7e33dSBarry Smith /* find size of row to the left of the diagonal part */ 4352fd7e33dSBarry Smith ierr = MatGetOwnershipRange(A,&diag,0);CHKERRQ(ierr); 4362fd7e33dSBarry Smith row = row - diag; 4372fd7e33dSBarry Smith for (l=0; l<b->i[row+1]-b->i[row]; l++) { 4382fd7e33dSBarry Smith if (garray[b->j[b->i[row]+l]] > diag) break; 4392fd7e33dSBarry Smith } 4402fd7e33dSBarry Smith ierr = PetscMemcpy(b->a+b->i[row],v,l*sizeof(PetscScalar));CHKERRQ(ierr); 4412fd7e33dSBarry Smith 4422fd7e33dSBarry Smith /* diagonal part */ 4432fd7e33dSBarry Smith ierr = PetscMemcpy(a->a+a->i[row],v+l,(a->i[row+1]-a->i[row])*sizeof(PetscScalar));CHKERRQ(ierr); 4442fd7e33dSBarry Smith 4452fd7e33dSBarry Smith /* right of diagonal part */ 4462fd7e33dSBarry Smith ierr = PetscMemcpy(b->a+b->i[row]+l,v+l+a->i[row+1]-a->i[row],(b->i[row+1]-b->i[row]-l)*sizeof(PetscScalar));CHKERRQ(ierr); 4472fd7e33dSBarry Smith PetscFunctionReturn(0); 4482fd7e33dSBarry Smith } 4492fd7e33dSBarry Smith 4502fd7e33dSBarry Smith #undef __FUNCT__ 4514a2ae208SSatish Balay #define __FUNCT__ "MatSetValues_MPIAIJ" 452b1d57f15SBarry Smith PetscErrorCode MatSetValues_MPIAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv) 4538a729477SBarry Smith { 45444a69424SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 45587828ca2SBarry Smith PetscScalar value; 456dfbe8321SBarry Smith PetscErrorCode ierr; 457d0f46423SBarry Smith PetscInt i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend; 458d0f46423SBarry Smith PetscInt cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col; 459ace3abfcSBarry Smith PetscBool roworiented = aij->roworiented; 4608a729477SBarry Smith 4610520107fSSatish Balay /* Some Variables required in the macro */ 4624ee7247eSSatish Balay Mat A = aij->A; 4634ee7247eSSatish Balay Mat_SeqAIJ *a = (Mat_SeqAIJ*)A->data; 46457809a77SBarry Smith PetscInt *aimax = a->imax,*ai = a->i,*ailen = a->ilen,*aj = a->j; 465a77337e4SBarry Smith MatScalar *aa = a->a; 466ace3abfcSBarry Smith PetscBool ignorezeroentries = a->ignorezeroentries; 46730770e4dSSatish Balay Mat B = aij->B; 46830770e4dSSatish Balay Mat_SeqAIJ *b = (Mat_SeqAIJ*)B->data; 469d0f46423SBarry Smith PetscInt *bimax = b->imax,*bi = b->i,*bilen = b->ilen,*bj = b->j,bm = aij->B->rmap->n,am = aij->A->rmap->n; 470a77337e4SBarry Smith MatScalar *ba = b->a; 47130770e4dSSatish Balay 472fd3458f5SBarry Smith PetscInt *rp1,*rp2,ii,nrow1,nrow2,_i,rmax1,rmax2,N,low1,high1,low2,high2,t,lastcol1,lastcol2; 473fd3458f5SBarry Smith PetscInt nonew = a->nonew; 474a77337e4SBarry Smith MatScalar *ap1,*ap2; 4754ee7247eSSatish Balay 4763a40ed3dSBarry Smith PetscFunctionBegin; 47771fd2e92SBarry Smith if (v) PetscValidScalarPointer(v,6); 4788a729477SBarry Smith for (i=0; i<m; i++) { 4795ef9f2a5SBarry Smith if (im[i] < 0) continue; 4802515c552SBarry Smith #if defined(PETSC_USE_DEBUG) 481e32f2f54SBarry Smith if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1); 4820a198c4cSBarry Smith #endif 4834b0e389bSBarry Smith if (im[i] >= rstart && im[i] < rend) { 4844b0e389bSBarry Smith row = im[i] - rstart; 485fd3458f5SBarry Smith lastcol1 = -1; 486fd3458f5SBarry Smith rp1 = aj + ai[row]; 487fd3458f5SBarry Smith ap1 = aa + ai[row]; 488fd3458f5SBarry Smith rmax1 = aimax[row]; 489fd3458f5SBarry Smith nrow1 = ailen[row]; 490fd3458f5SBarry Smith low1 = 0; 491fd3458f5SBarry Smith high1 = nrow1; 492fd3458f5SBarry Smith lastcol2 = -1; 493fd3458f5SBarry Smith rp2 = bj + bi[row]; 494d498b1e9SBarry Smith ap2 = ba + bi[row]; 495fd3458f5SBarry Smith rmax2 = bimax[row]; 496d498b1e9SBarry Smith nrow2 = bilen[row]; 497fd3458f5SBarry Smith low2 = 0; 498fd3458f5SBarry Smith high2 = nrow2; 499fd3458f5SBarry Smith 5001eb62cbbSBarry Smith for (j=0; j<n; j++) { 50116371a99SBarry Smith if (v) {if (roworiented) value = v[i*n+j]; else value = v[i+j*m];} else value = 0.0; 502abc0a331SBarry Smith if (ignorezeroentries && value == 0.0 && (addv == ADD_VALUES)) continue; 503fd3458f5SBarry Smith if (in[j] >= cstart && in[j] < cend){ 504fd3458f5SBarry Smith col = in[j] - cstart; 50530770e4dSSatish Balay MatSetValues_SeqAIJ_A_Private(row,col,value,addv); 506273d9f13SBarry Smith } else if (in[j] < 0) continue; 5072515c552SBarry Smith #if defined(PETSC_USE_DEBUG) 508cb9801acSJed Brown else if (in[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1); 5090a198c4cSBarry Smith #endif 5101eb62cbbSBarry Smith else { 511227d817aSBarry Smith if (mat->was_assembled) { 512905e6a2fSBarry Smith if (!aij->colmap) { 513905e6a2fSBarry Smith ierr = CreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr); 514905e6a2fSBarry Smith } 515aa482453SBarry Smith #if defined (PETSC_USE_CTABLE) 5160f5bd95cSBarry Smith ierr = PetscTableFind(aij->colmap,in[j]+1,&col);CHKERRQ(ierr); 517fa46199cSSatish Balay col--; 518b1fc9764SSatish Balay #else 519905e6a2fSBarry Smith col = aij->colmap[in[j]] - 1; 520b1fc9764SSatish Balay #endif 521ec8511deSBarry Smith if (col < 0 && !((Mat_SeqAIJ*)(aij->A->data))->nonew) { 5222493cbb0SBarry Smith ierr = DisAssemble_MPIAIJ(mat);CHKERRQ(ierr); 5234b0e389bSBarry Smith col = in[j]; 5249bf004c3SSatish Balay /* Reinitialize the variables required by MatSetValues_SeqAIJ_B_Private() */ 525f9508a3cSSatish Balay B = aij->B; 526f9508a3cSSatish Balay b = (Mat_SeqAIJ*)B->data; 527e44c0bd4SBarry Smith bimax = b->imax; bi = b->i; bilen = b->ilen; bj = b->j; ba = b->a; 528d498b1e9SBarry Smith rp2 = bj + bi[row]; 529d498b1e9SBarry Smith ap2 = ba + bi[row]; 530d498b1e9SBarry Smith rmax2 = bimax[row]; 531d498b1e9SBarry Smith nrow2 = bilen[row]; 532d498b1e9SBarry Smith low2 = 0; 533d498b1e9SBarry Smith high2 = nrow2; 534d0f46423SBarry Smith bm = aij->B->rmap->n; 535f9508a3cSSatish Balay ba = b->a; 536d6dfbf8fSBarry Smith } 537c48de900SBarry Smith } else col = in[j]; 53830770e4dSSatish Balay MatSetValues_SeqAIJ_B_Private(row,col,value,addv); 5391eb62cbbSBarry Smith } 5401eb62cbbSBarry Smith } 5415ef9f2a5SBarry Smith } else { 5424cb17eb5SBarry Smith if (mat->nooffprocentries) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Setting off process row %D even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set",im[i]); 54390f02eecSBarry Smith if (!aij->donotstash) { 544d36fbae8SSatish Balay if (roworiented) { 545ace3abfcSBarry Smith ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,(PetscBool)(ignorezeroentries && (addv == ADD_VALUES)));CHKERRQ(ierr); 546d36fbae8SSatish Balay } else { 547ace3abfcSBarry Smith ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,(PetscBool)(ignorezeroentries && (addv == ADD_VALUES)));CHKERRQ(ierr); 5484b0e389bSBarry Smith } 5491eb62cbbSBarry Smith } 5508a729477SBarry Smith } 55190f02eecSBarry Smith } 5523a40ed3dSBarry Smith PetscFunctionReturn(0); 5538a729477SBarry Smith } 5548a729477SBarry Smith 5554a2ae208SSatish Balay #undef __FUNCT__ 5564a2ae208SSatish Balay #define __FUNCT__ "MatGetValues_MPIAIJ" 557b1d57f15SBarry Smith PetscErrorCode MatGetValues_MPIAIJ(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[]) 558b49de8d1SLois Curfman McInnes { 559b49de8d1SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 560dfbe8321SBarry Smith PetscErrorCode ierr; 561d0f46423SBarry Smith PetscInt i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend; 562d0f46423SBarry Smith PetscInt cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col; 563b49de8d1SLois Curfman McInnes 5643a40ed3dSBarry Smith PetscFunctionBegin; 565b49de8d1SLois Curfman McInnes for (i=0; i<m; i++) { 566e32f2f54SBarry Smith if (idxm[i] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",idxm[i]);*/ 567e32f2f54SBarry Smith if (idxm[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",idxm[i],mat->rmap->N-1); 568b49de8d1SLois Curfman McInnes if (idxm[i] >= rstart && idxm[i] < rend) { 569b49de8d1SLois Curfman McInnes row = idxm[i] - rstart; 570b49de8d1SLois Curfman McInnes for (j=0; j<n; j++) { 571e32f2f54SBarry Smith if (idxn[j] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative column: %D",idxn[j]); */ 572e32f2f54SBarry Smith if (idxn[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",idxn[j],mat->cmap->N-1); 573b49de8d1SLois Curfman McInnes if (idxn[j] >= cstart && idxn[j] < cend){ 574b49de8d1SLois Curfman McInnes col = idxn[j] - cstart; 575b49de8d1SLois Curfman McInnes ierr = MatGetValues(aij->A,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr); 576fa852ad4SSatish Balay } else { 577905e6a2fSBarry Smith if (!aij->colmap) { 578905e6a2fSBarry Smith ierr = CreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr); 579905e6a2fSBarry Smith } 580aa482453SBarry Smith #if defined (PETSC_USE_CTABLE) 5810f5bd95cSBarry Smith ierr = PetscTableFind(aij->colmap,idxn[j]+1,&col);CHKERRQ(ierr); 582fa46199cSSatish Balay col --; 583b1fc9764SSatish Balay #else 584905e6a2fSBarry Smith col = aij->colmap[idxn[j]] - 1; 585b1fc9764SSatish Balay #endif 586e60e1c95SSatish Balay if ((col < 0) || (aij->garray[col] != idxn[j])) *(v+i*n+j) = 0.0; 587d9d09a02SSatish Balay else { 588b49de8d1SLois Curfman McInnes ierr = MatGetValues(aij->B,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr); 589b49de8d1SLois Curfman McInnes } 590b49de8d1SLois Curfman McInnes } 591b49de8d1SLois Curfman McInnes } 592a8c6a408SBarry Smith } else { 593e32f2f54SBarry Smith SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local values currently supported"); 594b49de8d1SLois Curfman McInnes } 595b49de8d1SLois Curfman McInnes } 5963a40ed3dSBarry Smith PetscFunctionReturn(0); 597b49de8d1SLois Curfman McInnes } 598bc5ccf88SSatish Balay 599bd0c2dcbSBarry Smith extern PetscErrorCode MatMultDiagonalBlock_MPIAIJ(Mat,Vec,Vec); 600bd0c2dcbSBarry Smith 6014a2ae208SSatish Balay #undef __FUNCT__ 6024a2ae208SSatish Balay #define __FUNCT__ "MatAssemblyBegin_MPIAIJ" 603dfbe8321SBarry Smith PetscErrorCode MatAssemblyBegin_MPIAIJ(Mat mat,MatAssemblyType mode) 604bc5ccf88SSatish Balay { 605bc5ccf88SSatish Balay Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 606dfbe8321SBarry Smith PetscErrorCode ierr; 607b1d57f15SBarry Smith PetscInt nstash,reallocs; 608bc5ccf88SSatish Balay InsertMode addv; 609bc5ccf88SSatish Balay 610bc5ccf88SSatish Balay PetscFunctionBegin; 6114cb17eb5SBarry Smith if (aij->donotstash || mat->nooffprocentries) { 612bc5ccf88SSatish Balay PetscFunctionReturn(0); 613bc5ccf88SSatish Balay } 614bc5ccf88SSatish Balay 615bc5ccf88SSatish Balay /* make sure all processors are either in INSERTMODE or ADDMODE */ 6167adad957SLisandro Dalcin ierr = MPI_Allreduce(&mat->insertmode,&addv,1,MPI_INT,MPI_BOR,((PetscObject)mat)->comm);CHKERRQ(ierr); 617e7e72b3dSBarry Smith if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(((PetscObject)mat)->comm,PETSC_ERR_ARG_WRONGSTATE,"Some processors inserted others added"); 618bc5ccf88SSatish Balay mat->insertmode = addv; /* in case this processor had no cache */ 619bc5ccf88SSatish Balay 620d0f46423SBarry Smith ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr); 6218798bf22SSatish Balay ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr); 622ae15b995SBarry Smith ierr = PetscInfo2(aij->A,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr); 623bc5ccf88SSatish Balay PetscFunctionReturn(0); 624bc5ccf88SSatish Balay } 625bc5ccf88SSatish Balay 6264a2ae208SSatish Balay #undef __FUNCT__ 6274a2ae208SSatish Balay #define __FUNCT__ "MatAssemblyEnd_MPIAIJ" 628dfbe8321SBarry Smith PetscErrorCode MatAssemblyEnd_MPIAIJ(Mat mat,MatAssemblyType mode) 629bc5ccf88SSatish Balay { 630bc5ccf88SSatish Balay Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 63191c97fd4SSatish Balay Mat_SeqAIJ *a=(Mat_SeqAIJ *)aij->A->data; 6326849ba73SBarry Smith PetscErrorCode ierr; 633b1d57f15SBarry Smith PetscMPIInt n; 634b1d57f15SBarry Smith PetscInt i,j,rstart,ncols,flg; 635e44c0bd4SBarry Smith PetscInt *row,*col; 636ace3abfcSBarry Smith PetscBool other_disassembled; 63787828ca2SBarry Smith PetscScalar *val; 638bc5ccf88SSatish Balay InsertMode addv = mat->insertmode; 639bc5ccf88SSatish Balay 64091c97fd4SSatish Balay /* do not use 'b = (Mat_SeqAIJ *)aij->B->data' as B can be reset in disassembly */ 641bc5ccf88SSatish Balay PetscFunctionBegin; 6424cb17eb5SBarry Smith if (!aij->donotstash && !mat->nooffprocentries) { 643a2d1c673SSatish Balay while (1) { 6448798bf22SSatish Balay ierr = MatStashScatterGetMesg_Private(&mat->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr); 645a2d1c673SSatish Balay if (!flg) break; 646a2d1c673SSatish Balay 647bc5ccf88SSatish Balay for (i=0; i<n;) { 648bc5ccf88SSatish Balay /* Now identify the consecutive vals belonging to the same row */ 649bc5ccf88SSatish Balay for (j=i,rstart=row[j]; j<n; j++) { if (row[j] != rstart) break; } 650bc5ccf88SSatish Balay if (j < n) ncols = j-i; 651bc5ccf88SSatish Balay else ncols = n-i; 652bc5ccf88SSatish Balay /* Now assemble all these values with a single function call */ 653bc5ccf88SSatish Balay ierr = MatSetValues_MPIAIJ(mat,1,row+i,ncols,col+i,val+i,addv);CHKERRQ(ierr); 654bc5ccf88SSatish Balay i = j; 655bc5ccf88SSatish Balay } 656bc5ccf88SSatish Balay } 6578798bf22SSatish Balay ierr = MatStashScatterEnd_Private(&mat->stash);CHKERRQ(ierr); 658bc5ccf88SSatish Balay } 659bc5ccf88SSatish Balay ierr = MatAssemblyBegin(aij->A,mode);CHKERRQ(ierr); 660bc5ccf88SSatish Balay ierr = MatAssemblyEnd(aij->A,mode);CHKERRQ(ierr); 661bc5ccf88SSatish Balay 662bc5ccf88SSatish Balay /* determine if any processor has disassembled, if so we must 663bc5ccf88SSatish Balay also disassemble ourselfs, in order that we may reassemble. */ 664bc5ccf88SSatish Balay /* 665bc5ccf88SSatish Balay if nonzero structure of submatrix B cannot change then we know that 666bc5ccf88SSatish Balay no processor disassembled thus we can skip this stuff 667bc5ccf88SSatish Balay */ 668bc5ccf88SSatish Balay if (!((Mat_SeqAIJ*)aij->B->data)->nonew) { 6697adad957SLisandro Dalcin ierr = MPI_Allreduce(&mat->was_assembled,&other_disassembled,1,MPI_INT,MPI_PROD,((PetscObject)mat)->comm);CHKERRQ(ierr); 670bc5ccf88SSatish Balay if (mat->was_assembled && !other_disassembled) { 671bc5ccf88SSatish Balay ierr = DisAssemble_MPIAIJ(mat);CHKERRQ(ierr); 672ad59fb31SSatish Balay } 673ad59fb31SSatish Balay } 674bc5ccf88SSatish Balay if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) { 675bc5ccf88SSatish Balay ierr = MatSetUpMultiply_MPIAIJ(mat);CHKERRQ(ierr); 676bc5ccf88SSatish Balay } 6774e0d8c25SBarry Smith ierr = MatSetOption(aij->B,MAT_USE_INODES,PETSC_FALSE);CHKERRQ(ierr); 6784e35b6f3SSatish Balay ierr = MatSetOption(aij->B,MAT_CHECK_COMPRESSED_ROW,PETSC_FALSE);CHKERRQ(ierr); 679bc5ccf88SSatish Balay ierr = MatAssemblyBegin(aij->B,mode);CHKERRQ(ierr); 680bc5ccf88SSatish Balay ierr = MatAssemblyEnd(aij->B,mode);CHKERRQ(ierr); 681bc5ccf88SSatish Balay 6821d79065fSBarry Smith ierr = PetscFree2(aij->rowvalues,aij->rowindices);CHKERRQ(ierr); 683606d414cSSatish Balay aij->rowvalues = 0; 684a30b2313SHong Zhang 685a30b2313SHong Zhang /* used by MatAXPY() */ 68691c97fd4SSatish Balay a->xtoy = 0; ((Mat_SeqAIJ *)aij->B->data)->xtoy = 0; /* b->xtoy = 0 */ 68791c97fd4SSatish Balay a->XtoY = 0; ((Mat_SeqAIJ *)aij->B->data)->XtoY = 0; /* b->XtoY = 0 */ 688a30b2313SHong Zhang 6896bf464f9SBarry Smith ierr = VecDestroy(&aij->diag);CHKERRQ(ierr); 690bd0c2dcbSBarry Smith if (a->inode.size) mat->ops->multdiagonalblock = MatMultDiagonalBlock_MPIAIJ; 691bc5ccf88SSatish Balay PetscFunctionReturn(0); 692bc5ccf88SSatish Balay } 693bc5ccf88SSatish Balay 6944a2ae208SSatish Balay #undef __FUNCT__ 6954a2ae208SSatish Balay #define __FUNCT__ "MatZeroEntries_MPIAIJ" 696dfbe8321SBarry Smith PetscErrorCode MatZeroEntries_MPIAIJ(Mat A) 6971eb62cbbSBarry Smith { 69844a69424SLois Curfman McInnes Mat_MPIAIJ *l = (Mat_MPIAIJ*)A->data; 699dfbe8321SBarry Smith PetscErrorCode ierr; 7003a40ed3dSBarry Smith 7013a40ed3dSBarry Smith PetscFunctionBegin; 70278b31e54SBarry Smith ierr = MatZeroEntries(l->A);CHKERRQ(ierr); 70378b31e54SBarry Smith ierr = MatZeroEntries(l->B);CHKERRQ(ierr); 7043a40ed3dSBarry Smith PetscFunctionReturn(0); 7051eb62cbbSBarry Smith } 7061eb62cbbSBarry Smith 7074a2ae208SSatish Balay #undef __FUNCT__ 7084a2ae208SSatish Balay #define __FUNCT__ "MatZeroRows_MPIAIJ" 7092b40b63fSBarry Smith PetscErrorCode MatZeroRows_MPIAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 7101eb62cbbSBarry Smith { 71144a69424SLois Curfman McInnes Mat_MPIAIJ *l = (Mat_MPIAIJ*)A->data; 7126849ba73SBarry Smith PetscErrorCode ierr; 7137adad957SLisandro Dalcin PetscMPIInt size = l->size,imdex,n,rank = l->rank,tag = ((PetscObject)A)->tag,lastidx = -1; 714d0f46423SBarry Smith PetscInt i,*owners = A->rmap->range; 715b1d57f15SBarry Smith PetscInt *nprocs,j,idx,nsends,row; 716b1d57f15SBarry Smith PetscInt nmax,*svalues,*starts,*owner,nrecvs; 717b1d57f15SBarry Smith PetscInt *rvalues,count,base,slen,*source; 718d0f46423SBarry Smith PetscInt *lens,*lrows,*values,rstart=A->rmap->rstart; 7197adad957SLisandro Dalcin MPI_Comm comm = ((PetscObject)A)->comm; 7201eb62cbbSBarry Smith MPI_Request *send_waits,*recv_waits; 7211eb62cbbSBarry Smith MPI_Status recv_status,*send_status; 72297b48c8fSBarry Smith const PetscScalar *xx; 72397b48c8fSBarry Smith PetscScalar *bb; 7246543fbbaSBarry Smith #if defined(PETSC_DEBUG) 725ace3abfcSBarry Smith PetscBool found = PETSC_FALSE; 7266543fbbaSBarry Smith #endif 7271eb62cbbSBarry Smith 7283a40ed3dSBarry Smith PetscFunctionBegin; 7291eb62cbbSBarry Smith /* first count number of contributors to each processor */ 730b1d57f15SBarry Smith ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr); 731b1d57f15SBarry Smith ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr); 732b1d57f15SBarry Smith ierr = PetscMalloc((N+1)*sizeof(PetscInt),&owner);CHKERRQ(ierr); /* see note*/ 7336543fbbaSBarry Smith j = 0; 7341eb62cbbSBarry Smith for (i=0; i<N; i++) { 7356543fbbaSBarry Smith if (lastidx > (idx = rows[i])) j = 0; 7366543fbbaSBarry Smith lastidx = idx; 7376543fbbaSBarry Smith for (; j<size; j++) { 7381eb62cbbSBarry Smith if (idx >= owners[j] && idx < owners[j+1]) { 7396543fbbaSBarry Smith nprocs[2*j]++; 7406543fbbaSBarry Smith nprocs[2*j+1] = 1; 7416543fbbaSBarry Smith owner[i] = j; 7426543fbbaSBarry Smith #if defined(PETSC_DEBUG) 7436543fbbaSBarry Smith found = PETSC_TRUE; 7446543fbbaSBarry Smith #endif 7456543fbbaSBarry Smith break; 7461eb62cbbSBarry Smith } 7471eb62cbbSBarry Smith } 7486543fbbaSBarry Smith #if defined(PETSC_DEBUG) 749e32f2f54SBarry Smith if (!found) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index out of range"); 7506543fbbaSBarry Smith found = PETSC_FALSE; 7516543fbbaSBarry Smith #endif 7521eb62cbbSBarry Smith } 753c1dc657dSBarry Smith nsends = 0; for (i=0; i<size; i++) { nsends += nprocs[2*i+1];} 7541eb62cbbSBarry Smith 7557367270fSBarry Smith if (A->nooffproczerorows) { 7567367270fSBarry Smith if (nsends > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"You called MatSetOption(,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE) but set an off process zero row"); 7577367270fSBarry Smith nrecvs = nsends; 7587367270fSBarry Smith nmax = N; 7597367270fSBarry Smith } else { 7601eb62cbbSBarry Smith /* inform other processors of number of messages and max length*/ 761c1dc657dSBarry Smith ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr); 7627367270fSBarry Smith } 7631eb62cbbSBarry Smith 7641eb62cbbSBarry Smith /* post receives: */ 765b1d57f15SBarry Smith ierr = PetscMalloc((nrecvs+1)*(nmax+1)*sizeof(PetscInt),&rvalues);CHKERRQ(ierr); 766b0a32e0cSBarry Smith ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr); 7671eb62cbbSBarry Smith for (i=0; i<nrecvs; i++) { 768b1d57f15SBarry Smith ierr = MPI_Irecv(rvalues+nmax*i,nmax,MPIU_INT,MPI_ANY_SOURCE,tag,comm,recv_waits+i);CHKERRQ(ierr); 7691eb62cbbSBarry Smith } 7701eb62cbbSBarry Smith 7711eb62cbbSBarry Smith /* do sends: 7721eb62cbbSBarry Smith 1) starts[i] gives the starting index in svalues for stuff going to 7731eb62cbbSBarry Smith the ith processor 7741eb62cbbSBarry Smith */ 775b1d57f15SBarry Smith ierr = PetscMalloc((N+1)*sizeof(PetscInt),&svalues);CHKERRQ(ierr); 776b0a32e0cSBarry Smith ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr); 777b1d57f15SBarry Smith ierr = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr); 7781eb62cbbSBarry Smith starts[0] = 0; 779c1dc657dSBarry Smith for (i=1; i<size; i++) { starts[i] = starts[i-1] + nprocs[2*i-2];} 7801eb62cbbSBarry Smith for (i=0; i<N; i++) { 7811eb62cbbSBarry Smith svalues[starts[owner[i]]++] = rows[i]; 7821eb62cbbSBarry Smith } 7831eb62cbbSBarry Smith 7841eb62cbbSBarry Smith starts[0] = 0; 785c1dc657dSBarry Smith for (i=1; i<size+1; i++) { starts[i] = starts[i-1] + nprocs[2*i-2];} 7861eb62cbbSBarry Smith count = 0; 78717699dbbSLois Curfman McInnes for (i=0; i<size; i++) { 788c1dc657dSBarry Smith if (nprocs[2*i+1]) { 789b1d57f15SBarry Smith ierr = MPI_Isend(svalues+starts[i],nprocs[2*i],MPIU_INT,i,tag,comm,send_waits+count++);CHKERRQ(ierr); 7901eb62cbbSBarry Smith } 7911eb62cbbSBarry Smith } 792606d414cSSatish Balay ierr = PetscFree(starts);CHKERRQ(ierr); 7931eb62cbbSBarry Smith 79417699dbbSLois Curfman McInnes base = owners[rank]; 7951eb62cbbSBarry Smith 7961eb62cbbSBarry Smith /* wait on receives */ 7971d79065fSBarry Smith ierr = PetscMalloc2(nrecvs,PetscInt,&lens,nrecvs,PetscInt,&source);CHKERRQ(ierr); 7981eb62cbbSBarry Smith count = nrecvs; slen = 0; 7991eb62cbbSBarry Smith while (count) { 800ca161407SBarry Smith ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr); 8011eb62cbbSBarry Smith /* unpack receives into our local space */ 802b1d57f15SBarry Smith ierr = MPI_Get_count(&recv_status,MPIU_INT,&n);CHKERRQ(ierr); 803d6dfbf8fSBarry Smith source[imdex] = recv_status.MPI_SOURCE; 804d6dfbf8fSBarry Smith lens[imdex] = n; 8051eb62cbbSBarry Smith slen += n; 8061eb62cbbSBarry Smith count--; 8071eb62cbbSBarry Smith } 808606d414cSSatish Balay ierr = PetscFree(recv_waits);CHKERRQ(ierr); 8091eb62cbbSBarry Smith 8101eb62cbbSBarry Smith /* move the data into the send scatter */ 811b1d57f15SBarry Smith ierr = PetscMalloc((slen+1)*sizeof(PetscInt),&lrows);CHKERRQ(ierr); 8121eb62cbbSBarry Smith count = 0; 8131eb62cbbSBarry Smith for (i=0; i<nrecvs; i++) { 8141eb62cbbSBarry Smith values = rvalues + i*nmax; 8151eb62cbbSBarry Smith for (j=0; j<lens[i]; j++) { 8161eb62cbbSBarry Smith lrows[count++] = values[j] - base; 8171eb62cbbSBarry Smith } 8181eb62cbbSBarry Smith } 819606d414cSSatish Balay ierr = PetscFree(rvalues);CHKERRQ(ierr); 8201d79065fSBarry Smith ierr = PetscFree2(lens,source);CHKERRQ(ierr); 821606d414cSSatish Balay ierr = PetscFree(owner);CHKERRQ(ierr); 822606d414cSSatish Balay ierr = PetscFree(nprocs);CHKERRQ(ierr); 8231eb62cbbSBarry Smith 82497b48c8fSBarry Smith /* fix right hand side if needed */ 82597b48c8fSBarry Smith if (x && b) { 82697b48c8fSBarry Smith ierr = VecGetArrayRead(x,&xx);CHKERRQ(ierr); 82797b48c8fSBarry Smith ierr = VecGetArray(b,&bb);CHKERRQ(ierr); 828564f14d6SBarry Smith for (i=0; i<slen; i++) { 82997b48c8fSBarry Smith bb[lrows[i]] = diag*xx[lrows[i]]; 83097b48c8fSBarry Smith } 83197b48c8fSBarry Smith ierr = VecRestoreArrayRead(x,&xx);CHKERRQ(ierr); 83297b48c8fSBarry Smith ierr = VecRestoreArray(b,&bb);CHKERRQ(ierr); 83397b48c8fSBarry Smith } 8346eb55b6aSBarry Smith /* 8356eb55b6aSBarry Smith Zero the required rows. If the "diagonal block" of the matrix 836a8c7a070SBarry Smith is square and the user wishes to set the diagonal we use separate 8376eb55b6aSBarry Smith code so that MatSetValues() is not called for each diagonal allocating 8386eb55b6aSBarry Smith new memory, thus calling lots of mallocs and slowing things down. 8396eb55b6aSBarry Smith 8406eb55b6aSBarry Smith */ 841e2d53e46SBarry Smith /* must zero l->B before l->A because the (diag) case below may put values into l->B*/ 8422b40b63fSBarry Smith ierr = MatZeroRows(l->B,slen,lrows,0.0,0,0);CHKERRQ(ierr); 843d0f46423SBarry Smith if ((diag != 0.0) && (l->A->rmap->N == l->A->cmap->N)) { 8442b40b63fSBarry Smith ierr = MatZeroRows(l->A,slen,lrows,diag,0,0);CHKERRQ(ierr); 845f4df32b1SMatthew Knepley } else if (diag != 0.0) { 8462b40b63fSBarry Smith ierr = MatZeroRows(l->A,slen,lrows,0.0,0,0);CHKERRQ(ierr); 847fa46199cSSatish Balay if (((Mat_SeqAIJ*)l->A->data)->nonew) { 848e32f2f54SBarry Smith SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatZeroRows() on rectangular matrices cannot be used with the Mat options\n\ 849512a5fc5SBarry Smith MAT_NEW_NONZERO_LOCATIONS,MAT_NEW_NONZERO_LOCATION_ERR,MAT_NEW_NONZERO_ALLOCATION_ERR"); 8506525c446SSatish Balay } 851e2d53e46SBarry Smith for (i = 0; i < slen; i++) { 852e2d53e46SBarry Smith row = lrows[i] + rstart; 853f4df32b1SMatthew Knepley ierr = MatSetValues(A,1,&row,1,&row,&diag,INSERT_VALUES);CHKERRQ(ierr); 854e2d53e46SBarry Smith } 855e2d53e46SBarry Smith ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 856e2d53e46SBarry Smith ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 8576eb55b6aSBarry Smith } else { 8582b40b63fSBarry Smith ierr = MatZeroRows(l->A,slen,lrows,0.0,0,0);CHKERRQ(ierr); 8596eb55b6aSBarry Smith } 860606d414cSSatish Balay ierr = PetscFree(lrows);CHKERRQ(ierr); 86172dacd9aSBarry Smith 8621eb62cbbSBarry Smith /* wait on sends */ 8631eb62cbbSBarry Smith if (nsends) { 864b0a32e0cSBarry Smith ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr); 865ca161407SBarry Smith ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr); 866606d414cSSatish Balay ierr = PetscFree(send_status);CHKERRQ(ierr); 8671eb62cbbSBarry Smith } 868606d414cSSatish Balay ierr = PetscFree(send_waits);CHKERRQ(ierr); 869606d414cSSatish Balay ierr = PetscFree(svalues);CHKERRQ(ierr); 8703a40ed3dSBarry Smith PetscFunctionReturn(0); 8711eb62cbbSBarry Smith } 8721eb62cbbSBarry Smith 8734a2ae208SSatish Balay #undef __FUNCT__ 8749c7c4993SBarry Smith #define __FUNCT__ "MatZeroRowsColumns_MPIAIJ" 8759c7c4993SBarry Smith PetscErrorCode MatZeroRowsColumns_MPIAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 8769c7c4993SBarry Smith { 8779c7c4993SBarry Smith Mat_MPIAIJ *l = (Mat_MPIAIJ*)A->data; 8789c7c4993SBarry Smith PetscErrorCode ierr; 8799c7c4993SBarry Smith PetscMPIInt size = l->size,imdex,n,rank = l->rank,tag = ((PetscObject)A)->tag,lastidx = -1; 8809c7c4993SBarry Smith PetscInt i,*owners = A->rmap->range; 881564f14d6SBarry Smith PetscInt *nprocs,j,idx,nsends; 8829c7c4993SBarry Smith PetscInt nmax,*svalues,*starts,*owner,nrecvs; 8839c7c4993SBarry Smith PetscInt *rvalues,count,base,slen,*source; 884564f14d6SBarry Smith PetscInt *lens,*lrows,*values,m; 8859c7c4993SBarry Smith MPI_Comm comm = ((PetscObject)A)->comm; 8869c7c4993SBarry Smith MPI_Request *send_waits,*recv_waits; 8879c7c4993SBarry Smith MPI_Status recv_status,*send_status; 8889c7c4993SBarry Smith const PetscScalar *xx; 889564f14d6SBarry Smith PetscScalar *bb,*mask; 890564f14d6SBarry Smith Vec xmask,lmask; 891564f14d6SBarry Smith Mat_SeqAIJ *aij = (Mat_SeqAIJ*)l->B->data; 892564f14d6SBarry Smith const PetscInt *aj, *ii,*ridx; 893564f14d6SBarry Smith PetscScalar *aa; 8949c7c4993SBarry Smith #if defined(PETSC_DEBUG) 8959c7c4993SBarry Smith PetscBool found = PETSC_FALSE; 8969c7c4993SBarry Smith #endif 8979c7c4993SBarry Smith 8989c7c4993SBarry Smith PetscFunctionBegin; 8999c7c4993SBarry Smith /* first count number of contributors to each processor */ 9009c7c4993SBarry Smith ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr); 9019c7c4993SBarry Smith ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr); 9029c7c4993SBarry Smith ierr = PetscMalloc((N+1)*sizeof(PetscInt),&owner);CHKERRQ(ierr); /* see note*/ 9039c7c4993SBarry Smith j = 0; 9049c7c4993SBarry Smith for (i=0; i<N; i++) { 9059c7c4993SBarry Smith if (lastidx > (idx = rows[i])) j = 0; 9069c7c4993SBarry Smith lastidx = idx; 9079c7c4993SBarry Smith for (; j<size; j++) { 9089c7c4993SBarry Smith if (idx >= owners[j] && idx < owners[j+1]) { 9099c7c4993SBarry Smith nprocs[2*j]++; 9109c7c4993SBarry Smith nprocs[2*j+1] = 1; 9119c7c4993SBarry Smith owner[i] = j; 9129c7c4993SBarry Smith #if defined(PETSC_DEBUG) 9139c7c4993SBarry Smith found = PETSC_TRUE; 9149c7c4993SBarry Smith #endif 9159c7c4993SBarry Smith break; 9169c7c4993SBarry Smith } 9179c7c4993SBarry Smith } 9189c7c4993SBarry Smith #if defined(PETSC_DEBUG) 9199c7c4993SBarry Smith if (!found) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index out of range"); 9209c7c4993SBarry Smith found = PETSC_FALSE; 9219c7c4993SBarry Smith #endif 9229c7c4993SBarry Smith } 9239c7c4993SBarry Smith nsends = 0; for (i=0; i<size; i++) { nsends += nprocs[2*i+1];} 9249c7c4993SBarry Smith 9259c7c4993SBarry Smith /* inform other processors of number of messages and max length*/ 9269c7c4993SBarry Smith ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr); 9279c7c4993SBarry Smith 9289c7c4993SBarry Smith /* post receives: */ 9299c7c4993SBarry Smith ierr = PetscMalloc((nrecvs+1)*(nmax+1)*sizeof(PetscInt),&rvalues);CHKERRQ(ierr); 9309c7c4993SBarry Smith ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr); 9319c7c4993SBarry Smith for (i=0; i<nrecvs; i++) { 9329c7c4993SBarry Smith ierr = MPI_Irecv(rvalues+nmax*i,nmax,MPIU_INT,MPI_ANY_SOURCE,tag,comm,recv_waits+i);CHKERRQ(ierr); 9339c7c4993SBarry Smith } 9349c7c4993SBarry Smith 9359c7c4993SBarry Smith /* do sends: 9369c7c4993SBarry Smith 1) starts[i] gives the starting index in svalues for stuff going to 9379c7c4993SBarry Smith the ith processor 9389c7c4993SBarry Smith */ 9399c7c4993SBarry Smith ierr = PetscMalloc((N+1)*sizeof(PetscInt),&svalues);CHKERRQ(ierr); 9409c7c4993SBarry Smith ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr); 9419c7c4993SBarry Smith ierr = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr); 9429c7c4993SBarry Smith starts[0] = 0; 9439c7c4993SBarry Smith for (i=1; i<size; i++) { starts[i] = starts[i-1] + nprocs[2*i-2];} 9449c7c4993SBarry Smith for (i=0; i<N; i++) { 9459c7c4993SBarry Smith svalues[starts[owner[i]]++] = rows[i]; 9469c7c4993SBarry Smith } 9479c7c4993SBarry Smith 9489c7c4993SBarry Smith starts[0] = 0; 9499c7c4993SBarry Smith for (i=1; i<size+1; i++) { starts[i] = starts[i-1] + nprocs[2*i-2];} 9509c7c4993SBarry Smith count = 0; 9519c7c4993SBarry Smith for (i=0; i<size; i++) { 9529c7c4993SBarry Smith if (nprocs[2*i+1]) { 9539c7c4993SBarry Smith ierr = MPI_Isend(svalues+starts[i],nprocs[2*i],MPIU_INT,i,tag,comm,send_waits+count++);CHKERRQ(ierr); 9549c7c4993SBarry Smith } 9559c7c4993SBarry Smith } 9569c7c4993SBarry Smith ierr = PetscFree(starts);CHKERRQ(ierr); 9579c7c4993SBarry Smith 9589c7c4993SBarry Smith base = owners[rank]; 9599c7c4993SBarry Smith 9609c7c4993SBarry Smith /* wait on receives */ 9619c7c4993SBarry Smith ierr = PetscMalloc2(nrecvs,PetscInt,&lens,nrecvs,PetscInt,&source);CHKERRQ(ierr); 9629c7c4993SBarry Smith count = nrecvs; slen = 0; 9639c7c4993SBarry Smith while (count) { 9649c7c4993SBarry Smith ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr); 9659c7c4993SBarry Smith /* unpack receives into our local space */ 9669c7c4993SBarry Smith ierr = MPI_Get_count(&recv_status,MPIU_INT,&n);CHKERRQ(ierr); 9679c7c4993SBarry Smith source[imdex] = recv_status.MPI_SOURCE; 9689c7c4993SBarry Smith lens[imdex] = n; 9699c7c4993SBarry Smith slen += n; 9709c7c4993SBarry Smith count--; 9719c7c4993SBarry Smith } 9729c7c4993SBarry Smith ierr = PetscFree(recv_waits);CHKERRQ(ierr); 9739c7c4993SBarry Smith 9749c7c4993SBarry Smith /* move the data into the send scatter */ 9759c7c4993SBarry Smith ierr = PetscMalloc((slen+1)*sizeof(PetscInt),&lrows);CHKERRQ(ierr); 9769c7c4993SBarry Smith count = 0; 9779c7c4993SBarry Smith for (i=0; i<nrecvs; i++) { 9789c7c4993SBarry Smith values = rvalues + i*nmax; 9799c7c4993SBarry Smith for (j=0; j<lens[i]; j++) { 9809c7c4993SBarry Smith lrows[count++] = values[j] - base; 9819c7c4993SBarry Smith } 9829c7c4993SBarry Smith } 9839c7c4993SBarry Smith ierr = PetscFree(rvalues);CHKERRQ(ierr); 9849c7c4993SBarry Smith ierr = PetscFree2(lens,source);CHKERRQ(ierr); 9859c7c4993SBarry Smith ierr = PetscFree(owner);CHKERRQ(ierr); 9869c7c4993SBarry Smith ierr = PetscFree(nprocs);CHKERRQ(ierr); 987564f14d6SBarry Smith /* lrows are the local rows to be zeroed, slen is the number of local rows */ 9889c7c4993SBarry Smith 989564f14d6SBarry Smith /* zero diagonal part of matrix */ 990564f14d6SBarry Smith ierr = MatZeroRowsColumns(l->A,slen,lrows,diag,x,b);CHKERRQ(ierr); 9919c7c4993SBarry Smith 992564f14d6SBarry Smith /* handle off diagonal part of matrix */ 993564f14d6SBarry Smith ierr = MatGetVecs(A,&xmask,PETSC_NULL);CHKERRQ(ierr); 994564f14d6SBarry Smith ierr = VecDuplicate(l->lvec,&lmask);CHKERRQ(ierr); 995564f14d6SBarry Smith ierr = VecGetArray(xmask,&bb);CHKERRQ(ierr); 9969c7c4993SBarry Smith for (i=0; i<slen; i++) { 997564f14d6SBarry Smith bb[lrows[i]] = 1; 9989c7c4993SBarry Smith } 999564f14d6SBarry Smith ierr = VecRestoreArray(xmask,&bb);CHKERRQ(ierr); 1000564f14d6SBarry Smith ierr = VecScatterBegin(l->Mvctx,xmask,lmask,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1001564f14d6SBarry Smith ierr = VecScatterEnd(l->Mvctx,xmask,lmask,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 10026bf464f9SBarry Smith ierr = VecDestroy(&xmask);CHKERRQ(ierr); 1003564f14d6SBarry Smith ierr = VecScatterBegin(l->Mvctx,x,l->lvec,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1004564f14d6SBarry Smith ierr = VecScatterEnd(l->Mvctx,x,l->lvec,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1005564f14d6SBarry Smith ierr = VecGetArrayRead(l->lvec,&xx);CHKERRQ(ierr); 1006564f14d6SBarry Smith ierr = VecGetArray(lmask,&mask);CHKERRQ(ierr); 1007564f14d6SBarry Smith ierr = VecGetArray(b,&bb);CHKERRQ(ierr); 1008564f14d6SBarry Smith 1009564f14d6SBarry Smith /* remove zeroed rows of off diagonal matrix */ 1010564f14d6SBarry Smith ii = aij->i; 1011564f14d6SBarry Smith for (i=0; i<slen; i++) { 1012564f14d6SBarry Smith ierr = PetscMemzero(aij->a + ii[lrows[i]],(ii[lrows[i]+1] - ii[lrows[i]])*sizeof(PetscScalar));CHKERRQ(ierr); 10139c7c4993SBarry Smith } 1014564f14d6SBarry Smith 1015564f14d6SBarry Smith /* loop over all elements of off process part of matrix zeroing removed columns*/ 1016564f14d6SBarry Smith if (aij->compressedrow.use){ 1017564f14d6SBarry Smith m = aij->compressedrow.nrows; 1018564f14d6SBarry Smith ii = aij->compressedrow.i; 1019564f14d6SBarry Smith ridx = aij->compressedrow.rindex; 1020564f14d6SBarry Smith for (i=0; i<m; i++){ 1021564f14d6SBarry Smith n = ii[i+1] - ii[i]; 1022564f14d6SBarry Smith aj = aij->j + ii[i]; 1023564f14d6SBarry Smith aa = aij->a + ii[i]; 1024564f14d6SBarry Smith 1025564f14d6SBarry Smith for (j=0; j<n; j++) { 102625266a92SSatish Balay if (PetscAbsScalar(mask[*aj])) { 1027564f14d6SBarry Smith bb[*ridx] -= *aa*xx[*aj]; 1028564f14d6SBarry Smith *aa = 0.0; 1029564f14d6SBarry Smith } 1030564f14d6SBarry Smith aa++; 1031564f14d6SBarry Smith aj++; 1032564f14d6SBarry Smith } 1033564f14d6SBarry Smith ridx++; 1034564f14d6SBarry Smith } 1035564f14d6SBarry Smith } else { /* do not use compressed row format */ 1036564f14d6SBarry Smith m = l->B->rmap->n; 1037564f14d6SBarry Smith for (i=0; i<m; i++) { 1038564f14d6SBarry Smith n = ii[i+1] - ii[i]; 1039564f14d6SBarry Smith aj = aij->j + ii[i]; 1040564f14d6SBarry Smith aa = aij->a + ii[i]; 1041564f14d6SBarry Smith for (j=0; j<n; j++) { 104225266a92SSatish Balay if (PetscAbsScalar(mask[*aj])) { 1043564f14d6SBarry Smith bb[i] -= *aa*xx[*aj]; 1044564f14d6SBarry Smith *aa = 0.0; 1045564f14d6SBarry Smith } 1046564f14d6SBarry Smith aa++; 1047564f14d6SBarry Smith aj++; 1048564f14d6SBarry Smith } 1049564f14d6SBarry Smith } 1050564f14d6SBarry Smith } 1051564f14d6SBarry Smith ierr = VecRestoreArray(b,&bb);CHKERRQ(ierr); 1052564f14d6SBarry Smith ierr = VecRestoreArray(lmask,&mask);CHKERRQ(ierr); 1053564f14d6SBarry Smith ierr = VecRestoreArrayRead(l->lvec,&xx);CHKERRQ(ierr); 10546bf464f9SBarry Smith ierr = VecDestroy(&lmask);CHKERRQ(ierr); 10559c7c4993SBarry Smith ierr = PetscFree(lrows);CHKERRQ(ierr); 10569c7c4993SBarry Smith 10579c7c4993SBarry Smith /* wait on sends */ 10589c7c4993SBarry Smith if (nsends) { 10599c7c4993SBarry Smith ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr); 10609c7c4993SBarry Smith ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr); 10619c7c4993SBarry Smith ierr = PetscFree(send_status);CHKERRQ(ierr); 10629c7c4993SBarry Smith } 10639c7c4993SBarry Smith ierr = PetscFree(send_waits);CHKERRQ(ierr); 10649c7c4993SBarry Smith ierr = PetscFree(svalues);CHKERRQ(ierr); 10659c7c4993SBarry Smith 10669c7c4993SBarry Smith PetscFunctionReturn(0); 10679c7c4993SBarry Smith } 10689c7c4993SBarry Smith 10699c7c4993SBarry Smith #undef __FUNCT__ 10704a2ae208SSatish Balay #define __FUNCT__ "MatMult_MPIAIJ" 1071dfbe8321SBarry Smith PetscErrorCode MatMult_MPIAIJ(Mat A,Vec xx,Vec yy) 10721eb62cbbSBarry Smith { 1073416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1074dfbe8321SBarry Smith PetscErrorCode ierr; 1075b1d57f15SBarry Smith PetscInt nt; 1076416022c9SBarry Smith 10773a40ed3dSBarry Smith PetscFunctionBegin; 1078a2ce50c7SBarry Smith ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr); 107965e19b50SBarry Smith if (nt != A->cmap->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible partition of A (%D) and xx (%D)",A->cmap->n,nt); 1080ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1081f830108cSBarry Smith ierr = (*a->A->ops->mult)(a->A,xx,yy);CHKERRQ(ierr); 1082ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1083f830108cSBarry Smith ierr = (*a->B->ops->multadd)(a->B,a->lvec,yy,yy);CHKERRQ(ierr); 10843a40ed3dSBarry Smith PetscFunctionReturn(0); 10851eb62cbbSBarry Smith } 10861eb62cbbSBarry Smith 10874a2ae208SSatish Balay #undef __FUNCT__ 1088bd0c2dcbSBarry Smith #define __FUNCT__ "MatMultDiagonalBlock_MPIAIJ" 1089bd0c2dcbSBarry Smith PetscErrorCode MatMultDiagonalBlock_MPIAIJ(Mat A,Vec bb,Vec xx) 1090bd0c2dcbSBarry Smith { 1091bd0c2dcbSBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1092bd0c2dcbSBarry Smith PetscErrorCode ierr; 1093bd0c2dcbSBarry Smith 1094bd0c2dcbSBarry Smith PetscFunctionBegin; 1095bd0c2dcbSBarry Smith ierr = MatMultDiagonalBlock(a->A,bb,xx);CHKERRQ(ierr); 1096bd0c2dcbSBarry Smith PetscFunctionReturn(0); 1097bd0c2dcbSBarry Smith } 1098bd0c2dcbSBarry Smith 1099bd0c2dcbSBarry Smith #undef __FUNCT__ 11004a2ae208SSatish Balay #define __FUNCT__ "MatMultAdd_MPIAIJ" 1101dfbe8321SBarry Smith PetscErrorCode MatMultAdd_MPIAIJ(Mat A,Vec xx,Vec yy,Vec zz) 1102da3a660dSBarry Smith { 1103416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1104dfbe8321SBarry Smith PetscErrorCode ierr; 11053a40ed3dSBarry Smith 11063a40ed3dSBarry Smith PetscFunctionBegin; 1107ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1108f830108cSBarry Smith ierr = (*a->A->ops->multadd)(a->A,xx,yy,zz);CHKERRQ(ierr); 1109ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1110f830108cSBarry Smith ierr = (*a->B->ops->multadd)(a->B,a->lvec,zz,zz);CHKERRQ(ierr); 11113a40ed3dSBarry Smith PetscFunctionReturn(0); 1112da3a660dSBarry Smith } 1113da3a660dSBarry Smith 11144a2ae208SSatish Balay #undef __FUNCT__ 11154a2ae208SSatish Balay #define __FUNCT__ "MatMultTranspose_MPIAIJ" 1116dfbe8321SBarry Smith PetscErrorCode MatMultTranspose_MPIAIJ(Mat A,Vec xx,Vec yy) 1117da3a660dSBarry Smith { 1118416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1119dfbe8321SBarry Smith PetscErrorCode ierr; 1120ace3abfcSBarry Smith PetscBool merged; 1121da3a660dSBarry Smith 11223a40ed3dSBarry Smith PetscFunctionBegin; 1123a5ff213dSBarry Smith ierr = VecScatterGetMerged(a->Mvctx,&merged);CHKERRQ(ierr); 1124da3a660dSBarry Smith /* do nondiagonal part */ 11257c922b88SBarry Smith ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr); 1126a5ff213dSBarry Smith if (!merged) { 1127da3a660dSBarry Smith /* send it on its way */ 1128ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1129da3a660dSBarry Smith /* do local part */ 11307c922b88SBarry Smith ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr); 1131da3a660dSBarry Smith /* receive remote parts: note this assumes the values are not actually */ 1132a5ff213dSBarry Smith /* added in yy until the next line, */ 1133ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1134a5ff213dSBarry Smith } else { 1135a5ff213dSBarry Smith /* do local part */ 1136a5ff213dSBarry Smith ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr); 1137a5ff213dSBarry Smith /* send it on its way */ 1138ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1139a5ff213dSBarry Smith /* values actually were received in the Begin() but we need to call this nop */ 1140ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1141a5ff213dSBarry Smith } 11423a40ed3dSBarry Smith PetscFunctionReturn(0); 1143da3a660dSBarry Smith } 1144da3a660dSBarry Smith 1145cd0d46ebSvictorle EXTERN_C_BEGIN 1146cd0d46ebSvictorle #undef __FUNCT__ 11475fbd3699SBarry Smith #define __FUNCT__ "MatIsTranspose_MPIAIJ" 11487087cfbeSBarry Smith PetscErrorCode MatIsTranspose_MPIAIJ(Mat Amat,Mat Bmat,PetscReal tol,PetscBool *f) 1149cd0d46ebSvictorle { 11504f423910Svictorle MPI_Comm comm; 1151cd0d46ebSvictorle Mat_MPIAIJ *Aij = (Mat_MPIAIJ *) Amat->data, *Bij; 115266501d38Svictorle Mat Adia = Aij->A, Bdia, Aoff,Boff,*Aoffs,*Boffs; 1153cd0d46ebSvictorle IS Me,Notme; 11546849ba73SBarry Smith PetscErrorCode ierr; 1155b1d57f15SBarry Smith PetscInt M,N,first,last,*notme,i; 1156b1d57f15SBarry Smith PetscMPIInt size; 1157cd0d46ebSvictorle 1158cd0d46ebSvictorle PetscFunctionBegin; 115942e5f5b4Svictorle 116042e5f5b4Svictorle /* Easy test: symmetric diagonal block */ 116166501d38Svictorle Bij = (Mat_MPIAIJ *) Bmat->data; Bdia = Bij->A; 11625485867bSBarry Smith ierr = MatIsTranspose(Adia,Bdia,tol,f);CHKERRQ(ierr); 1163cd0d46ebSvictorle if (!*f) PetscFunctionReturn(0); 11644f423910Svictorle ierr = PetscObjectGetComm((PetscObject)Amat,&comm);CHKERRQ(ierr); 1165b1d57f15SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 1166b1d57f15SBarry Smith if (size == 1) PetscFunctionReturn(0); 116742e5f5b4Svictorle 116842e5f5b4Svictorle /* Hard test: off-diagonal block. This takes a MatGetSubMatrix. */ 1169cd0d46ebSvictorle ierr = MatGetSize(Amat,&M,&N);CHKERRQ(ierr); 1170cd0d46ebSvictorle ierr = MatGetOwnershipRange(Amat,&first,&last);CHKERRQ(ierr); 1171b1d57f15SBarry Smith ierr = PetscMalloc((N-last+first)*sizeof(PetscInt),¬me);CHKERRQ(ierr); 1172cd0d46ebSvictorle for (i=0; i<first; i++) notme[i] = i; 1173cd0d46ebSvictorle for (i=last; i<M; i++) notme[i-last+first] = i; 117470b3c8c7SBarry Smith ierr = ISCreateGeneral(MPI_COMM_SELF,N-last+first,notme,PETSC_COPY_VALUES,&Notme);CHKERRQ(ierr); 1175268466fbSBarry Smith ierr = ISCreateStride(MPI_COMM_SELF,last-first,first,1,&Me);CHKERRQ(ierr); 1176268466fbSBarry Smith ierr = MatGetSubMatrices(Amat,1,&Me,&Notme,MAT_INITIAL_MATRIX,&Aoffs);CHKERRQ(ierr); 117766501d38Svictorle Aoff = Aoffs[0]; 1178268466fbSBarry Smith ierr = MatGetSubMatrices(Bmat,1,&Notme,&Me,MAT_INITIAL_MATRIX,&Boffs);CHKERRQ(ierr); 117966501d38Svictorle Boff = Boffs[0]; 11805485867bSBarry Smith ierr = MatIsTranspose(Aoff,Boff,tol,f);CHKERRQ(ierr); 118166501d38Svictorle ierr = MatDestroyMatrices(1,&Aoffs);CHKERRQ(ierr); 118266501d38Svictorle ierr = MatDestroyMatrices(1,&Boffs);CHKERRQ(ierr); 11836bf464f9SBarry Smith ierr = ISDestroy(&Me);CHKERRQ(ierr); 11846bf464f9SBarry Smith ierr = ISDestroy(&Notme);CHKERRQ(ierr); 11853e0d0d19SHong Zhang ierr = PetscFree(notme);CHKERRQ(ierr); 1186cd0d46ebSvictorle PetscFunctionReturn(0); 1187cd0d46ebSvictorle } 1188cd0d46ebSvictorle EXTERN_C_END 1189cd0d46ebSvictorle 11904a2ae208SSatish Balay #undef __FUNCT__ 11914a2ae208SSatish Balay #define __FUNCT__ "MatMultTransposeAdd_MPIAIJ" 1192dfbe8321SBarry Smith PetscErrorCode MatMultTransposeAdd_MPIAIJ(Mat A,Vec xx,Vec yy,Vec zz) 1193da3a660dSBarry Smith { 1194416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1195dfbe8321SBarry Smith PetscErrorCode ierr; 1196da3a660dSBarry Smith 11973a40ed3dSBarry Smith PetscFunctionBegin; 1198da3a660dSBarry Smith /* do nondiagonal part */ 11997c922b88SBarry Smith ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr); 1200da3a660dSBarry Smith /* send it on its way */ 1201ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 1202da3a660dSBarry Smith /* do local part */ 12037c922b88SBarry Smith ierr = (*a->A->ops->multtransposeadd)(a->A,xx,yy,zz);CHKERRQ(ierr); 1204a5ff213dSBarry Smith /* receive remote parts */ 1205ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 12063a40ed3dSBarry Smith PetscFunctionReturn(0); 1207da3a660dSBarry Smith } 1208da3a660dSBarry Smith 12091eb62cbbSBarry Smith /* 12101eb62cbbSBarry Smith This only works correctly for square matrices where the subblock A->A is the 12111eb62cbbSBarry Smith diagonal block 12121eb62cbbSBarry Smith */ 12134a2ae208SSatish Balay #undef __FUNCT__ 12144a2ae208SSatish Balay #define __FUNCT__ "MatGetDiagonal_MPIAIJ" 1215dfbe8321SBarry Smith PetscErrorCode MatGetDiagonal_MPIAIJ(Mat A,Vec v) 12161eb62cbbSBarry Smith { 1217dfbe8321SBarry Smith PetscErrorCode ierr; 1218416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 12193a40ed3dSBarry Smith 12203a40ed3dSBarry Smith PetscFunctionBegin; 1221e7e72b3dSBarry Smith if (A->rmap->N != A->cmap->N) SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"Supports only square matrix where A->A is diag block"); 1222e7e72b3dSBarry Smith if (A->rmap->rstart != A->cmap->rstart || A->rmap->rend != A->cmap->rend) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"row partition must equal col partition"); 12233a40ed3dSBarry Smith ierr = MatGetDiagonal(a->A,v);CHKERRQ(ierr); 12243a40ed3dSBarry Smith PetscFunctionReturn(0); 12251eb62cbbSBarry Smith } 12261eb62cbbSBarry Smith 12274a2ae208SSatish Balay #undef __FUNCT__ 12284a2ae208SSatish Balay #define __FUNCT__ "MatScale_MPIAIJ" 1229f4df32b1SMatthew Knepley PetscErrorCode MatScale_MPIAIJ(Mat A,PetscScalar aa) 1230052efed2SBarry Smith { 1231052efed2SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1232dfbe8321SBarry Smith PetscErrorCode ierr; 12333a40ed3dSBarry Smith 12343a40ed3dSBarry Smith PetscFunctionBegin; 1235f4df32b1SMatthew Knepley ierr = MatScale(a->A,aa);CHKERRQ(ierr); 1236f4df32b1SMatthew Knepley ierr = MatScale(a->B,aa);CHKERRQ(ierr); 12373a40ed3dSBarry Smith PetscFunctionReturn(0); 1238052efed2SBarry Smith } 1239052efed2SBarry Smith 12404a2ae208SSatish Balay #undef __FUNCT__ 12414a2ae208SSatish Balay #define __FUNCT__ "MatDestroy_MPIAIJ" 1242dfbe8321SBarry Smith PetscErrorCode MatDestroy_MPIAIJ(Mat mat) 12431eb62cbbSBarry Smith { 124444a69424SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 1245dfbe8321SBarry Smith PetscErrorCode ierr; 124683e2fdc7SBarry Smith 12473a40ed3dSBarry Smith PetscFunctionBegin; 1248aa482453SBarry Smith #if defined(PETSC_USE_LOG) 1249d0f46423SBarry Smith PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D",mat->rmap->N,mat->cmap->N); 1250a5a9c739SBarry Smith #endif 12518798bf22SSatish Balay ierr = MatStashDestroy_Private(&mat->stash);CHKERRQ(ierr); 12526bf464f9SBarry Smith ierr = VecDestroy(&aij->diag);CHKERRQ(ierr); 12536bf464f9SBarry Smith ierr = MatDestroy(&aij->A);CHKERRQ(ierr); 12546bf464f9SBarry Smith ierr = MatDestroy(&aij->B);CHKERRQ(ierr); 1255aa482453SBarry Smith #if defined (PETSC_USE_CTABLE) 12566bc0bbbfSBarry Smith ierr = PetscTableDestroy(&aij->colmap);CHKERRQ(ierr); 1257b1fc9764SSatish Balay #else 125805b42c5fSBarry Smith ierr = PetscFree(aij->colmap);CHKERRQ(ierr); 1259b1fc9764SSatish Balay #endif 126005b42c5fSBarry Smith ierr = PetscFree(aij->garray);CHKERRQ(ierr); 12616bf464f9SBarry Smith ierr = VecDestroy(&aij->lvec);CHKERRQ(ierr); 12626bf464f9SBarry Smith ierr = VecScatterDestroy(&aij->Mvctx);CHKERRQ(ierr); 126303095fedSBarry Smith ierr = PetscFree2(aij->rowvalues,aij->rowindices);CHKERRQ(ierr); 12648aa348c1SBarry Smith ierr = PetscFree(aij->ld);CHKERRQ(ierr); 1265bf0cc555SLisandro Dalcin ierr = PetscFree(mat->data);CHKERRQ(ierr); 1266901853e0SKris Buschelman 1267dbd8c25aSHong Zhang ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr); 1268901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatStoreValues_C","",PETSC_NULL);CHKERRQ(ierr); 1269901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatRetrieveValues_C","",PETSC_NULL);CHKERRQ(ierr); 1270901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatGetDiagonalBlock_C","",PETSC_NULL);CHKERRQ(ierr); 1271901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatIsTranspose_C","",PETSC_NULL);CHKERRQ(ierr); 1272901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAIJSetPreallocation_C","",PETSC_NULL);CHKERRQ(ierr); 1273ff69c46cSKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAIJSetPreallocationCSR_C","",PETSC_NULL);CHKERRQ(ierr); 1274901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatDiagonalScaleLocal_C","",PETSC_NULL);CHKERRQ(ierr); 1275471cc821SHong Zhang ierr = PetscObjectComposeFunction((PetscObject)mat,"MatConvert_mpiaij_mpisbaij_C","",PETSC_NULL);CHKERRQ(ierr); 12763a40ed3dSBarry Smith PetscFunctionReturn(0); 12771eb62cbbSBarry Smith } 1278ee50ffe9SBarry Smith 12794a2ae208SSatish Balay #undef __FUNCT__ 12808e2fed03SBarry Smith #define __FUNCT__ "MatView_MPIAIJ_Binary" 1281dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ_Binary(Mat mat,PetscViewer viewer) 12828e2fed03SBarry Smith { 12838e2fed03SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 12848e2fed03SBarry Smith Mat_SeqAIJ* A = (Mat_SeqAIJ*)aij->A->data; 12858e2fed03SBarry Smith Mat_SeqAIJ* B = (Mat_SeqAIJ*)aij->B->data; 12866849ba73SBarry Smith PetscErrorCode ierr; 128732dcc486SBarry Smith PetscMPIInt rank,size,tag = ((PetscObject)viewer)->tag; 12886f69ff64SBarry Smith int fd; 1289a788621eSSatish Balay PetscInt nz,header[4],*row_lengths,*range=0,rlen,i; 1290d0f46423SBarry Smith PetscInt nzmax,*column_indices,j,k,col,*garray = aij->garray,cnt,cstart = mat->cmap->rstart,rnz; 12918e2fed03SBarry Smith PetscScalar *column_values; 129285ebf7a4SBarry Smith PetscInt message_count,flowcontrolcount; 12938e2fed03SBarry Smith 12948e2fed03SBarry Smith PetscFunctionBegin; 12957adad957SLisandro Dalcin ierr = MPI_Comm_rank(((PetscObject)mat)->comm,&rank);CHKERRQ(ierr); 12967adad957SLisandro Dalcin ierr = MPI_Comm_size(((PetscObject)mat)->comm,&size);CHKERRQ(ierr); 12978e2fed03SBarry Smith nz = A->nz + B->nz; 1298958c9bccSBarry Smith if (!rank) { 12990700a824SBarry Smith header[0] = MAT_FILE_CLASSID; 1300d0f46423SBarry Smith header[1] = mat->rmap->N; 1301d0f46423SBarry Smith header[2] = mat->cmap->N; 13027adad957SLisandro Dalcin ierr = MPI_Reduce(&nz,&header[3],1,MPIU_INT,MPI_SUM,0,((PetscObject)mat)->comm);CHKERRQ(ierr); 13038e2fed03SBarry Smith ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr); 13046f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,header,4,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 13058e2fed03SBarry Smith /* get largest number of rows any processor has */ 1306d0f46423SBarry Smith rlen = mat->rmap->n; 1307d0f46423SBarry Smith range = mat->rmap->range; 13088e2fed03SBarry Smith for (i=1; i<size; i++) { 13098e2fed03SBarry Smith rlen = PetscMax(rlen,range[i+1] - range[i]); 13108e2fed03SBarry Smith } 13118e2fed03SBarry Smith } else { 13127adad957SLisandro Dalcin ierr = MPI_Reduce(&nz,0,1,MPIU_INT,MPI_SUM,0,((PetscObject)mat)->comm);CHKERRQ(ierr); 1313d0f46423SBarry Smith rlen = mat->rmap->n; 13148e2fed03SBarry Smith } 13158e2fed03SBarry Smith 13168e2fed03SBarry Smith /* load up the local row counts */ 1317b1d57f15SBarry Smith ierr = PetscMalloc((rlen+1)*sizeof(PetscInt),&row_lengths);CHKERRQ(ierr); 1318d0f46423SBarry Smith for (i=0; i<mat->rmap->n; i++) { 13198e2fed03SBarry Smith row_lengths[i] = A->i[i+1] - A->i[i] + B->i[i+1] - B->i[i]; 13208e2fed03SBarry Smith } 13218e2fed03SBarry Smith 13228e2fed03SBarry Smith /* store the row lengths to the file */ 132385ebf7a4SBarry Smith ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr); 1324958c9bccSBarry Smith if (!rank) { 13258e2fed03SBarry Smith MPI_Status status; 1326d0f46423SBarry Smith ierr = PetscBinaryWrite(fd,row_lengths,mat->rmap->n,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 13278e2fed03SBarry Smith for (i=1; i<size; i++) { 132885ebf7a4SBarry Smith ierr = PetscViewerFlowControlStepMaster(viewer,i,message_count,flowcontrolcount);CHKERRQ(ierr); 13298e2fed03SBarry Smith rlen = range[i+1] - range[i]; 1330a1319256SJed Brown ierr = MPI_Recv(row_lengths,rlen,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr); 13316f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,row_lengths,rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 13328e2fed03SBarry Smith } 133385ebf7a4SBarry Smith ierr = PetscViewerFlowControlEndMaster(viewer,message_count);CHKERRQ(ierr); 13348e2fed03SBarry Smith } else { 133585ebf7a4SBarry Smith ierr = PetscViewerFlowControlStepWorker(viewer,rank,message_count);CHKERRQ(ierr); 1336d0f46423SBarry Smith ierr = MPI_Send(row_lengths,mat->rmap->n,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr); 133785ebf7a4SBarry Smith ierr = PetscViewerFlowControlEndWorker(viewer,message_count);CHKERRQ(ierr); 13388e2fed03SBarry Smith } 13398e2fed03SBarry Smith ierr = PetscFree(row_lengths);CHKERRQ(ierr); 13408e2fed03SBarry Smith 13418e2fed03SBarry Smith /* load up the local column indices */ 13428e2fed03SBarry Smith nzmax = nz; /* )th processor needs space a largest processor needs */ 13437adad957SLisandro Dalcin ierr = MPI_Reduce(&nz,&nzmax,1,MPIU_INT,MPI_MAX,0,((PetscObject)mat)->comm);CHKERRQ(ierr); 1344b1d57f15SBarry Smith ierr = PetscMalloc((nzmax+1)*sizeof(PetscInt),&column_indices);CHKERRQ(ierr); 13458e2fed03SBarry Smith cnt = 0; 1346d0f46423SBarry Smith for (i=0; i<mat->rmap->n; i++) { 13478e2fed03SBarry Smith for (j=B->i[i]; j<B->i[i+1]; j++) { 13488e2fed03SBarry Smith if ( (col = garray[B->j[j]]) > cstart) break; 13498e2fed03SBarry Smith column_indices[cnt++] = col; 13508e2fed03SBarry Smith } 13518e2fed03SBarry Smith for (k=A->i[i]; k<A->i[i+1]; k++) { 13528e2fed03SBarry Smith column_indices[cnt++] = A->j[k] + cstart; 13538e2fed03SBarry Smith } 13548e2fed03SBarry Smith for (; j<B->i[i+1]; j++) { 13558e2fed03SBarry Smith column_indices[cnt++] = garray[B->j[j]]; 13568e2fed03SBarry Smith } 13578e2fed03SBarry Smith } 1358e32f2f54SBarry Smith if (cnt != A->nz + B->nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Internal PETSc error: cnt = %D nz = %D",cnt,A->nz+B->nz); 13598e2fed03SBarry Smith 13608e2fed03SBarry Smith /* store the column indices to the file */ 136185ebf7a4SBarry Smith ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr); 1362958c9bccSBarry Smith if (!rank) { 13638e2fed03SBarry Smith MPI_Status status; 13646f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,column_indices,nz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 13658e2fed03SBarry Smith for (i=1; i<size; i++) { 136685ebf7a4SBarry Smith ierr = PetscViewerFlowControlStepMaster(viewer,i,message_count,flowcontrolcount);CHKERRQ(ierr); 13677adad957SLisandro Dalcin ierr = MPI_Recv(&rnz,1,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr); 1368e32f2f54SBarry Smith if (rnz > nzmax) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Internal PETSc error: nz = %D nzmax = %D",nz,nzmax); 13697adad957SLisandro Dalcin ierr = MPI_Recv(column_indices,rnz,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr); 13706f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,column_indices,rnz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 13718e2fed03SBarry Smith } 137285ebf7a4SBarry Smith ierr = PetscViewerFlowControlEndMaster(viewer,message_count);CHKERRQ(ierr); 13738e2fed03SBarry Smith } else { 137485ebf7a4SBarry Smith ierr = PetscViewerFlowControlStepWorker(viewer,rank,message_count);CHKERRQ(ierr); 13757adad957SLisandro Dalcin ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr); 13767adad957SLisandro Dalcin ierr = MPI_Send(column_indices,nz,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr); 137785ebf7a4SBarry Smith ierr = PetscViewerFlowControlEndWorker(viewer,message_count);CHKERRQ(ierr); 13788e2fed03SBarry Smith } 13798e2fed03SBarry Smith ierr = PetscFree(column_indices);CHKERRQ(ierr); 13808e2fed03SBarry Smith 13818e2fed03SBarry Smith /* load up the local column values */ 13828e2fed03SBarry Smith ierr = PetscMalloc((nzmax+1)*sizeof(PetscScalar),&column_values);CHKERRQ(ierr); 13838e2fed03SBarry Smith cnt = 0; 1384d0f46423SBarry Smith for (i=0; i<mat->rmap->n; i++) { 13858e2fed03SBarry Smith for (j=B->i[i]; j<B->i[i+1]; j++) { 13868e2fed03SBarry Smith if ( garray[B->j[j]] > cstart) break; 13878e2fed03SBarry Smith column_values[cnt++] = B->a[j]; 13888e2fed03SBarry Smith } 13898e2fed03SBarry Smith for (k=A->i[i]; k<A->i[i+1]; k++) { 13908e2fed03SBarry Smith column_values[cnt++] = A->a[k]; 13918e2fed03SBarry Smith } 13928e2fed03SBarry Smith for (; j<B->i[i+1]; j++) { 13938e2fed03SBarry Smith column_values[cnt++] = B->a[j]; 13948e2fed03SBarry Smith } 13958e2fed03SBarry Smith } 1396e32f2f54SBarry Smith if (cnt != A->nz + B->nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Internal PETSc error: cnt = %D nz = %D",cnt,A->nz+B->nz); 13978e2fed03SBarry Smith 13988e2fed03SBarry Smith /* store the column values to the file */ 139985ebf7a4SBarry Smith ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr); 1400958c9bccSBarry Smith if (!rank) { 14018e2fed03SBarry Smith MPI_Status status; 14026f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,column_values,nz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr); 14038e2fed03SBarry Smith for (i=1; i<size; i++) { 140485ebf7a4SBarry Smith ierr = PetscViewerFlowControlStepMaster(viewer,i,message_count,flowcontrolcount);CHKERRQ(ierr); 14057adad957SLisandro Dalcin ierr = MPI_Recv(&rnz,1,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr); 1406e32f2f54SBarry Smith if (rnz > nzmax) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Internal PETSc error: nz = %D nzmax = %D",nz,nzmax); 14077adad957SLisandro Dalcin ierr = MPI_Recv(column_values,rnz,MPIU_SCALAR,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr); 14086f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,column_values,rnz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr); 14098e2fed03SBarry Smith } 141085ebf7a4SBarry Smith ierr = PetscViewerFlowControlEndMaster(viewer,message_count);CHKERRQ(ierr); 14118e2fed03SBarry Smith } else { 141285ebf7a4SBarry Smith ierr = PetscViewerFlowControlStepWorker(viewer,rank,message_count);CHKERRQ(ierr); 14137adad957SLisandro Dalcin ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr); 14147adad957SLisandro Dalcin ierr = MPI_Send(column_values,nz,MPIU_SCALAR,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr); 141585ebf7a4SBarry Smith ierr = PetscViewerFlowControlEndWorker(viewer,message_count);CHKERRQ(ierr); 14168e2fed03SBarry Smith } 14178e2fed03SBarry Smith ierr = PetscFree(column_values);CHKERRQ(ierr); 14188e2fed03SBarry Smith PetscFunctionReturn(0); 14198e2fed03SBarry Smith } 14208e2fed03SBarry Smith 14218e2fed03SBarry Smith #undef __FUNCT__ 14224a2ae208SSatish Balay #define __FUNCT__ "MatView_MPIAIJ_ASCIIorDraworSocket" 1423dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ_ASCIIorDraworSocket(Mat mat,PetscViewer viewer) 1424416022c9SBarry Smith { 142544a69424SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 1426dfbe8321SBarry Smith PetscErrorCode ierr; 142732dcc486SBarry Smith PetscMPIInt rank = aij->rank,size = aij->size; 1428ace3abfcSBarry Smith PetscBool isdraw,iascii,isbinary; 1429b0a32e0cSBarry Smith PetscViewer sviewer; 1430f3ef73ceSBarry Smith PetscViewerFormat format; 1431416022c9SBarry Smith 14323a40ed3dSBarry Smith PetscFunctionBegin; 14332692d6eeSBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 14342692d6eeSBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 14352692d6eeSBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 143632077d6dSBarry Smith if (iascii) { 1437b0a32e0cSBarry Smith ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 1438456192e2SBarry Smith if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) { 14394e220ebcSLois Curfman McInnes MatInfo info; 1440ace3abfcSBarry Smith PetscBool inodes; 1441923f20ffSKris Buschelman 14427adad957SLisandro Dalcin ierr = MPI_Comm_rank(((PetscObject)mat)->comm,&rank);CHKERRQ(ierr); 1443888f2ed8SSatish Balay ierr = MatGetInfo(mat,MAT_LOCAL,&info);CHKERRQ(ierr); 1444923f20ffSKris Buschelman ierr = MatInodeGetInodeSizes(aij->A,PETSC_NULL,(PetscInt **)&inodes,PETSC_NULL);CHKERRQ(ierr); 14457b23a99aSBarry Smith ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr); 1446923f20ffSKris Buschelman if (!inodes) { 144777431f27SBarry Smith ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D mem %D, not using I-node routines\n", 1448d0f46423SBarry Smith rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,(PetscInt)info.memory);CHKERRQ(ierr); 14496831982aSBarry Smith } else { 145077431f27SBarry Smith ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D mem %D, using I-node routines\n", 1451d0f46423SBarry Smith rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,(PetscInt)info.memory);CHKERRQ(ierr); 14526831982aSBarry Smith } 1453888f2ed8SSatish Balay ierr = MatGetInfo(aij->A,MAT_LOCAL,&info);CHKERRQ(ierr); 145477431f27SBarry Smith ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] on-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr); 1455888f2ed8SSatish Balay ierr = MatGetInfo(aij->B,MAT_LOCAL,&info);CHKERRQ(ierr); 145677431f27SBarry Smith ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] off-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr); 1457b0a32e0cSBarry Smith ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 14587b23a99aSBarry Smith ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr); 145907d81ca4SBarry Smith ierr = PetscViewerASCIIPrintf(viewer,"Information on VecScatter used in matrix-vector product: \n");CHKERRQ(ierr); 1460a40aa06bSLois Curfman McInnes ierr = VecScatterView(aij->Mvctx,viewer);CHKERRQ(ierr); 14613a40ed3dSBarry Smith PetscFunctionReturn(0); 1462fb9695e5SSatish Balay } else if (format == PETSC_VIEWER_ASCII_INFO) { 1463923f20ffSKris Buschelman PetscInt inodecount,inodelimit,*inodes; 1464923f20ffSKris Buschelman ierr = MatInodeGetInodeSizes(aij->A,&inodecount,&inodes,&inodelimit);CHKERRQ(ierr); 1465923f20ffSKris Buschelman if (inodes) { 1466923f20ffSKris Buschelman ierr = PetscViewerASCIIPrintf(viewer,"using I-node (on process 0) routines: found %D nodes, limit used is %D\n",inodecount,inodelimit);CHKERRQ(ierr); 1467d38fa0fbSBarry Smith } else { 1468d38fa0fbSBarry Smith ierr = PetscViewerASCIIPrintf(viewer,"not using I-node (on process 0) routines\n");CHKERRQ(ierr); 1469d38fa0fbSBarry Smith } 14703a40ed3dSBarry Smith PetscFunctionReturn(0); 14714aedb280SBarry Smith } else if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) { 14724aedb280SBarry Smith PetscFunctionReturn(0); 147308480c60SBarry Smith } 14748e2fed03SBarry Smith } else if (isbinary) { 14758e2fed03SBarry Smith if (size == 1) { 14767adad957SLisandro Dalcin ierr = PetscObjectSetName((PetscObject)aij->A,((PetscObject)mat)->name);CHKERRQ(ierr); 14778e2fed03SBarry Smith ierr = MatView(aij->A,viewer);CHKERRQ(ierr); 14788e2fed03SBarry Smith } else { 14798e2fed03SBarry Smith ierr = MatView_MPIAIJ_Binary(mat,viewer);CHKERRQ(ierr); 14808e2fed03SBarry Smith } 14818e2fed03SBarry Smith PetscFunctionReturn(0); 14820f5bd95cSBarry Smith } else if (isdraw) { 1483b0a32e0cSBarry Smith PetscDraw draw; 1484ace3abfcSBarry Smith PetscBool isnull; 1485b0a32e0cSBarry Smith ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 1486b0a32e0cSBarry Smith ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0); 148719bcc07fSBarry Smith } 148819bcc07fSBarry Smith 148917699dbbSLois Curfman McInnes if (size == 1) { 14907adad957SLisandro Dalcin ierr = PetscObjectSetName((PetscObject)aij->A,((PetscObject)mat)->name);CHKERRQ(ierr); 149178b31e54SBarry Smith ierr = MatView(aij->A,viewer);CHKERRQ(ierr); 14923a40ed3dSBarry Smith } else { 149395373324SBarry Smith /* assemble the entire matrix onto first processor. */ 149495373324SBarry Smith Mat A; 1495ec8511deSBarry Smith Mat_SeqAIJ *Aloc; 1496d0f46423SBarry Smith PetscInt M = mat->rmap->N,N = mat->cmap->N,m,*ai,*aj,row,*cols,i,*ct; 1497dd6ea824SBarry Smith MatScalar *a; 14982ee70a88SLois Curfman McInnes 149932a366e4SMatthew Knepley if (mat->rmap->N > 1024) { 1500ace3abfcSBarry Smith PetscBool flg = PETSC_FALSE; 150132a366e4SMatthew Knepley 1502acfcf0e5SJed Brown ierr = PetscOptionsGetBool(((PetscObject) mat)->prefix, "-mat_ascii_output_large", &flg,PETSC_NULL);CHKERRQ(ierr); 150332a366e4SMatthew Knepley if (!flg) { 1504e7e72b3dSBarry Smith SETERRQ(((PetscObject)mat)->comm,PETSC_ERR_ARG_OUTOFRANGE,"ASCII matrix output not allowed for matrices with more than 1024 rows, use binary format instead.\nYou can override this restriction using -mat_ascii_output_large."); 150532a366e4SMatthew Knepley } 150632a366e4SMatthew Knepley } 15070805154bSBarry Smith 15087adad957SLisandro Dalcin ierr = MatCreate(((PetscObject)mat)->comm,&A);CHKERRQ(ierr); 150917699dbbSLois Curfman McInnes if (!rank) { 1510f69a0ea3SMatthew Knepley ierr = MatSetSizes(A,M,N,M,N);CHKERRQ(ierr); 15113a40ed3dSBarry Smith } else { 1512f69a0ea3SMatthew Knepley ierr = MatSetSizes(A,0,0,M,N);CHKERRQ(ierr); 151395373324SBarry Smith } 1514f204ca49SKris Buschelman /* This is just a temporary matrix, so explicitly using MATMPIAIJ is probably best */ 1515f204ca49SKris Buschelman ierr = MatSetType(A,MATMPIAIJ);CHKERRQ(ierr); 1516f204ca49SKris Buschelman ierr = MatMPIAIJSetPreallocation(A,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr); 151752e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,A);CHKERRQ(ierr); 1518416022c9SBarry Smith 151995373324SBarry Smith /* copy over the A part */ 1520ec8511deSBarry Smith Aloc = (Mat_SeqAIJ*)aij->A->data; 1521d0f46423SBarry Smith m = aij->A->rmap->n; ai = Aloc->i; aj = Aloc->j; a = Aloc->a; 1522d0f46423SBarry Smith row = mat->rmap->rstart; 1523d0f46423SBarry Smith for (i=0; i<ai[m]; i++) {aj[i] += mat->cmap->rstart ;} 152495373324SBarry Smith for (i=0; i<m; i++) { 1525416022c9SBarry Smith ierr = MatSetValues(A,1,&row,ai[i+1]-ai[i],aj,a,INSERT_VALUES);CHKERRQ(ierr); 152695373324SBarry Smith row++; a += ai[i+1]-ai[i]; aj += ai[i+1]-ai[i]; 152795373324SBarry Smith } 15282ee70a88SLois Curfman McInnes aj = Aloc->j; 1529d0f46423SBarry Smith for (i=0; i<ai[m]; i++) {aj[i] -= mat->cmap->rstart;} 153095373324SBarry Smith 153195373324SBarry Smith /* copy over the B part */ 1532ec8511deSBarry Smith Aloc = (Mat_SeqAIJ*)aij->B->data; 1533d0f46423SBarry Smith m = aij->B->rmap->n; ai = Aloc->i; aj = Aloc->j; a = Aloc->a; 1534d0f46423SBarry Smith row = mat->rmap->rstart; 1535b1d57f15SBarry Smith ierr = PetscMalloc((ai[m]+1)*sizeof(PetscInt),&cols);CHKERRQ(ierr); 1536b0a32e0cSBarry Smith ct = cols; 1537bfec09a0SHong Zhang for (i=0; i<ai[m]; i++) {cols[i] = aij->garray[aj[i]];} 153895373324SBarry Smith for (i=0; i<m; i++) { 1539416022c9SBarry Smith ierr = MatSetValues(A,1,&row,ai[i+1]-ai[i],cols,a,INSERT_VALUES);CHKERRQ(ierr); 154095373324SBarry Smith row++; a += ai[i+1]-ai[i]; cols += ai[i+1]-ai[i]; 154195373324SBarry Smith } 1542606d414cSSatish Balay ierr = PetscFree(ct);CHKERRQ(ierr); 15436d4a8577SBarry Smith ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 15446d4a8577SBarry Smith ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 154555843e3eSBarry Smith /* 154655843e3eSBarry Smith Everyone has to call to draw the matrix since the graphics waits are 1547b0a32e0cSBarry Smith synchronized across all processors that share the PetscDraw object 154855843e3eSBarry Smith */ 1549b0a32e0cSBarry Smith ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr); 1550e03a110bSBarry Smith if (!rank) { 15517adad957SLisandro Dalcin ierr = PetscObjectSetName((PetscObject)((Mat_MPIAIJ*)(A->data))->A,((PetscObject)mat)->name);CHKERRQ(ierr); 15527566de4bSShri Abhyankar /* Set the type name to MATMPIAIJ so that the correct type can be printed out by PetscObjectPrintClassNamePrefixType() in MatView_SeqAIJ_ASCII()*/ 15537566de4bSShri Abhyankar PetscStrcpy(((PetscObject)((Mat_MPIAIJ*)(A->data))->A)->type_name,MATMPIAIJ); 15546831982aSBarry Smith ierr = MatView(((Mat_MPIAIJ*)(A->data))->A,sviewer);CHKERRQ(ierr); 155595373324SBarry Smith } 1556b0a32e0cSBarry Smith ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr); 15576bf464f9SBarry Smith ierr = MatDestroy(&A);CHKERRQ(ierr); 155895373324SBarry Smith } 15593a40ed3dSBarry Smith PetscFunctionReturn(0); 15601eb62cbbSBarry Smith } 15611eb62cbbSBarry Smith 15624a2ae208SSatish Balay #undef __FUNCT__ 15634a2ae208SSatish Balay #define __FUNCT__ "MatView_MPIAIJ" 1564dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ(Mat mat,PetscViewer viewer) 1565416022c9SBarry Smith { 1566dfbe8321SBarry Smith PetscErrorCode ierr; 1567ace3abfcSBarry Smith PetscBool iascii,isdraw,issocket,isbinary; 1568416022c9SBarry Smith 15693a40ed3dSBarry Smith PetscFunctionBegin; 15702692d6eeSBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 15712692d6eeSBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 15722692d6eeSBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 15732692d6eeSBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERSOCKET,&issocket);CHKERRQ(ierr); 157432077d6dSBarry Smith if (iascii || isdraw || isbinary || issocket) { 15757b2a1423SBarry Smith ierr = MatView_MPIAIJ_ASCIIorDraworSocket(mat,viewer);CHKERRQ(ierr); 15765cd90555SBarry Smith } else { 1577e32f2f54SBarry Smith SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported by MPIAIJ matrices",((PetscObject)viewer)->type_name); 1578416022c9SBarry Smith } 15793a40ed3dSBarry Smith PetscFunctionReturn(0); 1580416022c9SBarry Smith } 1581416022c9SBarry Smith 15824a2ae208SSatish Balay #undef __FUNCT__ 158341f059aeSBarry Smith #define __FUNCT__ "MatSOR_MPIAIJ" 158441f059aeSBarry Smith PetscErrorCode MatSOR_MPIAIJ(Mat matin,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx) 15858a729477SBarry Smith { 158644a69424SLois Curfman McInnes Mat_MPIAIJ *mat = (Mat_MPIAIJ*)matin->data; 1587dfbe8321SBarry Smith PetscErrorCode ierr; 15886987fefcSBarry Smith Vec bb1 = 0; 1589ace3abfcSBarry Smith PetscBool hasop; 15908a729477SBarry Smith 15913a40ed3dSBarry Smith PetscFunctionBegin; 159285911e72SJed Brown if (its > 1 || ~flag & SOR_ZERO_INITIAL_GUESS || flag & SOR_EISENSTAT) { 159385911e72SJed Brown ierr = VecDuplicate(bb,&bb1);CHKERRQ(ierr); 159485911e72SJed Brown } 15952798e883SHong Zhang 1596a2b30743SBarry Smith if (flag == SOR_APPLY_UPPER) { 159741f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr); 1598a2b30743SBarry Smith PetscFunctionReturn(0); 1599a2b30743SBarry Smith } 1600a2b30743SBarry Smith 1601c16cb8f2SBarry Smith if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP){ 1602da3a660dSBarry Smith if (flag & SOR_ZERO_INITIAL_GUESS) { 160341f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr); 16042798e883SHong Zhang its--; 1605da3a660dSBarry Smith } 16062798e883SHong Zhang 16072798e883SHong Zhang while (its--) { 1608ca9f406cSSatish Balay ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1609ca9f406cSSatish Balay ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 16102798e883SHong Zhang 1611c14dc6b6SHong Zhang /* update rhs: bb1 = bb - B*x */ 1612efb30889SBarry Smith ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr); 1613c14dc6b6SHong Zhang ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr); 16142798e883SHong Zhang 1615c14dc6b6SHong Zhang /* local sweep */ 161641f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_SYMMETRIC_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr); 16172798e883SHong Zhang } 16183a40ed3dSBarry Smith } else if (flag & SOR_LOCAL_FORWARD_SWEEP){ 1619da3a660dSBarry Smith if (flag & SOR_ZERO_INITIAL_GUESS) { 162041f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr); 16212798e883SHong Zhang its--; 1622da3a660dSBarry Smith } 16232798e883SHong Zhang while (its--) { 1624ca9f406cSSatish Balay ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1625ca9f406cSSatish Balay ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 16262798e883SHong Zhang 1627c14dc6b6SHong Zhang /* update rhs: bb1 = bb - B*x */ 1628efb30889SBarry Smith ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr); 1629c14dc6b6SHong Zhang ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr); 1630c14dc6b6SHong Zhang 1631c14dc6b6SHong Zhang /* local sweep */ 163241f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_FORWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr); 16332798e883SHong Zhang } 16343a40ed3dSBarry Smith } else if (flag & SOR_LOCAL_BACKWARD_SWEEP){ 1635da3a660dSBarry Smith if (flag & SOR_ZERO_INITIAL_GUESS) { 163641f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr); 16372798e883SHong Zhang its--; 1638da3a660dSBarry Smith } 16392798e883SHong Zhang while (its--) { 1640ca9f406cSSatish Balay ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1641ca9f406cSSatish Balay ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 16422798e883SHong Zhang 1643c14dc6b6SHong Zhang /* update rhs: bb1 = bb - B*x */ 1644efb30889SBarry Smith ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr); 1645c14dc6b6SHong Zhang ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr); 16462798e883SHong Zhang 1647c14dc6b6SHong Zhang /* local sweep */ 164841f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_BACKWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr); 16492798e883SHong Zhang } 1650a7420bb7SBarry Smith } else if (flag & SOR_EISENSTAT) { 1651a7420bb7SBarry Smith Vec xx1; 1652a7420bb7SBarry Smith 1653a7420bb7SBarry Smith ierr = VecDuplicate(bb,&xx1);CHKERRQ(ierr); 165441f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb,omega,(MatSORType)(SOR_ZERO_INITIAL_GUESS | SOR_LOCAL_BACKWARD_SWEEP),fshift,lits,1,xx);CHKERRQ(ierr); 1655a7420bb7SBarry Smith 1656a7420bb7SBarry Smith ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1657a7420bb7SBarry Smith ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1658a7420bb7SBarry Smith if (!mat->diag) { 1659a7420bb7SBarry Smith ierr = MatGetVecs(matin,&mat->diag,PETSC_NULL);CHKERRQ(ierr); 1660a7420bb7SBarry Smith ierr = MatGetDiagonal(matin,mat->diag);CHKERRQ(ierr); 1661a7420bb7SBarry Smith } 1662bd0c2dcbSBarry Smith ierr = MatHasOperation(matin,MATOP_MULT_DIAGONAL_BLOCK,&hasop);CHKERRQ(ierr); 1663bd0c2dcbSBarry Smith if (hasop) { 1664bd0c2dcbSBarry Smith ierr = MatMultDiagonalBlock(matin,xx,bb1);CHKERRQ(ierr); 1665bd0c2dcbSBarry Smith } else { 1666a7420bb7SBarry Smith ierr = VecPointwiseMult(bb1,mat->diag,xx);CHKERRQ(ierr); 1667bd0c2dcbSBarry Smith } 1668887ee2caSBarry Smith ierr = VecAYPX(bb1,(omega-2.0)/omega,bb);CHKERRQ(ierr); 1669887ee2caSBarry Smith 1670a7420bb7SBarry Smith ierr = MatMultAdd(mat->B,mat->lvec,bb1,bb1);CHKERRQ(ierr); 1671a7420bb7SBarry Smith 1672a7420bb7SBarry Smith /* local sweep */ 167341f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,(MatSORType)(SOR_ZERO_INITIAL_GUESS | SOR_LOCAL_FORWARD_SWEEP),fshift,lits,1,xx1);CHKERRQ(ierr); 1674a7420bb7SBarry Smith ierr = VecAXPY(xx,1.0,xx1);CHKERRQ(ierr); 16756bf464f9SBarry Smith ierr = VecDestroy(&xx1);CHKERRQ(ierr); 16766bf464f9SBarry Smith } else SETERRQ(((PetscObject)mat)->comm,PETSC_ERR_SUP,"Parallel SOR not supported"); 1677c14dc6b6SHong Zhang 16786bf464f9SBarry Smith ierr = VecDestroy(&bb1);CHKERRQ(ierr); 16793a40ed3dSBarry Smith PetscFunctionReturn(0); 16808a729477SBarry Smith } 1681a66be287SLois Curfman McInnes 16824a2ae208SSatish Balay #undef __FUNCT__ 168342e855d1Svictor #define __FUNCT__ "MatPermute_MPIAIJ" 168442e855d1Svictor PetscErrorCode MatPermute_MPIAIJ(Mat A,IS rowp,IS colp,Mat *B) 168542e855d1Svictor { 168642e855d1Svictor MPI_Comm comm,pcomm; 16875d0c19d7SBarry Smith PetscInt first,local_size,nrows; 16885d0c19d7SBarry Smith const PetscInt *rows; 1689dbf0e21dSBarry Smith PetscMPIInt size; 169042e855d1Svictor IS crowp,growp,irowp,lrowp,lcolp,icolp; 169142e855d1Svictor PetscErrorCode ierr; 169242e855d1Svictor 169342e855d1Svictor PetscFunctionBegin; 169442e855d1Svictor ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr); 169542e855d1Svictor /* make a collective version of 'rowp' */ 169642e855d1Svictor ierr = PetscObjectGetComm((PetscObject)rowp,&pcomm);CHKERRQ(ierr); 169742e855d1Svictor if (pcomm==comm) { 169842e855d1Svictor crowp = rowp; 169942e855d1Svictor } else { 170042e855d1Svictor ierr = ISGetSize(rowp,&nrows);CHKERRQ(ierr); 170142e855d1Svictor ierr = ISGetIndices(rowp,&rows);CHKERRQ(ierr); 170270b3c8c7SBarry Smith ierr = ISCreateGeneral(comm,nrows,rows,PETSC_COPY_VALUES,&crowp);CHKERRQ(ierr); 170342e855d1Svictor ierr = ISRestoreIndices(rowp,&rows);CHKERRQ(ierr); 170442e855d1Svictor } 170542e855d1Svictor /* collect the global row permutation and invert it */ 170642e855d1Svictor ierr = ISAllGather(crowp,&growp);CHKERRQ(ierr); 170742e855d1Svictor ierr = ISSetPermutation(growp);CHKERRQ(ierr); 170842e855d1Svictor if (pcomm!=comm) { 17096bf464f9SBarry Smith ierr = ISDestroy(&crowp);CHKERRQ(ierr); 171042e855d1Svictor } 171142e855d1Svictor ierr = ISInvertPermutation(growp,PETSC_DECIDE,&irowp);CHKERRQ(ierr); 171242e855d1Svictor /* get the local target indices */ 171342e855d1Svictor ierr = MatGetOwnershipRange(A,&first,PETSC_NULL);CHKERRQ(ierr); 171442e855d1Svictor ierr = MatGetLocalSize(A,&local_size,PETSC_NULL);CHKERRQ(ierr); 171542e855d1Svictor ierr = ISGetIndices(irowp,&rows);CHKERRQ(ierr); 171670b3c8c7SBarry Smith ierr = ISCreateGeneral(MPI_COMM_SELF,local_size,rows+first,PETSC_COPY_VALUES,&lrowp);CHKERRQ(ierr); 171742e855d1Svictor ierr = ISRestoreIndices(irowp,&rows);CHKERRQ(ierr); 17186bf464f9SBarry Smith ierr = ISDestroy(&irowp);CHKERRQ(ierr); 171942e855d1Svictor /* the column permutation is so much easier; 172042e855d1Svictor make a local version of 'colp' and invert it */ 172142e855d1Svictor ierr = PetscObjectGetComm((PetscObject)colp,&pcomm);CHKERRQ(ierr); 1722dbf0e21dSBarry Smith ierr = MPI_Comm_size(pcomm,&size);CHKERRQ(ierr); 1723dbf0e21dSBarry Smith if (size==1) { 172442e855d1Svictor lcolp = colp; 172542e855d1Svictor } else { 172642e855d1Svictor ierr = ISGetSize(colp,&nrows);CHKERRQ(ierr); 172742e855d1Svictor ierr = ISGetIndices(colp,&rows);CHKERRQ(ierr); 172870b3c8c7SBarry Smith ierr = ISCreateGeneral(MPI_COMM_SELF,nrows,rows,PETSC_COPY_VALUES,&lcolp);CHKERRQ(ierr); 172942e855d1Svictor } 1730dbf0e21dSBarry Smith ierr = ISSetPermutation(lcolp);CHKERRQ(ierr); 173142e855d1Svictor ierr = ISInvertPermutation(lcolp,PETSC_DECIDE,&icolp);CHKERRQ(ierr); 17324aa3045dSJed Brown ierr = ISSetPermutation(icolp);CHKERRQ(ierr); 1733dbf0e21dSBarry Smith if (size>1) { 173442e855d1Svictor ierr = ISRestoreIndices(colp,&rows);CHKERRQ(ierr); 17356bf464f9SBarry Smith ierr = ISDestroy(&lcolp);CHKERRQ(ierr); 173642e855d1Svictor } 173742e855d1Svictor /* now we just get the submatrix */ 17384aa3045dSJed Brown ierr = MatGetSubMatrix_MPIAIJ_Private(A,lrowp,icolp,local_size,MAT_INITIAL_MATRIX,B);CHKERRQ(ierr); 173942e855d1Svictor /* clean up */ 17406bf464f9SBarry Smith ierr = ISDestroy(&lrowp);CHKERRQ(ierr); 17416bf464f9SBarry Smith ierr = ISDestroy(&icolp);CHKERRQ(ierr); 174242e855d1Svictor PetscFunctionReturn(0); 174342e855d1Svictor } 174442e855d1Svictor 174542e855d1Svictor #undef __FUNCT__ 17464a2ae208SSatish Balay #define __FUNCT__ "MatGetInfo_MPIAIJ" 1747dfbe8321SBarry Smith PetscErrorCode MatGetInfo_MPIAIJ(Mat matin,MatInfoType flag,MatInfo *info) 1748a66be287SLois Curfman McInnes { 1749a66be287SLois Curfman McInnes Mat_MPIAIJ *mat = (Mat_MPIAIJ*)matin->data; 1750a66be287SLois Curfman McInnes Mat A = mat->A,B = mat->B; 1751dfbe8321SBarry Smith PetscErrorCode ierr; 1752329f5518SBarry Smith PetscReal isend[5],irecv[5]; 1753a66be287SLois Curfman McInnes 17543a40ed3dSBarry Smith PetscFunctionBegin; 17554e220ebcSLois Curfman McInnes info->block_size = 1.0; 17564e220ebcSLois Curfman McInnes ierr = MatGetInfo(A,MAT_LOCAL,info);CHKERRQ(ierr); 17574e220ebcSLois Curfman McInnes isend[0] = info->nz_used; isend[1] = info->nz_allocated; isend[2] = info->nz_unneeded; 17584e220ebcSLois Curfman McInnes isend[3] = info->memory; isend[4] = info->mallocs; 17594e220ebcSLois Curfman McInnes ierr = MatGetInfo(B,MAT_LOCAL,info);CHKERRQ(ierr); 17604e220ebcSLois Curfman McInnes isend[0] += info->nz_used; isend[1] += info->nz_allocated; isend[2] += info->nz_unneeded; 17614e220ebcSLois Curfman McInnes isend[3] += info->memory; isend[4] += info->mallocs; 1762a66be287SLois Curfman McInnes if (flag == MAT_LOCAL) { 17634e220ebcSLois Curfman McInnes info->nz_used = isend[0]; 17644e220ebcSLois Curfman McInnes info->nz_allocated = isend[1]; 17654e220ebcSLois Curfman McInnes info->nz_unneeded = isend[2]; 17664e220ebcSLois Curfman McInnes info->memory = isend[3]; 17674e220ebcSLois Curfman McInnes info->mallocs = isend[4]; 1768a66be287SLois Curfman McInnes } else if (flag == MAT_GLOBAL_MAX) { 1769d9822059SBarry Smith ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_MAX,((PetscObject)matin)->comm);CHKERRQ(ierr); 17704e220ebcSLois Curfman McInnes info->nz_used = irecv[0]; 17714e220ebcSLois Curfman McInnes info->nz_allocated = irecv[1]; 17724e220ebcSLois Curfman McInnes info->nz_unneeded = irecv[2]; 17734e220ebcSLois Curfman McInnes info->memory = irecv[3]; 17744e220ebcSLois Curfman McInnes info->mallocs = irecv[4]; 1775a66be287SLois Curfman McInnes } else if (flag == MAT_GLOBAL_SUM) { 1776d9822059SBarry Smith ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_SUM,((PetscObject)matin)->comm);CHKERRQ(ierr); 17774e220ebcSLois Curfman McInnes info->nz_used = irecv[0]; 17784e220ebcSLois Curfman McInnes info->nz_allocated = irecv[1]; 17794e220ebcSLois Curfman McInnes info->nz_unneeded = irecv[2]; 17804e220ebcSLois Curfman McInnes info->memory = irecv[3]; 17814e220ebcSLois Curfman McInnes info->mallocs = irecv[4]; 1782a66be287SLois Curfman McInnes } 17834e220ebcSLois Curfman McInnes info->fill_ratio_given = 0; /* no parallel LU/ILU/Cholesky */ 17844e220ebcSLois Curfman McInnes info->fill_ratio_needed = 0; 17854e220ebcSLois Curfman McInnes info->factor_mallocs = 0; 17864e220ebcSLois Curfman McInnes 17873a40ed3dSBarry Smith PetscFunctionReturn(0); 1788a66be287SLois Curfman McInnes } 1789a66be287SLois Curfman McInnes 17904a2ae208SSatish Balay #undef __FUNCT__ 17914a2ae208SSatish Balay #define __FUNCT__ "MatSetOption_MPIAIJ" 1792ace3abfcSBarry Smith PetscErrorCode MatSetOption_MPIAIJ(Mat A,MatOption op,PetscBool flg) 1793c74985f6SBarry Smith { 1794c0bbcb79SLois Curfman McInnes Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1795dfbe8321SBarry Smith PetscErrorCode ierr; 1796c74985f6SBarry Smith 17973a40ed3dSBarry Smith PetscFunctionBegin; 179812c028f9SKris Buschelman switch (op) { 1799512a5fc5SBarry Smith case MAT_NEW_NONZERO_LOCATIONS: 180012c028f9SKris Buschelman case MAT_NEW_NONZERO_ALLOCATION_ERR: 180128b2fa4aSMatthew Knepley case MAT_UNUSED_NONZERO_LOCATION_ERR: 1802a9817697SBarry Smith case MAT_KEEP_NONZERO_PATTERN: 180312c028f9SKris Buschelman case MAT_NEW_NONZERO_LOCATION_ERR: 180412c028f9SKris Buschelman case MAT_USE_INODES: 180512c028f9SKris Buschelman case MAT_IGNORE_ZERO_ENTRIES: 18064e0d8c25SBarry Smith ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 18074e0d8c25SBarry Smith ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr); 180812c028f9SKris Buschelman break; 180912c028f9SKris Buschelman case MAT_ROW_ORIENTED: 18104e0d8c25SBarry Smith a->roworiented = flg; 18114e0d8c25SBarry Smith ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 18124e0d8c25SBarry Smith ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr); 181312c028f9SKris Buschelman break; 18144e0d8c25SBarry Smith case MAT_NEW_DIAGONALS: 1815290bbb0aSBarry Smith ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr); 181612c028f9SKris Buschelman break; 181712c028f9SKris Buschelman case MAT_IGNORE_OFF_PROC_ENTRIES: 18185c0f0b64SBarry Smith a->donotstash = flg; 181912c028f9SKris Buschelman break; 1820ffa07934SHong Zhang case MAT_SPD: 1821ffa07934SHong Zhang A->spd_set = PETSC_TRUE; 1822ffa07934SHong Zhang A->spd = flg; 1823ffa07934SHong Zhang if (flg) { 1824ffa07934SHong Zhang A->symmetric = PETSC_TRUE; 1825ffa07934SHong Zhang A->structurally_symmetric = PETSC_TRUE; 1826ffa07934SHong Zhang A->symmetric_set = PETSC_TRUE; 1827ffa07934SHong Zhang A->structurally_symmetric_set = PETSC_TRUE; 1828ffa07934SHong Zhang } 1829ffa07934SHong Zhang break; 183077e54ba9SKris Buschelman case MAT_SYMMETRIC: 18314e0d8c25SBarry Smith ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 183225f421beSHong Zhang break; 183377e54ba9SKris Buschelman case MAT_STRUCTURALLY_SYMMETRIC: 1834eeffb40dSHong Zhang ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 1835eeffb40dSHong Zhang break; 1836bf108f30SBarry Smith case MAT_HERMITIAN: 1837eeffb40dSHong Zhang ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 1838eeffb40dSHong Zhang break; 1839bf108f30SBarry Smith case MAT_SYMMETRY_ETERNAL: 18404e0d8c25SBarry Smith ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 184177e54ba9SKris Buschelman break; 184212c028f9SKris Buschelman default: 1843e32f2f54SBarry Smith SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"unknown option %d",op); 18443a40ed3dSBarry Smith } 18453a40ed3dSBarry Smith PetscFunctionReturn(0); 1846c74985f6SBarry Smith } 1847c74985f6SBarry Smith 18484a2ae208SSatish Balay #undef __FUNCT__ 18494a2ae208SSatish Balay #define __FUNCT__ "MatGetRow_MPIAIJ" 1850b1d57f15SBarry Smith PetscErrorCode MatGetRow_MPIAIJ(Mat matin,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 185139e00950SLois Curfman McInnes { 1852154123eaSLois Curfman McInnes Mat_MPIAIJ *mat = (Mat_MPIAIJ*)matin->data; 185387828ca2SBarry Smith PetscScalar *vworkA,*vworkB,**pvA,**pvB,*v_p; 18546849ba73SBarry Smith PetscErrorCode ierr; 1855d0f46423SBarry Smith PetscInt i,*cworkA,*cworkB,**pcA,**pcB,cstart = matin->cmap->rstart; 1856d0f46423SBarry Smith PetscInt nztot,nzA,nzB,lrow,rstart = matin->rmap->rstart,rend = matin->rmap->rend; 1857b1d57f15SBarry Smith PetscInt *cmap,*idx_p; 185839e00950SLois Curfman McInnes 18593a40ed3dSBarry Smith PetscFunctionBegin; 1860e32f2f54SBarry Smith if (mat->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Already active"); 18617a0afa10SBarry Smith mat->getrowactive = PETSC_TRUE; 18627a0afa10SBarry Smith 186370f0671dSBarry Smith if (!mat->rowvalues && (idx || v)) { 18647a0afa10SBarry Smith /* 18657a0afa10SBarry Smith allocate enough space to hold information from the longest row. 18667a0afa10SBarry Smith */ 18677a0afa10SBarry Smith Mat_SeqAIJ *Aa = (Mat_SeqAIJ*)mat->A->data,*Ba = (Mat_SeqAIJ*)mat->B->data; 1868b1d57f15SBarry Smith PetscInt max = 1,tmp; 1869d0f46423SBarry Smith for (i=0; i<matin->rmap->n; i++) { 18707a0afa10SBarry Smith tmp = Aa->i[i+1] - Aa->i[i] + Ba->i[i+1] - Ba->i[i]; 18717a0afa10SBarry Smith if (max < tmp) { max = tmp; } 18727a0afa10SBarry Smith } 18731d79065fSBarry Smith ierr = PetscMalloc2(max,PetscScalar,&mat->rowvalues,max,PetscInt,&mat->rowindices);CHKERRQ(ierr); 18747a0afa10SBarry Smith } 18757a0afa10SBarry Smith 1876e7e72b3dSBarry Smith if (row < rstart || row >= rend) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Only local rows"); 1877abc0e9e4SLois Curfman McInnes lrow = row - rstart; 187839e00950SLois Curfman McInnes 1879154123eaSLois Curfman McInnes pvA = &vworkA; pcA = &cworkA; pvB = &vworkB; pcB = &cworkB; 1880154123eaSLois Curfman McInnes if (!v) {pvA = 0; pvB = 0;} 1881154123eaSLois Curfman McInnes if (!idx) {pcA = 0; if (!v) pcB = 0;} 1882f830108cSBarry Smith ierr = (*mat->A->ops->getrow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr); 1883f830108cSBarry Smith ierr = (*mat->B->ops->getrow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr); 1884154123eaSLois Curfman McInnes nztot = nzA + nzB; 1885154123eaSLois Curfman McInnes 188670f0671dSBarry Smith cmap = mat->garray; 1887154123eaSLois Curfman McInnes if (v || idx) { 1888154123eaSLois Curfman McInnes if (nztot) { 1889154123eaSLois Curfman McInnes /* Sort by increasing column numbers, assuming A and B already sorted */ 1890b1d57f15SBarry Smith PetscInt imark = -1; 1891154123eaSLois Curfman McInnes if (v) { 189270f0671dSBarry Smith *v = v_p = mat->rowvalues; 189339e00950SLois Curfman McInnes for (i=0; i<nzB; i++) { 189470f0671dSBarry Smith if (cmap[cworkB[i]] < cstart) v_p[i] = vworkB[i]; 1895154123eaSLois Curfman McInnes else break; 1896154123eaSLois Curfman McInnes } 1897154123eaSLois Curfman McInnes imark = i; 189870f0671dSBarry Smith for (i=0; i<nzA; i++) v_p[imark+i] = vworkA[i]; 189970f0671dSBarry Smith for (i=imark; i<nzB; i++) v_p[nzA+i] = vworkB[i]; 1900154123eaSLois Curfman McInnes } 1901154123eaSLois Curfman McInnes if (idx) { 190270f0671dSBarry Smith *idx = idx_p = mat->rowindices; 190370f0671dSBarry Smith if (imark > -1) { 190470f0671dSBarry Smith for (i=0; i<imark; i++) { 190570f0671dSBarry Smith idx_p[i] = cmap[cworkB[i]]; 190670f0671dSBarry Smith } 190770f0671dSBarry Smith } else { 1908154123eaSLois Curfman McInnes for (i=0; i<nzB; i++) { 190970f0671dSBarry Smith if (cmap[cworkB[i]] < cstart) idx_p[i] = cmap[cworkB[i]]; 1910154123eaSLois Curfman McInnes else break; 1911154123eaSLois Curfman McInnes } 1912154123eaSLois Curfman McInnes imark = i; 191370f0671dSBarry Smith } 191470f0671dSBarry Smith for (i=0; i<nzA; i++) idx_p[imark+i] = cstart + cworkA[i]; 191570f0671dSBarry Smith for (i=imark; i<nzB; i++) idx_p[nzA+i] = cmap[cworkB[i]]; 191639e00950SLois Curfman McInnes } 19173f97c4b0SBarry Smith } else { 19181ca473b0SSatish Balay if (idx) *idx = 0; 19191ca473b0SSatish Balay if (v) *v = 0; 19201ca473b0SSatish Balay } 1921154123eaSLois Curfman McInnes } 192239e00950SLois Curfman McInnes *nz = nztot; 1923f830108cSBarry Smith ierr = (*mat->A->ops->restorerow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr); 1924f830108cSBarry Smith ierr = (*mat->B->ops->restorerow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr); 19253a40ed3dSBarry Smith PetscFunctionReturn(0); 192639e00950SLois Curfman McInnes } 192739e00950SLois Curfman McInnes 19284a2ae208SSatish Balay #undef __FUNCT__ 19294a2ae208SSatish Balay #define __FUNCT__ "MatRestoreRow_MPIAIJ" 1930b1d57f15SBarry Smith PetscErrorCode MatRestoreRow_MPIAIJ(Mat mat,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 193139e00950SLois Curfman McInnes { 19327a0afa10SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 19333a40ed3dSBarry Smith 19343a40ed3dSBarry Smith PetscFunctionBegin; 1935e7e72b3dSBarry Smith if (!aij->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"MatGetRow() must be called first"); 19367a0afa10SBarry Smith aij->getrowactive = PETSC_FALSE; 19373a40ed3dSBarry Smith PetscFunctionReturn(0); 193839e00950SLois Curfman McInnes } 193939e00950SLois Curfman McInnes 19404a2ae208SSatish Balay #undef __FUNCT__ 19414a2ae208SSatish Balay #define __FUNCT__ "MatNorm_MPIAIJ" 1942dfbe8321SBarry Smith PetscErrorCode MatNorm_MPIAIJ(Mat mat,NormType type,PetscReal *norm) 1943855ac2c5SLois Curfman McInnes { 1944855ac2c5SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 1945ec8511deSBarry Smith Mat_SeqAIJ *amat = (Mat_SeqAIJ*)aij->A->data,*bmat = (Mat_SeqAIJ*)aij->B->data; 1946dfbe8321SBarry Smith PetscErrorCode ierr; 1947d0f46423SBarry Smith PetscInt i,j,cstart = mat->cmap->rstart; 1948329f5518SBarry Smith PetscReal sum = 0.0; 1949a77337e4SBarry Smith MatScalar *v; 195004ca555eSLois Curfman McInnes 19513a40ed3dSBarry Smith PetscFunctionBegin; 195217699dbbSLois Curfman McInnes if (aij->size == 1) { 195314183eadSLois Curfman McInnes ierr = MatNorm(aij->A,type,norm);CHKERRQ(ierr); 195437fa93a5SLois Curfman McInnes } else { 195504ca555eSLois Curfman McInnes if (type == NORM_FROBENIUS) { 195604ca555eSLois Curfman McInnes v = amat->a; 195704ca555eSLois Curfman McInnes for (i=0; i<amat->nz; i++) { 1958aa482453SBarry Smith #if defined(PETSC_USE_COMPLEX) 1959329f5518SBarry Smith sum += PetscRealPart(PetscConj(*v)*(*v)); v++; 196004ca555eSLois Curfman McInnes #else 196104ca555eSLois Curfman McInnes sum += (*v)*(*v); v++; 196204ca555eSLois Curfman McInnes #endif 196304ca555eSLois Curfman McInnes } 196404ca555eSLois Curfman McInnes v = bmat->a; 196504ca555eSLois Curfman McInnes for (i=0; i<bmat->nz; i++) { 1966aa482453SBarry Smith #if defined(PETSC_USE_COMPLEX) 1967329f5518SBarry Smith sum += PetscRealPart(PetscConj(*v)*(*v)); v++; 196804ca555eSLois Curfman McInnes #else 196904ca555eSLois Curfman McInnes sum += (*v)*(*v); v++; 197004ca555eSLois Curfman McInnes #endif 197104ca555eSLois Curfman McInnes } 1972d9822059SBarry Smith ierr = MPI_Allreduce(&sum,norm,1,MPIU_REAL,MPIU_SUM,((PetscObject)mat)->comm);CHKERRQ(ierr); 197304ca555eSLois Curfman McInnes *norm = sqrt(*norm); 19743a40ed3dSBarry Smith } else if (type == NORM_1) { /* max column norm */ 1975329f5518SBarry Smith PetscReal *tmp,*tmp2; 1976b1d57f15SBarry Smith PetscInt *jj,*garray = aij->garray; 1977d0f46423SBarry Smith ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscReal),&tmp);CHKERRQ(ierr); 1978d0f46423SBarry Smith ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscReal),&tmp2);CHKERRQ(ierr); 1979d0f46423SBarry Smith ierr = PetscMemzero(tmp,mat->cmap->N*sizeof(PetscReal));CHKERRQ(ierr); 198004ca555eSLois Curfman McInnes *norm = 0.0; 198104ca555eSLois Curfman McInnes v = amat->a; jj = amat->j; 198204ca555eSLois Curfman McInnes for (j=0; j<amat->nz; j++) { 1983bfec09a0SHong Zhang tmp[cstart + *jj++ ] += PetscAbsScalar(*v); v++; 198404ca555eSLois Curfman McInnes } 198504ca555eSLois Curfman McInnes v = bmat->a; jj = bmat->j; 198604ca555eSLois Curfman McInnes for (j=0; j<bmat->nz; j++) { 1987bfec09a0SHong Zhang tmp[garray[*jj++]] += PetscAbsScalar(*v); v++; 198804ca555eSLois Curfman McInnes } 1989d9822059SBarry Smith ierr = MPI_Allreduce(tmp,tmp2,mat->cmap->N,MPIU_REAL,MPIU_SUM,((PetscObject)mat)->comm);CHKERRQ(ierr); 1990d0f46423SBarry Smith for (j=0; j<mat->cmap->N; j++) { 199104ca555eSLois Curfman McInnes if (tmp2[j] > *norm) *norm = tmp2[j]; 199204ca555eSLois Curfman McInnes } 1993606d414cSSatish Balay ierr = PetscFree(tmp);CHKERRQ(ierr); 1994606d414cSSatish Balay ierr = PetscFree(tmp2);CHKERRQ(ierr); 19953a40ed3dSBarry Smith } else if (type == NORM_INFINITY) { /* max row norm */ 1996329f5518SBarry Smith PetscReal ntemp = 0.0; 1997d0f46423SBarry Smith for (j=0; j<aij->A->rmap->n; j++) { 1998bfec09a0SHong Zhang v = amat->a + amat->i[j]; 199904ca555eSLois Curfman McInnes sum = 0.0; 200004ca555eSLois Curfman McInnes for (i=0; i<amat->i[j+1]-amat->i[j]; i++) { 2001cddf8d76SBarry Smith sum += PetscAbsScalar(*v); v++; 200204ca555eSLois Curfman McInnes } 2003bfec09a0SHong Zhang v = bmat->a + bmat->i[j]; 200404ca555eSLois Curfman McInnes for (i=0; i<bmat->i[j+1]-bmat->i[j]; i++) { 2005cddf8d76SBarry Smith sum += PetscAbsScalar(*v); v++; 200604ca555eSLois Curfman McInnes } 2007515d9167SLois Curfman McInnes if (sum > ntemp) ntemp = sum; 200804ca555eSLois Curfman McInnes } 2009d9822059SBarry Smith ierr = MPI_Allreduce(&ntemp,norm,1,MPIU_REAL,MPIU_MAX,((PetscObject)mat)->comm);CHKERRQ(ierr); 2010ca161407SBarry Smith } else { 2011e7e72b3dSBarry Smith SETERRQ(((PetscObject)mat)->comm,PETSC_ERR_SUP,"No support for two norm"); 201204ca555eSLois Curfman McInnes } 201337fa93a5SLois Curfman McInnes } 20143a40ed3dSBarry Smith PetscFunctionReturn(0); 2015855ac2c5SLois Curfman McInnes } 2016855ac2c5SLois Curfman McInnes 20174a2ae208SSatish Balay #undef __FUNCT__ 20184a2ae208SSatish Balay #define __FUNCT__ "MatTranspose_MPIAIJ" 2019fc4dec0aSBarry Smith PetscErrorCode MatTranspose_MPIAIJ(Mat A,MatReuse reuse,Mat *matout) 2020b7c46309SBarry Smith { 2021b7c46309SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 2022da668accSHong Zhang Mat_SeqAIJ *Aloc=(Mat_SeqAIJ*)a->A->data,*Bloc=(Mat_SeqAIJ*)a->B->data; 2023dfbe8321SBarry Smith PetscErrorCode ierr; 2024d0f46423SBarry Smith PetscInt M = A->rmap->N,N = A->cmap->N,ma,na,mb,*ai,*aj,*bi,*bj,row,*cols,*cols_tmp,i,*d_nnz; 2025d0f46423SBarry Smith PetscInt cstart=A->cmap->rstart,ncol; 20263a40ed3dSBarry Smith Mat B; 2027a77337e4SBarry Smith MatScalar *array; 2028b7c46309SBarry Smith 20293a40ed3dSBarry Smith PetscFunctionBegin; 2030e7e72b3dSBarry Smith if (reuse == MAT_REUSE_MATRIX && A == *matout && M != N) SETERRQ(((PetscObject)A)->comm,PETSC_ERR_ARG_SIZ,"Square matrix only for in-place"); 2031da668accSHong Zhang 2032d0f46423SBarry Smith ma = A->rmap->n; na = A->cmap->n; mb = a->B->rmap->n; 2033da668accSHong Zhang ai = Aloc->i; aj = Aloc->j; 2034da668accSHong Zhang bi = Bloc->i; bj = Bloc->j; 2035fc73b1b3SBarry Smith if (reuse == MAT_INITIAL_MATRIX || *matout == A) { 2036fc73b1b3SBarry Smith /* compute d_nnz for preallocation; o_nnz is approximated by d_nnz to avoid communication */ 2037fc73b1b3SBarry Smith ierr = PetscMalloc((1+na)*sizeof(PetscInt),&d_nnz);CHKERRQ(ierr); 2038da668accSHong Zhang ierr = PetscMemzero(d_nnz,(1+na)*sizeof(PetscInt));CHKERRQ(ierr); 2039da668accSHong Zhang for (i=0; i<ai[ma]; i++){ 2040da668accSHong Zhang d_nnz[aj[i]] ++; 2041da668accSHong Zhang aj[i] += cstart; /* global col index to be used by MatSetValues() */ 2042d4bb536fSBarry Smith } 2043d4bb536fSBarry Smith 20447adad957SLisandro Dalcin ierr = MatCreate(((PetscObject)A)->comm,&B);CHKERRQ(ierr); 2045d0f46423SBarry Smith ierr = MatSetSizes(B,A->cmap->n,A->rmap->n,N,M);CHKERRQ(ierr); 20467adad957SLisandro Dalcin ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr); 2047da668accSHong Zhang ierr = MatMPIAIJSetPreallocation(B,0,d_nnz,0,d_nnz);CHKERRQ(ierr); 2048fc73b1b3SBarry Smith ierr = PetscFree(d_nnz);CHKERRQ(ierr); 2049fc4dec0aSBarry Smith } else { 2050fc4dec0aSBarry Smith B = *matout; 2051fc4dec0aSBarry Smith } 2052b7c46309SBarry Smith 2053b7c46309SBarry Smith /* copy over the A part */ 2054da668accSHong Zhang array = Aloc->a; 2055d0f46423SBarry Smith row = A->rmap->rstart; 2056da668accSHong Zhang for (i=0; i<ma; i++) { 2057da668accSHong Zhang ncol = ai[i+1]-ai[i]; 2058da668accSHong Zhang ierr = MatSetValues(B,ncol,aj,1,&row,array,INSERT_VALUES);CHKERRQ(ierr); 2059da668accSHong Zhang row++; array += ncol; aj += ncol; 2060b7c46309SBarry Smith } 2061b7c46309SBarry Smith aj = Aloc->j; 2062da668accSHong Zhang for (i=0; i<ai[ma]; i++) aj[i] -= cstart; /* resume local col index */ 2063b7c46309SBarry Smith 2064b7c46309SBarry Smith /* copy over the B part */ 2065fc73b1b3SBarry Smith ierr = PetscMalloc(bi[mb]*sizeof(PetscInt),&cols);CHKERRQ(ierr); 2066fc73b1b3SBarry Smith ierr = PetscMemzero(cols,bi[mb]*sizeof(PetscInt));CHKERRQ(ierr); 2067da668accSHong Zhang array = Bloc->a; 2068d0f46423SBarry Smith row = A->rmap->rstart; 2069da668accSHong Zhang for (i=0; i<bi[mb]; i++) {cols[i] = a->garray[bj[i]];} 207061a2fbbaSHong Zhang cols_tmp = cols; 2071da668accSHong Zhang for (i=0; i<mb; i++) { 2072da668accSHong Zhang ncol = bi[i+1]-bi[i]; 207361a2fbbaSHong Zhang ierr = MatSetValues(B,ncol,cols_tmp,1,&row,array,INSERT_VALUES);CHKERRQ(ierr); 207461a2fbbaSHong Zhang row++; array += ncol; cols_tmp += ncol; 2075b7c46309SBarry Smith } 2076fc73b1b3SBarry Smith ierr = PetscFree(cols);CHKERRQ(ierr); 2077fc73b1b3SBarry Smith 20786d4a8577SBarry Smith ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 20796d4a8577SBarry Smith ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2080815cbec1SBarry Smith if (reuse == MAT_INITIAL_MATRIX || *matout != A) { 20810de55854SLois Curfman McInnes *matout = B; 20820de55854SLois Curfman McInnes } else { 2083eb6b5d47SBarry Smith ierr = MatHeaderMerge(A,B);CHKERRQ(ierr); 20840de55854SLois Curfman McInnes } 20853a40ed3dSBarry Smith PetscFunctionReturn(0); 2086b7c46309SBarry Smith } 2087b7c46309SBarry Smith 20884a2ae208SSatish Balay #undef __FUNCT__ 20894a2ae208SSatish Balay #define __FUNCT__ "MatDiagonalScale_MPIAIJ" 2090dfbe8321SBarry Smith PetscErrorCode MatDiagonalScale_MPIAIJ(Mat mat,Vec ll,Vec rr) 2091a008b906SSatish Balay { 20924b967eb1SSatish Balay Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 20934b967eb1SSatish Balay Mat a = aij->A,b = aij->B; 2094dfbe8321SBarry Smith PetscErrorCode ierr; 2095b1d57f15SBarry Smith PetscInt s1,s2,s3; 2096a008b906SSatish Balay 20973a40ed3dSBarry Smith PetscFunctionBegin; 20984b967eb1SSatish Balay ierr = MatGetLocalSize(mat,&s2,&s3);CHKERRQ(ierr); 20994b967eb1SSatish Balay if (rr) { 2100e1311b90SBarry Smith ierr = VecGetLocalSize(rr,&s1);CHKERRQ(ierr); 2101e32f2f54SBarry Smith if (s1!=s3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"right vector non-conforming local size"); 21024b967eb1SSatish Balay /* Overlap communication with computation. */ 2103ca9f406cSSatish Balay ierr = VecScatterBegin(aij->Mvctx,rr,aij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 2104a008b906SSatish Balay } 21054b967eb1SSatish Balay if (ll) { 2106e1311b90SBarry Smith ierr = VecGetLocalSize(ll,&s1);CHKERRQ(ierr); 2107e32f2f54SBarry Smith if (s1!=s2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"left vector non-conforming local size"); 2108f830108cSBarry Smith ierr = (*b->ops->diagonalscale)(b,ll,0);CHKERRQ(ierr); 21094b967eb1SSatish Balay } 21104b967eb1SSatish Balay /* scale the diagonal block */ 2111f830108cSBarry Smith ierr = (*a->ops->diagonalscale)(a,ll,rr);CHKERRQ(ierr); 21124b967eb1SSatish Balay 21134b967eb1SSatish Balay if (rr) { 21144b967eb1SSatish Balay /* Do a scatter end and then right scale the off-diagonal block */ 2115ca9f406cSSatish Balay ierr = VecScatterEnd(aij->Mvctx,rr,aij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 2116f830108cSBarry Smith ierr = (*b->ops->diagonalscale)(b,0,aij->lvec);CHKERRQ(ierr); 21174b967eb1SSatish Balay } 21184b967eb1SSatish Balay 21193a40ed3dSBarry Smith PetscFunctionReturn(0); 2120a008b906SSatish Balay } 2121a008b906SSatish Balay 21224a2ae208SSatish Balay #undef __FUNCT__ 2123521d7252SBarry Smith #define __FUNCT__ "MatSetBlockSize_MPIAIJ" 2124521d7252SBarry Smith PetscErrorCode MatSetBlockSize_MPIAIJ(Mat A,PetscInt bs) 21255a838052SSatish Balay { 2126521d7252SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 2127521d7252SBarry Smith PetscErrorCode ierr; 2128521d7252SBarry Smith 21293a40ed3dSBarry Smith PetscFunctionBegin; 2130521d7252SBarry Smith ierr = MatSetBlockSize(a->A,bs);CHKERRQ(ierr); 2131521d7252SBarry Smith ierr = MatSetBlockSize(a->B,bs);CHKERRQ(ierr); 2132829b6ff0SJed Brown ierr = PetscLayoutSetBlockSize(A->rmap,bs);CHKERRQ(ierr); 2133829b6ff0SJed Brown ierr = PetscLayoutSetBlockSize(A->cmap,bs);CHKERRQ(ierr); 21343a40ed3dSBarry Smith PetscFunctionReturn(0); 21355a838052SSatish Balay } 21364a2ae208SSatish Balay #undef __FUNCT__ 21374a2ae208SSatish Balay #define __FUNCT__ "MatSetUnfactored_MPIAIJ" 2138dfbe8321SBarry Smith PetscErrorCode MatSetUnfactored_MPIAIJ(Mat A) 2139bb5a7306SBarry Smith { 2140bb5a7306SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 2141dfbe8321SBarry Smith PetscErrorCode ierr; 21423a40ed3dSBarry Smith 21433a40ed3dSBarry Smith PetscFunctionBegin; 2144bb5a7306SBarry Smith ierr = MatSetUnfactored(a->A);CHKERRQ(ierr); 21453a40ed3dSBarry Smith PetscFunctionReturn(0); 2146bb5a7306SBarry Smith } 2147bb5a7306SBarry Smith 21484a2ae208SSatish Balay #undef __FUNCT__ 21494a2ae208SSatish Balay #define __FUNCT__ "MatEqual_MPIAIJ" 2150ace3abfcSBarry Smith PetscErrorCode MatEqual_MPIAIJ(Mat A,Mat B,PetscBool *flag) 2151d4bb536fSBarry Smith { 2152d4bb536fSBarry Smith Mat_MPIAIJ *matB = (Mat_MPIAIJ*)B->data,*matA = (Mat_MPIAIJ*)A->data; 2153d4bb536fSBarry Smith Mat a,b,c,d; 2154ace3abfcSBarry Smith PetscBool flg; 2155dfbe8321SBarry Smith PetscErrorCode ierr; 2156d4bb536fSBarry Smith 21573a40ed3dSBarry Smith PetscFunctionBegin; 2158d4bb536fSBarry Smith a = matA->A; b = matA->B; 2159d4bb536fSBarry Smith c = matB->A; d = matB->B; 2160d4bb536fSBarry Smith 2161d4bb536fSBarry Smith ierr = MatEqual(a,c,&flg);CHKERRQ(ierr); 2162abc0a331SBarry Smith if (flg) { 2163d4bb536fSBarry Smith ierr = MatEqual(b,d,&flg);CHKERRQ(ierr); 2164d4bb536fSBarry Smith } 21657adad957SLisandro Dalcin ierr = MPI_Allreduce(&flg,flag,1,MPI_INT,MPI_LAND,((PetscObject)A)->comm);CHKERRQ(ierr); 21663a40ed3dSBarry Smith PetscFunctionReturn(0); 2167d4bb536fSBarry Smith } 2168d4bb536fSBarry Smith 21694a2ae208SSatish Balay #undef __FUNCT__ 21704a2ae208SSatish Balay #define __FUNCT__ "MatCopy_MPIAIJ" 2171dfbe8321SBarry Smith PetscErrorCode MatCopy_MPIAIJ(Mat A,Mat B,MatStructure str) 2172cb5b572fSBarry Smith { 2173dfbe8321SBarry Smith PetscErrorCode ierr; 2174cb5b572fSBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ *)A->data; 2175cb5b572fSBarry Smith Mat_MPIAIJ *b = (Mat_MPIAIJ *)B->data; 2176cb5b572fSBarry Smith 2177cb5b572fSBarry Smith PetscFunctionBegin; 217833f4a19fSKris Buschelman /* If the two matrices don't have the same copy implementation, they aren't compatible for fast copy. */ 217933f4a19fSKris Buschelman if ((str != SAME_NONZERO_PATTERN) || (A->ops->copy != B->ops->copy)) { 2180cb5b572fSBarry Smith /* because of the column compression in the off-processor part of the matrix a->B, 2181cb5b572fSBarry Smith the number of columns in a->B and b->B may be different, hence we cannot call 2182cb5b572fSBarry Smith the MatCopy() directly on the two parts. If need be, we can provide a more 2183cb5b572fSBarry Smith efficient copy than the MatCopy_Basic() by first uncompressing the a->B matrices 2184cb5b572fSBarry Smith then copying the submatrices */ 2185cb5b572fSBarry Smith ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr); 2186cb5b572fSBarry Smith } else { 2187cb5b572fSBarry Smith ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr); 2188cb5b572fSBarry Smith ierr = MatCopy(a->B,b->B,str);CHKERRQ(ierr); 2189cb5b572fSBarry Smith } 2190cb5b572fSBarry Smith PetscFunctionReturn(0); 2191cb5b572fSBarry Smith } 2192cb5b572fSBarry Smith 21934a2ae208SSatish Balay #undef __FUNCT__ 21944a2ae208SSatish Balay #define __FUNCT__ "MatSetUpPreallocation_MPIAIJ" 2195dfbe8321SBarry Smith PetscErrorCode MatSetUpPreallocation_MPIAIJ(Mat A) 2196273d9f13SBarry Smith { 2197dfbe8321SBarry Smith PetscErrorCode ierr; 2198273d9f13SBarry Smith 2199273d9f13SBarry Smith PetscFunctionBegin; 2200273d9f13SBarry Smith ierr = MatMPIAIJSetPreallocation(A,PETSC_DEFAULT,0,PETSC_DEFAULT,0);CHKERRQ(ierr); 2201273d9f13SBarry Smith PetscFunctionReturn(0); 2202273d9f13SBarry Smith } 2203273d9f13SBarry Smith 2204ac90fabeSBarry Smith #undef __FUNCT__ 2205ac90fabeSBarry Smith #define __FUNCT__ "MatAXPY_MPIAIJ" 2206f4df32b1SMatthew Knepley PetscErrorCode MatAXPY_MPIAIJ(Mat Y,PetscScalar a,Mat X,MatStructure str) 2207ac90fabeSBarry Smith { 2208dfbe8321SBarry Smith PetscErrorCode ierr; 2209b1d57f15SBarry Smith PetscInt i; 2210ac90fabeSBarry Smith Mat_MPIAIJ *xx = (Mat_MPIAIJ *)X->data,*yy = (Mat_MPIAIJ *)Y->data; 22114ce68768SBarry Smith PetscBLASInt bnz,one=1; 2212ac90fabeSBarry Smith Mat_SeqAIJ *x,*y; 2213ac90fabeSBarry Smith 2214ac90fabeSBarry Smith PetscFunctionBegin; 2215ac90fabeSBarry Smith if (str == SAME_NONZERO_PATTERN) { 2216f4df32b1SMatthew Knepley PetscScalar alpha = a; 2217ac90fabeSBarry Smith x = (Mat_SeqAIJ *)xx->A->data; 2218ac90fabeSBarry Smith y = (Mat_SeqAIJ *)yy->A->data; 22190805154bSBarry Smith bnz = PetscBLASIntCast(x->nz); 2220f4df32b1SMatthew Knepley BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one); 2221ac90fabeSBarry Smith x = (Mat_SeqAIJ *)xx->B->data; 2222ac90fabeSBarry Smith y = (Mat_SeqAIJ *)yy->B->data; 22230805154bSBarry Smith bnz = PetscBLASIntCast(x->nz); 2224f4df32b1SMatthew Knepley BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one); 2225a30b2313SHong Zhang } else if (str == SUBSET_NONZERO_PATTERN) { 2226f4df32b1SMatthew Knepley ierr = MatAXPY_SeqAIJ(yy->A,a,xx->A,str);CHKERRQ(ierr); 2227c537a176SHong Zhang 2228c537a176SHong Zhang x = (Mat_SeqAIJ *)xx->B->data; 2229a30b2313SHong Zhang y = (Mat_SeqAIJ *)yy->B->data; 2230a30b2313SHong Zhang if (y->xtoy && y->XtoY != xx->B) { 2231a30b2313SHong Zhang ierr = PetscFree(y->xtoy);CHKERRQ(ierr); 22326bf464f9SBarry Smith ierr = MatDestroy(&y->XtoY);CHKERRQ(ierr); 2233c537a176SHong Zhang } 2234a30b2313SHong Zhang if (!y->xtoy) { /* get xtoy */ 2235d0f46423SBarry Smith ierr = MatAXPYGetxtoy_Private(xx->B->rmap->n,x->i,x->j,xx->garray,y->i,y->j,yy->garray,&y->xtoy);CHKERRQ(ierr); 2236a30b2313SHong Zhang y->XtoY = xx->B; 2237407f6b05SHong Zhang ierr = PetscObjectReference((PetscObject)xx->B);CHKERRQ(ierr); 2238c537a176SHong Zhang } 2239f4df32b1SMatthew Knepley for (i=0; i<x->nz; i++) y->a[y->xtoy[i]] += a*(x->a[i]); 2240ac90fabeSBarry Smith } else { 22419f5f6813SShri Abhyankar Mat B; 22429f5f6813SShri Abhyankar PetscInt *nnz_d,*nnz_o; 22439f5f6813SShri Abhyankar ierr = PetscMalloc(yy->A->rmap->N*sizeof(PetscInt),&nnz_d);CHKERRQ(ierr); 22449f5f6813SShri Abhyankar ierr = PetscMalloc(yy->B->rmap->N*sizeof(PetscInt),&nnz_o);CHKERRQ(ierr); 22459f5f6813SShri Abhyankar ierr = MatCreate(((PetscObject)Y)->comm,&B);CHKERRQ(ierr); 2246bc5a2726SShri Abhyankar ierr = PetscObjectSetName((PetscObject)B,((PetscObject)Y)->name);CHKERRQ(ierr); 22479f5f6813SShri Abhyankar ierr = MatSetSizes(B,Y->rmap->n,Y->cmap->n,Y->rmap->N,Y->cmap->N);CHKERRQ(ierr); 22489f5f6813SShri Abhyankar ierr = MatSetType(B,MATMPIAIJ);CHKERRQ(ierr); 22499f5f6813SShri Abhyankar ierr = MatAXPYGetPreallocation_SeqAIJ(yy->A,xx->A,nnz_d);CHKERRQ(ierr); 22509f5f6813SShri Abhyankar ierr = MatAXPYGetPreallocation_SeqAIJ(yy->B,xx->B,nnz_o);CHKERRQ(ierr); 22519f5f6813SShri Abhyankar ierr = MatMPIAIJSetPreallocation(B,PETSC_NULL,nnz_d,PETSC_NULL,nnz_o);CHKERRQ(ierr); 22529f5f6813SShri Abhyankar ierr = MatAXPY_BasicWithPreallocation(B,Y,a,X,str);CHKERRQ(ierr); 22539f5f6813SShri Abhyankar ierr = MatHeaderReplace(Y,B); 22549f5f6813SShri Abhyankar ierr = PetscFree(nnz_d);CHKERRQ(ierr); 22559f5f6813SShri Abhyankar ierr = PetscFree(nnz_o);CHKERRQ(ierr); 2256ac90fabeSBarry Smith } 2257ac90fabeSBarry Smith PetscFunctionReturn(0); 2258ac90fabeSBarry Smith } 2259ac90fabeSBarry Smith 22607087cfbeSBarry Smith extern PetscErrorCode MatConjugate_SeqAIJ(Mat); 2261354c94deSBarry Smith 2262354c94deSBarry Smith #undef __FUNCT__ 2263354c94deSBarry Smith #define __FUNCT__ "MatConjugate_MPIAIJ" 22647087cfbeSBarry Smith PetscErrorCode MatConjugate_MPIAIJ(Mat mat) 2265354c94deSBarry Smith { 2266354c94deSBarry Smith #if defined(PETSC_USE_COMPLEX) 2267354c94deSBarry Smith PetscErrorCode ierr; 2268354c94deSBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ *)mat->data; 2269354c94deSBarry Smith 2270354c94deSBarry Smith PetscFunctionBegin; 2271354c94deSBarry Smith ierr = MatConjugate_SeqAIJ(aij->A);CHKERRQ(ierr); 2272354c94deSBarry Smith ierr = MatConjugate_SeqAIJ(aij->B);CHKERRQ(ierr); 2273354c94deSBarry Smith #else 2274354c94deSBarry Smith PetscFunctionBegin; 2275354c94deSBarry Smith #endif 2276354c94deSBarry Smith PetscFunctionReturn(0); 2277354c94deSBarry Smith } 2278354c94deSBarry Smith 227999cafbc1SBarry Smith #undef __FUNCT__ 228099cafbc1SBarry Smith #define __FUNCT__ "MatRealPart_MPIAIJ" 228199cafbc1SBarry Smith PetscErrorCode MatRealPart_MPIAIJ(Mat A) 228299cafbc1SBarry Smith { 228399cafbc1SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 228499cafbc1SBarry Smith PetscErrorCode ierr; 228599cafbc1SBarry Smith 228699cafbc1SBarry Smith PetscFunctionBegin; 228799cafbc1SBarry Smith ierr = MatRealPart(a->A);CHKERRQ(ierr); 228899cafbc1SBarry Smith ierr = MatRealPart(a->B);CHKERRQ(ierr); 228999cafbc1SBarry Smith PetscFunctionReturn(0); 229099cafbc1SBarry Smith } 229199cafbc1SBarry Smith 229299cafbc1SBarry Smith #undef __FUNCT__ 229399cafbc1SBarry Smith #define __FUNCT__ "MatImaginaryPart_MPIAIJ" 229499cafbc1SBarry Smith PetscErrorCode MatImaginaryPart_MPIAIJ(Mat A) 229599cafbc1SBarry Smith { 229699cafbc1SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 229799cafbc1SBarry Smith PetscErrorCode ierr; 229899cafbc1SBarry Smith 229999cafbc1SBarry Smith PetscFunctionBegin; 230099cafbc1SBarry Smith ierr = MatImaginaryPart(a->A);CHKERRQ(ierr); 230199cafbc1SBarry Smith ierr = MatImaginaryPart(a->B);CHKERRQ(ierr); 230299cafbc1SBarry Smith PetscFunctionReturn(0); 230399cafbc1SBarry Smith } 230499cafbc1SBarry Smith 2305103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL 2306103bf8bdSMatthew Knepley 2307103bf8bdSMatthew Knepley #include <boost/parallel/mpi/bsp_process_group.hpp> 2308a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_default_graph.hpp> 2309a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_0_block.hpp> 2310a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_preconditioner.hpp> 2311103bf8bdSMatthew Knepley #include <boost/graph/distributed/petsc/interface.hpp> 2312a2c909beSMatthew Knepley #include <boost/multi_array.hpp> 2313d0f46423SBarry Smith #include <boost/parallel/distributed_property_map->hpp> 2314103bf8bdSMatthew Knepley 2315103bf8bdSMatthew Knepley #undef __FUNCT__ 2316103bf8bdSMatthew Knepley #define __FUNCT__ "MatILUFactorSymbolic_MPIAIJ" 2317103bf8bdSMatthew Knepley /* 2318103bf8bdSMatthew Knepley This uses the parallel ILU factorization of Peter Gottschling <pgottsch@osl.iu.edu> 2319103bf8bdSMatthew Knepley */ 23200481f469SBarry Smith PetscErrorCode MatILUFactorSymbolic_MPIAIJ(Mat fact,Mat A, IS isrow, IS iscol, const MatFactorInfo *info) 2321103bf8bdSMatthew Knepley { 2322a2c909beSMatthew Knepley namespace petsc = boost::distributed::petsc; 2323a2c909beSMatthew Knepley 2324a2c909beSMatthew Knepley namespace graph_dist = boost::graph::distributed; 2325a2c909beSMatthew Knepley using boost::graph::distributed::ilu_default::process_group_type; 2326a2c909beSMatthew Knepley using boost::graph::ilu_permuted; 2327a2c909beSMatthew Knepley 2328ace3abfcSBarry Smith PetscBool row_identity, col_identity; 2329776b82aeSLisandro Dalcin PetscContainer c; 2330103bf8bdSMatthew Knepley PetscInt m, n, M, N; 2331103bf8bdSMatthew Knepley PetscErrorCode ierr; 2332103bf8bdSMatthew Knepley 2333103bf8bdSMatthew Knepley PetscFunctionBegin; 2334e32f2f54SBarry Smith if (info->levels != 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only levels = 0 supported for parallel ilu"); 2335103bf8bdSMatthew Knepley ierr = ISIdentity(isrow, &row_identity);CHKERRQ(ierr); 2336103bf8bdSMatthew Knepley ierr = ISIdentity(iscol, &col_identity);CHKERRQ(ierr); 2337103bf8bdSMatthew Knepley if (!row_identity || !col_identity) { 2338e32f2f54SBarry Smith SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Row and column permutations must be identity for parallel ILU"); 2339103bf8bdSMatthew Knepley } 2340103bf8bdSMatthew Knepley 2341103bf8bdSMatthew Knepley process_group_type pg; 2342a2c909beSMatthew Knepley typedef graph_dist::ilu_default::ilu_level_graph_type lgraph_type; 2343a2c909beSMatthew Knepley lgraph_type* lgraph_p = new lgraph_type(petsc::num_global_vertices(A), pg, petsc::matrix_distribution(A, pg)); 2344a2c909beSMatthew Knepley lgraph_type& level_graph = *lgraph_p; 2345a2c909beSMatthew Knepley graph_dist::ilu_default::graph_type& graph(level_graph.graph); 2346a2c909beSMatthew Knepley 2347103bf8bdSMatthew Knepley petsc::read_matrix(A, graph, get(boost::edge_weight, graph)); 2348a2c909beSMatthew Knepley ilu_permuted(level_graph); 2349103bf8bdSMatthew Knepley 2350103bf8bdSMatthew Knepley /* put together the new matrix */ 23517adad957SLisandro Dalcin ierr = MatCreate(((PetscObject)A)->comm, fact);CHKERRQ(ierr); 2352103bf8bdSMatthew Knepley ierr = MatGetLocalSize(A, &m, &n);CHKERRQ(ierr); 2353103bf8bdSMatthew Knepley ierr = MatGetSize(A, &M, &N);CHKERRQ(ierr); 2354719d5645SBarry Smith ierr = MatSetSizes(fact, m, n, M, N);CHKERRQ(ierr); 2355719d5645SBarry Smith ierr = MatSetType(fact, ((PetscObject)A)->type_name);CHKERRQ(ierr); 2356719d5645SBarry Smith ierr = MatAssemblyBegin(fact, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2357719d5645SBarry Smith ierr = MatAssemblyEnd(fact, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2358103bf8bdSMatthew Knepley 23597adad957SLisandro Dalcin ierr = PetscContainerCreate(((PetscObject)A)->comm, &c); 2360776b82aeSLisandro Dalcin ierr = PetscContainerSetPointer(c, lgraph_p); 2361719d5645SBarry Smith ierr = PetscObjectCompose((PetscObject) (fact), "graph", (PetscObject) c); 2362bf0cc555SLisandro Dalcin ierr = PetscContainerDestroy(&c); 2363103bf8bdSMatthew Knepley PetscFunctionReturn(0); 2364103bf8bdSMatthew Knepley } 2365103bf8bdSMatthew Knepley 2366103bf8bdSMatthew Knepley #undef __FUNCT__ 2367103bf8bdSMatthew Knepley #define __FUNCT__ "MatLUFactorNumeric_MPIAIJ" 23680481f469SBarry Smith PetscErrorCode MatLUFactorNumeric_MPIAIJ(Mat B,Mat A, const MatFactorInfo *info) 2369103bf8bdSMatthew Knepley { 2370103bf8bdSMatthew Knepley PetscFunctionBegin; 2371103bf8bdSMatthew Knepley PetscFunctionReturn(0); 2372103bf8bdSMatthew Knepley } 2373103bf8bdSMatthew Knepley 2374103bf8bdSMatthew Knepley #undef __FUNCT__ 2375103bf8bdSMatthew Knepley #define __FUNCT__ "MatSolve_MPIAIJ" 2376103bf8bdSMatthew Knepley /* 2377103bf8bdSMatthew Knepley This uses the parallel ILU factorization of Peter Gottschling <pgottsch@osl.iu.edu> 2378103bf8bdSMatthew Knepley */ 2379103bf8bdSMatthew Knepley PetscErrorCode MatSolve_MPIAIJ(Mat A, Vec b, Vec x) 2380103bf8bdSMatthew Knepley { 2381a2c909beSMatthew Knepley namespace graph_dist = boost::graph::distributed; 2382a2c909beSMatthew Knepley 2383a2c909beSMatthew Knepley typedef graph_dist::ilu_default::ilu_level_graph_type lgraph_type; 2384a2c909beSMatthew Knepley lgraph_type* lgraph_p; 2385776b82aeSLisandro Dalcin PetscContainer c; 2386103bf8bdSMatthew Knepley PetscErrorCode ierr; 2387103bf8bdSMatthew Knepley 2388103bf8bdSMatthew Knepley PetscFunctionBegin; 2389103bf8bdSMatthew Knepley ierr = PetscObjectQuery((PetscObject) A, "graph", (PetscObject *) &c);CHKERRQ(ierr); 2390776b82aeSLisandro Dalcin ierr = PetscContainerGetPointer(c, (void **) &lgraph_p);CHKERRQ(ierr); 2391103bf8bdSMatthew Knepley ierr = VecCopy(b, x);CHKERRQ(ierr); 2392a2c909beSMatthew Knepley 2393a2c909beSMatthew Knepley PetscScalar* array_x; 2394a2c909beSMatthew Knepley ierr = VecGetArray(x, &array_x);CHKERRQ(ierr); 2395a2c909beSMatthew Knepley PetscInt sx; 2396a2c909beSMatthew Knepley ierr = VecGetSize(x, &sx);CHKERRQ(ierr); 2397a2c909beSMatthew Knepley 2398a2c909beSMatthew Knepley PetscScalar* array_b; 2399a2c909beSMatthew Knepley ierr = VecGetArray(b, &array_b);CHKERRQ(ierr); 2400a2c909beSMatthew Knepley PetscInt sb; 2401a2c909beSMatthew Knepley ierr = VecGetSize(b, &sb);CHKERRQ(ierr); 2402a2c909beSMatthew Knepley 2403a2c909beSMatthew Knepley lgraph_type& level_graph = *lgraph_p; 2404a2c909beSMatthew Knepley graph_dist::ilu_default::graph_type& graph(level_graph.graph); 2405a2c909beSMatthew Knepley 2406a2c909beSMatthew Knepley typedef boost::multi_array_ref<PetscScalar, 1> array_ref_type; 2407a2c909beSMatthew Knepley array_ref_type ref_b(array_b, boost::extents[num_vertices(graph)]), 2408a2c909beSMatthew Knepley ref_x(array_x, boost::extents[num_vertices(graph)]); 2409a2c909beSMatthew Knepley 2410a2c909beSMatthew Knepley typedef boost::iterator_property_map<array_ref_type::iterator, 2411a2c909beSMatthew Knepley boost::property_map<graph_dist::ilu_default::graph_type, boost::vertex_index_t>::type> gvector_type; 2412a2c909beSMatthew Knepley gvector_type vector_b(ref_b.begin(), get(boost::vertex_index, graph)), 2413a2c909beSMatthew Knepley vector_x(ref_x.begin(), get(boost::vertex_index, graph)); 2414a2c909beSMatthew Knepley 2415a2c909beSMatthew Knepley ilu_set_solve(*lgraph_p, vector_b, vector_x); 2416a2c909beSMatthew Knepley 2417103bf8bdSMatthew Knepley PetscFunctionReturn(0); 2418103bf8bdSMatthew Knepley } 2419103bf8bdSMatthew Knepley #endif 2420103bf8bdSMatthew Knepley 242169db28dcSHong Zhang typedef struct { /* used by MatGetRedundantMatrix() for reusing matredundant */ 242269db28dcSHong Zhang PetscInt nzlocal,nsends,nrecvs; 24231d79065fSBarry Smith PetscMPIInt *send_rank,*recv_rank; 24241d79065fSBarry Smith PetscInt *sbuf_nz,*rbuf_nz,*sbuf_j,**rbuf_j; 242569db28dcSHong Zhang PetscScalar *sbuf_a,**rbuf_a; 2426bf0cc555SLisandro Dalcin PetscErrorCode (*Destroy)(Mat); 242769db28dcSHong Zhang } Mat_Redundant; 242869db28dcSHong Zhang 242969db28dcSHong Zhang #undef __FUNCT__ 243069db28dcSHong Zhang #define __FUNCT__ "PetscContainerDestroy_MatRedundant" 243169db28dcSHong Zhang PetscErrorCode PetscContainerDestroy_MatRedundant(void *ptr) 243269db28dcSHong Zhang { 243369db28dcSHong Zhang PetscErrorCode ierr; 243469db28dcSHong Zhang Mat_Redundant *redund=(Mat_Redundant*)ptr; 243569db28dcSHong Zhang PetscInt i; 243669db28dcSHong Zhang 243769db28dcSHong Zhang PetscFunctionBegin; 24381d79065fSBarry Smith ierr = PetscFree2(redund->send_rank,redund->recv_rank);CHKERRQ(ierr); 243969db28dcSHong Zhang ierr = PetscFree(redund->sbuf_j);CHKERRQ(ierr); 244069db28dcSHong Zhang ierr = PetscFree(redund->sbuf_a);CHKERRQ(ierr); 244169db28dcSHong Zhang for (i=0; i<redund->nrecvs; i++){ 244269db28dcSHong Zhang ierr = PetscFree(redund->rbuf_j[i]);CHKERRQ(ierr); 244369db28dcSHong Zhang ierr = PetscFree(redund->rbuf_a[i]);CHKERRQ(ierr); 244469db28dcSHong Zhang } 24451d79065fSBarry Smith ierr = PetscFree4(redund->sbuf_nz,redund->rbuf_nz,redund->rbuf_j,redund->rbuf_a);CHKERRQ(ierr); 244669db28dcSHong Zhang ierr = PetscFree(redund);CHKERRQ(ierr); 244769db28dcSHong Zhang PetscFunctionReturn(0); 244869db28dcSHong Zhang } 244969db28dcSHong Zhang 245069db28dcSHong Zhang #undef __FUNCT__ 245169db28dcSHong Zhang #define __FUNCT__ "MatDestroy_MatRedundant" 245269db28dcSHong Zhang PetscErrorCode MatDestroy_MatRedundant(Mat A) 245369db28dcSHong Zhang { 245469db28dcSHong Zhang PetscErrorCode ierr; 245569db28dcSHong Zhang PetscContainer container; 245669db28dcSHong Zhang Mat_Redundant *redund=PETSC_NULL; 245769db28dcSHong Zhang 245869db28dcSHong Zhang PetscFunctionBegin; 245969db28dcSHong Zhang ierr = PetscObjectQuery((PetscObject)A,"Mat_Redundant",(PetscObject *)&container);CHKERRQ(ierr); 2460bf0cc555SLisandro Dalcin if (!container) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Container does not exit"); 246169db28dcSHong Zhang ierr = PetscContainerGetPointer(container,(void **)&redund);CHKERRQ(ierr); 2462bf0cc555SLisandro Dalcin A->ops->destroy = redund->Destroy; 246369db28dcSHong Zhang ierr = PetscObjectCompose((PetscObject)A,"Mat_Redundant",0);CHKERRQ(ierr); 2464bf0cc555SLisandro Dalcin if (A->ops->destroy) { 246569db28dcSHong Zhang ierr = (*A->ops->destroy)(A);CHKERRQ(ierr); 2466bf0cc555SLisandro Dalcin } 246769db28dcSHong Zhang PetscFunctionReturn(0); 246869db28dcSHong Zhang } 246969db28dcSHong Zhang 247069db28dcSHong Zhang #undef __FUNCT__ 247169db28dcSHong Zhang #define __FUNCT__ "MatGetRedundantMatrix_MPIAIJ" 247269db28dcSHong Zhang PetscErrorCode MatGetRedundantMatrix_MPIAIJ(Mat mat,PetscInt nsubcomm,MPI_Comm subcomm,PetscInt mlocal_sub,MatReuse reuse,Mat *matredundant) 247369db28dcSHong Zhang { 247469db28dcSHong Zhang PetscMPIInt rank,size; 24757adad957SLisandro Dalcin MPI_Comm comm=((PetscObject)mat)->comm; 247669db28dcSHong Zhang PetscErrorCode ierr; 247769db28dcSHong Zhang PetscInt nsends=0,nrecvs=0,i,rownz_max=0; 247869db28dcSHong Zhang PetscMPIInt *send_rank=PETSC_NULL,*recv_rank=PETSC_NULL; 2479d0f46423SBarry Smith PetscInt *rowrange=mat->rmap->range; 248069db28dcSHong Zhang Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 248169db28dcSHong Zhang Mat A=aij->A,B=aij->B,C=*matredundant; 248269db28dcSHong Zhang Mat_SeqAIJ *a=(Mat_SeqAIJ*)A->data,*b=(Mat_SeqAIJ*)B->data; 248369db28dcSHong Zhang PetscScalar *sbuf_a; 248469db28dcSHong Zhang PetscInt nzlocal=a->nz+b->nz; 2485d0f46423SBarry Smith PetscInt j,cstart=mat->cmap->rstart,cend=mat->cmap->rend,row,nzA,nzB,ncols,*cworkA,*cworkB; 2486d0f46423SBarry Smith PetscInt rstart=mat->rmap->rstart,rend=mat->rmap->rend,*bmap=aij->garray,M,N; 248769db28dcSHong Zhang PetscInt *cols,ctmp,lwrite,*rptr,l,*sbuf_j; 2488a77337e4SBarry Smith MatScalar *aworkA,*aworkB; 2489a77337e4SBarry Smith PetscScalar *vals; 249069db28dcSHong Zhang PetscMPIInt tag1,tag2,tag3,imdex; 249169db28dcSHong Zhang MPI_Request *s_waits1=PETSC_NULL,*s_waits2=PETSC_NULL,*s_waits3=PETSC_NULL, 249269db28dcSHong Zhang *r_waits1=PETSC_NULL,*r_waits2=PETSC_NULL,*r_waits3=PETSC_NULL; 249369db28dcSHong Zhang MPI_Status recv_status,*send_status; 249469db28dcSHong Zhang PetscInt *sbuf_nz=PETSC_NULL,*rbuf_nz=PETSC_NULL,count; 249569db28dcSHong Zhang PetscInt **rbuf_j=PETSC_NULL; 249669db28dcSHong Zhang PetscScalar **rbuf_a=PETSC_NULL; 249769db28dcSHong Zhang Mat_Redundant *redund=PETSC_NULL; 249869db28dcSHong Zhang PetscContainer container; 249969db28dcSHong Zhang 250069db28dcSHong Zhang PetscFunctionBegin; 250169db28dcSHong Zhang ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 250269db28dcSHong Zhang ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 250369db28dcSHong Zhang 250469db28dcSHong Zhang if (reuse == MAT_REUSE_MATRIX) { 250569db28dcSHong Zhang ierr = MatGetSize(C,&M,&N);CHKERRQ(ierr); 2506e32f2f54SBarry Smith if (M != N || M != mat->rmap->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong global size"); 250769db28dcSHong Zhang ierr = MatGetLocalSize(C,&M,&N);CHKERRQ(ierr); 2508e32f2f54SBarry Smith if (M != N || M != mlocal_sub) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong local size"); 250969db28dcSHong Zhang ierr = PetscObjectQuery((PetscObject)C,"Mat_Redundant",(PetscObject *)&container);CHKERRQ(ierr); 2510bf0cc555SLisandro Dalcin if (!container) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Container does not exit"); 251169db28dcSHong Zhang ierr = PetscContainerGetPointer(container,(void **)&redund);CHKERRQ(ierr); 2512e32f2f54SBarry Smith if (nzlocal != redund->nzlocal) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong nzlocal"); 251369db28dcSHong Zhang 251469db28dcSHong Zhang nsends = redund->nsends; 251569db28dcSHong Zhang nrecvs = redund->nrecvs; 25161d79065fSBarry Smith send_rank = redund->send_rank; 25171d79065fSBarry Smith recv_rank = redund->recv_rank; 25181d79065fSBarry Smith sbuf_nz = redund->sbuf_nz; 25191d79065fSBarry Smith rbuf_nz = redund->rbuf_nz; 252069db28dcSHong Zhang sbuf_j = redund->sbuf_j; 252169db28dcSHong Zhang sbuf_a = redund->sbuf_a; 252269db28dcSHong Zhang rbuf_j = redund->rbuf_j; 252369db28dcSHong Zhang rbuf_a = redund->rbuf_a; 252469db28dcSHong Zhang } 252569db28dcSHong Zhang 252669db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX){ 252769db28dcSHong Zhang PetscMPIInt subrank,subsize; 252869db28dcSHong Zhang PetscInt nleftover,np_subcomm; 252969db28dcSHong Zhang /* get the destination processors' id send_rank, nsends and nrecvs */ 253069db28dcSHong Zhang ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr); 253169db28dcSHong Zhang ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr); 25321d79065fSBarry Smith ierr = PetscMalloc2(size,PetscMPIInt,&send_rank,size,PetscMPIInt,&recv_rank); 253369db28dcSHong Zhang np_subcomm = size/nsubcomm; 253469db28dcSHong Zhang nleftover = size - nsubcomm*np_subcomm; 253569db28dcSHong Zhang nsends = 0; nrecvs = 0; 253669db28dcSHong Zhang for (i=0; i<size; i++){ /* i=rank*/ 253769db28dcSHong Zhang if (subrank == i/nsubcomm && rank != i){ /* my_subrank == other's subrank */ 253869db28dcSHong Zhang send_rank[nsends] = i; nsends++; 253969db28dcSHong Zhang recv_rank[nrecvs++] = i; 254069db28dcSHong Zhang } 254169db28dcSHong Zhang } 254269db28dcSHong Zhang if (rank >= size - nleftover){/* this proc is a leftover processor */ 254369db28dcSHong Zhang i = size-nleftover-1; 254469db28dcSHong Zhang j = 0; 254569db28dcSHong Zhang while (j < nsubcomm - nleftover){ 254669db28dcSHong Zhang send_rank[nsends++] = i; 254769db28dcSHong Zhang i--; j++; 254869db28dcSHong Zhang } 254969db28dcSHong Zhang } 255069db28dcSHong Zhang 255169db28dcSHong Zhang if (nleftover && subsize == size/nsubcomm && subrank==subsize-1){ /* this proc recvs from leftover processors */ 255269db28dcSHong Zhang for (i=0; i<nleftover; i++){ 255369db28dcSHong Zhang recv_rank[nrecvs++] = size-nleftover+i; 255469db28dcSHong Zhang } 255569db28dcSHong Zhang } 255669db28dcSHong Zhang 255769db28dcSHong Zhang /* allocate sbuf_j, sbuf_a */ 255869db28dcSHong Zhang i = nzlocal + rowrange[rank+1] - rowrange[rank] + 2; 255969db28dcSHong Zhang ierr = PetscMalloc(i*sizeof(PetscInt),&sbuf_j);CHKERRQ(ierr); 256069db28dcSHong Zhang ierr = PetscMalloc((nzlocal+1)*sizeof(PetscScalar),&sbuf_a);CHKERRQ(ierr); 256169db28dcSHong Zhang } /* endof if (reuse == MAT_INITIAL_MATRIX) */ 256269db28dcSHong Zhang 256369db28dcSHong Zhang /* copy mat's local entries into the buffers */ 256469db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX){ 256569db28dcSHong Zhang rownz_max = 0; 256669db28dcSHong Zhang rptr = sbuf_j; 256769db28dcSHong Zhang cols = sbuf_j + rend-rstart + 1; 256869db28dcSHong Zhang vals = sbuf_a; 256969db28dcSHong Zhang rptr[0] = 0; 257069db28dcSHong Zhang for (i=0; i<rend-rstart; i++){ 257169db28dcSHong Zhang row = i + rstart; 257269db28dcSHong Zhang nzA = a->i[i+1] - a->i[i]; nzB = b->i[i+1] - b->i[i]; 257369db28dcSHong Zhang ncols = nzA + nzB; 257469db28dcSHong Zhang cworkA = a->j + a->i[i]; cworkB = b->j + b->i[i]; 257569db28dcSHong Zhang aworkA = a->a + a->i[i]; aworkB = b->a + b->i[i]; 257669db28dcSHong Zhang /* load the column indices for this row into cols */ 257769db28dcSHong Zhang lwrite = 0; 257869db28dcSHong Zhang for (l=0; l<nzB; l++) { 257969db28dcSHong Zhang if ((ctmp = bmap[cworkB[l]]) < cstart){ 258069db28dcSHong Zhang vals[lwrite] = aworkB[l]; 258169db28dcSHong Zhang cols[lwrite++] = ctmp; 258269db28dcSHong Zhang } 258369db28dcSHong Zhang } 258469db28dcSHong Zhang for (l=0; l<nzA; l++){ 258569db28dcSHong Zhang vals[lwrite] = aworkA[l]; 258669db28dcSHong Zhang cols[lwrite++] = cstart + cworkA[l]; 258769db28dcSHong Zhang } 258869db28dcSHong Zhang for (l=0; l<nzB; l++) { 258969db28dcSHong Zhang if ((ctmp = bmap[cworkB[l]]) >= cend){ 259069db28dcSHong Zhang vals[lwrite] = aworkB[l]; 259169db28dcSHong Zhang cols[lwrite++] = ctmp; 259269db28dcSHong Zhang } 259369db28dcSHong Zhang } 259469db28dcSHong Zhang vals += ncols; 259569db28dcSHong Zhang cols += ncols; 259669db28dcSHong Zhang rptr[i+1] = rptr[i] + ncols; 259769db28dcSHong Zhang if (rownz_max < ncols) rownz_max = ncols; 259869db28dcSHong Zhang } 2599e32f2f54SBarry Smith if (rptr[rend-rstart] != a->nz + b->nz) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB, "rptr[%d] %d != %d + %d",rend-rstart,rptr[rend-rstart+1],a->nz,b->nz); 260069db28dcSHong Zhang } else { /* only copy matrix values into sbuf_a */ 260169db28dcSHong Zhang rptr = sbuf_j; 260269db28dcSHong Zhang vals = sbuf_a; 260369db28dcSHong Zhang rptr[0] = 0; 260469db28dcSHong Zhang for (i=0; i<rend-rstart; i++){ 260569db28dcSHong Zhang row = i + rstart; 260669db28dcSHong Zhang nzA = a->i[i+1] - a->i[i]; nzB = b->i[i+1] - b->i[i]; 260769db28dcSHong Zhang ncols = nzA + nzB; 260869db28dcSHong Zhang cworkA = a->j + a->i[i]; cworkB = b->j + b->i[i]; 260969db28dcSHong Zhang aworkA = a->a + a->i[i]; aworkB = b->a + b->i[i]; 261069db28dcSHong Zhang lwrite = 0; 261169db28dcSHong Zhang for (l=0; l<nzB; l++) { 261269db28dcSHong Zhang if ((ctmp = bmap[cworkB[l]]) < cstart) vals[lwrite++] = aworkB[l]; 261369db28dcSHong Zhang } 261469db28dcSHong Zhang for (l=0; l<nzA; l++) vals[lwrite++] = aworkA[l]; 261569db28dcSHong Zhang for (l=0; l<nzB; l++) { 261669db28dcSHong Zhang if ((ctmp = bmap[cworkB[l]]) >= cend) vals[lwrite++] = aworkB[l]; 261769db28dcSHong Zhang } 261869db28dcSHong Zhang vals += ncols; 261969db28dcSHong Zhang rptr[i+1] = rptr[i] + ncols; 262069db28dcSHong Zhang } 262169db28dcSHong Zhang } /* endof if (reuse == MAT_INITIAL_MATRIX) */ 262269db28dcSHong Zhang 262369db28dcSHong Zhang /* send nzlocal to others, and recv other's nzlocal */ 262469db28dcSHong Zhang /*--------------------------------------------------*/ 262569db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX){ 262669db28dcSHong Zhang ierr = PetscMalloc2(3*(nsends + nrecvs)+1,MPI_Request,&s_waits3,nsends+1,MPI_Status,&send_status);CHKERRQ(ierr); 262769db28dcSHong Zhang s_waits2 = s_waits3 + nsends; 262869db28dcSHong Zhang s_waits1 = s_waits2 + nsends; 262969db28dcSHong Zhang r_waits1 = s_waits1 + nsends; 263069db28dcSHong Zhang r_waits2 = r_waits1 + nrecvs; 263169db28dcSHong Zhang r_waits3 = r_waits2 + nrecvs; 263269db28dcSHong Zhang } else { 263369db28dcSHong Zhang ierr = PetscMalloc2(nsends + nrecvs +1,MPI_Request,&s_waits3,nsends+1,MPI_Status,&send_status);CHKERRQ(ierr); 263469db28dcSHong Zhang r_waits3 = s_waits3 + nsends; 263569db28dcSHong Zhang } 263669db28dcSHong Zhang 263769db28dcSHong Zhang ierr = PetscObjectGetNewTag((PetscObject)mat,&tag3);CHKERRQ(ierr); 263869db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX){ 263969db28dcSHong Zhang /* get new tags to keep the communication clean */ 264069db28dcSHong Zhang ierr = PetscObjectGetNewTag((PetscObject)mat,&tag1);CHKERRQ(ierr); 264169db28dcSHong Zhang ierr = PetscObjectGetNewTag((PetscObject)mat,&tag2);CHKERRQ(ierr); 26421d79065fSBarry Smith ierr = PetscMalloc4(nsends,PetscInt,&sbuf_nz,nrecvs,PetscInt,&rbuf_nz,nrecvs,PetscInt*,&rbuf_j,nrecvs,PetscScalar*,&rbuf_a);CHKERRQ(ierr); 264369db28dcSHong Zhang 264469db28dcSHong Zhang /* post receives of other's nzlocal */ 264569db28dcSHong Zhang for (i=0; i<nrecvs; i++){ 264669db28dcSHong Zhang ierr = MPI_Irecv(rbuf_nz+i,1,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,r_waits1+i);CHKERRQ(ierr); 264769db28dcSHong Zhang } 264869db28dcSHong Zhang /* send nzlocal to others */ 264969db28dcSHong Zhang for (i=0; i<nsends; i++){ 265069db28dcSHong Zhang sbuf_nz[i] = nzlocal; 265169db28dcSHong Zhang ierr = MPI_Isend(sbuf_nz+i,1,MPIU_INT,send_rank[i],tag1,comm,s_waits1+i);CHKERRQ(ierr); 265269db28dcSHong Zhang } 265369db28dcSHong Zhang /* wait on receives of nzlocal; allocate space for rbuf_j, rbuf_a */ 265469db28dcSHong Zhang count = nrecvs; 265569db28dcSHong Zhang while (count) { 265669db28dcSHong Zhang ierr = MPI_Waitany(nrecvs,r_waits1,&imdex,&recv_status);CHKERRQ(ierr); 265769db28dcSHong Zhang recv_rank[imdex] = recv_status.MPI_SOURCE; 265869db28dcSHong Zhang /* allocate rbuf_a and rbuf_j; then post receives of rbuf_j */ 265969db28dcSHong Zhang ierr = PetscMalloc((rbuf_nz[imdex]+1)*sizeof(PetscScalar),&rbuf_a[imdex]);CHKERRQ(ierr); 266069db28dcSHong Zhang 266169db28dcSHong Zhang i = rowrange[recv_status.MPI_SOURCE+1] - rowrange[recv_status.MPI_SOURCE]; /* number of expected mat->i */ 266269db28dcSHong Zhang rbuf_nz[imdex] += i + 2; 266369db28dcSHong Zhang ierr = PetscMalloc(rbuf_nz[imdex]*sizeof(PetscInt),&rbuf_j[imdex]);CHKERRQ(ierr); 266469db28dcSHong Zhang ierr = MPI_Irecv(rbuf_j[imdex],rbuf_nz[imdex],MPIU_INT,recv_status.MPI_SOURCE,tag2,comm,r_waits2+imdex);CHKERRQ(ierr); 266569db28dcSHong Zhang count--; 266669db28dcSHong Zhang } 266769db28dcSHong Zhang /* wait on sends of nzlocal */ 266869db28dcSHong Zhang if (nsends) {ierr = MPI_Waitall(nsends,s_waits1,send_status);CHKERRQ(ierr);} 266969db28dcSHong Zhang /* send mat->i,j to others, and recv from other's */ 267069db28dcSHong Zhang /*------------------------------------------------*/ 267169db28dcSHong Zhang for (i=0; i<nsends; i++){ 267269db28dcSHong Zhang j = nzlocal + rowrange[rank+1] - rowrange[rank] + 1; 267369db28dcSHong Zhang ierr = MPI_Isend(sbuf_j,j,MPIU_INT,send_rank[i],tag2,comm,s_waits2+i);CHKERRQ(ierr); 267469db28dcSHong Zhang } 267569db28dcSHong Zhang /* wait on receives of mat->i,j */ 267669db28dcSHong Zhang /*------------------------------*/ 267769db28dcSHong Zhang count = nrecvs; 267869db28dcSHong Zhang while (count) { 267969db28dcSHong Zhang ierr = MPI_Waitany(nrecvs,r_waits2,&imdex,&recv_status);CHKERRQ(ierr); 2680e32f2f54SBarry Smith if (recv_rank[imdex] != recv_status.MPI_SOURCE) SETERRQ2(PETSC_COMM_SELF,1, "recv_rank %d != MPI_SOURCE %d",recv_rank[imdex],recv_status.MPI_SOURCE); 268169db28dcSHong Zhang count--; 268269db28dcSHong Zhang } 268369db28dcSHong Zhang /* wait on sends of mat->i,j */ 268469db28dcSHong Zhang /*---------------------------*/ 268569db28dcSHong Zhang if (nsends) { 268669db28dcSHong Zhang ierr = MPI_Waitall(nsends,s_waits2,send_status);CHKERRQ(ierr); 268769db28dcSHong Zhang } 268869db28dcSHong Zhang } /* endof if (reuse == MAT_INITIAL_MATRIX) */ 268969db28dcSHong Zhang 269069db28dcSHong Zhang /* post receives, send and receive mat->a */ 269169db28dcSHong Zhang /*----------------------------------------*/ 269269db28dcSHong Zhang for (imdex=0; imdex<nrecvs; imdex++) { 269369db28dcSHong Zhang ierr = MPI_Irecv(rbuf_a[imdex],rbuf_nz[imdex],MPIU_SCALAR,recv_rank[imdex],tag3,comm,r_waits3+imdex);CHKERRQ(ierr); 269469db28dcSHong Zhang } 269569db28dcSHong Zhang for (i=0; i<nsends; i++){ 269669db28dcSHong Zhang ierr = MPI_Isend(sbuf_a,nzlocal,MPIU_SCALAR,send_rank[i],tag3,comm,s_waits3+i);CHKERRQ(ierr); 269769db28dcSHong Zhang } 269869db28dcSHong Zhang count = nrecvs; 269969db28dcSHong Zhang while (count) { 270069db28dcSHong Zhang ierr = MPI_Waitany(nrecvs,r_waits3,&imdex,&recv_status);CHKERRQ(ierr); 2701e32f2f54SBarry Smith if (recv_rank[imdex] != recv_status.MPI_SOURCE) SETERRQ2(PETSC_COMM_SELF,1, "recv_rank %d != MPI_SOURCE %d",recv_rank[imdex],recv_status.MPI_SOURCE); 270269db28dcSHong Zhang count--; 270369db28dcSHong Zhang } 270469db28dcSHong Zhang if (nsends) { 270569db28dcSHong Zhang ierr = MPI_Waitall(nsends,s_waits3,send_status);CHKERRQ(ierr); 270669db28dcSHong Zhang } 270769db28dcSHong Zhang 270869db28dcSHong Zhang ierr = PetscFree2(s_waits3,send_status);CHKERRQ(ierr); 270969db28dcSHong Zhang 271069db28dcSHong Zhang /* create redundant matrix */ 271169db28dcSHong Zhang /*-------------------------*/ 271269db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX){ 271369db28dcSHong Zhang /* compute rownz_max for preallocation */ 271469db28dcSHong Zhang for (imdex=0; imdex<nrecvs; imdex++){ 271569db28dcSHong Zhang j = rowrange[recv_rank[imdex]+1] - rowrange[recv_rank[imdex]]; 271669db28dcSHong Zhang rptr = rbuf_j[imdex]; 271769db28dcSHong Zhang for (i=0; i<j; i++){ 271869db28dcSHong Zhang ncols = rptr[i+1] - rptr[i]; 271969db28dcSHong Zhang if (rownz_max < ncols) rownz_max = ncols; 272069db28dcSHong Zhang } 272169db28dcSHong Zhang } 272269db28dcSHong Zhang 272369db28dcSHong Zhang ierr = MatCreate(subcomm,&C);CHKERRQ(ierr); 272469db28dcSHong Zhang ierr = MatSetSizes(C,mlocal_sub,mlocal_sub,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr); 272569db28dcSHong Zhang ierr = MatSetFromOptions(C);CHKERRQ(ierr); 272669db28dcSHong Zhang ierr = MatSeqAIJSetPreallocation(C,rownz_max,PETSC_NULL);CHKERRQ(ierr); 272769db28dcSHong Zhang ierr = MatMPIAIJSetPreallocation(C,rownz_max,PETSC_NULL,rownz_max,PETSC_NULL);CHKERRQ(ierr); 272869db28dcSHong Zhang } else { 272969db28dcSHong Zhang C = *matredundant; 273069db28dcSHong Zhang } 273169db28dcSHong Zhang 273269db28dcSHong Zhang /* insert local matrix entries */ 273369db28dcSHong Zhang rptr = sbuf_j; 273469db28dcSHong Zhang cols = sbuf_j + rend-rstart + 1; 273569db28dcSHong Zhang vals = sbuf_a; 273669db28dcSHong Zhang for (i=0; i<rend-rstart; i++){ 273769db28dcSHong Zhang row = i + rstart; 273869db28dcSHong Zhang ncols = rptr[i+1] - rptr[i]; 273969db28dcSHong Zhang ierr = MatSetValues(C,1,&row,ncols,cols,vals,INSERT_VALUES);CHKERRQ(ierr); 274069db28dcSHong Zhang vals += ncols; 274169db28dcSHong Zhang cols += ncols; 274269db28dcSHong Zhang } 274369db28dcSHong Zhang /* insert received matrix entries */ 274469db28dcSHong Zhang for (imdex=0; imdex<nrecvs; imdex++){ 274569db28dcSHong Zhang rstart = rowrange[recv_rank[imdex]]; 274669db28dcSHong Zhang rend = rowrange[recv_rank[imdex]+1]; 274769db28dcSHong Zhang rptr = rbuf_j[imdex]; 274869db28dcSHong Zhang cols = rbuf_j[imdex] + rend-rstart + 1; 274969db28dcSHong Zhang vals = rbuf_a[imdex]; 275069db28dcSHong Zhang for (i=0; i<rend-rstart; i++){ 275169db28dcSHong Zhang row = i + rstart; 275269db28dcSHong Zhang ncols = rptr[i+1] - rptr[i]; 275369db28dcSHong Zhang ierr = MatSetValues(C,1,&row,ncols,cols,vals,INSERT_VALUES);CHKERRQ(ierr); 275469db28dcSHong Zhang vals += ncols; 275569db28dcSHong Zhang cols += ncols; 275669db28dcSHong Zhang } 275769db28dcSHong Zhang } 275869db28dcSHong Zhang ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 275969db28dcSHong Zhang ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 276069db28dcSHong Zhang ierr = MatGetSize(C,&M,&N);CHKERRQ(ierr); 2761e32f2f54SBarry Smith if (M != mat->rmap->N || N != mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"redundant mat size %d != input mat size %d",M,mat->rmap->N); 276269db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX) { 276369db28dcSHong Zhang PetscContainer container; 276469db28dcSHong Zhang *matredundant = C; 276569db28dcSHong Zhang /* create a supporting struct and attach it to C for reuse */ 276638f2d2fdSLisandro Dalcin ierr = PetscNewLog(C,Mat_Redundant,&redund);CHKERRQ(ierr); 276769db28dcSHong Zhang ierr = PetscContainerCreate(PETSC_COMM_SELF,&container);CHKERRQ(ierr); 276869db28dcSHong Zhang ierr = PetscContainerSetPointer(container,redund);CHKERRQ(ierr); 276969db28dcSHong Zhang ierr = PetscContainerSetUserDestroy(container,PetscContainerDestroy_MatRedundant);CHKERRQ(ierr); 2770bf0cc555SLisandro Dalcin ierr = PetscObjectCompose((PetscObject)C,"Mat_Redundant",(PetscObject)container);CHKERRQ(ierr); 2771bf0cc555SLisandro Dalcin ierr = PetscContainerDestroy(&container);CHKERRQ(ierr); 277269db28dcSHong Zhang 277369db28dcSHong Zhang redund->nzlocal = nzlocal; 277469db28dcSHong Zhang redund->nsends = nsends; 277569db28dcSHong Zhang redund->nrecvs = nrecvs; 277669db28dcSHong Zhang redund->send_rank = send_rank; 27771d79065fSBarry Smith redund->recv_rank = recv_rank; 277869db28dcSHong Zhang redund->sbuf_nz = sbuf_nz; 27791d79065fSBarry Smith redund->rbuf_nz = rbuf_nz; 278069db28dcSHong Zhang redund->sbuf_j = sbuf_j; 278169db28dcSHong Zhang redund->sbuf_a = sbuf_a; 278269db28dcSHong Zhang redund->rbuf_j = rbuf_j; 278369db28dcSHong Zhang redund->rbuf_a = rbuf_a; 278469db28dcSHong Zhang 2785bf0cc555SLisandro Dalcin redund->Destroy = C->ops->destroy; 278669db28dcSHong Zhang C->ops->destroy = MatDestroy_MatRedundant; 278769db28dcSHong Zhang } 278869db28dcSHong Zhang PetscFunctionReturn(0); 278969db28dcSHong Zhang } 279069db28dcSHong Zhang 279103bc72f1SMatthew Knepley #undef __FUNCT__ 2792c91732d9SHong Zhang #define __FUNCT__ "MatGetRowMaxAbs_MPIAIJ" 2793c91732d9SHong Zhang PetscErrorCode MatGetRowMaxAbs_MPIAIJ(Mat A, Vec v, PetscInt idx[]) 2794c91732d9SHong Zhang { 2795c91732d9SHong Zhang Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 2796c91732d9SHong Zhang PetscErrorCode ierr; 2797c91732d9SHong Zhang PetscInt i,*idxb = 0; 2798c91732d9SHong Zhang PetscScalar *va,*vb; 2799c91732d9SHong Zhang Vec vtmp; 2800c91732d9SHong Zhang 2801c91732d9SHong Zhang PetscFunctionBegin; 2802c91732d9SHong Zhang ierr = MatGetRowMaxAbs(a->A,v,idx);CHKERRQ(ierr); 2803c91732d9SHong Zhang ierr = VecGetArray(v,&va);CHKERRQ(ierr); 2804c91732d9SHong Zhang if (idx) { 2805192daf7cSBarry Smith for (i=0; i<A->rmap->n; i++) { 2806d0f46423SBarry Smith if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart; 2807c91732d9SHong Zhang } 2808c91732d9SHong Zhang } 2809c91732d9SHong Zhang 2810d0f46423SBarry Smith ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->n,&vtmp);CHKERRQ(ierr); 2811c91732d9SHong Zhang if (idx) { 2812d0f46423SBarry Smith ierr = PetscMalloc(A->rmap->n*sizeof(PetscInt),&idxb);CHKERRQ(ierr); 2813c91732d9SHong Zhang } 2814c91732d9SHong Zhang ierr = MatGetRowMaxAbs(a->B,vtmp,idxb);CHKERRQ(ierr); 2815c91732d9SHong Zhang ierr = VecGetArray(vtmp,&vb);CHKERRQ(ierr); 2816c91732d9SHong Zhang 2817d0f46423SBarry Smith for (i=0; i<A->rmap->n; i++){ 2818c91732d9SHong Zhang if (PetscAbsScalar(va[i]) < PetscAbsScalar(vb[i])) { 2819c91732d9SHong Zhang va[i] = vb[i]; 2820c91732d9SHong Zhang if (idx) idx[i] = a->garray[idxb[i]]; 2821c91732d9SHong Zhang } 2822c91732d9SHong Zhang } 2823c91732d9SHong Zhang 2824c91732d9SHong Zhang ierr = VecRestoreArray(v,&va);CHKERRQ(ierr); 2825c91732d9SHong Zhang ierr = VecRestoreArray(vtmp,&vb);CHKERRQ(ierr); 2826c91732d9SHong Zhang ierr = PetscFree(idxb);CHKERRQ(ierr); 28276bf464f9SBarry Smith ierr = VecDestroy(&vtmp);CHKERRQ(ierr); 2828c91732d9SHong Zhang PetscFunctionReturn(0); 2829c91732d9SHong Zhang } 2830c91732d9SHong Zhang 2831c91732d9SHong Zhang #undef __FUNCT__ 2832c87e5d42SMatthew Knepley #define __FUNCT__ "MatGetRowMinAbs_MPIAIJ" 2833c87e5d42SMatthew Knepley PetscErrorCode MatGetRowMinAbs_MPIAIJ(Mat A, Vec v, PetscInt idx[]) 2834c87e5d42SMatthew Knepley { 2835c87e5d42SMatthew Knepley Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 2836c87e5d42SMatthew Knepley PetscErrorCode ierr; 2837c87e5d42SMatthew Knepley PetscInt i,*idxb = 0; 2838c87e5d42SMatthew Knepley PetscScalar *va,*vb; 2839c87e5d42SMatthew Knepley Vec vtmp; 2840c87e5d42SMatthew Knepley 2841c87e5d42SMatthew Knepley PetscFunctionBegin; 2842c87e5d42SMatthew Knepley ierr = MatGetRowMinAbs(a->A,v,idx);CHKERRQ(ierr); 2843c87e5d42SMatthew Knepley ierr = VecGetArray(v,&va);CHKERRQ(ierr); 2844c87e5d42SMatthew Knepley if (idx) { 2845c87e5d42SMatthew Knepley for (i=0; i<A->cmap->n; i++) { 2846c87e5d42SMatthew Knepley if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart; 2847c87e5d42SMatthew Knepley } 2848c87e5d42SMatthew Knepley } 2849c87e5d42SMatthew Knepley 2850c87e5d42SMatthew Knepley ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->n,&vtmp);CHKERRQ(ierr); 2851c87e5d42SMatthew Knepley if (idx) { 2852c87e5d42SMatthew Knepley ierr = PetscMalloc(A->rmap->n*sizeof(PetscInt),&idxb);CHKERRQ(ierr); 2853c87e5d42SMatthew Knepley } 2854c87e5d42SMatthew Knepley ierr = MatGetRowMinAbs(a->B,vtmp,idxb);CHKERRQ(ierr); 2855c87e5d42SMatthew Knepley ierr = VecGetArray(vtmp,&vb);CHKERRQ(ierr); 2856c87e5d42SMatthew Knepley 2857c87e5d42SMatthew Knepley for (i=0; i<A->rmap->n; i++){ 2858c87e5d42SMatthew Knepley if (PetscAbsScalar(va[i]) > PetscAbsScalar(vb[i])) { 2859c87e5d42SMatthew Knepley va[i] = vb[i]; 2860c87e5d42SMatthew Knepley if (idx) idx[i] = a->garray[idxb[i]]; 2861c87e5d42SMatthew Knepley } 2862c87e5d42SMatthew Knepley } 2863c87e5d42SMatthew Knepley 2864c87e5d42SMatthew Knepley ierr = VecRestoreArray(v,&va);CHKERRQ(ierr); 2865c87e5d42SMatthew Knepley ierr = VecRestoreArray(vtmp,&vb);CHKERRQ(ierr); 2866c87e5d42SMatthew Knepley ierr = PetscFree(idxb);CHKERRQ(ierr); 28676bf464f9SBarry Smith ierr = VecDestroy(&vtmp);CHKERRQ(ierr); 2868c87e5d42SMatthew Knepley PetscFunctionReturn(0); 2869c87e5d42SMatthew Knepley } 2870c87e5d42SMatthew Knepley 2871c87e5d42SMatthew Knepley #undef __FUNCT__ 287203bc72f1SMatthew Knepley #define __FUNCT__ "MatGetRowMin_MPIAIJ" 287303bc72f1SMatthew Knepley PetscErrorCode MatGetRowMin_MPIAIJ(Mat A, Vec v, PetscInt idx[]) 287403bc72f1SMatthew Knepley { 287503bc72f1SMatthew Knepley Mat_MPIAIJ *mat = (Mat_MPIAIJ *) A->data; 2876d0f46423SBarry Smith PetscInt n = A->rmap->n; 2877d0f46423SBarry Smith PetscInt cstart = A->cmap->rstart; 287803bc72f1SMatthew Knepley PetscInt *cmap = mat->garray; 287903bc72f1SMatthew Knepley PetscInt *diagIdx, *offdiagIdx; 288003bc72f1SMatthew Knepley Vec diagV, offdiagV; 288103bc72f1SMatthew Knepley PetscScalar *a, *diagA, *offdiagA; 288203bc72f1SMatthew Knepley PetscInt r; 288303bc72f1SMatthew Knepley PetscErrorCode ierr; 288403bc72f1SMatthew Knepley 288503bc72f1SMatthew Knepley PetscFunctionBegin; 288603bc72f1SMatthew Knepley ierr = PetscMalloc2(n,PetscInt,&diagIdx,n,PetscInt,&offdiagIdx);CHKERRQ(ierr); 2887e64afeacSLisandro Dalcin ierr = VecCreateSeq(((PetscObject)A)->comm, n, &diagV);CHKERRQ(ierr); 2888e64afeacSLisandro Dalcin ierr = VecCreateSeq(((PetscObject)A)->comm, n, &offdiagV);CHKERRQ(ierr); 288903bc72f1SMatthew Knepley ierr = MatGetRowMin(mat->A, diagV, diagIdx);CHKERRQ(ierr); 289003bc72f1SMatthew Knepley ierr = MatGetRowMin(mat->B, offdiagV, offdiagIdx);CHKERRQ(ierr); 289103bc72f1SMatthew Knepley ierr = VecGetArray(v, &a);CHKERRQ(ierr); 289203bc72f1SMatthew Knepley ierr = VecGetArray(diagV, &diagA);CHKERRQ(ierr); 289303bc72f1SMatthew Knepley ierr = VecGetArray(offdiagV, &offdiagA);CHKERRQ(ierr); 289403bc72f1SMatthew Knepley for(r = 0; r < n; ++r) { 2895028cd4eaSSatish Balay if (PetscAbsScalar(diagA[r]) <= PetscAbsScalar(offdiagA[r])) { 289603bc72f1SMatthew Knepley a[r] = diagA[r]; 289703bc72f1SMatthew Knepley idx[r] = cstart + diagIdx[r]; 289803bc72f1SMatthew Knepley } else { 289903bc72f1SMatthew Knepley a[r] = offdiagA[r]; 290003bc72f1SMatthew Knepley idx[r] = cmap[offdiagIdx[r]]; 290103bc72f1SMatthew Knepley } 290203bc72f1SMatthew Knepley } 290303bc72f1SMatthew Knepley ierr = VecRestoreArray(v, &a);CHKERRQ(ierr); 290403bc72f1SMatthew Knepley ierr = VecRestoreArray(diagV, &diagA);CHKERRQ(ierr); 290503bc72f1SMatthew Knepley ierr = VecRestoreArray(offdiagV, &offdiagA);CHKERRQ(ierr); 29066bf464f9SBarry Smith ierr = VecDestroy(&diagV);CHKERRQ(ierr); 29076bf464f9SBarry Smith ierr = VecDestroy(&offdiagV);CHKERRQ(ierr); 290803bc72f1SMatthew Knepley ierr = PetscFree2(diagIdx, offdiagIdx);CHKERRQ(ierr); 290903bc72f1SMatthew Knepley PetscFunctionReturn(0); 291003bc72f1SMatthew Knepley } 291103bc72f1SMatthew Knepley 29125494a064SHong Zhang #undef __FUNCT__ 2913c87e5d42SMatthew Knepley #define __FUNCT__ "MatGetRowMax_MPIAIJ" 2914c87e5d42SMatthew Knepley PetscErrorCode MatGetRowMax_MPIAIJ(Mat A, Vec v, PetscInt idx[]) 2915c87e5d42SMatthew Knepley { 2916c87e5d42SMatthew Knepley Mat_MPIAIJ *mat = (Mat_MPIAIJ *) A->data; 2917c87e5d42SMatthew Knepley PetscInt n = A->rmap->n; 2918c87e5d42SMatthew Knepley PetscInt cstart = A->cmap->rstart; 2919c87e5d42SMatthew Knepley PetscInt *cmap = mat->garray; 2920c87e5d42SMatthew Knepley PetscInt *diagIdx, *offdiagIdx; 2921c87e5d42SMatthew Knepley Vec diagV, offdiagV; 2922c87e5d42SMatthew Knepley PetscScalar *a, *diagA, *offdiagA; 2923c87e5d42SMatthew Knepley PetscInt r; 2924c87e5d42SMatthew Knepley PetscErrorCode ierr; 2925c87e5d42SMatthew Knepley 2926c87e5d42SMatthew Knepley PetscFunctionBegin; 2927c87e5d42SMatthew Knepley ierr = PetscMalloc2(n,PetscInt,&diagIdx,n,PetscInt,&offdiagIdx);CHKERRQ(ierr); 2928c87e5d42SMatthew Knepley ierr = VecCreateSeq(((PetscObject)A)->comm, n, &diagV);CHKERRQ(ierr); 2929c87e5d42SMatthew Knepley ierr = VecCreateSeq(((PetscObject)A)->comm, n, &offdiagV);CHKERRQ(ierr); 2930c87e5d42SMatthew Knepley ierr = MatGetRowMax(mat->A, diagV, diagIdx);CHKERRQ(ierr); 2931c87e5d42SMatthew Knepley ierr = MatGetRowMax(mat->B, offdiagV, offdiagIdx);CHKERRQ(ierr); 2932c87e5d42SMatthew Knepley ierr = VecGetArray(v, &a);CHKERRQ(ierr); 2933c87e5d42SMatthew Knepley ierr = VecGetArray(diagV, &diagA);CHKERRQ(ierr); 2934c87e5d42SMatthew Knepley ierr = VecGetArray(offdiagV, &offdiagA);CHKERRQ(ierr); 2935c87e5d42SMatthew Knepley for(r = 0; r < n; ++r) { 2936c87e5d42SMatthew Knepley if (PetscAbsScalar(diagA[r]) >= PetscAbsScalar(offdiagA[r])) { 2937c87e5d42SMatthew Knepley a[r] = diagA[r]; 2938c87e5d42SMatthew Knepley idx[r] = cstart + diagIdx[r]; 2939c87e5d42SMatthew Knepley } else { 2940c87e5d42SMatthew Knepley a[r] = offdiagA[r]; 2941c87e5d42SMatthew Knepley idx[r] = cmap[offdiagIdx[r]]; 2942c87e5d42SMatthew Knepley } 2943c87e5d42SMatthew Knepley } 2944c87e5d42SMatthew Knepley ierr = VecRestoreArray(v, &a);CHKERRQ(ierr); 2945c87e5d42SMatthew Knepley ierr = VecRestoreArray(diagV, &diagA);CHKERRQ(ierr); 2946c87e5d42SMatthew Knepley ierr = VecRestoreArray(offdiagV, &offdiagA);CHKERRQ(ierr); 29476bf464f9SBarry Smith ierr = VecDestroy(&diagV);CHKERRQ(ierr); 29486bf464f9SBarry Smith ierr = VecDestroy(&offdiagV);CHKERRQ(ierr); 2949c87e5d42SMatthew Knepley ierr = PetscFree2(diagIdx, offdiagIdx);CHKERRQ(ierr); 2950c87e5d42SMatthew Knepley PetscFunctionReturn(0); 2951c87e5d42SMatthew Knepley } 2952c87e5d42SMatthew Knepley 2953c87e5d42SMatthew Knepley #undef __FUNCT__ 2954829201f2SHong Zhang #define __FUNCT__ "MatGetSeqNonzerostructure_MPIAIJ" 2955f6d58c54SBarry Smith PetscErrorCode MatGetSeqNonzerostructure_MPIAIJ(Mat mat,Mat *newmat) 29565494a064SHong Zhang { 29575494a064SHong Zhang PetscErrorCode ierr; 2958f6d58c54SBarry Smith Mat *dummy; 29595494a064SHong Zhang 29605494a064SHong Zhang PetscFunctionBegin; 2961f6d58c54SBarry Smith ierr = MatGetSubMatrix_MPIAIJ_All(mat,MAT_DO_NOT_GET_VALUES,MAT_INITIAL_MATRIX,&dummy);CHKERRQ(ierr); 2962f6d58c54SBarry Smith *newmat = *dummy; 2963f6d58c54SBarry Smith ierr = PetscFree(dummy);CHKERRQ(ierr); 29645494a064SHong Zhang PetscFunctionReturn(0); 29655494a064SHong Zhang } 29665494a064SHong Zhang 29677087cfbeSBarry Smith extern PetscErrorCode MatFDColoringApply_AIJ(Mat,MatFDColoring,Vec,MatStructure*,void*); 2968*bbead8a2SBarry Smith 2969*bbead8a2SBarry Smith #undef __FUNCT__ 2970*bbead8a2SBarry Smith #define __FUNCT__ "MatInvertBlockDiagonal_MPIAIJ" 2971*bbead8a2SBarry Smith PetscErrorCode MatInvertBlockDiagonal_MPIAIJ(Mat A,PetscScalar **values) 2972*bbead8a2SBarry Smith { 2973*bbead8a2SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*) A->data; 2974*bbead8a2SBarry Smith PetscErrorCode ierr; 2975*bbead8a2SBarry Smith 2976*bbead8a2SBarry Smith PetscFunctionBegin; 2977*bbead8a2SBarry Smith ierr = MatInvertBlockDiagonal(a->A,values);CHKERRQ(ierr); 2978*bbead8a2SBarry Smith PetscFunctionReturn(0); 2979*bbead8a2SBarry Smith } 2980*bbead8a2SBarry Smith 2981*bbead8a2SBarry Smith 29828a729477SBarry Smith /* -------------------------------------------------------------------*/ 2983cda55fadSBarry Smith static struct _MatOps MatOps_Values = {MatSetValues_MPIAIJ, 2984cda55fadSBarry Smith MatGetRow_MPIAIJ, 2985cda55fadSBarry Smith MatRestoreRow_MPIAIJ, 2986cda55fadSBarry Smith MatMult_MPIAIJ, 298797304618SKris Buschelman /* 4*/ MatMultAdd_MPIAIJ, 29887c922b88SBarry Smith MatMultTranspose_MPIAIJ, 29897c922b88SBarry Smith MatMultTransposeAdd_MPIAIJ, 2990103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL 2991103bf8bdSMatthew Knepley MatSolve_MPIAIJ, 2992103bf8bdSMatthew Knepley #else 2993cda55fadSBarry Smith 0, 2994103bf8bdSMatthew Knepley #endif 2995cda55fadSBarry Smith 0, 2996cda55fadSBarry Smith 0, 299797304618SKris Buschelman /*10*/ 0, 2998cda55fadSBarry Smith 0, 2999cda55fadSBarry Smith 0, 300041f059aeSBarry Smith MatSOR_MPIAIJ, 3001b7c46309SBarry Smith MatTranspose_MPIAIJ, 300297304618SKris Buschelman /*15*/ MatGetInfo_MPIAIJ, 3003cda55fadSBarry Smith MatEqual_MPIAIJ, 3004cda55fadSBarry Smith MatGetDiagonal_MPIAIJ, 3005cda55fadSBarry Smith MatDiagonalScale_MPIAIJ, 3006cda55fadSBarry Smith MatNorm_MPIAIJ, 300797304618SKris Buschelman /*20*/ MatAssemblyBegin_MPIAIJ, 3008cda55fadSBarry Smith MatAssemblyEnd_MPIAIJ, 3009cda55fadSBarry Smith MatSetOption_MPIAIJ, 3010cda55fadSBarry Smith MatZeroEntries_MPIAIJ, 3011d519adbfSMatthew Knepley /*24*/ MatZeroRows_MPIAIJ, 3012cda55fadSBarry Smith 0, 3013103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL 3014719d5645SBarry Smith 0, 3015103bf8bdSMatthew Knepley #else 3016cda55fadSBarry Smith 0, 3017103bf8bdSMatthew Knepley #endif 3018cda55fadSBarry Smith 0, 3019cda55fadSBarry Smith 0, 3020d519adbfSMatthew Knepley /*29*/ MatSetUpPreallocation_MPIAIJ, 3021103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL 3022719d5645SBarry Smith 0, 3023103bf8bdSMatthew Knepley #else 3024cda55fadSBarry Smith 0, 3025103bf8bdSMatthew Knepley #endif 3026cda55fadSBarry Smith 0, 3027cda55fadSBarry Smith 0, 3028cda55fadSBarry Smith 0, 3029d519adbfSMatthew Knepley /*34*/ MatDuplicate_MPIAIJ, 3030cda55fadSBarry Smith 0, 3031cda55fadSBarry Smith 0, 3032cda55fadSBarry Smith 0, 3033cda55fadSBarry Smith 0, 3034d519adbfSMatthew Knepley /*39*/ MatAXPY_MPIAIJ, 3035cda55fadSBarry Smith MatGetSubMatrices_MPIAIJ, 3036cda55fadSBarry Smith MatIncreaseOverlap_MPIAIJ, 3037cda55fadSBarry Smith MatGetValues_MPIAIJ, 3038cb5b572fSBarry Smith MatCopy_MPIAIJ, 3039d519adbfSMatthew Knepley /*44*/ MatGetRowMax_MPIAIJ, 3040cda55fadSBarry Smith MatScale_MPIAIJ, 3041cda55fadSBarry Smith 0, 3042cda55fadSBarry Smith 0, 3043564f14d6SBarry Smith MatZeroRowsColumns_MPIAIJ, 3044d519adbfSMatthew Knepley /*49*/ MatSetBlockSize_MPIAIJ, 3045cda55fadSBarry Smith 0, 3046cda55fadSBarry Smith 0, 3047cda55fadSBarry Smith 0, 3048cda55fadSBarry Smith 0, 3049d519adbfSMatthew Knepley /*54*/ MatFDColoringCreate_MPIAIJ, 3050cda55fadSBarry Smith 0, 3051cda55fadSBarry Smith MatSetUnfactored_MPIAIJ, 305242e855d1Svictor MatPermute_MPIAIJ, 3053cda55fadSBarry Smith 0, 3054d519adbfSMatthew Knepley /*59*/ MatGetSubMatrix_MPIAIJ, 3055e03a110bSBarry Smith MatDestroy_MPIAIJ, 3056e03a110bSBarry Smith MatView_MPIAIJ, 3057357abbc8SBarry Smith 0, 3058a2243be0SBarry Smith 0, 3059d519adbfSMatthew Knepley /*64*/ 0, 3060a2243be0SBarry Smith 0, 3061a2243be0SBarry Smith 0, 3062a2243be0SBarry Smith 0, 3063a2243be0SBarry Smith 0, 3064d519adbfSMatthew Knepley /*69*/ MatGetRowMaxAbs_MPIAIJ, 3065c87e5d42SMatthew Knepley MatGetRowMinAbs_MPIAIJ, 3066a2243be0SBarry Smith 0, 3067a2243be0SBarry Smith MatSetColoring_MPIAIJ, 3068dcf5cc72SBarry Smith #if defined(PETSC_HAVE_ADIC) 3069779c1a83SBarry Smith MatSetValuesAdic_MPIAIJ, 3070dcf5cc72SBarry Smith #else 3071dcf5cc72SBarry Smith 0, 3072dcf5cc72SBarry Smith #endif 307397304618SKris Buschelman MatSetValuesAdifor_MPIAIJ, 30743acb8795SBarry Smith /*75*/ MatFDColoringApply_AIJ, 307597304618SKris Buschelman 0, 307697304618SKris Buschelman 0, 307797304618SKris Buschelman 0, 307897304618SKris Buschelman 0, 307997304618SKris Buschelman /*80*/ 0, 308097304618SKris Buschelman 0, 308197304618SKris Buschelman 0, 30825bba2384SShri Abhyankar /*83*/ MatLoad_MPIAIJ, 30836284ec50SHong Zhang 0, 30846284ec50SHong Zhang 0, 30856284ec50SHong Zhang 0, 30866284ec50SHong Zhang 0, 3087865e5f61SKris Buschelman 0, 3088d519adbfSMatthew Knepley /*89*/ MatMatMult_MPIAIJ_MPIAIJ, 308926be0446SHong Zhang MatMatMultSymbolic_MPIAIJ_MPIAIJ, 309026be0446SHong Zhang MatMatMultNumeric_MPIAIJ_MPIAIJ, 30917a7894deSKris Buschelman MatPtAP_Basic, 30927a7894deSKris Buschelman MatPtAPSymbolic_MPIAIJ, 3093d519adbfSMatthew Knepley /*94*/ MatPtAPNumeric_MPIAIJ, 30947a7894deSKris Buschelman 0, 30957a7894deSKris Buschelman 0, 30967a7894deSKris Buschelman 0, 30977a7894deSKris Buschelman 0, 3098d519adbfSMatthew Knepley /*99*/ 0, 3099865e5f61SKris Buschelman MatPtAPSymbolic_MPIAIJ_MPIAIJ, 31007a7894deSKris Buschelman MatPtAPNumeric_MPIAIJ_MPIAIJ, 31012fd7e33dSBarry Smith MatConjugate_MPIAIJ, 31022fd7e33dSBarry Smith 0, 3103d519adbfSMatthew Knepley /*104*/MatSetValuesRow_MPIAIJ, 310499cafbc1SBarry Smith MatRealPart_MPIAIJ, 310569db28dcSHong Zhang MatImaginaryPart_MPIAIJ, 310669db28dcSHong Zhang 0, 310769db28dcSHong Zhang 0, 3108d519adbfSMatthew Knepley /*109*/0, 310903bc72f1SMatthew Knepley MatGetRedundantMatrix_MPIAIJ, 31105494a064SHong Zhang MatGetRowMin_MPIAIJ, 31115494a064SHong Zhang 0, 31125494a064SHong Zhang 0, 3113bd0c2dcbSBarry Smith /*114*/MatGetSeqNonzerostructure_MPIAIJ, 3114bd0c2dcbSBarry Smith 0, 3115bd0c2dcbSBarry Smith 0, 3116bd0c2dcbSBarry Smith 0, 3117bd0c2dcbSBarry Smith 0, 31188fb81238SShri Abhyankar /*119*/0, 31198fb81238SShri Abhyankar 0, 31208fb81238SShri Abhyankar 0, 3121d6037b41SHong Zhang 0, 3122b9614d88SDmitry Karpeev MatGetMultiProcBlock_MPIAIJ, 312327d4218bSShri Abhyankar /*124*/MatFindNonZeroRows_MPIAIJ, 31240716a85fSBarry Smith MatGetColumnNorms_MPIAIJ, 3125*bbead8a2SBarry Smith MatInvertBlockDiagonal_MPIAIJ, 3126b9614d88SDmitry Karpeev 0, 3127b9614d88SDmitry Karpeev MatGetSubMatricesParallel_MPIAIJ 3128bd0c2dcbSBarry Smith }; 312936ce4990SBarry Smith 31302e8a6d31SBarry Smith /* ----------------------------------------------------------------------------------------*/ 31312e8a6d31SBarry Smith 3132fb2e594dSBarry Smith EXTERN_C_BEGIN 31334a2ae208SSatish Balay #undef __FUNCT__ 31344a2ae208SSatish Balay #define __FUNCT__ "MatStoreValues_MPIAIJ" 31357087cfbeSBarry Smith PetscErrorCode MatStoreValues_MPIAIJ(Mat mat) 31362e8a6d31SBarry Smith { 31372e8a6d31SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ *)mat->data; 3138dfbe8321SBarry Smith PetscErrorCode ierr; 31392e8a6d31SBarry Smith 31402e8a6d31SBarry Smith PetscFunctionBegin; 31412e8a6d31SBarry Smith ierr = MatStoreValues(aij->A);CHKERRQ(ierr); 31422e8a6d31SBarry Smith ierr = MatStoreValues(aij->B);CHKERRQ(ierr); 31432e8a6d31SBarry Smith PetscFunctionReturn(0); 31442e8a6d31SBarry Smith } 3145fb2e594dSBarry Smith EXTERN_C_END 31462e8a6d31SBarry Smith 3147fb2e594dSBarry Smith EXTERN_C_BEGIN 31484a2ae208SSatish Balay #undef __FUNCT__ 31494a2ae208SSatish Balay #define __FUNCT__ "MatRetrieveValues_MPIAIJ" 31507087cfbeSBarry Smith PetscErrorCode MatRetrieveValues_MPIAIJ(Mat mat) 31512e8a6d31SBarry Smith { 31522e8a6d31SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ *)mat->data; 3153dfbe8321SBarry Smith PetscErrorCode ierr; 31542e8a6d31SBarry Smith 31552e8a6d31SBarry Smith PetscFunctionBegin; 31562e8a6d31SBarry Smith ierr = MatRetrieveValues(aij->A);CHKERRQ(ierr); 31572e8a6d31SBarry Smith ierr = MatRetrieveValues(aij->B);CHKERRQ(ierr); 31582e8a6d31SBarry Smith PetscFunctionReturn(0); 31592e8a6d31SBarry Smith } 3160fb2e594dSBarry Smith EXTERN_C_END 31618a729477SBarry Smith 316227508adbSBarry Smith EXTERN_C_BEGIN 31634a2ae208SSatish Balay #undef __FUNCT__ 3164a23d5eceSKris Buschelman #define __FUNCT__ "MatMPIAIJSetPreallocation_MPIAIJ" 31657087cfbeSBarry Smith PetscErrorCode MatMPIAIJSetPreallocation_MPIAIJ(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 3166a23d5eceSKris Buschelman { 3167a23d5eceSKris Buschelman Mat_MPIAIJ *b; 3168dfbe8321SBarry Smith PetscErrorCode ierr; 3169b1d57f15SBarry Smith PetscInt i; 3170a23d5eceSKris Buschelman 3171a23d5eceSKris Buschelman PetscFunctionBegin; 3172a23d5eceSKris Buschelman if (d_nz == PETSC_DEFAULT || d_nz == PETSC_DECIDE) d_nz = 5; 3173a23d5eceSKris Buschelman if (o_nz == PETSC_DEFAULT || o_nz == PETSC_DECIDE) o_nz = 2; 3174e32f2f54SBarry Smith if (d_nz < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"d_nz cannot be less than 0: value %D",d_nz); 3175e32f2f54SBarry Smith if (o_nz < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"o_nz cannot be less than 0: value %D",o_nz); 3176899cda47SBarry Smith 317726283091SBarry Smith ierr = PetscLayoutSetBlockSize(B->rmap,1);CHKERRQ(ierr); 317826283091SBarry Smith ierr = PetscLayoutSetBlockSize(B->cmap,1);CHKERRQ(ierr); 317926283091SBarry Smith ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr); 318026283091SBarry Smith ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr); 3181a23d5eceSKris Buschelman if (d_nnz) { 3182d0f46423SBarry Smith for (i=0; i<B->rmap->n; i++) { 3183e32f2f54SBarry Smith if (d_nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"d_nnz cannot be less than 0: local row %D value %D",i,d_nnz[i]); 3184a23d5eceSKris Buschelman } 3185a23d5eceSKris Buschelman } 3186a23d5eceSKris Buschelman if (o_nnz) { 3187d0f46423SBarry Smith for (i=0; i<B->rmap->n; i++) { 3188e32f2f54SBarry Smith if (o_nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"o_nnz cannot be less than 0: local row %D value %D",i,o_nnz[i]); 3189a23d5eceSKris Buschelman } 3190a23d5eceSKris Buschelman } 3191a23d5eceSKris Buschelman b = (Mat_MPIAIJ*)B->data; 3192899cda47SBarry Smith 3193526dfc15SBarry Smith if (!B->preallocated) { 3194899cda47SBarry Smith /* Explicitly create 2 MATSEQAIJ matrices. */ 3195899cda47SBarry Smith ierr = MatCreate(PETSC_COMM_SELF,&b->A);CHKERRQ(ierr); 3196d0f46423SBarry Smith ierr = MatSetSizes(b->A,B->rmap->n,B->cmap->n,B->rmap->n,B->cmap->n);CHKERRQ(ierr); 3197899cda47SBarry Smith ierr = MatSetType(b->A,MATSEQAIJ);CHKERRQ(ierr); 3198899cda47SBarry Smith ierr = PetscLogObjectParent(B,b->A);CHKERRQ(ierr); 3199899cda47SBarry Smith ierr = MatCreate(PETSC_COMM_SELF,&b->B);CHKERRQ(ierr); 3200d0f46423SBarry Smith ierr = MatSetSizes(b->B,B->rmap->n,B->cmap->N,B->rmap->n,B->cmap->N);CHKERRQ(ierr); 3201899cda47SBarry Smith ierr = MatSetType(b->B,MATSEQAIJ);CHKERRQ(ierr); 3202899cda47SBarry Smith ierr = PetscLogObjectParent(B,b->B);CHKERRQ(ierr); 3203526dfc15SBarry Smith } 3204899cda47SBarry Smith 3205c60e587dSKris Buschelman ierr = MatSeqAIJSetPreallocation(b->A,d_nz,d_nnz);CHKERRQ(ierr); 3206c60e587dSKris Buschelman ierr = MatSeqAIJSetPreallocation(b->B,o_nz,o_nnz);CHKERRQ(ierr); 3207526dfc15SBarry Smith B->preallocated = PETSC_TRUE; 3208a23d5eceSKris Buschelman PetscFunctionReturn(0); 3209a23d5eceSKris Buschelman } 3210a23d5eceSKris Buschelman EXTERN_C_END 3211a23d5eceSKris Buschelman 32124a2ae208SSatish Balay #undef __FUNCT__ 32134a2ae208SSatish Balay #define __FUNCT__ "MatDuplicate_MPIAIJ" 3214dfbe8321SBarry Smith PetscErrorCode MatDuplicate_MPIAIJ(Mat matin,MatDuplicateOption cpvalues,Mat *newmat) 3215d6dfbf8fSBarry Smith { 3216d6dfbf8fSBarry Smith Mat mat; 3217416022c9SBarry Smith Mat_MPIAIJ *a,*oldmat = (Mat_MPIAIJ*)matin->data; 3218dfbe8321SBarry Smith PetscErrorCode ierr; 3219d6dfbf8fSBarry Smith 32203a40ed3dSBarry Smith PetscFunctionBegin; 3221416022c9SBarry Smith *newmat = 0; 32227adad957SLisandro Dalcin ierr = MatCreate(((PetscObject)matin)->comm,&mat);CHKERRQ(ierr); 3223d0f46423SBarry Smith ierr = MatSetSizes(mat,matin->rmap->n,matin->cmap->n,matin->rmap->N,matin->cmap->N);CHKERRQ(ierr); 32247adad957SLisandro Dalcin ierr = MatSetType(mat,((PetscObject)matin)->type_name);CHKERRQ(ierr); 32251d5dac46SHong Zhang ierr = PetscMemcpy(mat->ops,matin->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 3226273d9f13SBarry Smith a = (Mat_MPIAIJ*)mat->data; 3227e1b6402fSHong Zhang 3228d5f3da31SBarry Smith mat->factortype = matin->factortype; 3229d0f46423SBarry Smith mat->rmap->bs = matin->rmap->bs; 3230c456f294SBarry Smith mat->assembled = PETSC_TRUE; 3231e7641de0SSatish Balay mat->insertmode = NOT_SET_VALUES; 3232273d9f13SBarry Smith mat->preallocated = PETSC_TRUE; 3233d6dfbf8fSBarry Smith 323417699dbbSLois Curfman McInnes a->size = oldmat->size; 323517699dbbSLois Curfman McInnes a->rank = oldmat->rank; 3236e7641de0SSatish Balay a->donotstash = oldmat->donotstash; 3237e7641de0SSatish Balay a->roworiented = oldmat->roworiented; 3238e7641de0SSatish Balay a->rowindices = 0; 3239bcd2baecSBarry Smith a->rowvalues = 0; 3240bcd2baecSBarry Smith a->getrowactive = PETSC_FALSE; 3241d6dfbf8fSBarry Smith 32421e1e43feSBarry Smith ierr = PetscLayoutReference(matin->rmap,&mat->rmap);CHKERRQ(ierr); 32431e1e43feSBarry Smith ierr = PetscLayoutReference(matin->cmap,&mat->cmap);CHKERRQ(ierr); 3244899cda47SBarry Smith 32452ee70a88SLois Curfman McInnes if (oldmat->colmap) { 3246aa482453SBarry Smith #if defined (PETSC_USE_CTABLE) 32470f5bd95cSBarry Smith ierr = PetscTableCreateCopy(oldmat->colmap,&a->colmap);CHKERRQ(ierr); 3248b1fc9764SSatish Balay #else 3249d0f46423SBarry Smith ierr = PetscMalloc((mat->cmap->N)*sizeof(PetscInt),&a->colmap);CHKERRQ(ierr); 3250d0f46423SBarry Smith ierr = PetscLogObjectMemory(mat,(mat->cmap->N)*sizeof(PetscInt));CHKERRQ(ierr); 3251d0f46423SBarry Smith ierr = PetscMemcpy(a->colmap,oldmat->colmap,(mat->cmap->N)*sizeof(PetscInt));CHKERRQ(ierr); 3252b1fc9764SSatish Balay #endif 3253416022c9SBarry Smith } else a->colmap = 0; 32543f41c07dSBarry Smith if (oldmat->garray) { 3255b1d57f15SBarry Smith PetscInt len; 3256d0f46423SBarry Smith len = oldmat->B->cmap->n; 3257b1d57f15SBarry Smith ierr = PetscMalloc((len+1)*sizeof(PetscInt),&a->garray);CHKERRQ(ierr); 325852e6d16bSBarry Smith ierr = PetscLogObjectMemory(mat,len*sizeof(PetscInt));CHKERRQ(ierr); 3259b1d57f15SBarry Smith if (len) { ierr = PetscMemcpy(a->garray,oldmat->garray,len*sizeof(PetscInt));CHKERRQ(ierr); } 3260416022c9SBarry Smith } else a->garray = 0; 3261d6dfbf8fSBarry Smith 3262416022c9SBarry Smith ierr = VecDuplicate(oldmat->lvec,&a->lvec);CHKERRQ(ierr); 326352e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,a->lvec);CHKERRQ(ierr); 3264a56f8943SBarry Smith ierr = VecScatterCopy(oldmat->Mvctx,&a->Mvctx);CHKERRQ(ierr); 326552e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,a->Mvctx);CHKERRQ(ierr); 32662e8a6d31SBarry Smith ierr = MatDuplicate(oldmat->A,cpvalues,&a->A);CHKERRQ(ierr); 326752e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,a->A);CHKERRQ(ierr); 32682e8a6d31SBarry Smith ierr = MatDuplicate(oldmat->B,cpvalues,&a->B);CHKERRQ(ierr); 326952e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,a->B);CHKERRQ(ierr); 32707adad957SLisandro Dalcin ierr = PetscFListDuplicate(((PetscObject)matin)->qlist,&((PetscObject)mat)->qlist);CHKERRQ(ierr); 32718a729477SBarry Smith *newmat = mat; 32723a40ed3dSBarry Smith PetscFunctionReturn(0); 32738a729477SBarry Smith } 3274416022c9SBarry Smith 32751a4ee126SBarry Smith 32761a4ee126SBarry Smith 32774a2ae208SSatish Balay #undef __FUNCT__ 32785bba2384SShri Abhyankar #define __FUNCT__ "MatLoad_MPIAIJ" 3279112444f4SShri Abhyankar PetscErrorCode MatLoad_MPIAIJ(Mat newMat, PetscViewer viewer) 32808fb81238SShri Abhyankar { 32818fb81238SShri Abhyankar PetscScalar *vals,*svals; 32828fb81238SShri Abhyankar MPI_Comm comm = ((PetscObject)viewer)->comm; 32838fb81238SShri Abhyankar PetscErrorCode ierr; 32841a4ee126SBarry Smith PetscMPIInt rank,size,tag = ((PetscObject)viewer)->tag; 32858fb81238SShri Abhyankar PetscInt i,nz,j,rstart,rend,mmax,maxnz = 0,grows,gcols; 32868fb81238SShri Abhyankar PetscInt header[4],*rowlengths = 0,M,N,m,*cols; 32878fb81238SShri Abhyankar PetscInt *ourlens = PETSC_NULL,*procsnz = PETSC_NULL,*offlens = PETSC_NULL,jj,*mycols,*smycols; 32888fb81238SShri Abhyankar PetscInt cend,cstart,n,*rowners,sizesset=1; 32898fb81238SShri Abhyankar int fd; 32908fb81238SShri Abhyankar 32918fb81238SShri Abhyankar PetscFunctionBegin; 32928fb81238SShri Abhyankar ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 32938fb81238SShri Abhyankar ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 32948fb81238SShri Abhyankar if (!rank) { 32958fb81238SShri Abhyankar ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr); 32968fb81238SShri Abhyankar ierr = PetscBinaryRead(fd,(char *)header,4,PETSC_INT);CHKERRQ(ierr); 32978fb81238SShri Abhyankar if (header[0] != MAT_FILE_CLASSID) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"not matrix object"); 32988fb81238SShri Abhyankar } 32998fb81238SShri Abhyankar 33008fb81238SShri Abhyankar if (newMat->rmap->n < 0 && newMat->rmap->N < 0 && newMat->cmap->n < 0 && newMat->cmap->N < 0) sizesset = 0; 33018fb81238SShri Abhyankar 33028fb81238SShri Abhyankar ierr = MPI_Bcast(header+1,3,MPIU_INT,0,comm);CHKERRQ(ierr); 33038fb81238SShri Abhyankar M = header[1]; N = header[2]; 33048fb81238SShri Abhyankar /* If global rows/cols are set to PETSC_DECIDE, set it to the sizes given in the file */ 33058fb81238SShri Abhyankar if (sizesset && newMat->rmap->N < 0) newMat->rmap->N = M; 33068fb81238SShri Abhyankar if (sizesset && newMat->cmap->N < 0) newMat->cmap->N = N; 33078fb81238SShri Abhyankar 33088fb81238SShri Abhyankar /* If global sizes are set, check if they are consistent with that given in the file */ 33098fb81238SShri Abhyankar if (sizesset) { 33108fb81238SShri Abhyankar ierr = MatGetSize(newMat,&grows,&gcols);CHKERRQ(ierr); 33118fb81238SShri Abhyankar } 3312abd38a8fSBarry Smith if (sizesset && newMat->rmap->N != grows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Inconsistent # of rows:Matrix in file has (%d) and input matrix has (%d)",M,grows); 3313abd38a8fSBarry Smith if (sizesset && newMat->cmap->N != gcols) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Inconsistent # of cols:Matrix in file has (%d) and input matrix has (%d)",N,gcols); 33148fb81238SShri Abhyankar 33158fb81238SShri Abhyankar /* determine ownership of all rows */ 33168fb81238SShri Abhyankar if (newMat->rmap->n < 0 ) m = M/size + ((M % size) > rank); /* PETSC_DECIDE */ 33174683f7a4SShri Abhyankar else m = newMat->rmap->n; /* Set by user */ 33188fb81238SShri Abhyankar 33198fb81238SShri Abhyankar ierr = PetscMalloc((size+1)*sizeof(PetscInt),&rowners);CHKERRQ(ierr); 33208fb81238SShri Abhyankar ierr = MPI_Allgather(&m,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr); 33218fb81238SShri Abhyankar 33228fb81238SShri Abhyankar /* First process needs enough room for process with most rows */ 33238fb81238SShri Abhyankar if (!rank) { 33248fb81238SShri Abhyankar mmax = rowners[1]; 33258fb81238SShri Abhyankar for (i=2; i<size; i++) { 33268fb81238SShri Abhyankar mmax = PetscMax(mmax,rowners[i]); 33278fb81238SShri Abhyankar } 33288fb81238SShri Abhyankar } else mmax = m; 33298fb81238SShri Abhyankar 33308fb81238SShri Abhyankar rowners[0] = 0; 33318fb81238SShri Abhyankar for (i=2; i<=size; i++) { 33328fb81238SShri Abhyankar rowners[i] += rowners[i-1]; 33338fb81238SShri Abhyankar } 33348fb81238SShri Abhyankar rstart = rowners[rank]; 33358fb81238SShri Abhyankar rend = rowners[rank+1]; 33368fb81238SShri Abhyankar 33378fb81238SShri Abhyankar /* distribute row lengths to all processors */ 33388fb81238SShri Abhyankar ierr = PetscMalloc2(mmax,PetscInt,&ourlens,mmax,PetscInt,&offlens);CHKERRQ(ierr); 33398fb81238SShri Abhyankar if (!rank) { 33408fb81238SShri Abhyankar ierr = PetscBinaryRead(fd,ourlens,m,PETSC_INT);CHKERRQ(ierr); 33418fb81238SShri Abhyankar ierr = PetscMalloc(m*sizeof(PetscInt),&rowlengths);CHKERRQ(ierr); 33428fb81238SShri Abhyankar ierr = PetscMalloc(size*sizeof(PetscInt),&procsnz);CHKERRQ(ierr); 33438fb81238SShri Abhyankar ierr = PetscMemzero(procsnz,size*sizeof(PetscInt));CHKERRQ(ierr); 33448fb81238SShri Abhyankar for (j=0; j<m; j++) { 33458fb81238SShri Abhyankar procsnz[0] += ourlens[j]; 33468fb81238SShri Abhyankar } 33478fb81238SShri Abhyankar for (i=1; i<size; i++) { 33488fb81238SShri Abhyankar ierr = PetscBinaryRead(fd,rowlengths,rowners[i+1]-rowners[i],PETSC_INT);CHKERRQ(ierr); 33498fb81238SShri Abhyankar /* calculate the number of nonzeros on each processor */ 33508fb81238SShri Abhyankar for (j=0; j<rowners[i+1]-rowners[i]; j++) { 33518fb81238SShri Abhyankar procsnz[i] += rowlengths[j]; 33528fb81238SShri Abhyankar } 33531a4ee126SBarry Smith ierr = MPILong_Send(rowlengths,rowners[i+1]-rowners[i],MPIU_INT,i,tag,comm);CHKERRQ(ierr); 33548fb81238SShri Abhyankar } 33558fb81238SShri Abhyankar ierr = PetscFree(rowlengths);CHKERRQ(ierr); 33568fb81238SShri Abhyankar } else { 33571a4ee126SBarry Smith ierr = MPILong_Recv(ourlens,m,MPIU_INT,0,tag,comm);CHKERRQ(ierr); 33588fb81238SShri Abhyankar } 33598fb81238SShri Abhyankar 33608fb81238SShri Abhyankar if (!rank) { 33618fb81238SShri Abhyankar /* determine max buffer needed and allocate it */ 33628fb81238SShri Abhyankar maxnz = 0; 33638fb81238SShri Abhyankar for (i=0; i<size; i++) { 33648fb81238SShri Abhyankar maxnz = PetscMax(maxnz,procsnz[i]); 33658fb81238SShri Abhyankar } 33668fb81238SShri Abhyankar ierr = PetscMalloc(maxnz*sizeof(PetscInt),&cols);CHKERRQ(ierr); 33678fb81238SShri Abhyankar 33688fb81238SShri Abhyankar /* read in my part of the matrix column indices */ 33698fb81238SShri Abhyankar nz = procsnz[0]; 33708fb81238SShri Abhyankar ierr = PetscMalloc(nz*sizeof(PetscInt),&mycols);CHKERRQ(ierr); 33718fb81238SShri Abhyankar ierr = PetscBinaryRead(fd,mycols,nz,PETSC_INT);CHKERRQ(ierr); 33728fb81238SShri Abhyankar 33738fb81238SShri Abhyankar /* read in every one elses and ship off */ 33748fb81238SShri Abhyankar for (i=1; i<size; i++) { 33758fb81238SShri Abhyankar nz = procsnz[i]; 33768fb81238SShri Abhyankar ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr); 33771a4ee126SBarry Smith ierr = MPILong_Send(cols,nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr); 33788fb81238SShri Abhyankar } 33798fb81238SShri Abhyankar ierr = PetscFree(cols);CHKERRQ(ierr); 33808fb81238SShri Abhyankar } else { 33818fb81238SShri Abhyankar /* determine buffer space needed for message */ 33828fb81238SShri Abhyankar nz = 0; 33838fb81238SShri Abhyankar for (i=0; i<m; i++) { 33848fb81238SShri Abhyankar nz += ourlens[i]; 33858fb81238SShri Abhyankar } 33868fb81238SShri Abhyankar ierr = PetscMalloc(nz*sizeof(PetscInt),&mycols);CHKERRQ(ierr); 33878fb81238SShri Abhyankar 33888fb81238SShri Abhyankar /* receive message of column indices*/ 33891a4ee126SBarry Smith ierr = MPILong_Recv(mycols,nz,MPIU_INT,0,tag,comm);CHKERRQ(ierr); 33908fb81238SShri Abhyankar } 33918fb81238SShri Abhyankar 33928fb81238SShri Abhyankar /* determine column ownership if matrix is not square */ 33938fb81238SShri Abhyankar if (N != M) { 33948fb81238SShri Abhyankar if (newMat->cmap->n < 0) n = N/size + ((N % size) > rank); 33958fb81238SShri Abhyankar else n = newMat->cmap->n; 33968fb81238SShri Abhyankar ierr = MPI_Scan(&n,&cend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr); 33978fb81238SShri Abhyankar cstart = cend - n; 33988fb81238SShri Abhyankar } else { 33998fb81238SShri Abhyankar cstart = rstart; 34008fb81238SShri Abhyankar cend = rend; 34018fb81238SShri Abhyankar n = cend - cstart; 34028fb81238SShri Abhyankar } 34038fb81238SShri Abhyankar 34048fb81238SShri Abhyankar /* loop over local rows, determining number of off diagonal entries */ 34058fb81238SShri Abhyankar ierr = PetscMemzero(offlens,m*sizeof(PetscInt));CHKERRQ(ierr); 34068fb81238SShri Abhyankar jj = 0; 34078fb81238SShri Abhyankar for (i=0; i<m; i++) { 34088fb81238SShri Abhyankar for (j=0; j<ourlens[i]; j++) { 34098fb81238SShri Abhyankar if (mycols[jj] < cstart || mycols[jj] >= cend) offlens[i]++; 34108fb81238SShri Abhyankar jj++; 34118fb81238SShri Abhyankar } 34128fb81238SShri Abhyankar } 34138fb81238SShri Abhyankar 34148fb81238SShri Abhyankar for (i=0; i<m; i++) { 34158fb81238SShri Abhyankar ourlens[i] -= offlens[i]; 34168fb81238SShri Abhyankar } 34178fb81238SShri Abhyankar if (!sizesset) { 34188fb81238SShri Abhyankar ierr = MatSetSizes(newMat,m,n,M,N);CHKERRQ(ierr); 34198fb81238SShri Abhyankar } 34208fb81238SShri Abhyankar ierr = MatMPIAIJSetPreallocation(newMat,0,ourlens,0,offlens);CHKERRQ(ierr); 34218fb81238SShri Abhyankar 34228fb81238SShri Abhyankar for (i=0; i<m; i++) { 34238fb81238SShri Abhyankar ourlens[i] += offlens[i]; 34248fb81238SShri Abhyankar } 34258fb81238SShri Abhyankar 34268fb81238SShri Abhyankar if (!rank) { 34278fb81238SShri Abhyankar ierr = PetscMalloc((maxnz+1)*sizeof(PetscScalar),&vals);CHKERRQ(ierr); 34288fb81238SShri Abhyankar 34298fb81238SShri Abhyankar /* read in my part of the matrix numerical values */ 34308fb81238SShri Abhyankar nz = procsnz[0]; 34318fb81238SShri Abhyankar ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr); 34328fb81238SShri Abhyankar 34338fb81238SShri Abhyankar /* insert into matrix */ 34348fb81238SShri Abhyankar jj = rstart; 34358fb81238SShri Abhyankar smycols = mycols; 34368fb81238SShri Abhyankar svals = vals; 34378fb81238SShri Abhyankar for (i=0; i<m; i++) { 34388fb81238SShri Abhyankar ierr = MatSetValues_MPIAIJ(newMat,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);CHKERRQ(ierr); 34398fb81238SShri Abhyankar smycols += ourlens[i]; 34408fb81238SShri Abhyankar svals += ourlens[i]; 34418fb81238SShri Abhyankar jj++; 34428fb81238SShri Abhyankar } 34438fb81238SShri Abhyankar 34448fb81238SShri Abhyankar /* read in other processors and ship out */ 34458fb81238SShri Abhyankar for (i=1; i<size; i++) { 34468fb81238SShri Abhyankar nz = procsnz[i]; 34478fb81238SShri Abhyankar ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr); 34481a4ee126SBarry Smith ierr = MPILong_Send(vals,nz,MPIU_SCALAR,i,((PetscObject)newMat)->tag,comm);CHKERRQ(ierr); 34498fb81238SShri Abhyankar } 34508fb81238SShri Abhyankar ierr = PetscFree(procsnz);CHKERRQ(ierr); 34518fb81238SShri Abhyankar } else { 34528fb81238SShri Abhyankar /* receive numeric values */ 34538fb81238SShri Abhyankar ierr = PetscMalloc((nz+1)*sizeof(PetscScalar),&vals);CHKERRQ(ierr); 34548fb81238SShri Abhyankar 34558fb81238SShri Abhyankar /* receive message of values*/ 34561a4ee126SBarry Smith ierr = MPILong_Recv(vals,nz,MPIU_SCALAR,0,((PetscObject)newMat)->tag,comm);CHKERRQ(ierr); 34578fb81238SShri Abhyankar 34588fb81238SShri Abhyankar /* insert into matrix */ 34598fb81238SShri Abhyankar jj = rstart; 34608fb81238SShri Abhyankar smycols = mycols; 34618fb81238SShri Abhyankar svals = vals; 34628fb81238SShri Abhyankar for (i=0; i<m; i++) { 34638fb81238SShri Abhyankar ierr = MatSetValues_MPIAIJ(newMat,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);CHKERRQ(ierr); 34648fb81238SShri Abhyankar smycols += ourlens[i]; 34658fb81238SShri Abhyankar svals += ourlens[i]; 34668fb81238SShri Abhyankar jj++; 34678fb81238SShri Abhyankar } 34688fb81238SShri Abhyankar } 34698fb81238SShri Abhyankar ierr = PetscFree2(ourlens,offlens);CHKERRQ(ierr); 34708fb81238SShri Abhyankar ierr = PetscFree(vals);CHKERRQ(ierr); 34718fb81238SShri Abhyankar ierr = PetscFree(mycols);CHKERRQ(ierr); 34728fb81238SShri Abhyankar ierr = PetscFree(rowners);CHKERRQ(ierr); 34738fb81238SShri Abhyankar 34748fb81238SShri Abhyankar ierr = MatAssemblyBegin(newMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 34758fb81238SShri Abhyankar ierr = MatAssemblyEnd(newMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 34768fb81238SShri Abhyankar PetscFunctionReturn(0); 34778fb81238SShri Abhyankar } 34788fb81238SShri Abhyankar 34798fb81238SShri Abhyankar #undef __FUNCT__ 34804a2ae208SSatish Balay #define __FUNCT__ "MatGetSubMatrix_MPIAIJ" 34814aa3045dSJed Brown PetscErrorCode MatGetSubMatrix_MPIAIJ(Mat mat,IS isrow,IS iscol,MatReuse call,Mat *newmat) 34824aa3045dSJed Brown { 34834aa3045dSJed Brown PetscErrorCode ierr; 34844aa3045dSJed Brown IS iscol_local; 34854aa3045dSJed Brown PetscInt csize; 34864aa3045dSJed Brown 34874aa3045dSJed Brown PetscFunctionBegin; 34884aa3045dSJed Brown ierr = ISGetLocalSize(iscol,&csize);CHKERRQ(ierr); 3489b79d0421SJed Brown if (call == MAT_REUSE_MATRIX) { 3490b79d0421SJed Brown ierr = PetscObjectQuery((PetscObject)*newmat,"ISAllGather",(PetscObject*)&iscol_local);CHKERRQ(ierr); 3491e32f2f54SBarry Smith if (!iscol_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse"); 3492b79d0421SJed Brown } else { 34934aa3045dSJed Brown ierr = ISAllGather(iscol,&iscol_local);CHKERRQ(ierr); 3494b79d0421SJed Brown } 34954aa3045dSJed Brown ierr = MatGetSubMatrix_MPIAIJ_Private(mat,isrow,iscol_local,csize,call,newmat);CHKERRQ(ierr); 3496b79d0421SJed Brown if (call == MAT_INITIAL_MATRIX) { 3497b79d0421SJed Brown ierr = PetscObjectCompose((PetscObject)*newmat,"ISAllGather",(PetscObject)iscol_local);CHKERRQ(ierr); 34986bf464f9SBarry Smith ierr = ISDestroy(&iscol_local);CHKERRQ(ierr); 3499b79d0421SJed Brown } 35004aa3045dSJed Brown PetscFunctionReturn(0); 35014aa3045dSJed Brown } 35024aa3045dSJed Brown 35034aa3045dSJed Brown #undef __FUNCT__ 35044aa3045dSJed Brown #define __FUNCT__ "MatGetSubMatrix_MPIAIJ_Private" 3505a0ff6018SBarry Smith /* 350629da9460SBarry Smith Not great since it makes two copies of the submatrix, first an SeqAIJ 350729da9460SBarry Smith in local and then by concatenating the local matrices the end result. 350829da9460SBarry Smith Writing it directly would be much like MatGetSubMatrices_MPIAIJ() 35094aa3045dSJed Brown 35104aa3045dSJed Brown Note: This requires a sequential iscol with all indices. 3511a0ff6018SBarry Smith */ 35124aa3045dSJed Brown PetscErrorCode MatGetSubMatrix_MPIAIJ_Private(Mat mat,IS isrow,IS iscol,PetscInt csize,MatReuse call,Mat *newmat) 3513a0ff6018SBarry Smith { 3514dfbe8321SBarry Smith PetscErrorCode ierr; 351532dcc486SBarry Smith PetscMPIInt rank,size; 3516b1d57f15SBarry Smith PetscInt i,m,n,rstart,row,rend,nz,*cwork,j; 3517b1d57f15SBarry Smith PetscInt *ii,*jj,nlocal,*dlens,*olens,dlen,olen,jend,mglobal; 3518fee21e36SBarry Smith Mat *local,M,Mreuse; 3519a77337e4SBarry Smith MatScalar *vwork,*aa; 35207adad957SLisandro Dalcin MPI_Comm comm = ((PetscObject)mat)->comm; 352100e6dbe6SBarry Smith Mat_SeqAIJ *aij; 35227e2c5f70SBarry Smith 3523a0ff6018SBarry Smith 3524a0ff6018SBarry Smith PetscFunctionBegin; 35251dab6e02SBarry Smith ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 35261dab6e02SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 352700e6dbe6SBarry Smith 3528fee21e36SBarry Smith if (call == MAT_REUSE_MATRIX) { 3529fee21e36SBarry Smith ierr = PetscObjectQuery((PetscObject)*newmat,"SubMatrix",(PetscObject *)&Mreuse);CHKERRQ(ierr); 3530e32f2f54SBarry Smith if (!Mreuse) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse"); 3531fee21e36SBarry Smith local = &Mreuse; 3532fee21e36SBarry Smith ierr = MatGetSubMatrices(mat,1,&isrow,&iscol,MAT_REUSE_MATRIX,&local);CHKERRQ(ierr); 3533fee21e36SBarry Smith } else { 3534a0ff6018SBarry Smith ierr = MatGetSubMatrices(mat,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&local);CHKERRQ(ierr); 3535fee21e36SBarry Smith Mreuse = *local; 3536606d414cSSatish Balay ierr = PetscFree(local);CHKERRQ(ierr); 3537fee21e36SBarry Smith } 3538a0ff6018SBarry Smith 3539a0ff6018SBarry Smith /* 3540a0ff6018SBarry Smith m - number of local rows 3541a0ff6018SBarry Smith n - number of columns (same on all processors) 3542a0ff6018SBarry Smith rstart - first row in new global matrix generated 3543a0ff6018SBarry Smith */ 3544fee21e36SBarry Smith ierr = MatGetSize(Mreuse,&m,&n);CHKERRQ(ierr); 3545a0ff6018SBarry Smith if (call == MAT_INITIAL_MATRIX) { 3546fee21e36SBarry Smith aij = (Mat_SeqAIJ*)(Mreuse)->data; 354700e6dbe6SBarry Smith ii = aij->i; 354800e6dbe6SBarry Smith jj = aij->j; 354900e6dbe6SBarry Smith 3550a0ff6018SBarry Smith /* 355100e6dbe6SBarry Smith Determine the number of non-zeros in the diagonal and off-diagonal 355200e6dbe6SBarry Smith portions of the matrix in order to do correct preallocation 3553a0ff6018SBarry Smith */ 355400e6dbe6SBarry Smith 355500e6dbe6SBarry Smith /* first get start and end of "diagonal" columns */ 35566a6a5d1dSBarry Smith if (csize == PETSC_DECIDE) { 3557ab50ec6bSBarry Smith ierr = ISGetSize(isrow,&mglobal);CHKERRQ(ierr); 3558ab50ec6bSBarry Smith if (mglobal == n) { /* square matrix */ 3559e2c4fddaSBarry Smith nlocal = m; 35606a6a5d1dSBarry Smith } else { 3561ab50ec6bSBarry Smith nlocal = n/size + ((n % size) > rank); 3562ab50ec6bSBarry Smith } 3563ab50ec6bSBarry Smith } else { 35646a6a5d1dSBarry Smith nlocal = csize; 35656a6a5d1dSBarry Smith } 3566b1d57f15SBarry Smith ierr = MPI_Scan(&nlocal,&rend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr); 356700e6dbe6SBarry Smith rstart = rend - nlocal; 356865e19b50SBarry Smith if (rank == size - 1 && rend != n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local column sizes %D do not add up to total number of columns %D",rend,n); 356900e6dbe6SBarry Smith 357000e6dbe6SBarry Smith /* next, compute all the lengths */ 3571b1d57f15SBarry Smith ierr = PetscMalloc((2*m+1)*sizeof(PetscInt),&dlens);CHKERRQ(ierr); 357200e6dbe6SBarry Smith olens = dlens + m; 357300e6dbe6SBarry Smith for (i=0; i<m; i++) { 357400e6dbe6SBarry Smith jend = ii[i+1] - ii[i]; 357500e6dbe6SBarry Smith olen = 0; 357600e6dbe6SBarry Smith dlen = 0; 357700e6dbe6SBarry Smith for (j=0; j<jend; j++) { 357800e6dbe6SBarry Smith if (*jj < rstart || *jj >= rend) olen++; 357900e6dbe6SBarry Smith else dlen++; 358000e6dbe6SBarry Smith jj++; 358100e6dbe6SBarry Smith } 358200e6dbe6SBarry Smith olens[i] = olen; 358300e6dbe6SBarry Smith dlens[i] = dlen; 358400e6dbe6SBarry Smith } 3585f69a0ea3SMatthew Knepley ierr = MatCreate(comm,&M);CHKERRQ(ierr); 3586f69a0ea3SMatthew Knepley ierr = MatSetSizes(M,m,nlocal,PETSC_DECIDE,n);CHKERRQ(ierr); 35877adad957SLisandro Dalcin ierr = MatSetType(M,((PetscObject)mat)->type_name);CHKERRQ(ierr); 3588e2d9671bSKris Buschelman ierr = MatMPIAIJSetPreallocation(M,0,dlens,0,olens);CHKERRQ(ierr); 3589606d414cSSatish Balay ierr = PetscFree(dlens);CHKERRQ(ierr); 3590a0ff6018SBarry Smith } else { 3591b1d57f15SBarry Smith PetscInt ml,nl; 3592a0ff6018SBarry Smith 3593a0ff6018SBarry Smith M = *newmat; 3594a0ff6018SBarry Smith ierr = MatGetLocalSize(M,&ml,&nl);CHKERRQ(ierr); 3595e32f2f54SBarry Smith if (ml != m) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Previous matrix must be same size/layout as request"); 3596a0ff6018SBarry Smith ierr = MatZeroEntries(M);CHKERRQ(ierr); 3597c48de900SBarry Smith /* 3598c48de900SBarry Smith The next two lines are needed so we may call MatSetValues_MPIAIJ() below directly, 3599c48de900SBarry Smith rather than the slower MatSetValues(). 3600c48de900SBarry Smith */ 3601c48de900SBarry Smith M->was_assembled = PETSC_TRUE; 3602c48de900SBarry Smith M->assembled = PETSC_FALSE; 3603a0ff6018SBarry Smith } 3604a0ff6018SBarry Smith ierr = MatGetOwnershipRange(M,&rstart,&rend);CHKERRQ(ierr); 3605fee21e36SBarry Smith aij = (Mat_SeqAIJ*)(Mreuse)->data; 360600e6dbe6SBarry Smith ii = aij->i; 360700e6dbe6SBarry Smith jj = aij->j; 360800e6dbe6SBarry Smith aa = aij->a; 3609a0ff6018SBarry Smith for (i=0; i<m; i++) { 3610a0ff6018SBarry Smith row = rstart + i; 361100e6dbe6SBarry Smith nz = ii[i+1] - ii[i]; 361200e6dbe6SBarry Smith cwork = jj; jj += nz; 361300e6dbe6SBarry Smith vwork = aa; aa += nz; 36148c638d02SBarry Smith ierr = MatSetValues_MPIAIJ(M,1,&row,nz,cwork,vwork,INSERT_VALUES);CHKERRQ(ierr); 3615a0ff6018SBarry Smith } 3616a0ff6018SBarry Smith 3617a0ff6018SBarry Smith ierr = MatAssemblyBegin(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3618a0ff6018SBarry Smith ierr = MatAssemblyEnd(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3619a0ff6018SBarry Smith *newmat = M; 3620fee21e36SBarry Smith 3621fee21e36SBarry Smith /* save submatrix used in processor for next request */ 3622fee21e36SBarry Smith if (call == MAT_INITIAL_MATRIX) { 3623fee21e36SBarry Smith ierr = PetscObjectCompose((PetscObject)M,"SubMatrix",(PetscObject)Mreuse);CHKERRQ(ierr); 3624bf0cc555SLisandro Dalcin ierr = MatDestroy(&Mreuse);CHKERRQ(ierr); 3625fee21e36SBarry Smith } 3626fee21e36SBarry Smith 3627a0ff6018SBarry Smith PetscFunctionReturn(0); 3628a0ff6018SBarry Smith } 3629273d9f13SBarry Smith 3630e2e86b8fSSatish Balay EXTERN_C_BEGIN 36314a2ae208SSatish Balay #undef __FUNCT__ 3632ccd8e176SBarry Smith #define __FUNCT__ "MatMPIAIJSetPreallocationCSR_MPIAIJ" 36337087cfbeSBarry Smith PetscErrorCode MatMPIAIJSetPreallocationCSR_MPIAIJ(Mat B,const PetscInt Ii[],const PetscInt J[],const PetscScalar v[]) 3634ccd8e176SBarry Smith { 3635899cda47SBarry Smith PetscInt m,cstart, cend,j,nnz,i,d; 3636899cda47SBarry Smith PetscInt *d_nnz,*o_nnz,nnz_max = 0,rstart,ii; 3637ccd8e176SBarry Smith const PetscInt *JJ; 3638ccd8e176SBarry Smith PetscScalar *values; 3639ccd8e176SBarry Smith PetscErrorCode ierr; 3640ccd8e176SBarry Smith 3641ccd8e176SBarry Smith PetscFunctionBegin; 3642e32f2f54SBarry Smith if (Ii[0]) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ii[0] must be 0 it is %D",Ii[0]); 3643899cda47SBarry Smith 364426283091SBarry Smith ierr = PetscLayoutSetBlockSize(B->rmap,1);CHKERRQ(ierr); 364526283091SBarry Smith ierr = PetscLayoutSetBlockSize(B->cmap,1);CHKERRQ(ierr); 364626283091SBarry Smith ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr); 364726283091SBarry Smith ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr); 3648d0f46423SBarry Smith m = B->rmap->n; 3649d0f46423SBarry Smith cstart = B->cmap->rstart; 3650d0f46423SBarry Smith cend = B->cmap->rend; 3651d0f46423SBarry Smith rstart = B->rmap->rstart; 3652899cda47SBarry Smith 36531d79065fSBarry Smith ierr = PetscMalloc2(m,PetscInt,&d_nnz,m,PetscInt,&o_nnz);CHKERRQ(ierr); 3654ccd8e176SBarry Smith 3655ecc77c7aSBarry Smith #if defined(PETSC_USE_DEBUGGING) 3656ecc77c7aSBarry Smith for (i=0; i<m; i++) { 3657ecc77c7aSBarry Smith nnz = Ii[i+1]- Ii[i]; 3658ecc77c7aSBarry Smith JJ = J + Ii[i]; 3659e32f2f54SBarry Smith if (nnz < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local row %D has a negative %D number of columns",i,nnz); 3660ecc77c7aSBarry Smith if (nnz && (JJ[0] < 0)) SETERRRQ1(PETSC_ERR_ARG_WRONGSTATE,"Row %D starts with negative column index",i,j); 3661d0f46423SBarry Smith if (nnz && (JJ[nnz-1] >= B->cmap->N) SETERRRQ3(PETSC_ERR_ARG_WRONGSTATE,"Row %D ends with too large a column index %D (max allowed %D)",i,JJ[nnz-1],B->cmap->N); 3662ecc77c7aSBarry Smith } 3663ecc77c7aSBarry Smith #endif 3664ecc77c7aSBarry Smith 3665ccd8e176SBarry Smith for (i=0; i<m; i++) { 3666b7940d39SSatish Balay nnz = Ii[i+1]- Ii[i]; 3667b7940d39SSatish Balay JJ = J + Ii[i]; 3668ccd8e176SBarry Smith nnz_max = PetscMax(nnz_max,nnz); 3669ccd8e176SBarry Smith d = 0; 36700daa03b5SJed Brown for (j=0; j<nnz; j++) { 36710daa03b5SJed Brown if (cstart <= JJ[j] && JJ[j] < cend) d++; 3672ccd8e176SBarry Smith } 3673ccd8e176SBarry Smith d_nnz[i] = d; 3674ccd8e176SBarry Smith o_nnz[i] = nnz - d; 3675ccd8e176SBarry Smith } 3676ccd8e176SBarry Smith ierr = MatMPIAIJSetPreallocation(B,0,d_nnz,0,o_nnz);CHKERRQ(ierr); 36771d79065fSBarry Smith ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr); 3678ccd8e176SBarry Smith 3679ccd8e176SBarry Smith if (v) values = (PetscScalar*)v; 3680ccd8e176SBarry Smith else { 3681ccd8e176SBarry Smith ierr = PetscMalloc((nnz_max+1)*sizeof(PetscScalar),&values);CHKERRQ(ierr); 3682ccd8e176SBarry Smith ierr = PetscMemzero(values,nnz_max*sizeof(PetscScalar));CHKERRQ(ierr); 3683ccd8e176SBarry Smith } 3684ccd8e176SBarry Smith 3685ccd8e176SBarry Smith for (i=0; i<m; i++) { 3686ccd8e176SBarry Smith ii = i + rstart; 3687b7940d39SSatish Balay nnz = Ii[i+1]- Ii[i]; 3688b7940d39SSatish Balay ierr = MatSetValues_MPIAIJ(B,1,&ii,nnz,J+Ii[i],values+(v ? Ii[i] : 0),INSERT_VALUES);CHKERRQ(ierr); 3689ccd8e176SBarry Smith } 3690ccd8e176SBarry Smith ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3691ccd8e176SBarry Smith ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3692ccd8e176SBarry Smith 3693ccd8e176SBarry Smith if (!v) { 3694ccd8e176SBarry Smith ierr = PetscFree(values);CHKERRQ(ierr); 3695ccd8e176SBarry Smith } 3696ccd8e176SBarry Smith PetscFunctionReturn(0); 3697ccd8e176SBarry Smith } 3698e2e86b8fSSatish Balay EXTERN_C_END 3699ccd8e176SBarry Smith 3700ccd8e176SBarry Smith #undef __FUNCT__ 3701ccd8e176SBarry Smith #define __FUNCT__ "MatMPIAIJSetPreallocationCSR" 37021eea217eSSatish Balay /*@ 3703ccd8e176SBarry Smith MatMPIAIJSetPreallocationCSR - Allocates memory for a sparse parallel matrix in AIJ format 3704ccd8e176SBarry Smith (the default parallel PETSc format). 3705ccd8e176SBarry Smith 3706ccd8e176SBarry Smith Collective on MPI_Comm 3707ccd8e176SBarry Smith 3708ccd8e176SBarry Smith Input Parameters: 3709a1661176SMatthew Knepley + B - the matrix 3710ccd8e176SBarry Smith . i - the indices into j for the start of each local row (starts with zero) 37110daa03b5SJed Brown . j - the column indices for each local row (starts with zero) 3712ccd8e176SBarry Smith - v - optional values in the matrix 3713ccd8e176SBarry Smith 3714ccd8e176SBarry Smith Level: developer 3715ccd8e176SBarry Smith 371612251496SSatish Balay Notes: 371712251496SSatish Balay The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc; 371812251496SSatish Balay thus you CANNOT change the matrix entries by changing the values of a[] after you have 371912251496SSatish Balay called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays. 372012251496SSatish Balay 372112251496SSatish Balay The i and j indices are 0 based, and i indices are indices corresponding to the local j array. 372212251496SSatish Balay 372312251496SSatish Balay The format which is used for the sparse matrix input, is equivalent to a 372412251496SSatish Balay row-major ordering.. i.e for the following matrix, the input data expected is 372512251496SSatish Balay as shown: 372612251496SSatish Balay 372712251496SSatish Balay 1 0 0 372812251496SSatish Balay 2 0 3 P0 372912251496SSatish Balay ------- 373012251496SSatish Balay 4 5 6 P1 373112251496SSatish Balay 373212251496SSatish Balay Process0 [P0]: rows_owned=[0,1] 373312251496SSatish Balay i = {0,1,3} [size = nrow+1 = 2+1] 373412251496SSatish Balay j = {0,0,2} [size = nz = 6] 373512251496SSatish Balay v = {1,2,3} [size = nz = 6] 373612251496SSatish Balay 373712251496SSatish Balay Process1 [P1]: rows_owned=[2] 373812251496SSatish Balay i = {0,3} [size = nrow+1 = 1+1] 373912251496SSatish Balay j = {0,1,2} [size = nz = 6] 374012251496SSatish Balay v = {4,5,6} [size = nz = 6] 374112251496SSatish Balay 3742ccd8e176SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 3743ccd8e176SBarry Smith 37442fb0ec9aSBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatCreateMPIAIJ(), MPIAIJ, 37458d7a6e47SBarry Smith MatCreateSeqAIJWithArrays(), MatCreateMPIAIJWithSplitArrays() 3746ccd8e176SBarry Smith @*/ 37477087cfbeSBarry Smith PetscErrorCode MatMPIAIJSetPreallocationCSR(Mat B,const PetscInt i[],const PetscInt j[], const PetscScalar v[]) 3748ccd8e176SBarry Smith { 37494ac538c5SBarry Smith PetscErrorCode ierr; 3750ccd8e176SBarry Smith 3751ccd8e176SBarry Smith PetscFunctionBegin; 37524ac538c5SBarry Smith ierr = PetscTryMethod(B,"MatMPIAIJSetPreallocationCSR_C",(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]),(B,i,j,v));CHKERRQ(ierr); 3753ccd8e176SBarry Smith PetscFunctionReturn(0); 3754ccd8e176SBarry Smith } 3755ccd8e176SBarry Smith 3756ccd8e176SBarry Smith #undef __FUNCT__ 37574a2ae208SSatish Balay #define __FUNCT__ "MatMPIAIJSetPreallocation" 3758273d9f13SBarry Smith /*@C 3759ccd8e176SBarry Smith MatMPIAIJSetPreallocation - Preallocates memory for a sparse parallel matrix in AIJ format 3760273d9f13SBarry Smith (the default parallel PETSc format). For good matrix assembly performance 3761273d9f13SBarry Smith the user should preallocate the matrix storage by setting the parameters 3762273d9f13SBarry Smith d_nz (or d_nnz) and o_nz (or o_nnz). By setting these parameters accurately, 3763273d9f13SBarry Smith performance can be increased by more than a factor of 50. 3764273d9f13SBarry Smith 3765273d9f13SBarry Smith Collective on MPI_Comm 3766273d9f13SBarry Smith 3767273d9f13SBarry Smith Input Parameters: 3768273d9f13SBarry Smith + A - the matrix 3769273d9f13SBarry Smith . d_nz - number of nonzeros per row in DIAGONAL portion of local submatrix 3770273d9f13SBarry Smith (same value is used for all local rows) 3771273d9f13SBarry Smith . d_nnz - array containing the number of nonzeros in the various rows of the 3772273d9f13SBarry Smith DIAGONAL portion of the local submatrix (possibly different for each row) 3773273d9f13SBarry Smith or PETSC_NULL, if d_nz is used to specify the nonzero structure. 3774273d9f13SBarry Smith The size of this array is equal to the number of local rows, i.e 'm'. 3775273d9f13SBarry Smith You must leave room for the diagonal entry even if it is zero. 3776273d9f13SBarry Smith . o_nz - number of nonzeros per row in the OFF-DIAGONAL portion of local 3777273d9f13SBarry Smith submatrix (same value is used for all local rows). 3778273d9f13SBarry Smith - o_nnz - array containing the number of nonzeros in the various rows of the 3779273d9f13SBarry Smith OFF-DIAGONAL portion of the local submatrix (possibly different for 3780273d9f13SBarry Smith each row) or PETSC_NULL, if o_nz is used to specify the nonzero 3781273d9f13SBarry Smith structure. The size of this array is equal to the number 3782273d9f13SBarry Smith of local rows, i.e 'm'. 3783273d9f13SBarry Smith 378449a6f317SBarry Smith If the *_nnz parameter is given then the *_nz parameter is ignored 378549a6f317SBarry Smith 3786273d9f13SBarry Smith The AIJ format (also called the Yale sparse matrix format or 3787ccd8e176SBarry Smith compressed row storage (CSR)), is fully compatible with standard Fortran 77 37880598bfebSBarry Smith storage. The stored row and column indices begin with zero. 37890598bfebSBarry Smith See the <A href="../../docs/manual.pdf#nameddest=ch_mat">Mat chapter of the users manual</A> for details. 3790273d9f13SBarry Smith 3791273d9f13SBarry Smith The parallel matrix is partitioned such that the first m0 rows belong to 3792273d9f13SBarry Smith process 0, the next m1 rows belong to process 1, the next m2 rows belong 3793273d9f13SBarry Smith to process 2 etc.. where m0,m1,m2... are the input parameter 'm'. 3794273d9f13SBarry Smith 3795273d9f13SBarry Smith The DIAGONAL portion of the local submatrix of a processor can be defined 3796a05b864aSJed Brown as the submatrix which is obtained by extraction the part corresponding to 3797a05b864aSJed Brown the rows r1-r2 and columns c1-c2 of the global matrix, where r1 is the 3798a05b864aSJed Brown first row that belongs to the processor, r2 is the last row belonging to 3799a05b864aSJed Brown the this processor, and c1-c2 is range of indices of the local part of a 3800a05b864aSJed Brown vector suitable for applying the matrix to. This is an mxn matrix. In the 3801a05b864aSJed Brown common case of a square matrix, the row and column ranges are the same and 3802a05b864aSJed Brown the DIAGONAL part is also square. The remaining portion of the local 3803a05b864aSJed Brown submatrix (mxN) constitute the OFF-DIAGONAL portion. 3804273d9f13SBarry Smith 3805273d9f13SBarry Smith If o_nnz, d_nnz are specified, then o_nz, and d_nz are ignored. 3806273d9f13SBarry Smith 3807aa95bbe8SBarry Smith You can call MatGetInfo() to get information on how effective the preallocation was; 3808aa95bbe8SBarry Smith for example the fields mallocs,nz_allocated,nz_used,nz_unneeded; 3809aa95bbe8SBarry Smith You can also run with the option -info and look for messages with the string 3810aa95bbe8SBarry Smith malloc in them to see if additional memory allocation was needed. 3811aa95bbe8SBarry Smith 3812273d9f13SBarry Smith Example usage: 3813273d9f13SBarry Smith 3814273d9f13SBarry Smith Consider the following 8x8 matrix with 34 non-zero values, that is 3815273d9f13SBarry Smith assembled across 3 processors. Lets assume that proc0 owns 3 rows, 3816273d9f13SBarry Smith proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown 3817273d9f13SBarry Smith as follows: 3818273d9f13SBarry Smith 3819273d9f13SBarry Smith .vb 3820273d9f13SBarry Smith 1 2 0 | 0 3 0 | 0 4 3821273d9f13SBarry Smith Proc0 0 5 6 | 7 0 0 | 8 0 3822273d9f13SBarry Smith 9 0 10 | 11 0 0 | 12 0 3823273d9f13SBarry Smith ------------------------------------- 3824273d9f13SBarry Smith 13 0 14 | 15 16 17 | 0 0 3825273d9f13SBarry Smith Proc1 0 18 0 | 19 20 21 | 0 0 3826273d9f13SBarry Smith 0 0 0 | 22 23 0 | 24 0 3827273d9f13SBarry Smith ------------------------------------- 3828273d9f13SBarry Smith Proc2 25 26 27 | 0 0 28 | 29 0 3829273d9f13SBarry Smith 30 0 0 | 31 32 33 | 0 34 3830273d9f13SBarry Smith .ve 3831273d9f13SBarry Smith 3832273d9f13SBarry Smith This can be represented as a collection of submatrices as: 3833273d9f13SBarry Smith 3834273d9f13SBarry Smith .vb 3835273d9f13SBarry Smith A B C 3836273d9f13SBarry Smith D E F 3837273d9f13SBarry Smith G H I 3838273d9f13SBarry Smith .ve 3839273d9f13SBarry Smith 3840273d9f13SBarry Smith Where the submatrices A,B,C are owned by proc0, D,E,F are 3841273d9f13SBarry Smith owned by proc1, G,H,I are owned by proc2. 3842273d9f13SBarry Smith 3843273d9f13SBarry Smith The 'm' parameters for proc0,proc1,proc2 are 3,3,2 respectively. 3844273d9f13SBarry Smith The 'n' parameters for proc0,proc1,proc2 are 3,3,2 respectively. 3845273d9f13SBarry Smith The 'M','N' parameters are 8,8, and have the same values on all procs. 3846273d9f13SBarry Smith 3847273d9f13SBarry Smith The DIAGONAL submatrices corresponding to proc0,proc1,proc2 are 3848273d9f13SBarry Smith submatrices [A], [E], [I] respectively. The OFF-DIAGONAL submatrices 3849273d9f13SBarry Smith corresponding to proc0,proc1,proc2 are [BC], [DF], [GH] respectively. 3850273d9f13SBarry Smith Internally, each processor stores the DIAGONAL part, and the OFF-DIAGONAL 3851273d9f13SBarry Smith part as SeqAIJ matrices. for eg: proc1 will store [E] as a SeqAIJ 3852273d9f13SBarry Smith matrix, ans [DF] as another SeqAIJ matrix. 3853273d9f13SBarry Smith 3854273d9f13SBarry Smith When d_nz, o_nz parameters are specified, d_nz storage elements are 3855273d9f13SBarry Smith allocated for every row of the local diagonal submatrix, and o_nz 3856273d9f13SBarry Smith storage locations are allocated for every row of the OFF-DIAGONAL submat. 3857273d9f13SBarry Smith One way to choose d_nz and o_nz is to use the max nonzerors per local 3858273d9f13SBarry Smith rows for each of the local DIAGONAL, and the OFF-DIAGONAL submatrices. 3859273d9f13SBarry Smith In this case, the values of d_nz,o_nz are: 3860273d9f13SBarry Smith .vb 3861273d9f13SBarry Smith proc0 : dnz = 2, o_nz = 2 3862273d9f13SBarry Smith proc1 : dnz = 3, o_nz = 2 3863273d9f13SBarry Smith proc2 : dnz = 1, o_nz = 4 3864273d9f13SBarry Smith .ve 3865273d9f13SBarry Smith We are allocating m*(d_nz+o_nz) storage locations for every proc. This 3866273d9f13SBarry Smith translates to 3*(2+2)=12 for proc0, 3*(3+2)=15 for proc1, 2*(1+4)=10 3867273d9f13SBarry Smith for proc3. i.e we are using 12+15+10=37 storage locations to store 3868273d9f13SBarry Smith 34 values. 3869273d9f13SBarry Smith 3870273d9f13SBarry Smith When d_nnz, o_nnz parameters are specified, the storage is specified 3871273d9f13SBarry Smith for every row, coresponding to both DIAGONAL and OFF-DIAGONAL submatrices. 3872273d9f13SBarry Smith In the above case the values for d_nnz,o_nnz are: 3873273d9f13SBarry Smith .vb 3874273d9f13SBarry Smith proc0: d_nnz = [2,2,2] and o_nnz = [2,2,2] 3875273d9f13SBarry Smith proc1: d_nnz = [3,3,2] and o_nnz = [2,1,1] 3876273d9f13SBarry Smith proc2: d_nnz = [1,1] and o_nnz = [4,4] 3877273d9f13SBarry Smith .ve 3878273d9f13SBarry Smith Here the space allocated is sum of all the above values i.e 34, and 3879273d9f13SBarry Smith hence pre-allocation is perfect. 3880273d9f13SBarry Smith 3881273d9f13SBarry Smith Level: intermediate 3882273d9f13SBarry Smith 3883273d9f13SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 3884273d9f13SBarry Smith 3885ccd8e176SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatCreateMPIAIJ(), MatMPIAIJSetPreallocationCSR(), 3886aa95bbe8SBarry Smith MPIAIJ, MatGetInfo() 3887273d9f13SBarry Smith @*/ 38887087cfbeSBarry Smith PetscErrorCode MatMPIAIJSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 3889273d9f13SBarry Smith { 38904ac538c5SBarry Smith PetscErrorCode ierr; 3891273d9f13SBarry Smith 3892273d9f13SBarry Smith PetscFunctionBegin; 38934ac538c5SBarry Smith ierr = PetscTryMethod(B,"MatMPIAIJSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr); 3894273d9f13SBarry Smith PetscFunctionReturn(0); 3895273d9f13SBarry Smith } 3896273d9f13SBarry Smith 38974a2ae208SSatish Balay #undef __FUNCT__ 38982fb0ec9aSBarry Smith #define __FUNCT__ "MatCreateMPIAIJWithArrays" 389958d36128SBarry Smith /*@ 39002fb0ec9aSBarry Smith MatCreateMPIAIJWithArrays - creates a MPI AIJ matrix using arrays that contain in standard 39012fb0ec9aSBarry Smith CSR format the local rows. 39022fb0ec9aSBarry Smith 39032fb0ec9aSBarry Smith Collective on MPI_Comm 39042fb0ec9aSBarry Smith 39052fb0ec9aSBarry Smith Input Parameters: 39062fb0ec9aSBarry Smith + comm - MPI communicator 39072fb0ec9aSBarry Smith . m - number of local rows (Cannot be PETSC_DECIDE) 39082fb0ec9aSBarry Smith . n - This value should be the same as the local size used in creating the 39092fb0ec9aSBarry Smith x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have 39102fb0ec9aSBarry Smith calculated if N is given) For square matrices n is almost always m. 39112fb0ec9aSBarry Smith . M - number of global rows (or PETSC_DETERMINE to have calculated if m is given) 39122fb0ec9aSBarry Smith . N - number of global columns (or PETSC_DETERMINE to have calculated if n is given) 39132fb0ec9aSBarry Smith . i - row indices 39142fb0ec9aSBarry Smith . j - column indices 39152fb0ec9aSBarry Smith - a - matrix values 39162fb0ec9aSBarry Smith 39172fb0ec9aSBarry Smith Output Parameter: 39182fb0ec9aSBarry Smith . mat - the matrix 391903bfb495SBarry Smith 39202fb0ec9aSBarry Smith Level: intermediate 39212fb0ec9aSBarry Smith 39222fb0ec9aSBarry Smith Notes: 39232fb0ec9aSBarry Smith The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc; 39242fb0ec9aSBarry Smith thus you CANNOT change the matrix entries by changing the values of a[] after you have 39258d7a6e47SBarry Smith called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays. 39262fb0ec9aSBarry Smith 392712251496SSatish Balay The i and j indices are 0 based, and i indices are indices corresponding to the local j array. 392812251496SSatish Balay 392912251496SSatish Balay The format which is used for the sparse matrix input, is equivalent to a 393012251496SSatish Balay row-major ordering.. i.e for the following matrix, the input data expected is 393112251496SSatish Balay as shown: 393212251496SSatish Balay 393312251496SSatish Balay 1 0 0 393412251496SSatish Balay 2 0 3 P0 393512251496SSatish Balay ------- 393612251496SSatish Balay 4 5 6 P1 393712251496SSatish Balay 393812251496SSatish Balay Process0 [P0]: rows_owned=[0,1] 393912251496SSatish Balay i = {0,1,3} [size = nrow+1 = 2+1] 394012251496SSatish Balay j = {0,0,2} [size = nz = 6] 394112251496SSatish Balay v = {1,2,3} [size = nz = 6] 394212251496SSatish Balay 394312251496SSatish Balay Process1 [P1]: rows_owned=[2] 394412251496SSatish Balay i = {0,3} [size = nrow+1 = 1+1] 394512251496SSatish Balay j = {0,1,2} [size = nz = 6] 394612251496SSatish Balay v = {4,5,6} [size = nz = 6] 39472fb0ec9aSBarry Smith 39482fb0ec9aSBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 39492fb0ec9aSBarry Smith 39502fb0ec9aSBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(), 39518d7a6e47SBarry Smith MPIAIJ, MatCreateMPIAIJ(), MatCreateMPIAIJWithSplitArrays() 39522fb0ec9aSBarry Smith @*/ 39537087cfbeSBarry Smith PetscErrorCode MatCreateMPIAIJWithArrays(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt i[],const PetscInt j[],const PetscScalar a[],Mat *mat) 39542fb0ec9aSBarry Smith { 39552fb0ec9aSBarry Smith PetscErrorCode ierr; 39562fb0ec9aSBarry Smith 39572fb0ec9aSBarry Smith PetscFunctionBegin; 39582fb0ec9aSBarry Smith if (i[0]) { 3959e32f2f54SBarry Smith SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0"); 39602fb0ec9aSBarry Smith } 3961e32f2f54SBarry Smith if (m < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative"); 39622fb0ec9aSBarry Smith ierr = MatCreate(comm,mat);CHKERRQ(ierr); 3963d4146a68SBarry Smith ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr); 39642fb0ec9aSBarry Smith ierr = MatSetType(*mat,MATMPIAIJ);CHKERRQ(ierr); 39652fb0ec9aSBarry Smith ierr = MatMPIAIJSetPreallocationCSR(*mat,i,j,a);CHKERRQ(ierr); 39662fb0ec9aSBarry Smith PetscFunctionReturn(0); 39672fb0ec9aSBarry Smith } 39682fb0ec9aSBarry Smith 39692fb0ec9aSBarry Smith #undef __FUNCT__ 39704a2ae208SSatish Balay #define __FUNCT__ "MatCreateMPIAIJ" 3971273d9f13SBarry Smith /*@C 3972273d9f13SBarry Smith MatCreateMPIAIJ - Creates a sparse parallel matrix in AIJ format 3973273d9f13SBarry Smith (the default parallel PETSc format). For good matrix assembly performance 3974273d9f13SBarry Smith the user should preallocate the matrix storage by setting the parameters 3975273d9f13SBarry Smith d_nz (or d_nnz) and o_nz (or o_nnz). By setting these parameters accurately, 3976273d9f13SBarry Smith performance can be increased by more than a factor of 50. 3977273d9f13SBarry Smith 3978273d9f13SBarry Smith Collective on MPI_Comm 3979273d9f13SBarry Smith 3980273d9f13SBarry Smith Input Parameters: 3981273d9f13SBarry Smith + comm - MPI communicator 3982273d9f13SBarry Smith . m - number of local rows (or PETSC_DECIDE to have calculated if M is given) 3983273d9f13SBarry Smith This value should be the same as the local size used in creating the 3984273d9f13SBarry Smith y vector for the matrix-vector product y = Ax. 3985273d9f13SBarry Smith . n - This value should be the same as the local size used in creating the 3986273d9f13SBarry Smith x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have 3987273d9f13SBarry Smith calculated if N is given) For square matrices n is almost always m. 3988273d9f13SBarry Smith . M - number of global rows (or PETSC_DETERMINE to have calculated if m is given) 3989273d9f13SBarry Smith . N - number of global columns (or PETSC_DETERMINE to have calculated if n is given) 3990273d9f13SBarry Smith . d_nz - number of nonzeros per row in DIAGONAL portion of local submatrix 3991273d9f13SBarry Smith (same value is used for all local rows) 3992273d9f13SBarry Smith . d_nnz - array containing the number of nonzeros in the various rows of the 3993273d9f13SBarry Smith DIAGONAL portion of the local submatrix (possibly different for each row) 3994273d9f13SBarry Smith or PETSC_NULL, if d_nz is used to specify the nonzero structure. 3995273d9f13SBarry Smith The size of this array is equal to the number of local rows, i.e 'm'. 3996273d9f13SBarry Smith You must leave room for the diagonal entry even if it is zero. 3997273d9f13SBarry Smith . o_nz - number of nonzeros per row in the OFF-DIAGONAL portion of local 3998273d9f13SBarry Smith submatrix (same value is used for all local rows). 3999273d9f13SBarry Smith - o_nnz - array containing the number of nonzeros in the various rows of the 4000273d9f13SBarry Smith OFF-DIAGONAL portion of the local submatrix (possibly different for 4001273d9f13SBarry Smith each row) or PETSC_NULL, if o_nz is used to specify the nonzero 4002273d9f13SBarry Smith structure. The size of this array is equal to the number 4003273d9f13SBarry Smith of local rows, i.e 'm'. 4004273d9f13SBarry Smith 4005273d9f13SBarry Smith Output Parameter: 4006273d9f13SBarry Smith . A - the matrix 4007273d9f13SBarry Smith 4008175b88e8SBarry Smith It is recommended that one use the MatCreate(), MatSetType() and/or MatSetFromOptions(), 4009ae1d86c5SBarry Smith MatXXXXSetPreallocation() paradgm instead of this routine directly. 4010175b88e8SBarry Smith [MatXXXXSetPreallocation() is, for example, MatSeqAIJSetPreallocation] 4011175b88e8SBarry Smith 4012273d9f13SBarry Smith Notes: 401349a6f317SBarry Smith If the *_nnz parameter is given then the *_nz parameter is ignored 401449a6f317SBarry Smith 4015273d9f13SBarry Smith m,n,M,N parameters specify the size of the matrix, and its partitioning across 4016273d9f13SBarry Smith processors, while d_nz,d_nnz,o_nz,o_nnz parameters specify the approximate 4017273d9f13SBarry Smith storage requirements for this matrix. 4018273d9f13SBarry Smith 4019273d9f13SBarry Smith If PETSC_DECIDE or PETSC_DETERMINE is used for a particular argument on one 4020273d9f13SBarry Smith processor than it must be used on all processors that share the object for 4021273d9f13SBarry Smith that argument. 4022273d9f13SBarry Smith 4023273d9f13SBarry Smith The user MUST specify either the local or global matrix dimensions 4024273d9f13SBarry Smith (possibly both). 4025273d9f13SBarry Smith 402633a7c187SSatish Balay The parallel matrix is partitioned across processors such that the 402733a7c187SSatish Balay first m0 rows belong to process 0, the next m1 rows belong to 402833a7c187SSatish Balay process 1, the next m2 rows belong to process 2 etc.. where 402933a7c187SSatish Balay m0,m1,m2,.. are the input parameter 'm'. i.e each processor stores 403033a7c187SSatish Balay values corresponding to [m x N] submatrix. 4031273d9f13SBarry Smith 403233a7c187SSatish Balay The columns are logically partitioned with the n0 columns belonging 403333a7c187SSatish Balay to 0th partition, the next n1 columns belonging to the next 403433a7c187SSatish Balay partition etc.. where n0,n1,n2... are the the input parameter 'n'. 403533a7c187SSatish Balay 403633a7c187SSatish Balay The DIAGONAL portion of the local submatrix on any given processor 403733a7c187SSatish Balay is the submatrix corresponding to the rows and columns m,n 403833a7c187SSatish Balay corresponding to the given processor. i.e diagonal matrix on 403933a7c187SSatish Balay process 0 is [m0 x n0], diagonal matrix on process 1 is [m1 x n1] 404033a7c187SSatish Balay etc. The remaining portion of the local submatrix [m x (N-n)] 404133a7c187SSatish Balay constitute the OFF-DIAGONAL portion. The example below better 404233a7c187SSatish Balay illustrates this concept. 404333a7c187SSatish Balay 404433a7c187SSatish Balay For a square global matrix we define each processor's diagonal portion 404533a7c187SSatish Balay to be its local rows and the corresponding columns (a square submatrix); 404633a7c187SSatish Balay each processor's off-diagonal portion encompasses the remainder of the 404733a7c187SSatish Balay local matrix (a rectangular submatrix). 4048273d9f13SBarry Smith 4049273d9f13SBarry Smith If o_nnz, d_nnz are specified, then o_nz, and d_nz are ignored. 4050273d9f13SBarry Smith 405197d05335SKris Buschelman When calling this routine with a single process communicator, a matrix of 405297d05335SKris Buschelman type SEQAIJ is returned. If a matrix of type MPIAIJ is desired for this 405397d05335SKris Buschelman type of communicator, use the construction mechanism: 405478102f6cSMatthew Knepley MatCreate(...,&A); MatSetType(A,MATMPIAIJ); MatSetSizes(A, m,n,M,N); MatMPIAIJSetPreallocation(A,...); 405597d05335SKris Buschelman 4056273d9f13SBarry Smith By default, this format uses inodes (identical nodes) when possible. 4057273d9f13SBarry Smith We search for consecutive rows with the same nonzero structure, thereby 4058273d9f13SBarry Smith reusing matrix information to achieve increased efficiency. 4059273d9f13SBarry Smith 4060273d9f13SBarry Smith Options Database Keys: 4061923f20ffSKris Buschelman + -mat_no_inode - Do not use inodes 4062923f20ffSKris Buschelman . -mat_inode_limit <limit> - Sets inode limit (max limit=5) 4063273d9f13SBarry Smith - -mat_aij_oneindex - Internally use indexing starting at 1 4064273d9f13SBarry Smith rather than 0. Note that when calling MatSetValues(), 4065273d9f13SBarry Smith the user still MUST index entries starting at 0! 4066273d9f13SBarry Smith 4067273d9f13SBarry Smith 4068273d9f13SBarry Smith Example usage: 4069273d9f13SBarry Smith 4070273d9f13SBarry Smith Consider the following 8x8 matrix with 34 non-zero values, that is 4071273d9f13SBarry Smith assembled across 3 processors. Lets assume that proc0 owns 3 rows, 4072273d9f13SBarry Smith proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown 4073273d9f13SBarry Smith as follows: 4074273d9f13SBarry Smith 4075273d9f13SBarry Smith .vb 4076273d9f13SBarry Smith 1 2 0 | 0 3 0 | 0 4 4077273d9f13SBarry Smith Proc0 0 5 6 | 7 0 0 | 8 0 4078273d9f13SBarry Smith 9 0 10 | 11 0 0 | 12 0 4079273d9f13SBarry Smith ------------------------------------- 4080273d9f13SBarry Smith 13 0 14 | 15 16 17 | 0 0 4081273d9f13SBarry Smith Proc1 0 18 0 | 19 20 21 | 0 0 4082273d9f13SBarry Smith 0 0 0 | 22 23 0 | 24 0 4083273d9f13SBarry Smith ------------------------------------- 4084273d9f13SBarry Smith Proc2 25 26 27 | 0 0 28 | 29 0 4085273d9f13SBarry Smith 30 0 0 | 31 32 33 | 0 34 4086273d9f13SBarry Smith .ve 4087273d9f13SBarry Smith 4088273d9f13SBarry Smith This can be represented as a collection of submatrices as: 4089273d9f13SBarry Smith 4090273d9f13SBarry Smith .vb 4091273d9f13SBarry Smith A B C 4092273d9f13SBarry Smith D E F 4093273d9f13SBarry Smith G H I 4094273d9f13SBarry Smith .ve 4095273d9f13SBarry Smith 4096273d9f13SBarry Smith Where the submatrices A,B,C are owned by proc0, D,E,F are 4097273d9f13SBarry Smith owned by proc1, G,H,I are owned by proc2. 4098273d9f13SBarry Smith 4099273d9f13SBarry Smith The 'm' parameters for proc0,proc1,proc2 are 3,3,2 respectively. 4100273d9f13SBarry Smith The 'n' parameters for proc0,proc1,proc2 are 3,3,2 respectively. 4101273d9f13SBarry Smith The 'M','N' parameters are 8,8, and have the same values on all procs. 4102273d9f13SBarry Smith 4103273d9f13SBarry Smith The DIAGONAL submatrices corresponding to proc0,proc1,proc2 are 4104273d9f13SBarry Smith submatrices [A], [E], [I] respectively. The OFF-DIAGONAL submatrices 4105273d9f13SBarry Smith corresponding to proc0,proc1,proc2 are [BC], [DF], [GH] respectively. 4106273d9f13SBarry Smith Internally, each processor stores the DIAGONAL part, and the OFF-DIAGONAL 4107273d9f13SBarry Smith part as SeqAIJ matrices. for eg: proc1 will store [E] as a SeqAIJ 4108273d9f13SBarry Smith matrix, ans [DF] as another SeqAIJ matrix. 4109273d9f13SBarry Smith 4110273d9f13SBarry Smith When d_nz, o_nz parameters are specified, d_nz storage elements are 4111273d9f13SBarry Smith allocated for every row of the local diagonal submatrix, and o_nz 4112273d9f13SBarry Smith storage locations are allocated for every row of the OFF-DIAGONAL submat. 4113273d9f13SBarry Smith One way to choose d_nz and o_nz is to use the max nonzerors per local 4114273d9f13SBarry Smith rows for each of the local DIAGONAL, and the OFF-DIAGONAL submatrices. 4115273d9f13SBarry Smith In this case, the values of d_nz,o_nz are: 4116273d9f13SBarry Smith .vb 4117273d9f13SBarry Smith proc0 : dnz = 2, o_nz = 2 4118273d9f13SBarry Smith proc1 : dnz = 3, o_nz = 2 4119273d9f13SBarry Smith proc2 : dnz = 1, o_nz = 4 4120273d9f13SBarry Smith .ve 4121273d9f13SBarry Smith We are allocating m*(d_nz+o_nz) storage locations for every proc. This 4122273d9f13SBarry Smith translates to 3*(2+2)=12 for proc0, 3*(3+2)=15 for proc1, 2*(1+4)=10 4123273d9f13SBarry Smith for proc3. i.e we are using 12+15+10=37 storage locations to store 4124273d9f13SBarry Smith 34 values. 4125273d9f13SBarry Smith 4126273d9f13SBarry Smith When d_nnz, o_nnz parameters are specified, the storage is specified 4127273d9f13SBarry Smith for every row, coresponding to both DIAGONAL and OFF-DIAGONAL submatrices. 4128273d9f13SBarry Smith In the above case the values for d_nnz,o_nnz are: 4129273d9f13SBarry Smith .vb 4130273d9f13SBarry Smith proc0: d_nnz = [2,2,2] and o_nnz = [2,2,2] 4131273d9f13SBarry Smith proc1: d_nnz = [3,3,2] and o_nnz = [2,1,1] 4132273d9f13SBarry Smith proc2: d_nnz = [1,1] and o_nnz = [4,4] 4133273d9f13SBarry Smith .ve 4134273d9f13SBarry Smith Here the space allocated is sum of all the above values i.e 34, and 4135273d9f13SBarry Smith hence pre-allocation is perfect. 4136273d9f13SBarry Smith 4137273d9f13SBarry Smith Level: intermediate 4138273d9f13SBarry Smith 4139273d9f13SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 4140273d9f13SBarry Smith 4141ccd8e176SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(), 41422fb0ec9aSBarry Smith MPIAIJ, MatCreateMPIAIJWithArrays() 4143273d9f13SBarry Smith @*/ 41447087cfbeSBarry Smith PetscErrorCode MatCreateMPIAIJ(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[],Mat *A) 4145273d9f13SBarry Smith { 41466849ba73SBarry Smith PetscErrorCode ierr; 4147b1d57f15SBarry Smith PetscMPIInt size; 4148273d9f13SBarry Smith 4149273d9f13SBarry Smith PetscFunctionBegin; 4150f69a0ea3SMatthew Knepley ierr = MatCreate(comm,A);CHKERRQ(ierr); 4151f69a0ea3SMatthew Knepley ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr); 4152273d9f13SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 4153273d9f13SBarry Smith if (size > 1) { 4154273d9f13SBarry Smith ierr = MatSetType(*A,MATMPIAIJ);CHKERRQ(ierr); 4155273d9f13SBarry Smith ierr = MatMPIAIJSetPreallocation(*A,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr); 4156273d9f13SBarry Smith } else { 4157273d9f13SBarry Smith ierr = MatSetType(*A,MATSEQAIJ);CHKERRQ(ierr); 4158273d9f13SBarry Smith ierr = MatSeqAIJSetPreallocation(*A,d_nz,d_nnz);CHKERRQ(ierr); 4159273d9f13SBarry Smith } 4160273d9f13SBarry Smith PetscFunctionReturn(0); 4161273d9f13SBarry Smith } 4162195d93cdSBarry Smith 41634a2ae208SSatish Balay #undef __FUNCT__ 41644a2ae208SSatish Balay #define __FUNCT__ "MatMPIAIJGetSeqAIJ" 41657087cfbeSBarry Smith PetscErrorCode MatMPIAIJGetSeqAIJ(Mat A,Mat *Ad,Mat *Ao,PetscInt *colmap[]) 4166195d93cdSBarry Smith { 4167195d93cdSBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ *)A->data; 4168b1d57f15SBarry Smith 4169195d93cdSBarry Smith PetscFunctionBegin; 4170195d93cdSBarry Smith *Ad = a->A; 4171195d93cdSBarry Smith *Ao = a->B; 4172195d93cdSBarry Smith *colmap = a->garray; 4173195d93cdSBarry Smith PetscFunctionReturn(0); 4174195d93cdSBarry Smith } 4175a2243be0SBarry Smith 4176a2243be0SBarry Smith #undef __FUNCT__ 4177a2243be0SBarry Smith #define __FUNCT__ "MatSetColoring_MPIAIJ" 4178dfbe8321SBarry Smith PetscErrorCode MatSetColoring_MPIAIJ(Mat A,ISColoring coloring) 4179a2243be0SBarry Smith { 4180dfbe8321SBarry Smith PetscErrorCode ierr; 4181b1d57f15SBarry Smith PetscInt i; 4182a2243be0SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 4183a2243be0SBarry Smith 4184a2243be0SBarry Smith PetscFunctionBegin; 41858ee2e534SBarry Smith if (coloring->ctype == IS_COLORING_GLOBAL) { 418608b6dcc0SBarry Smith ISColoringValue *allcolors,*colors; 4187a2243be0SBarry Smith ISColoring ocoloring; 4188a2243be0SBarry Smith 4189a2243be0SBarry Smith /* set coloring for diagonal portion */ 4190a2243be0SBarry Smith ierr = MatSetColoring_SeqAIJ(a->A,coloring);CHKERRQ(ierr); 4191a2243be0SBarry Smith 4192a2243be0SBarry Smith /* set coloring for off-diagonal portion */ 41937adad957SLisandro Dalcin ierr = ISAllGatherColors(((PetscObject)A)->comm,coloring->n,coloring->colors,PETSC_NULL,&allcolors);CHKERRQ(ierr); 4194d0f46423SBarry Smith ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr); 4195d0f46423SBarry Smith for (i=0; i<a->B->cmap->n; i++) { 4196a2243be0SBarry Smith colors[i] = allcolors[a->garray[i]]; 4197a2243be0SBarry Smith } 4198a2243be0SBarry Smith ierr = PetscFree(allcolors);CHKERRQ(ierr); 4199d0f46423SBarry Smith ierr = ISColoringCreate(MPI_COMM_SELF,coloring->n,a->B->cmap->n,colors,&ocoloring);CHKERRQ(ierr); 4200a2243be0SBarry Smith ierr = MatSetColoring_SeqAIJ(a->B,ocoloring);CHKERRQ(ierr); 42016bf464f9SBarry Smith ierr = ISColoringDestroy(&ocoloring);CHKERRQ(ierr); 4202a2243be0SBarry Smith } else if (coloring->ctype == IS_COLORING_GHOSTED) { 420308b6dcc0SBarry Smith ISColoringValue *colors; 4204b1d57f15SBarry Smith PetscInt *larray; 4205a2243be0SBarry Smith ISColoring ocoloring; 4206a2243be0SBarry Smith 4207a2243be0SBarry Smith /* set coloring for diagonal portion */ 4208d0f46423SBarry Smith ierr = PetscMalloc((a->A->cmap->n+1)*sizeof(PetscInt),&larray);CHKERRQ(ierr); 4209d0f46423SBarry Smith for (i=0; i<a->A->cmap->n; i++) { 4210d0f46423SBarry Smith larray[i] = i + A->cmap->rstart; 4211a2243be0SBarry Smith } 4212992144d0SBarry Smith ierr = ISGlobalToLocalMappingApply(A->cmap->mapping,IS_GTOLM_MASK,a->A->cmap->n,larray,PETSC_NULL,larray);CHKERRQ(ierr); 4213d0f46423SBarry Smith ierr = PetscMalloc((a->A->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr); 4214d0f46423SBarry Smith for (i=0; i<a->A->cmap->n; i++) { 4215a2243be0SBarry Smith colors[i] = coloring->colors[larray[i]]; 4216a2243be0SBarry Smith } 4217a2243be0SBarry Smith ierr = PetscFree(larray);CHKERRQ(ierr); 4218d0f46423SBarry Smith ierr = ISColoringCreate(PETSC_COMM_SELF,coloring->n,a->A->cmap->n,colors,&ocoloring);CHKERRQ(ierr); 4219a2243be0SBarry Smith ierr = MatSetColoring_SeqAIJ(a->A,ocoloring);CHKERRQ(ierr); 42206bf464f9SBarry Smith ierr = ISColoringDestroy(&ocoloring);CHKERRQ(ierr); 4221a2243be0SBarry Smith 4222a2243be0SBarry Smith /* set coloring for off-diagonal portion */ 4223d0f46423SBarry Smith ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(PetscInt),&larray);CHKERRQ(ierr); 4224992144d0SBarry Smith ierr = ISGlobalToLocalMappingApply(A->cmap->mapping,IS_GTOLM_MASK,a->B->cmap->n,a->garray,PETSC_NULL,larray);CHKERRQ(ierr); 4225d0f46423SBarry Smith ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr); 4226d0f46423SBarry Smith for (i=0; i<a->B->cmap->n; i++) { 4227a2243be0SBarry Smith colors[i] = coloring->colors[larray[i]]; 4228a2243be0SBarry Smith } 4229a2243be0SBarry Smith ierr = PetscFree(larray);CHKERRQ(ierr); 4230d0f46423SBarry Smith ierr = ISColoringCreate(MPI_COMM_SELF,coloring->n,a->B->cmap->n,colors,&ocoloring);CHKERRQ(ierr); 4231a2243be0SBarry Smith ierr = MatSetColoring_SeqAIJ(a->B,ocoloring);CHKERRQ(ierr); 42326bf464f9SBarry Smith ierr = ISColoringDestroy(&ocoloring);CHKERRQ(ierr); 42336bf464f9SBarry Smith } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support ISColoringType %d",(int)coloring->ctype); 4234a2243be0SBarry Smith 4235a2243be0SBarry Smith PetscFunctionReturn(0); 4236a2243be0SBarry Smith } 4237a2243be0SBarry Smith 4238dcf5cc72SBarry Smith #if defined(PETSC_HAVE_ADIC) 4239a2243be0SBarry Smith #undef __FUNCT__ 4240779c1a83SBarry Smith #define __FUNCT__ "MatSetValuesAdic_MPIAIJ" 4241dfbe8321SBarry Smith PetscErrorCode MatSetValuesAdic_MPIAIJ(Mat A,void *advalues) 4242a2243be0SBarry Smith { 4243a2243be0SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 4244dfbe8321SBarry Smith PetscErrorCode ierr; 4245a2243be0SBarry Smith 4246a2243be0SBarry Smith PetscFunctionBegin; 4247779c1a83SBarry Smith ierr = MatSetValuesAdic_SeqAIJ(a->A,advalues);CHKERRQ(ierr); 4248779c1a83SBarry Smith ierr = MatSetValuesAdic_SeqAIJ(a->B,advalues);CHKERRQ(ierr); 4249779c1a83SBarry Smith PetscFunctionReturn(0); 4250779c1a83SBarry Smith } 4251dcf5cc72SBarry Smith #endif 4252779c1a83SBarry Smith 4253779c1a83SBarry Smith #undef __FUNCT__ 4254779c1a83SBarry Smith #define __FUNCT__ "MatSetValuesAdifor_MPIAIJ" 4255b1d57f15SBarry Smith PetscErrorCode MatSetValuesAdifor_MPIAIJ(Mat A,PetscInt nl,void *advalues) 4256779c1a83SBarry Smith { 4257779c1a83SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 4258dfbe8321SBarry Smith PetscErrorCode ierr; 4259779c1a83SBarry Smith 4260779c1a83SBarry Smith PetscFunctionBegin; 4261779c1a83SBarry Smith ierr = MatSetValuesAdifor_SeqAIJ(a->A,nl,advalues);CHKERRQ(ierr); 4262779c1a83SBarry Smith ierr = MatSetValuesAdifor_SeqAIJ(a->B,nl,advalues);CHKERRQ(ierr); 4263a2243be0SBarry Smith PetscFunctionReturn(0); 4264a2243be0SBarry Smith } 4265c5d6d63eSBarry Smith 4266c5d6d63eSBarry Smith #undef __FUNCT__ 426751dd7536SBarry Smith #define __FUNCT__ "MatMerge" 4268bc08b0f1SBarry Smith /*@ 426951dd7536SBarry Smith MatMerge - Creates a single large PETSc matrix by concatinating sequential 427051dd7536SBarry Smith matrices from each processor 4271c5d6d63eSBarry Smith 4272c5d6d63eSBarry Smith Collective on MPI_Comm 4273c5d6d63eSBarry Smith 4274c5d6d63eSBarry Smith Input Parameters: 427551dd7536SBarry Smith + comm - the communicators the parallel matrix will live on 4276d6bb3c2dSHong Zhang . inmat - the input sequential matrices 42770e36024fSHong Zhang . n - number of local columns (or PETSC_DECIDE) 4278d6bb3c2dSHong Zhang - scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 427951dd7536SBarry Smith 428051dd7536SBarry Smith Output Parameter: 428151dd7536SBarry Smith . outmat - the parallel matrix generated 4282c5d6d63eSBarry Smith 42837e25d530SSatish Balay Level: advanced 42847e25d530SSatish Balay 4285f08fae4eSHong Zhang Notes: The number of columns of the matrix in EACH processor MUST be the same. 4286c5d6d63eSBarry Smith 4287c5d6d63eSBarry Smith @*/ 42887087cfbeSBarry Smith PetscErrorCode MatMerge(MPI_Comm comm,Mat inmat,PetscInt n,MatReuse scall,Mat *outmat) 4289c5d6d63eSBarry Smith { 4290dfbe8321SBarry Smith PetscErrorCode ierr; 4291b7940d39SSatish Balay PetscInt m,N,i,rstart,nnz,Ii,*dnz,*onz; 4292ba8c8a56SBarry Smith PetscInt *indx; 4293ba8c8a56SBarry Smith PetscScalar *values; 4294c5d6d63eSBarry Smith 4295c5d6d63eSBarry Smith PetscFunctionBegin; 42960e36024fSHong Zhang ierr = MatGetSize(inmat,&m,&N);CHKERRQ(ierr); 4297d6bb3c2dSHong Zhang if (scall == MAT_INITIAL_MATRIX){ 4298d6bb3c2dSHong Zhang /* count nonzeros in each row, for diagonal and off diagonal portion of matrix */ 42990e36024fSHong Zhang if (n == PETSC_DECIDE){ 4300357abbc8SBarry Smith ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 43010e36024fSHong Zhang } 4302357abbc8SBarry Smith ierr = MPI_Scan(&m, &rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr); 4303357abbc8SBarry Smith rstart -= m; 4304d6bb3c2dSHong Zhang 4305d6bb3c2dSHong Zhang ierr = MatPreallocateInitialize(comm,m,n,dnz,onz);CHKERRQ(ierr); 4306d6bb3c2dSHong Zhang for (i=0;i<m;i++) { 4307ba8c8a56SBarry Smith ierr = MatGetRow_SeqAIJ(inmat,i,&nnz,&indx,PETSC_NULL);CHKERRQ(ierr); 4308d6bb3c2dSHong Zhang ierr = MatPreallocateSet(i+rstart,nnz,indx,dnz,onz);CHKERRQ(ierr); 4309ba8c8a56SBarry Smith ierr = MatRestoreRow_SeqAIJ(inmat,i,&nnz,&indx,PETSC_NULL);CHKERRQ(ierr); 4310d6bb3c2dSHong Zhang } 4311d6bb3c2dSHong Zhang /* This routine will ONLY return MPIAIJ type matrix */ 4312f69a0ea3SMatthew Knepley ierr = MatCreate(comm,outmat);CHKERRQ(ierr); 4313f69a0ea3SMatthew Knepley ierr = MatSetSizes(*outmat,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 4314d6bb3c2dSHong Zhang ierr = MatSetType(*outmat,MATMPIAIJ);CHKERRQ(ierr); 4315d6bb3c2dSHong Zhang ierr = MatMPIAIJSetPreallocation(*outmat,0,dnz,0,onz);CHKERRQ(ierr); 4316d6bb3c2dSHong Zhang ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr); 4317d6bb3c2dSHong Zhang 4318d6bb3c2dSHong Zhang } else if (scall == MAT_REUSE_MATRIX){ 4319d6bb3c2dSHong Zhang ierr = MatGetOwnershipRange(*outmat,&rstart,PETSC_NULL);CHKERRQ(ierr); 4320d6bb3c2dSHong Zhang } else { 4321e32f2f54SBarry Smith SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid MatReuse %d",(int)scall); 4322d6bb3c2dSHong Zhang } 4323d6bb3c2dSHong Zhang 4324d6bb3c2dSHong Zhang for (i=0;i<m;i++) { 4325ba8c8a56SBarry Smith ierr = MatGetRow_SeqAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr); 4326b7940d39SSatish Balay Ii = i + rstart; 4327b7940d39SSatish Balay ierr = MatSetValues(*outmat,1,&Ii,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr); 4328ba8c8a56SBarry Smith ierr = MatRestoreRow_SeqAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr); 4329d6bb3c2dSHong Zhang } 43306bf464f9SBarry Smith ierr = MatDestroy(&inmat);CHKERRQ(ierr); 4331d6bb3c2dSHong Zhang ierr = MatAssemblyBegin(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 4332d6bb3c2dSHong Zhang ierr = MatAssemblyEnd(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 433351dd7536SBarry Smith 4334c5d6d63eSBarry Smith PetscFunctionReturn(0); 4335c5d6d63eSBarry Smith } 4336c5d6d63eSBarry Smith 4337c5d6d63eSBarry Smith #undef __FUNCT__ 4338c5d6d63eSBarry Smith #define __FUNCT__ "MatFileSplit" 4339dfbe8321SBarry Smith PetscErrorCode MatFileSplit(Mat A,char *outfile) 4340c5d6d63eSBarry Smith { 4341dfbe8321SBarry Smith PetscErrorCode ierr; 434232dcc486SBarry Smith PetscMPIInt rank; 4343b1d57f15SBarry Smith PetscInt m,N,i,rstart,nnz; 4344de4209c5SBarry Smith size_t len; 4345b1d57f15SBarry Smith const PetscInt *indx; 4346c5d6d63eSBarry Smith PetscViewer out; 4347c5d6d63eSBarry Smith char *name; 4348c5d6d63eSBarry Smith Mat B; 4349b3cc6726SBarry Smith const PetscScalar *values; 4350c5d6d63eSBarry Smith 4351c5d6d63eSBarry Smith PetscFunctionBegin; 4352c5d6d63eSBarry Smith ierr = MatGetLocalSize(A,&m,0);CHKERRQ(ierr); 4353c5d6d63eSBarry Smith ierr = MatGetSize(A,0,&N);CHKERRQ(ierr); 4354f204ca49SKris Buschelman /* Should this be the type of the diagonal block of A? */ 4355f69a0ea3SMatthew Knepley ierr = MatCreate(PETSC_COMM_SELF,&B);CHKERRQ(ierr); 4356f69a0ea3SMatthew Knepley ierr = MatSetSizes(B,m,N,m,N);CHKERRQ(ierr); 4357f204ca49SKris Buschelman ierr = MatSetType(B,MATSEQAIJ);CHKERRQ(ierr); 4358f204ca49SKris Buschelman ierr = MatSeqAIJSetPreallocation(B,0,PETSC_NULL);CHKERRQ(ierr); 4359c5d6d63eSBarry Smith ierr = MatGetOwnershipRange(A,&rstart,0);CHKERRQ(ierr); 4360c5d6d63eSBarry Smith for (i=0;i<m;i++) { 4361c5d6d63eSBarry Smith ierr = MatGetRow(A,i+rstart,&nnz,&indx,&values);CHKERRQ(ierr); 4362c5d6d63eSBarry Smith ierr = MatSetValues(B,1,&i,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr); 4363c5d6d63eSBarry Smith ierr = MatRestoreRow(A,i+rstart,&nnz,&indx,&values);CHKERRQ(ierr); 4364c5d6d63eSBarry Smith } 4365c5d6d63eSBarry Smith ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 4366c5d6d63eSBarry Smith ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 4367c5d6d63eSBarry Smith 43687adad957SLisandro Dalcin ierr = MPI_Comm_rank(((PetscObject)A)->comm,&rank);CHKERRQ(ierr); 4369c5d6d63eSBarry Smith ierr = PetscStrlen(outfile,&len);CHKERRQ(ierr); 4370c5d6d63eSBarry Smith ierr = PetscMalloc((len+5)*sizeof(char),&name);CHKERRQ(ierr); 4371c5d6d63eSBarry Smith sprintf(name,"%s.%d",outfile,rank); 4372852598b0SBarry Smith ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,name,FILE_MODE_APPEND,&out);CHKERRQ(ierr); 4373c5d6d63eSBarry Smith ierr = PetscFree(name); 4374c5d6d63eSBarry Smith ierr = MatView(B,out);CHKERRQ(ierr); 43756bf464f9SBarry Smith ierr = PetscViewerDestroy(&out);CHKERRQ(ierr); 43766bf464f9SBarry Smith ierr = MatDestroy(&B);CHKERRQ(ierr); 4377c5d6d63eSBarry Smith PetscFunctionReturn(0); 4378c5d6d63eSBarry Smith } 4379e5f2cdd8SHong Zhang 438009573ac7SBarry Smith extern PetscErrorCode MatDestroy_MPIAIJ(Mat); 438151a7d1a8SHong Zhang #undef __FUNCT__ 438251a7d1a8SHong Zhang #define __FUNCT__ "MatDestroy_MPIAIJ_SeqsToMPI" 43837087cfbeSBarry Smith PetscErrorCode MatDestroy_MPIAIJ_SeqsToMPI(Mat A) 438451a7d1a8SHong Zhang { 438551a7d1a8SHong Zhang PetscErrorCode ierr; 4386671beff6SHong Zhang Mat_Merge_SeqsToMPI *merge; 4387776b82aeSLisandro Dalcin PetscContainer container; 438851a7d1a8SHong Zhang 438951a7d1a8SHong Zhang PetscFunctionBegin; 4390671beff6SHong Zhang ierr = PetscObjectQuery((PetscObject)A,"MatMergeSeqsToMPI",(PetscObject *)&container);CHKERRQ(ierr); 4391671beff6SHong Zhang if (container) { 4392776b82aeSLisandro Dalcin ierr = PetscContainerGetPointer(container,(void **)&merge);CHKERRQ(ierr); 439351a7d1a8SHong Zhang ierr = PetscFree(merge->id_r);CHKERRQ(ierr); 43943e06a4e6SHong Zhang ierr = PetscFree(merge->len_s);CHKERRQ(ierr); 43953e06a4e6SHong Zhang ierr = PetscFree(merge->len_r);CHKERRQ(ierr); 439651a7d1a8SHong Zhang ierr = PetscFree(merge->bi);CHKERRQ(ierr); 439751a7d1a8SHong Zhang ierr = PetscFree(merge->bj);CHKERRQ(ierr); 4398533163c2SBarry Smith ierr = PetscFree(merge->buf_ri[0]);CHKERRQ(ierr); 439902c68681SHong Zhang ierr = PetscFree(merge->buf_ri);CHKERRQ(ierr); 4400533163c2SBarry Smith ierr = PetscFree(merge->buf_rj[0]);CHKERRQ(ierr); 440102c68681SHong Zhang ierr = PetscFree(merge->buf_rj);CHKERRQ(ierr); 440205b42c5fSBarry Smith ierr = PetscFree(merge->coi);CHKERRQ(ierr); 440305b42c5fSBarry Smith ierr = PetscFree(merge->coj);CHKERRQ(ierr); 440405b42c5fSBarry Smith ierr = PetscFree(merge->owners_co);CHKERRQ(ierr); 44056bf464f9SBarry Smith ierr = PetscLayoutDestroy(&merge->rowmap);CHKERRQ(ierr); 4406bf0cc555SLisandro Dalcin ierr = PetscFree(merge);CHKERRQ(ierr); 4407671beff6SHong Zhang ierr = PetscObjectCompose((PetscObject)A,"MatMergeSeqsToMPI",0);CHKERRQ(ierr); 4408671beff6SHong Zhang } 440951a7d1a8SHong Zhang ierr = MatDestroy_MPIAIJ(A);CHKERRQ(ierr); 441051a7d1a8SHong Zhang PetscFunctionReturn(0); 441151a7d1a8SHong Zhang } 441251a7d1a8SHong Zhang 4413c6db04a5SJed Brown #include <../src/mat/utils/freespace.h> 4414c6db04a5SJed Brown #include <petscbt.h> 44154ebed01fSBarry Smith 4416e5f2cdd8SHong Zhang #undef __FUNCT__ 441738f152feSBarry Smith #define __FUNCT__ "MatMerge_SeqsToMPINumeric" 4418e5f2cdd8SHong Zhang /*@C 4419f08fae4eSHong Zhang MatMerge_SeqsToMPI - Creates a MPIAIJ matrix by adding sequential 4420e5f2cdd8SHong Zhang matrices from each processor 4421e5f2cdd8SHong Zhang 4422e5f2cdd8SHong Zhang Collective on MPI_Comm 4423e5f2cdd8SHong Zhang 4424e5f2cdd8SHong Zhang Input Parameters: 4425e5f2cdd8SHong Zhang + comm - the communicators the parallel matrix will live on 4426f08fae4eSHong Zhang . seqmat - the input sequential matrices 44270e36024fSHong Zhang . m - number of local rows (or PETSC_DECIDE) 44280e36024fSHong Zhang . n - number of local columns (or PETSC_DECIDE) 4429e5f2cdd8SHong Zhang - scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 4430e5f2cdd8SHong Zhang 4431e5f2cdd8SHong Zhang Output Parameter: 4432f08fae4eSHong Zhang . mpimat - the parallel matrix generated 4433e5f2cdd8SHong Zhang 4434e5f2cdd8SHong Zhang Level: advanced 4435e5f2cdd8SHong Zhang 4436affca5deSHong Zhang Notes: 4437affca5deSHong Zhang The dimensions of the sequential matrix in each processor MUST be the same. 4438affca5deSHong Zhang The input seqmat is included into the container "Mat_Merge_SeqsToMPI", and will be 4439affca5deSHong Zhang destroyed when mpimat is destroyed. Call PetscObjectQuery() to access seqmat. 4440e5f2cdd8SHong Zhang @*/ 44417087cfbeSBarry Smith PetscErrorCode MatMerge_SeqsToMPINumeric(Mat seqmat,Mat mpimat) 444255d1abb9SHong Zhang { 444355d1abb9SHong Zhang PetscErrorCode ierr; 44447adad957SLisandro Dalcin MPI_Comm comm=((PetscObject)mpimat)->comm; 444555d1abb9SHong Zhang Mat_SeqAIJ *a=(Mat_SeqAIJ*)seqmat->data; 4446b1d57f15SBarry Smith PetscMPIInt size,rank,taga,*len_s; 4447d0f46423SBarry Smith PetscInt N=mpimat->cmap->N,i,j,*owners,*ai=a->i,*aj=a->j; 4448b1d57f15SBarry Smith PetscInt proc,m; 4449b1d57f15SBarry Smith PetscInt **buf_ri,**buf_rj; 4450b1d57f15SBarry Smith PetscInt k,anzi,*bj_i,*bi,*bj,arow,bnzi,nextaj; 4451b1d57f15SBarry Smith PetscInt nrows,**buf_ri_k,**nextrow,**nextai; 445255d1abb9SHong Zhang MPI_Request *s_waits,*r_waits; 445355d1abb9SHong Zhang MPI_Status *status; 4454a77337e4SBarry Smith MatScalar *aa=a->a; 4455dd6ea824SBarry Smith MatScalar **abuf_r,*ba_i; 445655d1abb9SHong Zhang Mat_Merge_SeqsToMPI *merge; 4457776b82aeSLisandro Dalcin PetscContainer container; 445855d1abb9SHong Zhang 445955d1abb9SHong Zhang PetscFunctionBegin; 44604ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Seqstompinum,seqmat,0,0,0);CHKERRQ(ierr); 44613c2c1871SHong Zhang 446255d1abb9SHong Zhang ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 446355d1abb9SHong Zhang ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 446455d1abb9SHong Zhang 446555d1abb9SHong Zhang ierr = PetscObjectQuery((PetscObject)mpimat,"MatMergeSeqsToMPI",(PetscObject *)&container);CHKERRQ(ierr); 4466776b82aeSLisandro Dalcin ierr = PetscContainerGetPointer(container,(void **)&merge);CHKERRQ(ierr); 4467bf0cc555SLisandro Dalcin 446855d1abb9SHong Zhang bi = merge->bi; 446955d1abb9SHong Zhang bj = merge->bj; 447055d1abb9SHong Zhang buf_ri = merge->buf_ri; 447155d1abb9SHong Zhang buf_rj = merge->buf_rj; 447255d1abb9SHong Zhang 447355d1abb9SHong Zhang ierr = PetscMalloc(size*sizeof(MPI_Status),&status);CHKERRQ(ierr); 44747a2fc3feSBarry Smith owners = merge->rowmap->range; 447555d1abb9SHong Zhang len_s = merge->len_s; 447655d1abb9SHong Zhang 447755d1abb9SHong Zhang /* send and recv matrix values */ 447855d1abb9SHong Zhang /*-----------------------------*/ 4479357abbc8SBarry Smith ierr = PetscObjectGetNewTag((PetscObject)mpimat,&taga);CHKERRQ(ierr); 448055d1abb9SHong Zhang ierr = PetscPostIrecvScalar(comm,taga,merge->nrecv,merge->id_r,merge->len_r,&abuf_r,&r_waits);CHKERRQ(ierr); 448155d1abb9SHong Zhang 448255d1abb9SHong Zhang ierr = PetscMalloc((merge->nsend+1)*sizeof(MPI_Request),&s_waits);CHKERRQ(ierr); 448355d1abb9SHong Zhang for (proc=0,k=0; proc<size; proc++){ 448455d1abb9SHong Zhang if (!len_s[proc]) continue; 448555d1abb9SHong Zhang i = owners[proc]; 448655d1abb9SHong Zhang ierr = MPI_Isend(aa+ai[i],len_s[proc],MPIU_MATSCALAR,proc,taga,comm,s_waits+k);CHKERRQ(ierr); 448755d1abb9SHong Zhang k++; 448855d1abb9SHong Zhang } 448955d1abb9SHong Zhang 44900c468ba9SBarry Smith if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,r_waits,status);CHKERRQ(ierr);} 44910c468ba9SBarry Smith if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,s_waits,status);CHKERRQ(ierr);} 449255d1abb9SHong Zhang ierr = PetscFree(status);CHKERRQ(ierr); 449355d1abb9SHong Zhang 449455d1abb9SHong Zhang ierr = PetscFree(s_waits);CHKERRQ(ierr); 449555d1abb9SHong Zhang ierr = PetscFree(r_waits);CHKERRQ(ierr); 449655d1abb9SHong Zhang 449755d1abb9SHong Zhang /* insert mat values of mpimat */ 449855d1abb9SHong Zhang /*----------------------------*/ 4499a77337e4SBarry Smith ierr = PetscMalloc(N*sizeof(PetscScalar),&ba_i);CHKERRQ(ierr); 45000572522cSBarry Smith ierr = PetscMalloc3(merge->nrecv,PetscInt*,&buf_ri_k,merge->nrecv,PetscInt*,&nextrow,merge->nrecv,PetscInt*,&nextai);CHKERRQ(ierr); 450155d1abb9SHong Zhang 450255d1abb9SHong Zhang for (k=0; k<merge->nrecv; k++){ 450355d1abb9SHong Zhang buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */ 450455d1abb9SHong Zhang nrows = *(buf_ri_k[k]); 450555d1abb9SHong Zhang nextrow[k] = buf_ri_k[k]+1; /* next row number of k-th recved i-structure */ 450655d1abb9SHong Zhang nextai[k] = buf_ri_k[k] + (nrows + 1);/* poins to the next i-structure of k-th recved i-structure */ 450755d1abb9SHong Zhang } 450855d1abb9SHong Zhang 450955d1abb9SHong Zhang /* set values of ba */ 45107a2fc3feSBarry Smith m = merge->rowmap->n; 451155d1abb9SHong Zhang for (i=0; i<m; i++) { 451255d1abb9SHong Zhang arow = owners[rank] + i; 451355d1abb9SHong Zhang bj_i = bj+bi[i]; /* col indices of the i-th row of mpimat */ 451455d1abb9SHong Zhang bnzi = bi[i+1] - bi[i]; 4515a77337e4SBarry Smith ierr = PetscMemzero(ba_i,bnzi*sizeof(PetscScalar));CHKERRQ(ierr); 451655d1abb9SHong Zhang 451755d1abb9SHong Zhang /* add local non-zero vals of this proc's seqmat into ba */ 451855d1abb9SHong Zhang anzi = ai[arow+1] - ai[arow]; 451955d1abb9SHong Zhang aj = a->j + ai[arow]; 452055d1abb9SHong Zhang aa = a->a + ai[arow]; 452155d1abb9SHong Zhang nextaj = 0; 452255d1abb9SHong Zhang for (j=0; nextaj<anzi; j++){ 452355d1abb9SHong Zhang if (*(bj_i + j) == aj[nextaj]){ /* bcol == acol */ 452455d1abb9SHong Zhang ba_i[j] += aa[nextaj++]; 452555d1abb9SHong Zhang } 452655d1abb9SHong Zhang } 452755d1abb9SHong Zhang 452855d1abb9SHong Zhang /* add received vals into ba */ 452955d1abb9SHong Zhang for (k=0; k<merge->nrecv; k++){ /* k-th received message */ 453055d1abb9SHong Zhang /* i-th row */ 453155d1abb9SHong Zhang if (i == *nextrow[k]) { 453255d1abb9SHong Zhang anzi = *(nextai[k]+1) - *nextai[k]; 453355d1abb9SHong Zhang aj = buf_rj[k] + *(nextai[k]); 453455d1abb9SHong Zhang aa = abuf_r[k] + *(nextai[k]); 453555d1abb9SHong Zhang nextaj = 0; 453655d1abb9SHong Zhang for (j=0; nextaj<anzi; j++){ 453755d1abb9SHong Zhang if (*(bj_i + j) == aj[nextaj]){ /* bcol == acol */ 453855d1abb9SHong Zhang ba_i[j] += aa[nextaj++]; 453955d1abb9SHong Zhang } 454055d1abb9SHong Zhang } 454155d1abb9SHong Zhang nextrow[k]++; nextai[k]++; 454255d1abb9SHong Zhang } 454355d1abb9SHong Zhang } 454455d1abb9SHong Zhang ierr = MatSetValues(mpimat,1,&arow,bnzi,bj_i,ba_i,INSERT_VALUES);CHKERRQ(ierr); 454555d1abb9SHong Zhang } 454655d1abb9SHong Zhang ierr = MatAssemblyBegin(mpimat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 454755d1abb9SHong Zhang ierr = MatAssemblyEnd(mpimat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 454855d1abb9SHong Zhang 4549533163c2SBarry Smith ierr = PetscFree(abuf_r[0]);CHKERRQ(ierr); 455055d1abb9SHong Zhang ierr = PetscFree(abuf_r);CHKERRQ(ierr); 455155d1abb9SHong Zhang ierr = PetscFree(ba_i);CHKERRQ(ierr); 45521d79065fSBarry Smith ierr = PetscFree3(buf_ri_k,nextrow,nextai);CHKERRQ(ierr); 45534ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Seqstompinum,seqmat,0,0,0);CHKERRQ(ierr); 455455d1abb9SHong Zhang PetscFunctionReturn(0); 455555d1abb9SHong Zhang } 455638f152feSBarry Smith 45576bc0bbbfSBarry Smith extern PetscErrorCode MatDestroy_MPIAIJ_SeqsToMPI(Mat); 45586bc0bbbfSBarry Smith 455938f152feSBarry Smith #undef __FUNCT__ 456038f152feSBarry Smith #define __FUNCT__ "MatMerge_SeqsToMPISymbolic" 45617087cfbeSBarry Smith PetscErrorCode MatMerge_SeqsToMPISymbolic(MPI_Comm comm,Mat seqmat,PetscInt m,PetscInt n,Mat *mpimat) 4562e5f2cdd8SHong Zhang { 4563f08fae4eSHong Zhang PetscErrorCode ierr; 456455a3bba9SHong Zhang Mat B_mpi; 4565c2234fe3SHong Zhang Mat_SeqAIJ *a=(Mat_SeqAIJ*)seqmat->data; 4566b1d57f15SBarry Smith PetscMPIInt size,rank,tagi,tagj,*len_s,*len_si,*len_ri; 4567b1d57f15SBarry Smith PetscInt **buf_rj,**buf_ri,**buf_ri_k; 4568d0f46423SBarry Smith PetscInt M=seqmat->rmap->n,N=seqmat->cmap->n,i,*owners,*ai=a->i,*aj=a->j; 4569b1d57f15SBarry Smith PetscInt len,proc,*dnz,*onz; 4570b1d57f15SBarry Smith PetscInt k,anzi,*bi,*bj,*lnk,nlnk,arow,bnzi,nspacedouble=0; 4571b1d57f15SBarry Smith PetscInt nrows,*buf_s,*buf_si,*buf_si_i,**nextrow,**nextai; 457255d1abb9SHong Zhang MPI_Request *si_waits,*sj_waits,*ri_waits,*rj_waits; 457358cb9c82SHong Zhang MPI_Status *status; 4574a1a86e44SBarry Smith PetscFreeSpaceList free_space=PETSC_NULL,current_space=PETSC_NULL; 4575be0fcf8dSHong Zhang PetscBT lnkbt; 457651a7d1a8SHong Zhang Mat_Merge_SeqsToMPI *merge; 4577776b82aeSLisandro Dalcin PetscContainer container; 457802c68681SHong Zhang 4579e5f2cdd8SHong Zhang PetscFunctionBegin; 45804ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Seqstompisym,seqmat,0,0,0);CHKERRQ(ierr); 45813c2c1871SHong Zhang 458238f152feSBarry Smith /* make sure it is a PETSc comm */ 458338f152feSBarry Smith ierr = PetscCommDuplicate(comm,&comm,PETSC_NULL);CHKERRQ(ierr); 4584e5f2cdd8SHong Zhang ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 4585e5f2cdd8SHong Zhang ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 458655d1abb9SHong Zhang 458751a7d1a8SHong Zhang ierr = PetscNew(Mat_Merge_SeqsToMPI,&merge);CHKERRQ(ierr); 4588c2234fe3SHong Zhang ierr = PetscMalloc(size*sizeof(MPI_Status),&status);CHKERRQ(ierr); 4589e5f2cdd8SHong Zhang 45906abd8857SHong Zhang /* determine row ownership */ 4591f08fae4eSHong Zhang /*---------------------------------------------------------*/ 459226283091SBarry Smith ierr = PetscLayoutCreate(comm,&merge->rowmap);CHKERRQ(ierr); 459326283091SBarry Smith ierr = PetscLayoutSetLocalSize(merge->rowmap,m);CHKERRQ(ierr); 459426283091SBarry Smith ierr = PetscLayoutSetSize(merge->rowmap,M);CHKERRQ(ierr); 459526283091SBarry Smith ierr = PetscLayoutSetBlockSize(merge->rowmap,1);CHKERRQ(ierr); 459626283091SBarry Smith ierr = PetscLayoutSetUp(merge->rowmap);CHKERRQ(ierr); 4597b1d57f15SBarry Smith ierr = PetscMalloc(size*sizeof(PetscMPIInt),&len_si);CHKERRQ(ierr); 4598b1d57f15SBarry Smith ierr = PetscMalloc(size*sizeof(PetscMPIInt),&merge->len_s);CHKERRQ(ierr); 459955d1abb9SHong Zhang 46007a2fc3feSBarry Smith m = merge->rowmap->n; 46017a2fc3feSBarry Smith M = merge->rowmap->N; 46027a2fc3feSBarry Smith owners = merge->rowmap->range; 46036abd8857SHong Zhang 46046abd8857SHong Zhang /* determine the number of messages to send, their lengths */ 46056abd8857SHong Zhang /*---------------------------------------------------------*/ 46063e06a4e6SHong Zhang len_s = merge->len_s; 460751a7d1a8SHong Zhang 46082257cef7SHong Zhang len = 0; /* length of buf_si[] */ 4609c2234fe3SHong Zhang merge->nsend = 0; 4610409913e3SHong Zhang for (proc=0; proc<size; proc++){ 46112257cef7SHong Zhang len_si[proc] = 0; 46123e06a4e6SHong Zhang if (proc == rank){ 46136abd8857SHong Zhang len_s[proc] = 0; 46143e06a4e6SHong Zhang } else { 461502c68681SHong Zhang len_si[proc] = owners[proc+1] - owners[proc] + 1; 46163e06a4e6SHong Zhang len_s[proc] = ai[owners[proc+1]] - ai[owners[proc]]; /* num of rows to be sent to [proc] */ 46173e06a4e6SHong Zhang } 46183e06a4e6SHong Zhang if (len_s[proc]) { 4619c2234fe3SHong Zhang merge->nsend++; 46202257cef7SHong Zhang nrows = 0; 46212257cef7SHong Zhang for (i=owners[proc]; i<owners[proc+1]; i++){ 46222257cef7SHong Zhang if (ai[i+1] > ai[i]) nrows++; 46232257cef7SHong Zhang } 46242257cef7SHong Zhang len_si[proc] = 2*(nrows+1); 46252257cef7SHong Zhang len += len_si[proc]; 4626409913e3SHong Zhang } 462758cb9c82SHong Zhang } 4628409913e3SHong Zhang 46292257cef7SHong Zhang /* determine the number and length of messages to receive for ij-structure */ 46302257cef7SHong Zhang /*-------------------------------------------------------------------------*/ 463151a7d1a8SHong Zhang ierr = PetscGatherNumberOfMessages(comm,PETSC_NULL,len_s,&merge->nrecv);CHKERRQ(ierr); 463255d1abb9SHong Zhang ierr = PetscGatherMessageLengths2(comm,merge->nsend,merge->nrecv,len_s,len_si,&merge->id_r,&merge->len_r,&len_ri);CHKERRQ(ierr); 4633671beff6SHong Zhang 46343e06a4e6SHong Zhang /* post the Irecv of j-structure */ 46353e06a4e6SHong Zhang /*-------------------------------*/ 46362c72b5baSSatish Balay ierr = PetscCommGetNewTag(comm,&tagj);CHKERRQ(ierr); 46373e06a4e6SHong Zhang ierr = PetscPostIrecvInt(comm,tagj,merge->nrecv,merge->id_r,merge->len_r,&buf_rj,&rj_waits);CHKERRQ(ierr); 463802c68681SHong Zhang 46393e06a4e6SHong Zhang /* post the Isend of j-structure */ 4640affca5deSHong Zhang /*--------------------------------*/ 46411d79065fSBarry Smith ierr = PetscMalloc2(merge->nsend,MPI_Request,&si_waits,merge->nsend,MPI_Request,&sj_waits);CHKERRQ(ierr); 46423e06a4e6SHong Zhang 46432257cef7SHong Zhang for (proc=0, k=0; proc<size; proc++){ 4644409913e3SHong Zhang if (!len_s[proc]) continue; 464502c68681SHong Zhang i = owners[proc]; 4646b1d57f15SBarry Smith ierr = MPI_Isend(aj+ai[i],len_s[proc],MPIU_INT,proc,tagj,comm,sj_waits+k);CHKERRQ(ierr); 464751a7d1a8SHong Zhang k++; 464851a7d1a8SHong Zhang } 464951a7d1a8SHong Zhang 46503e06a4e6SHong Zhang /* receives and sends of j-structure are complete */ 46513e06a4e6SHong Zhang /*------------------------------------------------*/ 46520c468ba9SBarry Smith if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,rj_waits,status);CHKERRQ(ierr);} 46530c468ba9SBarry Smith if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,sj_waits,status);CHKERRQ(ierr);} 465402c68681SHong Zhang 465502c68681SHong Zhang /* send and recv i-structure */ 465602c68681SHong Zhang /*---------------------------*/ 46572c72b5baSSatish Balay ierr = PetscCommGetNewTag(comm,&tagi);CHKERRQ(ierr); 465802c68681SHong Zhang ierr = PetscPostIrecvInt(comm,tagi,merge->nrecv,merge->id_r,len_ri,&buf_ri,&ri_waits);CHKERRQ(ierr); 465902c68681SHong Zhang 4660b1d57f15SBarry Smith ierr = PetscMalloc((len+1)*sizeof(PetscInt),&buf_s);CHKERRQ(ierr); 46613e06a4e6SHong Zhang buf_si = buf_s; /* points to the beginning of k-th msg to be sent */ 46622257cef7SHong Zhang for (proc=0,k=0; proc<size; proc++){ 466302c68681SHong Zhang if (!len_s[proc]) continue; 46643e06a4e6SHong Zhang /* form outgoing message for i-structure: 46653e06a4e6SHong Zhang buf_si[0]: nrows to be sent 46663e06a4e6SHong Zhang [1:nrows]: row index (global) 46673e06a4e6SHong Zhang [nrows+1:2*nrows+1]: i-structure index 46683e06a4e6SHong Zhang */ 46693e06a4e6SHong Zhang /*-------------------------------------------*/ 46702257cef7SHong Zhang nrows = len_si[proc]/2 - 1; 46713e06a4e6SHong Zhang buf_si_i = buf_si + nrows+1; 46723e06a4e6SHong Zhang buf_si[0] = nrows; 46733e06a4e6SHong Zhang buf_si_i[0] = 0; 46743e06a4e6SHong Zhang nrows = 0; 46753e06a4e6SHong Zhang for (i=owners[proc]; i<owners[proc+1]; i++){ 46763e06a4e6SHong Zhang anzi = ai[i+1] - ai[i]; 46773e06a4e6SHong Zhang if (anzi) { 46783e06a4e6SHong Zhang buf_si_i[nrows+1] = buf_si_i[nrows] + anzi; /* i-structure */ 46793e06a4e6SHong Zhang buf_si[nrows+1] = i-owners[proc]; /* local row index */ 46803e06a4e6SHong Zhang nrows++; 46813e06a4e6SHong Zhang } 46823e06a4e6SHong Zhang } 4683b1d57f15SBarry Smith ierr = MPI_Isend(buf_si,len_si[proc],MPIU_INT,proc,tagi,comm,si_waits+k);CHKERRQ(ierr); 468402c68681SHong Zhang k++; 46852257cef7SHong Zhang buf_si += len_si[proc]; 468602c68681SHong Zhang } 46872257cef7SHong Zhang 46880c468ba9SBarry Smith if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,ri_waits,status);CHKERRQ(ierr);} 46890c468ba9SBarry Smith if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,si_waits,status);CHKERRQ(ierr);} 469002c68681SHong Zhang 4691ae15b995SBarry Smith ierr = PetscInfo2(seqmat,"nsend: %D, nrecv: %D\n",merge->nsend,merge->nrecv);CHKERRQ(ierr); 46923e06a4e6SHong Zhang for (i=0; i<merge->nrecv; i++){ 4693ae15b995SBarry Smith ierr = PetscInfo3(seqmat,"recv len_ri=%D, len_rj=%D from [%D]\n",len_ri[i],merge->len_r[i],merge->id_r[i]);CHKERRQ(ierr); 46943e06a4e6SHong Zhang } 46953e06a4e6SHong Zhang 46963e06a4e6SHong Zhang ierr = PetscFree(len_si);CHKERRQ(ierr); 469702c68681SHong Zhang ierr = PetscFree(len_ri);CHKERRQ(ierr); 469802c68681SHong Zhang ierr = PetscFree(rj_waits);CHKERRQ(ierr); 46991d79065fSBarry Smith ierr = PetscFree2(si_waits,sj_waits);CHKERRQ(ierr); 47002257cef7SHong Zhang ierr = PetscFree(ri_waits);CHKERRQ(ierr); 47013e06a4e6SHong Zhang ierr = PetscFree(buf_s);CHKERRQ(ierr); 4702bcc1bcd5SHong Zhang ierr = PetscFree(status);CHKERRQ(ierr); 470358cb9c82SHong Zhang 4704bcc1bcd5SHong Zhang /* compute a local seq matrix in each processor */ 4705bcc1bcd5SHong Zhang /*----------------------------------------------*/ 470658cb9c82SHong Zhang /* allocate bi array and free space for accumulating nonzero column info */ 4707b1d57f15SBarry Smith ierr = PetscMalloc((m+1)*sizeof(PetscInt),&bi);CHKERRQ(ierr); 470858cb9c82SHong Zhang bi[0] = 0; 470958cb9c82SHong Zhang 4710be0fcf8dSHong Zhang /* create and initialize a linked list */ 4711be0fcf8dSHong Zhang nlnk = N+1; 4712be0fcf8dSHong Zhang ierr = PetscLLCreate(N,N,nlnk,lnk,lnkbt);CHKERRQ(ierr); 471358cb9c82SHong Zhang 4714bcc1bcd5SHong Zhang /* initial FreeSpace size is 2*(num of local nnz(seqmat)) */ 471558cb9c82SHong Zhang len = 0; 4716bcc1bcd5SHong Zhang len = ai[owners[rank+1]] - ai[owners[rank]]; 4717a1a86e44SBarry Smith ierr = PetscFreeSpaceGet((PetscInt)(2*len+1),&free_space);CHKERRQ(ierr); 471858cb9c82SHong Zhang current_space = free_space; 471958cb9c82SHong Zhang 4720bcc1bcd5SHong Zhang /* determine symbolic info for each local row */ 47210572522cSBarry Smith ierr = PetscMalloc3(merge->nrecv,PetscInt*,&buf_ri_k,merge->nrecv,PetscInt*,&nextrow,merge->nrecv,PetscInt*,&nextai);CHKERRQ(ierr); 47221d79065fSBarry Smith 47233e06a4e6SHong Zhang for (k=0; k<merge->nrecv; k++){ 47242257cef7SHong Zhang buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */ 47253e06a4e6SHong Zhang nrows = *buf_ri_k[k]; 47263e06a4e6SHong Zhang nextrow[k] = buf_ri_k[k] + 1; /* next row number of k-th recved i-structure */ 47272257cef7SHong Zhang nextai[k] = buf_ri_k[k] + (nrows + 1);/* poins to the next i-structure of k-th recved i-structure */ 47283e06a4e6SHong Zhang } 47292257cef7SHong Zhang 4730bcc1bcd5SHong Zhang ierr = MatPreallocateInitialize(comm,m,n,dnz,onz);CHKERRQ(ierr); 4731bcc1bcd5SHong Zhang len = 0; 473258cb9c82SHong Zhang for (i=0;i<m;i++) { 473358cb9c82SHong Zhang bnzi = 0; 473458cb9c82SHong Zhang /* add local non-zero cols of this proc's seqmat into lnk */ 473558cb9c82SHong Zhang arow = owners[rank] + i; 473658cb9c82SHong Zhang anzi = ai[arow+1] - ai[arow]; 473758cb9c82SHong Zhang aj = a->j + ai[arow]; 4738be0fcf8dSHong Zhang ierr = PetscLLAdd(anzi,aj,N,nlnk,lnk,lnkbt);CHKERRQ(ierr); 473958cb9c82SHong Zhang bnzi += nlnk; 474058cb9c82SHong Zhang /* add received col data into lnk */ 474151a7d1a8SHong Zhang for (k=0; k<merge->nrecv; k++){ /* k-th received message */ 474255d1abb9SHong Zhang if (i == *nextrow[k]) { /* i-th row */ 47433e06a4e6SHong Zhang anzi = *(nextai[k]+1) - *nextai[k]; 47443e06a4e6SHong Zhang aj = buf_rj[k] + *nextai[k]; 47453e06a4e6SHong Zhang ierr = PetscLLAdd(anzi,aj,N,nlnk,lnk,lnkbt);CHKERRQ(ierr); 47463e06a4e6SHong Zhang bnzi += nlnk; 47473e06a4e6SHong Zhang nextrow[k]++; nextai[k]++; 47483e06a4e6SHong Zhang } 474958cb9c82SHong Zhang } 4750bcc1bcd5SHong Zhang if (len < bnzi) len = bnzi; /* =max(bnzi) */ 475158cb9c82SHong Zhang 475258cb9c82SHong Zhang /* if free space is not available, make more free space */ 475358cb9c82SHong Zhang if (current_space->local_remaining<bnzi) { 47544238b7adSHong Zhang ierr = PetscFreeSpaceGet(bnzi+current_space->total_array_size,¤t_space);CHKERRQ(ierr); 475558cb9c82SHong Zhang nspacedouble++; 475658cb9c82SHong Zhang } 475758cb9c82SHong Zhang /* copy data into free space, then initialize lnk */ 4758be0fcf8dSHong Zhang ierr = PetscLLClean(N,N,bnzi,lnk,current_space->array,lnkbt);CHKERRQ(ierr); 4759bcc1bcd5SHong Zhang ierr = MatPreallocateSet(i+owners[rank],bnzi,current_space->array,dnz,onz);CHKERRQ(ierr); 4760bcc1bcd5SHong Zhang 476158cb9c82SHong Zhang current_space->array += bnzi; 476258cb9c82SHong Zhang current_space->local_used += bnzi; 476358cb9c82SHong Zhang current_space->local_remaining -= bnzi; 476458cb9c82SHong Zhang 476558cb9c82SHong Zhang bi[i+1] = bi[i] + bnzi; 476658cb9c82SHong Zhang } 4767bcc1bcd5SHong Zhang 47681d79065fSBarry Smith ierr = PetscFree3(buf_ri_k,nextrow,nextai);CHKERRQ(ierr); 4769bcc1bcd5SHong Zhang 4770b1d57f15SBarry Smith ierr = PetscMalloc((bi[m]+1)*sizeof(PetscInt),&bj);CHKERRQ(ierr); 4771a1a86e44SBarry Smith ierr = PetscFreeSpaceContiguous(&free_space,bj);CHKERRQ(ierr); 4772be0fcf8dSHong Zhang ierr = PetscLLDestroy(lnk,lnkbt);CHKERRQ(ierr); 4773409913e3SHong Zhang 4774bcc1bcd5SHong Zhang /* create symbolic parallel matrix B_mpi */ 4775bcc1bcd5SHong Zhang /*---------------------------------------*/ 4776f69a0ea3SMatthew Knepley ierr = MatCreate(comm,&B_mpi);CHKERRQ(ierr); 477754b84b50SHong Zhang if (n==PETSC_DECIDE) { 4778f69a0ea3SMatthew Knepley ierr = MatSetSizes(B_mpi,m,n,PETSC_DETERMINE,N);CHKERRQ(ierr); 477954b84b50SHong Zhang } else { 4780f69a0ea3SMatthew Knepley ierr = MatSetSizes(B_mpi,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 478154b84b50SHong Zhang } 4782bcc1bcd5SHong Zhang ierr = MatSetType(B_mpi,MATMPIAIJ);CHKERRQ(ierr); 4783bcc1bcd5SHong Zhang ierr = MatMPIAIJSetPreallocation(B_mpi,0,dnz,0,onz);CHKERRQ(ierr); 4784bcc1bcd5SHong Zhang ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr); 478558cb9c82SHong Zhang 47866abd8857SHong Zhang /* B_mpi is not ready for use - assembly will be done by MatMerge_SeqsToMPINumeric() */ 47876abd8857SHong Zhang B_mpi->assembled = PETSC_FALSE; 4788affca5deSHong Zhang B_mpi->ops->destroy = MatDestroy_MPIAIJ_SeqsToMPI; 4789affca5deSHong Zhang merge->bi = bi; 4790affca5deSHong Zhang merge->bj = bj; 479102c68681SHong Zhang merge->buf_ri = buf_ri; 479202c68681SHong Zhang merge->buf_rj = buf_rj; 4793de0260b3SHong Zhang merge->coi = PETSC_NULL; 4794de0260b3SHong Zhang merge->coj = PETSC_NULL; 4795de0260b3SHong Zhang merge->owners_co = PETSC_NULL; 4796affca5deSHong Zhang 4797bf0cc555SLisandro Dalcin ierr = PetscCommDestroy(&comm);CHKERRQ(ierr); 4798bf0cc555SLisandro Dalcin 4799affca5deSHong Zhang /* attach the supporting struct to B_mpi for reuse */ 4800776b82aeSLisandro Dalcin ierr = PetscContainerCreate(PETSC_COMM_SELF,&container);CHKERRQ(ierr); 4801776b82aeSLisandro Dalcin ierr = PetscContainerSetPointer(container,merge);CHKERRQ(ierr); 4802affca5deSHong Zhang ierr = PetscObjectCompose((PetscObject)B_mpi,"MatMergeSeqsToMPI",(PetscObject)container);CHKERRQ(ierr); 4803bf0cc555SLisandro Dalcin ierr = PetscContainerDestroy(&container);CHKERRQ(ierr); 4804affca5deSHong Zhang *mpimat = B_mpi; 480538f152feSBarry Smith 48064ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Seqstompisym,seqmat,0,0,0);CHKERRQ(ierr); 4807e5f2cdd8SHong Zhang PetscFunctionReturn(0); 4808e5f2cdd8SHong Zhang } 480925616d81SHong Zhang 481038f152feSBarry Smith #undef __FUNCT__ 481138f152feSBarry Smith #define __FUNCT__ "MatMerge_SeqsToMPI" 48127087cfbeSBarry Smith PetscErrorCode MatMerge_SeqsToMPI(MPI_Comm comm,Mat seqmat,PetscInt m,PetscInt n,MatReuse scall,Mat *mpimat) 481355d1abb9SHong Zhang { 481455d1abb9SHong Zhang PetscErrorCode ierr; 481555d1abb9SHong Zhang 481655d1abb9SHong Zhang PetscFunctionBegin; 48174ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Seqstompi,seqmat,0,0,0);CHKERRQ(ierr); 481855d1abb9SHong Zhang if (scall == MAT_INITIAL_MATRIX){ 481955d1abb9SHong Zhang ierr = MatMerge_SeqsToMPISymbolic(comm,seqmat,m,n,mpimat);CHKERRQ(ierr); 482055d1abb9SHong Zhang } 482155d1abb9SHong Zhang ierr = MatMerge_SeqsToMPINumeric(seqmat,*mpimat);CHKERRQ(ierr); 48224ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Seqstompi,seqmat,0,0,0);CHKERRQ(ierr); 482355d1abb9SHong Zhang PetscFunctionReturn(0); 482455d1abb9SHong Zhang } 48254ebed01fSBarry Smith 482625616d81SHong Zhang #undef __FUNCT__ 48274a2b5492SBarry Smith #define __FUNCT__ "MatMPIAIJGetLocalMat" 4828bc08b0f1SBarry Smith /*@ 48294a2b5492SBarry Smith MatMPIAIJGetLocalMat - Creates a SeqAIJ from a MPIAIJ matrix by taking all its local rows and putting them into a sequential vector with 48308661ff28SBarry Smith mlocal rows and n columns. Where mlocal is the row count obtained with MatGetLocalSize() and n is the global column count obtained 48318661ff28SBarry Smith with MatGetSize() 483225616d81SHong Zhang 483332fba14fSHong Zhang Not Collective 483425616d81SHong Zhang 483525616d81SHong Zhang Input Parameters: 483625616d81SHong Zhang + A - the matrix 483725616d81SHong Zhang . scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 483825616d81SHong Zhang 483925616d81SHong Zhang Output Parameter: 484025616d81SHong Zhang . A_loc - the local sequential matrix generated 484125616d81SHong Zhang 484225616d81SHong Zhang Level: developer 484325616d81SHong Zhang 4844ba264940SBarry Smith .seealso: MatGetOwnerShipRange(), MatMPIAIJGetLocalMatCondensed() 48458661ff28SBarry Smith 484625616d81SHong Zhang @*/ 48474a2b5492SBarry Smith PetscErrorCode MatMPIAIJGetLocalMat(Mat A,MatReuse scall,Mat *A_loc) 484825616d81SHong Zhang { 484925616d81SHong Zhang PetscErrorCode ierr; 485001b7ae99SHong Zhang Mat_MPIAIJ *mpimat=(Mat_MPIAIJ*)A->data; 485101b7ae99SHong Zhang Mat_SeqAIJ *mat,*a=(Mat_SeqAIJ*)(mpimat->A)->data,*b=(Mat_SeqAIJ*)(mpimat->B)->data; 485201b7ae99SHong Zhang PetscInt *ai=a->i,*aj=a->j,*bi=b->i,*bj=b->j,*cmap=mpimat->garray; 4853a77337e4SBarry Smith MatScalar *aa=a->a,*ba=b->a,*cam; 4854a77337e4SBarry Smith PetscScalar *ca; 4855d0f46423SBarry Smith PetscInt am=A->rmap->n,i,j,k,cstart=A->cmap->rstart; 48565a7d977cSHong Zhang PetscInt *ci,*cj,col,ncols_d,ncols_o,jo; 48578661ff28SBarry Smith PetscBool match; 485825616d81SHong Zhang 485925616d81SHong Zhang PetscFunctionBegin; 48608661ff28SBarry Smith ierr = PetscTypeCompare((PetscObject)A,MATMPIAIJ,&match);CHKERRQ(ierr); 48618661ff28SBarry Smith if (!match) SETERRQ(((PetscObject)A)->comm, PETSC_ERR_SUP,"Requires MPIAIJ matrix as input"); 48624ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Getlocalmat,A,0,0,0);CHKERRQ(ierr); 486301b7ae99SHong Zhang if (scall == MAT_INITIAL_MATRIX){ 4864dea91ad1SHong Zhang ierr = PetscMalloc((1+am)*sizeof(PetscInt),&ci);CHKERRQ(ierr); 4865dea91ad1SHong Zhang ci[0] = 0; 486601b7ae99SHong Zhang for (i=0; i<am; i++){ 4867dea91ad1SHong Zhang ci[i+1] = ci[i] + (ai[i+1] - ai[i]) + (bi[i+1] - bi[i]); 486801b7ae99SHong Zhang } 4869dea91ad1SHong Zhang ierr = PetscMalloc((1+ci[am])*sizeof(PetscInt),&cj);CHKERRQ(ierr); 4870dea91ad1SHong Zhang ierr = PetscMalloc((1+ci[am])*sizeof(PetscScalar),&ca);CHKERRQ(ierr); 4871dea91ad1SHong Zhang k = 0; 487201b7ae99SHong Zhang for (i=0; i<am; i++) { 48735a7d977cSHong Zhang ncols_o = bi[i+1] - bi[i]; 48745a7d977cSHong Zhang ncols_d = ai[i+1] - ai[i]; 487501b7ae99SHong Zhang /* off-diagonal portion of A */ 48765a7d977cSHong Zhang for (jo=0; jo<ncols_o; jo++) { 48775a7d977cSHong Zhang col = cmap[*bj]; 48785a7d977cSHong Zhang if (col >= cstart) break; 48795a7d977cSHong Zhang cj[k] = col; bj++; 48805a7d977cSHong Zhang ca[k++] = *ba++; 48815a7d977cSHong Zhang } 48825a7d977cSHong Zhang /* diagonal portion of A */ 48835a7d977cSHong Zhang for (j=0; j<ncols_d; j++) { 48845a7d977cSHong Zhang cj[k] = cstart + *aj++; 48855a7d977cSHong Zhang ca[k++] = *aa++; 48865a7d977cSHong Zhang } 48875a7d977cSHong Zhang /* off-diagonal portion of A */ 48885a7d977cSHong Zhang for (j=jo; j<ncols_o; j++) { 48895a7d977cSHong Zhang cj[k] = cmap[*bj++]; 48905a7d977cSHong Zhang ca[k++] = *ba++; 48915a7d977cSHong Zhang } 489225616d81SHong Zhang } 4893dea91ad1SHong Zhang /* put together the new matrix */ 4894d0f46423SBarry Smith ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,am,A->cmap->N,ci,cj,ca,A_loc);CHKERRQ(ierr); 4895dea91ad1SHong Zhang /* MatCreateSeqAIJWithArrays flags matrix so PETSc doesn't free the user's arrays. */ 4896dea91ad1SHong Zhang /* Since these are PETSc arrays, change flags to free them as necessary. */ 4897dea91ad1SHong Zhang mat = (Mat_SeqAIJ*)(*A_loc)->data; 4898e6b907acSBarry Smith mat->free_a = PETSC_TRUE; 4899e6b907acSBarry Smith mat->free_ij = PETSC_TRUE; 4900dea91ad1SHong Zhang mat->nonew = 0; 49015a7d977cSHong Zhang } else if (scall == MAT_REUSE_MATRIX){ 49025a7d977cSHong Zhang mat=(Mat_SeqAIJ*)(*A_loc)->data; 4903a77337e4SBarry Smith ci = mat->i; cj = mat->j; cam = mat->a; 49045a7d977cSHong Zhang for (i=0; i<am; i++) { 49055a7d977cSHong Zhang /* off-diagonal portion of A */ 49065a7d977cSHong Zhang ncols_o = bi[i+1] - bi[i]; 49075a7d977cSHong Zhang for (jo=0; jo<ncols_o; jo++) { 49085a7d977cSHong Zhang col = cmap[*bj]; 49095a7d977cSHong Zhang if (col >= cstart) break; 4910a77337e4SBarry Smith *cam++ = *ba++; bj++; 49115a7d977cSHong Zhang } 49125a7d977cSHong Zhang /* diagonal portion of A */ 4913ecc9b87dSHong Zhang ncols_d = ai[i+1] - ai[i]; 4914a77337e4SBarry Smith for (j=0; j<ncols_d; j++) *cam++ = *aa++; 49155a7d977cSHong Zhang /* off-diagonal portion of A */ 4916f33d1a9aSHong Zhang for (j=jo; j<ncols_o; j++) { 4917a77337e4SBarry Smith *cam++ = *ba++; bj++; 4918f33d1a9aSHong Zhang } 49195a7d977cSHong Zhang } 49208661ff28SBarry Smith } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid MatReuse %d",(int)scall); 49214ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Getlocalmat,A,0,0,0);CHKERRQ(ierr); 492225616d81SHong Zhang PetscFunctionReturn(0); 492325616d81SHong Zhang } 492425616d81SHong Zhang 492532fba14fSHong Zhang #undef __FUNCT__ 49264a2b5492SBarry Smith #define __FUNCT__ "MatMPIAIJGetLocalMatCondensed" 492732fba14fSHong Zhang /*@C 4928ba264940SBarry Smith MatMPIAIJGetLocalMatCondensed - Creates a SeqAIJ matrix from an MPIAIJ matrix by taking all its local rows and NON-ZERO columns 492932fba14fSHong Zhang 493032fba14fSHong Zhang Not Collective 493132fba14fSHong Zhang 493232fba14fSHong Zhang Input Parameters: 493332fba14fSHong Zhang + A - the matrix 493432fba14fSHong Zhang . scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 493532fba14fSHong Zhang - row, col - index sets of rows and columns to extract (or PETSC_NULL) 493632fba14fSHong Zhang 493732fba14fSHong Zhang Output Parameter: 493832fba14fSHong Zhang . A_loc - the local sequential matrix generated 493932fba14fSHong Zhang 494032fba14fSHong Zhang Level: developer 494132fba14fSHong Zhang 4942ba264940SBarry Smith .seealso: MatGetOwnershipRange(), MatMPIAIJGetLocalMat() 4943ba264940SBarry Smith 494432fba14fSHong Zhang @*/ 49454a2b5492SBarry Smith PetscErrorCode MatMPIAIJGetLocalMatCondensed(Mat A,MatReuse scall,IS *row,IS *col,Mat *A_loc) 494632fba14fSHong Zhang { 494732fba14fSHong Zhang Mat_MPIAIJ *a=(Mat_MPIAIJ*)A->data; 494832fba14fSHong Zhang PetscErrorCode ierr; 494932fba14fSHong Zhang PetscInt i,start,end,ncols,nzA,nzB,*cmap,imark,*idx; 495032fba14fSHong Zhang IS isrowa,iscola; 495132fba14fSHong Zhang Mat *aloc; 49524a2b5492SBarry Smith PetscBool match; 495332fba14fSHong Zhang 495432fba14fSHong Zhang PetscFunctionBegin; 49554a2b5492SBarry Smith ierr = PetscTypeCompare((PetscObject)A,MATMPIAIJ,&match);CHKERRQ(ierr); 49564a2b5492SBarry Smith if (!match) SETERRQ(((PetscObject)A)->comm, PETSC_ERR_SUP,"Requires MPIAIJ matrix as input"); 49574ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Getlocalmatcondensed,A,0,0,0);CHKERRQ(ierr); 495832fba14fSHong Zhang if (!row){ 4959d0f46423SBarry Smith start = A->rmap->rstart; end = A->rmap->rend; 496032fba14fSHong Zhang ierr = ISCreateStride(PETSC_COMM_SELF,end-start,start,1,&isrowa);CHKERRQ(ierr); 496132fba14fSHong Zhang } else { 496232fba14fSHong Zhang isrowa = *row; 496332fba14fSHong Zhang } 496432fba14fSHong Zhang if (!col){ 4965d0f46423SBarry Smith start = A->cmap->rstart; 496632fba14fSHong Zhang cmap = a->garray; 4967d0f46423SBarry Smith nzA = a->A->cmap->n; 4968d0f46423SBarry Smith nzB = a->B->cmap->n; 496932fba14fSHong Zhang ierr = PetscMalloc((nzA+nzB)*sizeof(PetscInt), &idx);CHKERRQ(ierr); 497032fba14fSHong Zhang ncols = 0; 497132fba14fSHong Zhang for (i=0; i<nzB; i++) { 497232fba14fSHong Zhang if (cmap[i] < start) idx[ncols++] = cmap[i]; 497332fba14fSHong Zhang else break; 497432fba14fSHong Zhang } 497532fba14fSHong Zhang imark = i; 497632fba14fSHong Zhang for (i=0; i<nzA; i++) idx[ncols++] = start + i; 497732fba14fSHong Zhang for (i=imark; i<nzB; i++) idx[ncols++] = cmap[i]; 4978d67e408aSBarry Smith ierr = ISCreateGeneral(PETSC_COMM_SELF,ncols,idx,PETSC_OWN_POINTER,&iscola);CHKERRQ(ierr); 497932fba14fSHong Zhang } else { 498032fba14fSHong Zhang iscola = *col; 498132fba14fSHong Zhang } 498232fba14fSHong Zhang if (scall != MAT_INITIAL_MATRIX){ 498332fba14fSHong Zhang ierr = PetscMalloc(sizeof(Mat),&aloc);CHKERRQ(ierr); 498432fba14fSHong Zhang aloc[0] = *A_loc; 498532fba14fSHong Zhang } 498632fba14fSHong Zhang ierr = MatGetSubMatrices(A,1,&isrowa,&iscola,scall,&aloc);CHKERRQ(ierr); 498732fba14fSHong Zhang *A_loc = aloc[0]; 498832fba14fSHong Zhang ierr = PetscFree(aloc);CHKERRQ(ierr); 498932fba14fSHong Zhang if (!row){ 49906bf464f9SBarry Smith ierr = ISDestroy(&isrowa);CHKERRQ(ierr); 499132fba14fSHong Zhang } 499232fba14fSHong Zhang if (!col){ 49936bf464f9SBarry Smith ierr = ISDestroy(&iscola);CHKERRQ(ierr); 499432fba14fSHong Zhang } 49954ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Getlocalmatcondensed,A,0,0,0);CHKERRQ(ierr); 499632fba14fSHong Zhang PetscFunctionReturn(0); 499732fba14fSHong Zhang } 499832fba14fSHong Zhang 499925616d81SHong Zhang #undef __FUNCT__ 500025616d81SHong Zhang #define __FUNCT__ "MatGetBrowsOfAcols" 500125616d81SHong Zhang /*@C 500232fba14fSHong Zhang MatGetBrowsOfAcols - Creates a SeqAIJ matrix by taking rows of B that equal to nonzero columns of local A 500325616d81SHong Zhang 500425616d81SHong Zhang Collective on Mat 500525616d81SHong Zhang 500625616d81SHong Zhang Input Parameters: 5007e240928fSHong Zhang + A,B - the matrices in mpiaij format 500825616d81SHong Zhang . scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 500925616d81SHong Zhang - rowb, colb - index sets of rows and columns of B to extract (or PETSC_NULL) 501025616d81SHong Zhang 501125616d81SHong Zhang Output Parameter: 501225616d81SHong Zhang + rowb, colb - index sets of rows and columns of B to extract 5013d0f46423SBarry Smith . brstart - row index of B_seq from which next B->rmap->n rows are taken from B's local rows 501425616d81SHong Zhang - B_seq - the sequential matrix generated 501525616d81SHong Zhang 501625616d81SHong Zhang Level: developer 501725616d81SHong Zhang 501825616d81SHong Zhang @*/ 50197087cfbeSBarry Smith PetscErrorCode MatGetBrowsOfAcols(Mat A,Mat B,MatReuse scall,IS *rowb,IS *colb,PetscInt *brstart,Mat *B_seq) 502025616d81SHong Zhang { 5021899cda47SBarry Smith Mat_MPIAIJ *a=(Mat_MPIAIJ*)A->data; 502225616d81SHong Zhang PetscErrorCode ierr; 5023b1d57f15SBarry Smith PetscInt *idx,i,start,ncols,nzA,nzB,*cmap,imark; 502425616d81SHong Zhang IS isrowb,iscolb; 502525616d81SHong Zhang Mat *bseq; 502625616d81SHong Zhang 502725616d81SHong Zhang PetscFunctionBegin; 5028d0f46423SBarry Smith if (A->cmap->rstart != B->rmap->rstart || A->cmap->rend != B->rmap->rend){ 5029e32f2f54SBarry Smith SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, (%D, %D) != (%D,%D)",A->cmap->rstart,A->cmap->rend,B->rmap->rstart,B->rmap->rend); 503025616d81SHong Zhang } 50314ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_GetBrowsOfAcols,A,B,0,0);CHKERRQ(ierr); 503225616d81SHong Zhang 503325616d81SHong Zhang if (scall == MAT_INITIAL_MATRIX){ 5034d0f46423SBarry Smith start = A->cmap->rstart; 503525616d81SHong Zhang cmap = a->garray; 5036d0f46423SBarry Smith nzA = a->A->cmap->n; 5037d0f46423SBarry Smith nzB = a->B->cmap->n; 5038b1d57f15SBarry Smith ierr = PetscMalloc((nzA+nzB)*sizeof(PetscInt), &idx);CHKERRQ(ierr); 503925616d81SHong Zhang ncols = 0; 50400390132cSHong Zhang for (i=0; i<nzB; i++) { /* row < local row index */ 504125616d81SHong Zhang if (cmap[i] < start) idx[ncols++] = cmap[i]; 504225616d81SHong Zhang else break; 504325616d81SHong Zhang } 504425616d81SHong Zhang imark = i; 50450390132cSHong Zhang for (i=0; i<nzA; i++) idx[ncols++] = start + i; /* local rows */ 50460390132cSHong Zhang for (i=imark; i<nzB; i++) idx[ncols++] = cmap[i]; /* row > local row index */ 5047d67e408aSBarry Smith ierr = ISCreateGeneral(PETSC_COMM_SELF,ncols,idx,PETSC_OWN_POINTER,&isrowb);CHKERRQ(ierr); 504825616d81SHong Zhang *brstart = imark; 5049d0f46423SBarry Smith ierr = ISCreateStride(PETSC_COMM_SELF,B->cmap->N,0,1,&iscolb);CHKERRQ(ierr); 505025616d81SHong Zhang } else { 5051e32f2f54SBarry Smith if (!rowb || !colb) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"IS rowb and colb must be provided for MAT_REUSE_MATRIX"); 505225616d81SHong Zhang isrowb = *rowb; iscolb = *colb; 505325616d81SHong Zhang ierr = PetscMalloc(sizeof(Mat),&bseq);CHKERRQ(ierr); 505425616d81SHong Zhang bseq[0] = *B_seq; 505525616d81SHong Zhang } 505625616d81SHong Zhang ierr = MatGetSubMatrices(B,1,&isrowb,&iscolb,scall,&bseq);CHKERRQ(ierr); 505725616d81SHong Zhang *B_seq = bseq[0]; 505825616d81SHong Zhang ierr = PetscFree(bseq);CHKERRQ(ierr); 505925616d81SHong Zhang if (!rowb){ 50606bf464f9SBarry Smith ierr = ISDestroy(&isrowb);CHKERRQ(ierr); 506125616d81SHong Zhang } else { 506225616d81SHong Zhang *rowb = isrowb; 506325616d81SHong Zhang } 506425616d81SHong Zhang if (!colb){ 50656bf464f9SBarry Smith ierr = ISDestroy(&iscolb);CHKERRQ(ierr); 506625616d81SHong Zhang } else { 506725616d81SHong Zhang *colb = iscolb; 506825616d81SHong Zhang } 50694ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_GetBrowsOfAcols,A,B,0,0);CHKERRQ(ierr); 507025616d81SHong Zhang PetscFunctionReturn(0); 507125616d81SHong Zhang } 5072429d309bSHong Zhang 5073a61c8c0fSHong Zhang #undef __FUNCT__ 5074a61c8c0fSHong Zhang #define __FUNCT__ "MatGetBrowsOfAoCols" 5075429d309bSHong Zhang /*@C 5076429d309bSHong Zhang MatGetBrowsOfAoCols - Creates a SeqAIJ matrix by taking rows of B that equal to nonzero columns 507701b7ae99SHong Zhang of the OFF-DIAGONAL portion of local A 5078429d309bSHong Zhang 5079429d309bSHong Zhang Collective on Mat 5080429d309bSHong Zhang 5081429d309bSHong Zhang Input Parameters: 5082429d309bSHong Zhang + A,B - the matrices in mpiaij format 508387025532SHong Zhang . scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 508487025532SHong Zhang . startsj - starting point in B's sending and receiving j-arrays, saved for MAT_REUSE (or PETSC_NULL) 50851d79065fSBarry Smith . startsj_r - similar to startsj for receives 508687025532SHong Zhang - bufa_ptr - array for sending matrix values, saved for MAT_REUSE (or PETSC_NULL) 5087429d309bSHong Zhang 5088429d309bSHong Zhang Output Parameter: 508987025532SHong Zhang + B_oth - the sequential matrix generated 5090429d309bSHong Zhang 5091429d309bSHong Zhang Level: developer 5092429d309bSHong Zhang 5093429d309bSHong Zhang @*/ 50947087cfbeSBarry Smith PetscErrorCode MatGetBrowsOfAoCols(Mat A,Mat B,MatReuse scall,PetscInt **startsj,PetscInt **startsj_r,MatScalar **bufa_ptr,Mat *B_oth) 5095429d309bSHong Zhang { 5096a6b2eed2SHong Zhang VecScatter_MPI_General *gen_to,*gen_from; 5097429d309bSHong Zhang PetscErrorCode ierr; 5098899cda47SBarry Smith Mat_MPIAIJ *a=(Mat_MPIAIJ*)A->data; 509987025532SHong Zhang Mat_SeqAIJ *b_oth; 5100a6b2eed2SHong Zhang VecScatter ctx=a->Mvctx; 51017adad957SLisandro Dalcin MPI_Comm comm=((PetscObject)ctx)->comm; 51027adad957SLisandro Dalcin PetscMPIInt *rprocs,*sprocs,tag=((PetscObject)ctx)->tag,rank; 5103d0f46423SBarry Smith PetscInt *rowlen,*bufj,*bufJ,ncols,aBn=a->B->cmap->n,row,*b_othi,*b_othj; 5104dd6ea824SBarry Smith PetscScalar *rvalues,*svalues; 5105dd6ea824SBarry Smith MatScalar *b_otha,*bufa,*bufA; 5106e42f35eeSHong Zhang PetscInt i,j,k,l,ll,nrecvs,nsends,nrows,*srow,*rstarts,*rstartsj = 0,*sstarts,*sstartsj,len; 5107910ba992SMatthew Knepley MPI_Request *rwaits = PETSC_NULL,*swaits = PETSC_NULL; 510887025532SHong Zhang MPI_Status *sstatus,rstatus; 5109aa5bb8c0SSatish Balay PetscMPIInt jj; 5110e42f35eeSHong Zhang PetscInt *cols,sbs,rbs; 5111ba8c8a56SBarry Smith PetscScalar *vals; 5112429d309bSHong Zhang 5113429d309bSHong Zhang PetscFunctionBegin; 5114d0f46423SBarry Smith if (A->cmap->rstart != B->rmap->rstart || A->cmap->rend != B->rmap->rend){ 5115e32f2f54SBarry Smith SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, (%d, %d) != (%d,%d)",A->cmap->rstart,A->cmap->rend,B->rmap->rstart,B->rmap->rend); 5116429d309bSHong Zhang } 51174ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_GetBrowsOfAocols,A,B,0,0);CHKERRQ(ierr); 5118a6b2eed2SHong Zhang ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 5119a6b2eed2SHong Zhang 5120a6b2eed2SHong Zhang gen_to = (VecScatter_MPI_General*)ctx->todata; 5121a6b2eed2SHong Zhang gen_from = (VecScatter_MPI_General*)ctx->fromdata; 5122e42f35eeSHong Zhang rvalues = gen_from->values; /* holds the length of receiving row */ 5123e42f35eeSHong Zhang svalues = gen_to->values; /* holds the length of sending row */ 5124a6b2eed2SHong Zhang nrecvs = gen_from->n; 5125a6b2eed2SHong Zhang nsends = gen_to->n; 5126d7ee0231SBarry Smith 5127d7ee0231SBarry Smith ierr = PetscMalloc2(nrecvs,MPI_Request,&rwaits,nsends,MPI_Request,&swaits);CHKERRQ(ierr); 5128a6b2eed2SHong Zhang srow = gen_to->indices; /* local row index to be sent */ 5129a6b2eed2SHong Zhang sstarts = gen_to->starts; 5130a6b2eed2SHong Zhang sprocs = gen_to->procs; 5131a6b2eed2SHong Zhang sstatus = gen_to->sstatus; 5132e42f35eeSHong Zhang sbs = gen_to->bs; 5133e42f35eeSHong Zhang rstarts = gen_from->starts; 5134e42f35eeSHong Zhang rprocs = gen_from->procs; 5135e42f35eeSHong Zhang rbs = gen_from->bs; 5136429d309bSHong Zhang 5137dea91ad1SHong Zhang if (!startsj || !bufa_ptr) scall = MAT_INITIAL_MATRIX; 5138429d309bSHong Zhang if (scall == MAT_INITIAL_MATRIX){ 5139a6b2eed2SHong Zhang /* i-array */ 5140a6b2eed2SHong Zhang /*---------*/ 5141a6b2eed2SHong Zhang /* post receives */ 5142a6b2eed2SHong Zhang for (i=0; i<nrecvs; i++){ 5143e42f35eeSHong Zhang rowlen = (PetscInt*)rvalues + rstarts[i]*rbs; 5144e42f35eeSHong Zhang nrows = (rstarts[i+1]-rstarts[i])*rbs; /* num of indices to be received */ 514587025532SHong Zhang ierr = MPI_Irecv(rowlen,nrows,MPIU_INT,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr); 5146429d309bSHong Zhang } 5147a6b2eed2SHong Zhang 5148a6b2eed2SHong Zhang /* pack the outgoing message */ 51491d79065fSBarry Smith ierr = PetscMalloc2(nsends+1,PetscInt,&sstartsj,nrecvs+1,PetscInt,&rstartsj);CHKERRQ(ierr); 5150a6b2eed2SHong Zhang sstartsj[0] = 0; rstartsj[0] = 0; 5151a6b2eed2SHong Zhang len = 0; /* total length of j or a array to be sent */ 5152a6b2eed2SHong Zhang k = 0; 5153a6b2eed2SHong Zhang for (i=0; i<nsends; i++){ 5154e42f35eeSHong Zhang rowlen = (PetscInt*)svalues + sstarts[i]*sbs; 5155e42f35eeSHong Zhang nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */ 515687025532SHong Zhang for (j=0; j<nrows; j++) { 5157d0f46423SBarry Smith row = srow[k] + B->rmap->range[rank]; /* global row idx */ 5158e42f35eeSHong Zhang for (l=0; l<sbs; l++){ 5159e42f35eeSHong Zhang ierr = MatGetRow_MPIAIJ(B,row+l,&ncols,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); /* rowlength */ 5160e42f35eeSHong Zhang rowlen[j*sbs+l] = ncols; 5161e42f35eeSHong Zhang len += ncols; 5162e42f35eeSHong Zhang ierr = MatRestoreRow_MPIAIJ(B,row+l,&ncols,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); 5163e42f35eeSHong Zhang } 5164a6b2eed2SHong Zhang k++; 5165429d309bSHong Zhang } 5166e42f35eeSHong Zhang ierr = MPI_Isend(rowlen,nrows*sbs,MPIU_INT,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr); 5167dea91ad1SHong Zhang sstartsj[i+1] = len; /* starting point of (i+1)-th outgoing msg in bufj and bufa */ 5168429d309bSHong Zhang } 516987025532SHong Zhang /* recvs and sends of i-array are completed */ 517087025532SHong Zhang i = nrecvs; 517187025532SHong Zhang while (i--) { 5172aa5bb8c0SSatish Balay ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr); 517387025532SHong Zhang } 51740c468ba9SBarry Smith if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);} 5175e42f35eeSHong Zhang 5176a6b2eed2SHong Zhang /* allocate buffers for sending j and a arrays */ 5177a6b2eed2SHong Zhang ierr = PetscMalloc((len+1)*sizeof(PetscInt),&bufj);CHKERRQ(ierr); 5178a6b2eed2SHong Zhang ierr = PetscMalloc((len+1)*sizeof(PetscScalar),&bufa);CHKERRQ(ierr); 5179a6b2eed2SHong Zhang 518087025532SHong Zhang /* create i-array of B_oth */ 518187025532SHong Zhang ierr = PetscMalloc((aBn+2)*sizeof(PetscInt),&b_othi);CHKERRQ(ierr); 518287025532SHong Zhang b_othi[0] = 0; 5183a6b2eed2SHong Zhang len = 0; /* total length of j or a array to be received */ 5184a6b2eed2SHong Zhang k = 0; 5185a6b2eed2SHong Zhang for (i=0; i<nrecvs; i++){ 5186fd0ff01cSHong Zhang rowlen = (PetscInt*)rvalues + rstarts[i]*rbs; 5187e42f35eeSHong Zhang nrows = rbs*(rstarts[i+1]-rstarts[i]); /* num of rows to be recieved */ 518887025532SHong Zhang for (j=0; j<nrows; j++) { 518987025532SHong Zhang b_othi[k+1] = b_othi[k] + rowlen[j]; 5190a6b2eed2SHong Zhang len += rowlen[j]; k++; 5191a6b2eed2SHong Zhang } 5192dea91ad1SHong Zhang rstartsj[i+1] = len; /* starting point of (i+1)-th incoming msg in bufj and bufa */ 5193a6b2eed2SHong Zhang } 5194a6b2eed2SHong Zhang 519587025532SHong Zhang /* allocate space for j and a arrrays of B_oth */ 519687025532SHong Zhang ierr = PetscMalloc((b_othi[aBn]+1)*sizeof(PetscInt),&b_othj);CHKERRQ(ierr); 5197dd6ea824SBarry Smith ierr = PetscMalloc((b_othi[aBn]+1)*sizeof(MatScalar),&b_otha);CHKERRQ(ierr); 5198a6b2eed2SHong Zhang 519987025532SHong Zhang /* j-array */ 520087025532SHong Zhang /*---------*/ 5201a6b2eed2SHong Zhang /* post receives of j-array */ 5202a6b2eed2SHong Zhang for (i=0; i<nrecvs; i++){ 520387025532SHong Zhang nrows = rstartsj[i+1]-rstartsj[i]; /* length of the msg received */ 520487025532SHong Zhang ierr = MPI_Irecv(b_othj+rstartsj[i],nrows,MPIU_INT,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr); 5205a6b2eed2SHong Zhang } 5206e42f35eeSHong Zhang 5207e42f35eeSHong Zhang /* pack the outgoing message j-array */ 5208a6b2eed2SHong Zhang k = 0; 5209a6b2eed2SHong Zhang for (i=0; i<nsends; i++){ 5210e42f35eeSHong Zhang nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */ 5211a6b2eed2SHong Zhang bufJ = bufj+sstartsj[i]; 521287025532SHong Zhang for (j=0; j<nrows; j++) { 5213d0f46423SBarry Smith row = srow[k++] + B->rmap->range[rank]; /* global row idx */ 5214e42f35eeSHong Zhang for (ll=0; ll<sbs; ll++){ 5215e42f35eeSHong Zhang ierr = MatGetRow_MPIAIJ(B,row+ll,&ncols,&cols,PETSC_NULL);CHKERRQ(ierr); 5216a6b2eed2SHong Zhang for (l=0; l<ncols; l++){ 5217a6b2eed2SHong Zhang *bufJ++ = cols[l]; 521887025532SHong Zhang } 5219e42f35eeSHong Zhang ierr = MatRestoreRow_MPIAIJ(B,row+ll,&ncols,&cols,PETSC_NULL);CHKERRQ(ierr); 5220e42f35eeSHong Zhang } 522187025532SHong Zhang } 522287025532SHong Zhang ierr = MPI_Isend(bufj+sstartsj[i],sstartsj[i+1]-sstartsj[i],MPIU_INT,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr); 522387025532SHong Zhang } 522487025532SHong Zhang 522587025532SHong Zhang /* recvs and sends of j-array are completed */ 522687025532SHong Zhang i = nrecvs; 522787025532SHong Zhang while (i--) { 5228aa5bb8c0SSatish Balay ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr); 522987025532SHong Zhang } 52300c468ba9SBarry Smith if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);} 523187025532SHong Zhang } else if (scall == MAT_REUSE_MATRIX){ 523287025532SHong Zhang sstartsj = *startsj; 52331d79065fSBarry Smith rstartsj = *startsj_r; 523487025532SHong Zhang bufa = *bufa_ptr; 523587025532SHong Zhang b_oth = (Mat_SeqAIJ*)(*B_oth)->data; 523687025532SHong Zhang b_otha = b_oth->a; 523787025532SHong Zhang } else { 5238e32f2f54SBarry Smith SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "Matrix P does not posses an object container"); 523987025532SHong Zhang } 524087025532SHong Zhang 524187025532SHong Zhang /* a-array */ 524287025532SHong Zhang /*---------*/ 524387025532SHong Zhang /* post receives of a-array */ 524487025532SHong Zhang for (i=0; i<nrecvs; i++){ 524587025532SHong Zhang nrows = rstartsj[i+1]-rstartsj[i]; /* length of the msg received */ 524687025532SHong Zhang ierr = MPI_Irecv(b_otha+rstartsj[i],nrows,MPIU_SCALAR,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr); 524787025532SHong Zhang } 5248e42f35eeSHong Zhang 5249e42f35eeSHong Zhang /* pack the outgoing message a-array */ 525087025532SHong Zhang k = 0; 525187025532SHong Zhang for (i=0; i<nsends; i++){ 5252e42f35eeSHong Zhang nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */ 525387025532SHong Zhang bufA = bufa+sstartsj[i]; 525487025532SHong Zhang for (j=0; j<nrows; j++) { 5255d0f46423SBarry Smith row = srow[k++] + B->rmap->range[rank]; /* global row idx */ 5256e42f35eeSHong Zhang for (ll=0; ll<sbs; ll++){ 5257e42f35eeSHong Zhang ierr = MatGetRow_MPIAIJ(B,row+ll,&ncols,PETSC_NULL,&vals);CHKERRQ(ierr); 525887025532SHong Zhang for (l=0; l<ncols; l++){ 5259a6b2eed2SHong Zhang *bufA++ = vals[l]; 5260a6b2eed2SHong Zhang } 5261e42f35eeSHong Zhang ierr = MatRestoreRow_MPIAIJ(B,row+ll,&ncols,PETSC_NULL,&vals);CHKERRQ(ierr); 5262e42f35eeSHong Zhang } 5263a6b2eed2SHong Zhang } 526487025532SHong Zhang ierr = MPI_Isend(bufa+sstartsj[i],sstartsj[i+1]-sstartsj[i],MPIU_SCALAR,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr); 5265a6b2eed2SHong Zhang } 526687025532SHong Zhang /* recvs and sends of a-array are completed */ 526787025532SHong Zhang i = nrecvs; 526887025532SHong Zhang while (i--) { 5269aa5bb8c0SSatish Balay ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr); 527087025532SHong Zhang } 52710c468ba9SBarry Smith if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);} 5272d7ee0231SBarry Smith ierr = PetscFree2(rwaits,swaits);CHKERRQ(ierr); 5273a6b2eed2SHong Zhang 527487025532SHong Zhang if (scall == MAT_INITIAL_MATRIX){ 5275a6b2eed2SHong Zhang /* put together the new matrix */ 5276d0f46423SBarry Smith ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,aBn,B->cmap->N,b_othi,b_othj,b_otha,B_oth);CHKERRQ(ierr); 5277a6b2eed2SHong Zhang 5278a6b2eed2SHong Zhang /* MatCreateSeqAIJWithArrays flags matrix so PETSc doesn't free the user's arrays. */ 5279a6b2eed2SHong Zhang /* Since these are PETSc arrays, change flags to free them as necessary. */ 528087025532SHong Zhang b_oth = (Mat_SeqAIJ *)(*B_oth)->data; 5281e6b907acSBarry Smith b_oth->free_a = PETSC_TRUE; 5282e6b907acSBarry Smith b_oth->free_ij = PETSC_TRUE; 528387025532SHong Zhang b_oth->nonew = 0; 5284a6b2eed2SHong Zhang 5285a6b2eed2SHong Zhang ierr = PetscFree(bufj);CHKERRQ(ierr); 5286dea91ad1SHong Zhang if (!startsj || !bufa_ptr){ 52871d79065fSBarry Smith ierr = PetscFree2(sstartsj,rstartsj);CHKERRQ(ierr); 5288dea91ad1SHong Zhang ierr = PetscFree(bufa_ptr);CHKERRQ(ierr); 5289dea91ad1SHong Zhang } else { 529087025532SHong Zhang *startsj = sstartsj; 52911d79065fSBarry Smith *startsj_r = rstartsj; 529287025532SHong Zhang *bufa_ptr = bufa; 529387025532SHong Zhang } 5294dea91ad1SHong Zhang } 52954ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_GetBrowsOfAocols,A,B,0,0);CHKERRQ(ierr); 5296429d309bSHong Zhang PetscFunctionReturn(0); 5297429d309bSHong Zhang } 5298ccd8e176SBarry Smith 529943eb5e2fSMatthew Knepley #undef __FUNCT__ 530043eb5e2fSMatthew Knepley #define __FUNCT__ "MatGetCommunicationStructs" 530143eb5e2fSMatthew Knepley /*@C 530243eb5e2fSMatthew Knepley MatGetCommunicationStructs - Provides access to the communication structures used in matrix-vector multiplication. 530343eb5e2fSMatthew Knepley 530443eb5e2fSMatthew Knepley Not Collective 530543eb5e2fSMatthew Knepley 530643eb5e2fSMatthew Knepley Input Parameters: 530743eb5e2fSMatthew Knepley . A - The matrix in mpiaij format 530843eb5e2fSMatthew Knepley 530943eb5e2fSMatthew Knepley Output Parameter: 531043eb5e2fSMatthew Knepley + lvec - The local vector holding off-process values from the argument to a matrix-vector product 531143eb5e2fSMatthew Knepley . colmap - A map from global column index to local index into lvec 531243eb5e2fSMatthew Knepley - multScatter - A scatter from the argument of a matrix-vector product to lvec 531343eb5e2fSMatthew Knepley 531443eb5e2fSMatthew Knepley Level: developer 531543eb5e2fSMatthew Knepley 531643eb5e2fSMatthew Knepley @*/ 531743eb5e2fSMatthew Knepley #if defined (PETSC_USE_CTABLE) 53187087cfbeSBarry Smith PetscErrorCode MatGetCommunicationStructs(Mat A, Vec *lvec, PetscTable *colmap, VecScatter *multScatter) 531943eb5e2fSMatthew Knepley #else 53207087cfbeSBarry Smith PetscErrorCode MatGetCommunicationStructs(Mat A, Vec *lvec, PetscInt *colmap[], VecScatter *multScatter) 532143eb5e2fSMatthew Knepley #endif 532243eb5e2fSMatthew Knepley { 532343eb5e2fSMatthew Knepley Mat_MPIAIJ *a; 532443eb5e2fSMatthew Knepley 532543eb5e2fSMatthew Knepley PetscFunctionBegin; 53260700a824SBarry Smith PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 5327e414b56bSJed Brown PetscValidPointer(lvec, 2); 5328e414b56bSJed Brown PetscValidPointer(colmap, 3); 5329e414b56bSJed Brown PetscValidPointer(multScatter, 4); 533043eb5e2fSMatthew Knepley a = (Mat_MPIAIJ *) A->data; 533143eb5e2fSMatthew Knepley if (lvec) *lvec = a->lvec; 533243eb5e2fSMatthew Knepley if (colmap) *colmap = a->colmap; 533343eb5e2fSMatthew Knepley if (multScatter) *multScatter = a->Mvctx; 533443eb5e2fSMatthew Knepley PetscFunctionReturn(0); 533543eb5e2fSMatthew Knepley } 533643eb5e2fSMatthew Knepley 533717667f90SBarry Smith EXTERN_C_BEGIN 53387087cfbeSBarry Smith extern PetscErrorCode MatConvert_MPIAIJ_MPIAIJCRL(Mat,const MatType,MatReuse,Mat*); 53397087cfbeSBarry Smith extern PetscErrorCode MatConvert_MPIAIJ_MPIAIJPERM(Mat,const MatType,MatReuse,Mat*); 53407087cfbeSBarry Smith extern PetscErrorCode MatConvert_MPIAIJ_MPISBAIJ(Mat,const MatType,MatReuse,Mat*); 534117667f90SBarry Smith EXTERN_C_END 534217667f90SBarry Smith 5343fc4dec0aSBarry Smith #undef __FUNCT__ 5344fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMultNumeric_MPIDense_MPIAIJ" 5345fc4dec0aSBarry Smith /* 5346fc4dec0aSBarry Smith Computes (B'*A')' since computing B*A directly is untenable 5347fc4dec0aSBarry Smith 5348fc4dec0aSBarry Smith n p p 5349fc4dec0aSBarry Smith ( ) ( ) ( ) 5350fc4dec0aSBarry Smith m ( A ) * n ( B ) = m ( C ) 5351fc4dec0aSBarry Smith ( ) ( ) ( ) 5352fc4dec0aSBarry Smith 5353fc4dec0aSBarry Smith */ 5354fc4dec0aSBarry Smith PetscErrorCode MatMatMultNumeric_MPIDense_MPIAIJ(Mat A,Mat B,Mat C) 5355fc4dec0aSBarry Smith { 5356fc4dec0aSBarry Smith PetscErrorCode ierr; 5357fc4dec0aSBarry Smith Mat At,Bt,Ct; 5358fc4dec0aSBarry Smith 5359fc4dec0aSBarry Smith PetscFunctionBegin; 5360fc4dec0aSBarry Smith ierr = MatTranspose(A,MAT_INITIAL_MATRIX,&At);CHKERRQ(ierr); 5361fc4dec0aSBarry Smith ierr = MatTranspose(B,MAT_INITIAL_MATRIX,&Bt);CHKERRQ(ierr); 5362fc4dec0aSBarry Smith ierr = MatMatMult(Bt,At,MAT_INITIAL_MATRIX,1.0,&Ct);CHKERRQ(ierr); 53636bf464f9SBarry Smith ierr = MatDestroy(&At);CHKERRQ(ierr); 53646bf464f9SBarry Smith ierr = MatDestroy(&Bt);CHKERRQ(ierr); 5365fc4dec0aSBarry Smith ierr = MatTranspose(Ct,MAT_REUSE_MATRIX,&C);CHKERRQ(ierr); 53666bf464f9SBarry Smith ierr = MatDestroy(&Ct);CHKERRQ(ierr); 5367fc4dec0aSBarry Smith PetscFunctionReturn(0); 5368fc4dec0aSBarry Smith } 5369fc4dec0aSBarry Smith 5370fc4dec0aSBarry Smith #undef __FUNCT__ 5371fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMultSymbolic_MPIDense_MPIAIJ" 5372fc4dec0aSBarry Smith PetscErrorCode MatMatMultSymbolic_MPIDense_MPIAIJ(Mat A,Mat B,PetscReal fill,Mat *C) 5373fc4dec0aSBarry Smith { 5374fc4dec0aSBarry Smith PetscErrorCode ierr; 5375d0f46423SBarry Smith PetscInt m=A->rmap->n,n=B->cmap->n; 5376fc4dec0aSBarry Smith Mat Cmat; 5377fc4dec0aSBarry Smith 5378fc4dec0aSBarry Smith PetscFunctionBegin; 5379e32f2f54SBarry Smith if (A->cmap->n != B->rmap->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"A->cmap->n %d != B->rmap->n %d\n",A->cmap->n,B->rmap->n); 538039804f7cSBarry Smith ierr = MatCreate(((PetscObject)A)->comm,&Cmat);CHKERRQ(ierr); 5381fc4dec0aSBarry Smith ierr = MatSetSizes(Cmat,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 5382fc4dec0aSBarry Smith ierr = MatSetType(Cmat,MATMPIDENSE);CHKERRQ(ierr); 5383fc4dec0aSBarry Smith ierr = MatMPIDenseSetPreallocation(Cmat,PETSC_NULL);CHKERRQ(ierr); 538438556019SBarry Smith ierr = MatAssemblyBegin(Cmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 538538556019SBarry Smith ierr = MatAssemblyEnd(Cmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 5386fc4dec0aSBarry Smith *C = Cmat; 5387fc4dec0aSBarry Smith PetscFunctionReturn(0); 5388fc4dec0aSBarry Smith } 5389fc4dec0aSBarry Smith 5390fc4dec0aSBarry Smith /* ----------------------------------------------------------------*/ 5391fc4dec0aSBarry Smith #undef __FUNCT__ 5392fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMult_MPIDense_MPIAIJ" 5393fc4dec0aSBarry Smith PetscErrorCode MatMatMult_MPIDense_MPIAIJ(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C) 5394fc4dec0aSBarry Smith { 5395fc4dec0aSBarry Smith PetscErrorCode ierr; 5396fc4dec0aSBarry Smith 5397fc4dec0aSBarry Smith PetscFunctionBegin; 5398fc4dec0aSBarry Smith if (scall == MAT_INITIAL_MATRIX){ 5399fc4dec0aSBarry Smith ierr = MatMatMultSymbolic_MPIDense_MPIAIJ(A,B,fill,C);CHKERRQ(ierr); 5400fc4dec0aSBarry Smith } 5401fc4dec0aSBarry Smith ierr = MatMatMultNumeric_MPIDense_MPIAIJ(A,B,*C);CHKERRQ(ierr); 5402fc4dec0aSBarry Smith PetscFunctionReturn(0); 5403fc4dec0aSBarry Smith } 5404fc4dec0aSBarry Smith 54055c9eb25fSBarry Smith EXTERN_C_BEGIN 5406611f576cSBarry Smith #if defined(PETSC_HAVE_MUMPS) 5407bccb9932SShri Abhyankar extern PetscErrorCode MatGetFactor_aij_mumps(Mat,MatFactorType,Mat*); 5408611f576cSBarry Smith #endif 54093bf14a46SMatthew Knepley #if defined(PETSC_HAVE_PASTIX) 54103bf14a46SMatthew Knepley extern PetscErrorCode MatGetFactor_mpiaij_pastix(Mat,MatFactorType,Mat*); 54113bf14a46SMatthew Knepley #endif 5412611f576cSBarry Smith #if defined(PETSC_HAVE_SUPERLU_DIST) 54135c9eb25fSBarry Smith extern PetscErrorCode MatGetFactor_mpiaij_superlu_dist(Mat,MatFactorType,Mat*); 5414611f576cSBarry Smith #endif 5415611f576cSBarry Smith #if defined(PETSC_HAVE_SPOOLES) 54165c9eb25fSBarry Smith extern PetscErrorCode MatGetFactor_mpiaij_spooles(Mat,MatFactorType,Mat*); 5417611f576cSBarry Smith #endif 54185c9eb25fSBarry Smith EXTERN_C_END 54195c9eb25fSBarry Smith 5420ccd8e176SBarry Smith /*MC 5421ccd8e176SBarry Smith MATMPIAIJ - MATMPIAIJ = "mpiaij" - A matrix type to be used for parallel sparse matrices. 5422ccd8e176SBarry Smith 5423ccd8e176SBarry Smith Options Database Keys: 5424ccd8e176SBarry Smith . -mat_type mpiaij - sets the matrix type to "mpiaij" during a call to MatSetFromOptions() 5425ccd8e176SBarry Smith 5426ccd8e176SBarry Smith Level: beginner 5427ccd8e176SBarry Smith 5428175b88e8SBarry Smith .seealso: MatCreateMPIAIJ() 5429ccd8e176SBarry Smith M*/ 5430ccd8e176SBarry Smith 5431ccd8e176SBarry Smith EXTERN_C_BEGIN 5432ccd8e176SBarry Smith #undef __FUNCT__ 5433ccd8e176SBarry Smith #define __FUNCT__ "MatCreate_MPIAIJ" 54347087cfbeSBarry Smith PetscErrorCode MatCreate_MPIAIJ(Mat B) 5435ccd8e176SBarry Smith { 5436ccd8e176SBarry Smith Mat_MPIAIJ *b; 5437ccd8e176SBarry Smith PetscErrorCode ierr; 5438ccd8e176SBarry Smith PetscMPIInt size; 5439ccd8e176SBarry Smith 5440ccd8e176SBarry Smith PetscFunctionBegin; 54417adad957SLisandro Dalcin ierr = MPI_Comm_size(((PetscObject)B)->comm,&size);CHKERRQ(ierr); 5442ccd8e176SBarry Smith 544338f2d2fdSLisandro Dalcin ierr = PetscNewLog(B,Mat_MPIAIJ,&b);CHKERRQ(ierr); 5444ccd8e176SBarry Smith B->data = (void*)b; 5445ccd8e176SBarry Smith ierr = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr); 5446d0f46423SBarry Smith B->rmap->bs = 1; 5447ccd8e176SBarry Smith B->assembled = PETSC_FALSE; 5448ccd8e176SBarry Smith 5449ccd8e176SBarry Smith B->insertmode = NOT_SET_VALUES; 5450ccd8e176SBarry Smith b->size = size; 54517adad957SLisandro Dalcin ierr = MPI_Comm_rank(((PetscObject)B)->comm,&b->rank);CHKERRQ(ierr); 5452ccd8e176SBarry Smith 5453ccd8e176SBarry Smith /* build cache for off array entries formed */ 54547adad957SLisandro Dalcin ierr = MatStashCreate_Private(((PetscObject)B)->comm,1,&B->stash);CHKERRQ(ierr); 5455ccd8e176SBarry Smith b->donotstash = PETSC_FALSE; 5456ccd8e176SBarry Smith b->colmap = 0; 5457ccd8e176SBarry Smith b->garray = 0; 5458ccd8e176SBarry Smith b->roworiented = PETSC_TRUE; 5459ccd8e176SBarry Smith 5460ccd8e176SBarry Smith /* stuff used for matrix vector multiply */ 5461ccd8e176SBarry Smith b->lvec = PETSC_NULL; 5462ccd8e176SBarry Smith b->Mvctx = PETSC_NULL; 5463ccd8e176SBarry Smith 5464ccd8e176SBarry Smith /* stuff for MatGetRow() */ 5465ccd8e176SBarry Smith b->rowindices = 0; 5466ccd8e176SBarry Smith b->rowvalues = 0; 5467ccd8e176SBarry Smith b->getrowactive = PETSC_FALSE; 5468ccd8e176SBarry Smith 5469611f576cSBarry Smith #if defined(PETSC_HAVE_SPOOLES) 5470ec1065edSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_spooles_C", 54715c9eb25fSBarry Smith "MatGetFactor_mpiaij_spooles", 54725c9eb25fSBarry Smith MatGetFactor_mpiaij_spooles);CHKERRQ(ierr); 5473611f576cSBarry Smith #endif 5474611f576cSBarry Smith #if defined(PETSC_HAVE_MUMPS) 5475ec1065edSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_mumps_C", 5476bccb9932SShri Abhyankar "MatGetFactor_aij_mumps", 5477bccb9932SShri Abhyankar MatGetFactor_aij_mumps);CHKERRQ(ierr); 5478611f576cSBarry Smith #endif 54793bf14a46SMatthew Knepley #if defined(PETSC_HAVE_PASTIX) 5480ec1065edSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_pastix_C", 54813bf14a46SMatthew Knepley "MatGetFactor_mpiaij_pastix", 54823bf14a46SMatthew Knepley MatGetFactor_mpiaij_pastix);CHKERRQ(ierr); 54833bf14a46SMatthew Knepley #endif 5484611f576cSBarry Smith #if defined(PETSC_HAVE_SUPERLU_DIST) 5485ec1065edSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_superlu_dist_C", 54865c9eb25fSBarry Smith "MatGetFactor_mpiaij_superlu_dist", 54875c9eb25fSBarry Smith MatGetFactor_mpiaij_superlu_dist);CHKERRQ(ierr); 5488611f576cSBarry Smith #endif 5489ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatStoreValues_C", 5490ccd8e176SBarry Smith "MatStoreValues_MPIAIJ", 5491ccd8e176SBarry Smith MatStoreValues_MPIAIJ);CHKERRQ(ierr); 5492ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatRetrieveValues_C", 5493ccd8e176SBarry Smith "MatRetrieveValues_MPIAIJ", 5494ccd8e176SBarry Smith MatRetrieveValues_MPIAIJ);CHKERRQ(ierr); 5495ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetDiagonalBlock_C", 5496ccd8e176SBarry Smith "MatGetDiagonalBlock_MPIAIJ", 5497ccd8e176SBarry Smith MatGetDiagonalBlock_MPIAIJ);CHKERRQ(ierr); 5498ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatIsTranspose_C", 5499ccd8e176SBarry Smith "MatIsTranspose_MPIAIJ", 5500ccd8e176SBarry Smith MatIsTranspose_MPIAIJ);CHKERRQ(ierr); 5501ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAIJSetPreallocation_C", 5502ccd8e176SBarry Smith "MatMPIAIJSetPreallocation_MPIAIJ", 5503ccd8e176SBarry Smith MatMPIAIJSetPreallocation_MPIAIJ);CHKERRQ(ierr); 5504ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAIJSetPreallocationCSR_C", 5505ccd8e176SBarry Smith "MatMPIAIJSetPreallocationCSR_MPIAIJ", 5506ccd8e176SBarry Smith MatMPIAIJSetPreallocationCSR_MPIAIJ);CHKERRQ(ierr); 5507ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatDiagonalScaleLocal_C", 5508ccd8e176SBarry Smith "MatDiagonalScaleLocal_MPIAIJ", 5509ccd8e176SBarry Smith MatDiagonalScaleLocal_MPIAIJ);CHKERRQ(ierr); 55105a11e1b2SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpiaij_mpiaijperm_C", 55115a11e1b2SBarry Smith "MatConvert_MPIAIJ_MPIAIJPERM", 55125a11e1b2SBarry Smith MatConvert_MPIAIJ_MPIAIJPERM);CHKERRQ(ierr); 55135a11e1b2SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpiaij_mpiaijcrl_C", 55145a11e1b2SBarry Smith "MatConvert_MPIAIJ_MPIAIJCRL", 55155a11e1b2SBarry Smith MatConvert_MPIAIJ_MPIAIJCRL);CHKERRQ(ierr); 5516471cc821SHong Zhang ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpiaij_mpisbaij_C", 5517471cc821SHong Zhang "MatConvert_MPIAIJ_MPISBAIJ", 5518471cc821SHong Zhang MatConvert_MPIAIJ_MPISBAIJ);CHKERRQ(ierr); 5519fc4dec0aSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMatMult_mpidense_mpiaij_C", 5520fc4dec0aSBarry Smith "MatMatMult_MPIDense_MPIAIJ", 5521fc4dec0aSBarry Smith MatMatMult_MPIDense_MPIAIJ);CHKERRQ(ierr); 5522fc4dec0aSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMatMultSymbolic_mpidense_mpiaij_C", 5523fc4dec0aSBarry Smith "MatMatMultSymbolic_MPIDense_MPIAIJ", 5524fc4dec0aSBarry Smith MatMatMultSymbolic_MPIDense_MPIAIJ);CHKERRQ(ierr); 5525fc4dec0aSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMatMultNumeric_mpidense_mpiaij_C", 5526fc4dec0aSBarry Smith "MatMatMultNumeric_MPIDense_MPIAIJ", 5527fc4dec0aSBarry Smith MatMatMultNumeric_MPIDense_MPIAIJ);CHKERRQ(ierr); 552817667f90SBarry Smith ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIAIJ);CHKERRQ(ierr); 5529ccd8e176SBarry Smith PetscFunctionReturn(0); 5530ccd8e176SBarry Smith } 5531ccd8e176SBarry Smith EXTERN_C_END 553281824310SBarry Smith 553303bfb495SBarry Smith #undef __FUNCT__ 553403bfb495SBarry Smith #define __FUNCT__ "MatCreateMPIAIJWithSplitArrays" 553558d36128SBarry Smith /*@ 553603bfb495SBarry Smith MatCreateMPIAIJWithSplitArrays - creates a MPI AIJ matrix using arrays that contain the "diagonal" 553703bfb495SBarry Smith and "off-diagonal" part of the matrix in CSR format. 553803bfb495SBarry Smith 553903bfb495SBarry Smith Collective on MPI_Comm 554003bfb495SBarry Smith 554103bfb495SBarry Smith Input Parameters: 554203bfb495SBarry Smith + comm - MPI communicator 554303bfb495SBarry Smith . m - number of local rows (Cannot be PETSC_DECIDE) 554403bfb495SBarry Smith . n - This value should be the same as the local size used in creating the 554503bfb495SBarry Smith x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have 554603bfb495SBarry Smith calculated if N is given) For square matrices n is almost always m. 554703bfb495SBarry Smith . M - number of global rows (or PETSC_DETERMINE to have calculated if m is given) 554803bfb495SBarry Smith . N - number of global columns (or PETSC_DETERMINE to have calculated if n is given) 554903bfb495SBarry Smith . i - row indices for "diagonal" portion of matrix 555003bfb495SBarry Smith . j - column indices 555103bfb495SBarry Smith . a - matrix values 555203bfb495SBarry Smith . oi - row indices for "off-diagonal" portion of matrix 555303bfb495SBarry Smith . oj - column indices 555403bfb495SBarry Smith - oa - matrix values 555503bfb495SBarry Smith 555603bfb495SBarry Smith Output Parameter: 555703bfb495SBarry Smith . mat - the matrix 555803bfb495SBarry Smith 555903bfb495SBarry Smith Level: advanced 556003bfb495SBarry Smith 556103bfb495SBarry Smith Notes: 5562292fb18eSBarry Smith The i, j, and a arrays ARE NOT copied by this routine into the internal format used by PETSc. The user 5563292fb18eSBarry Smith must free the arrays once the matrix has been destroyed and not before. 556403bfb495SBarry Smith 556503bfb495SBarry Smith The i and j indices are 0 based 556603bfb495SBarry Smith 556703bfb495SBarry Smith See MatCreateMPIAIJ() for the definition of "diagonal" and "off-diagonal" portion of the matrix 556803bfb495SBarry Smith 55697b55108eSBarry Smith This sets local rows and cannot be used to set off-processor values. 55707b55108eSBarry Smith 55717b55108eSBarry Smith You cannot later use MatSetValues() to change values in this matrix. 557203bfb495SBarry Smith 557303bfb495SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 557403bfb495SBarry Smith 557503bfb495SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(), 55768d7a6e47SBarry Smith MPIAIJ, MatCreateMPIAIJ(), MatCreateMPIAIJWithArrays() 557703bfb495SBarry Smith @*/ 55787087cfbeSBarry Smith PetscErrorCode MatCreateMPIAIJWithSplitArrays(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt i[],PetscInt j[],PetscScalar a[], 557903bfb495SBarry Smith PetscInt oi[], PetscInt oj[],PetscScalar oa[],Mat *mat) 558003bfb495SBarry Smith { 558103bfb495SBarry Smith PetscErrorCode ierr; 558203bfb495SBarry Smith Mat_MPIAIJ *maij; 558303bfb495SBarry Smith 558403bfb495SBarry Smith PetscFunctionBegin; 5585e32f2f54SBarry Smith if (m < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative"); 5586ea345e14SBarry Smith if (i[0]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0"); 5587ea345e14SBarry Smith if (oi[0]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"oi (row indices) must start with 0"); 558803bfb495SBarry Smith ierr = MatCreate(comm,mat);CHKERRQ(ierr); 558903bfb495SBarry Smith ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr); 559003bfb495SBarry Smith ierr = MatSetType(*mat,MATMPIAIJ);CHKERRQ(ierr); 559103bfb495SBarry Smith maij = (Mat_MPIAIJ*) (*mat)->data; 55928d7a6e47SBarry Smith maij->donotstash = PETSC_TRUE; 55938d7a6e47SBarry Smith (*mat)->preallocated = PETSC_TRUE; 559403bfb495SBarry Smith 559526283091SBarry Smith ierr = PetscLayoutSetBlockSize((*mat)->rmap,1);CHKERRQ(ierr); 559626283091SBarry Smith ierr = PetscLayoutSetBlockSize((*mat)->cmap,1);CHKERRQ(ierr); 559726283091SBarry Smith ierr = PetscLayoutSetUp((*mat)->rmap);CHKERRQ(ierr); 559826283091SBarry Smith ierr = PetscLayoutSetUp((*mat)->cmap);CHKERRQ(ierr); 559903bfb495SBarry Smith 560003bfb495SBarry Smith ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,m,n,i,j,a,&maij->A);CHKERRQ(ierr); 5601d0f46423SBarry Smith ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,m,(*mat)->cmap->N,oi,oj,oa,&maij->B);CHKERRQ(ierr); 560203bfb495SBarry Smith 56038d7a6e47SBarry Smith ierr = MatAssemblyBegin(maij->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 56048d7a6e47SBarry Smith ierr = MatAssemblyEnd(maij->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 56058d7a6e47SBarry Smith ierr = MatAssemblyBegin(maij->B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 56068d7a6e47SBarry Smith ierr = MatAssemblyEnd(maij->B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 56078d7a6e47SBarry Smith 560803bfb495SBarry Smith ierr = MatAssemblyBegin(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 560903bfb495SBarry Smith ierr = MatAssemblyEnd(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 561003bfb495SBarry Smith PetscFunctionReturn(0); 561103bfb495SBarry Smith } 561203bfb495SBarry Smith 561381824310SBarry Smith /* 561481824310SBarry Smith Special version for direct calls from Fortran 561581824310SBarry Smith */ 5616c6db04a5SJed Brown #include <private/fortranimpl.h> 56177087cfbeSBarry Smith 561881824310SBarry Smith #if defined(PETSC_HAVE_FORTRAN_CAPS) 561981824310SBarry Smith #define matsetvaluesmpiaij_ MATSETVALUESMPIAIJ 562081824310SBarry Smith #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) 562181824310SBarry Smith #define matsetvaluesmpiaij_ matsetvaluesmpiaij 562281824310SBarry Smith #endif 562381824310SBarry Smith 562481824310SBarry Smith /* Change these macros so can be used in void function */ 562581824310SBarry Smith #undef CHKERRQ 5626e32f2f54SBarry Smith #define CHKERRQ(ierr) CHKERRABORT(PETSC_COMM_WORLD,ierr) 562781824310SBarry Smith #undef SETERRQ2 5628e32f2f54SBarry Smith #define SETERRQ2(comm,ierr,b,c,d) CHKERRABORT(comm,ierr) 562981824310SBarry Smith #undef SETERRQ 5630e32f2f54SBarry Smith #define SETERRQ(c,ierr,b) CHKERRABORT(c,ierr) 563181824310SBarry Smith 563281824310SBarry Smith EXTERN_C_BEGIN 563381824310SBarry Smith #undef __FUNCT__ 563481824310SBarry Smith #define __FUNCT__ "matsetvaluesmpiaij_" 56351f6cc5b2SSatish Balay void PETSC_STDCALL matsetvaluesmpiaij_(Mat *mmat,PetscInt *mm,const PetscInt im[],PetscInt *mn,const PetscInt in[],const PetscScalar v[],InsertMode *maddv,PetscErrorCode *_ierr) 563681824310SBarry Smith { 563781824310SBarry Smith Mat mat = *mmat; 563881824310SBarry Smith PetscInt m = *mm, n = *mn; 563981824310SBarry Smith InsertMode addv = *maddv; 564081824310SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 564181824310SBarry Smith PetscScalar value; 564281824310SBarry Smith PetscErrorCode ierr; 5643899cda47SBarry Smith 5644d9e2c085SLisandro Dalcin ierr = MatPreallocated(mat);CHKERRQ(ierr); 564581824310SBarry Smith if (mat->insertmode == NOT_SET_VALUES) { 564681824310SBarry Smith mat->insertmode = addv; 564781824310SBarry Smith } 564881824310SBarry Smith #if defined(PETSC_USE_DEBUG) 564981824310SBarry Smith else if (mat->insertmode != addv) { 5650e32f2f54SBarry Smith SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values"); 565181824310SBarry Smith } 565281824310SBarry Smith #endif 565381824310SBarry Smith { 5654d0f46423SBarry Smith PetscInt i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend; 5655d0f46423SBarry Smith PetscInt cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col; 5656ace3abfcSBarry Smith PetscBool roworiented = aij->roworiented; 565781824310SBarry Smith 565881824310SBarry Smith /* Some Variables required in the macro */ 565981824310SBarry Smith Mat A = aij->A; 566081824310SBarry Smith Mat_SeqAIJ *a = (Mat_SeqAIJ*)A->data; 566181824310SBarry Smith PetscInt *aimax = a->imax,*ai = a->i,*ailen = a->ilen,*aj = a->j; 5662dd6ea824SBarry Smith MatScalar *aa = a->a; 5663ace3abfcSBarry Smith PetscBool ignorezeroentries = (((a->ignorezeroentries)&&(addv==ADD_VALUES))?PETSC_TRUE:PETSC_FALSE); 566481824310SBarry Smith Mat B = aij->B; 566581824310SBarry Smith Mat_SeqAIJ *b = (Mat_SeqAIJ*)B->data; 5666d0f46423SBarry Smith PetscInt *bimax = b->imax,*bi = b->i,*bilen = b->ilen,*bj = b->j,bm = aij->B->rmap->n,am = aij->A->rmap->n; 5667dd6ea824SBarry Smith MatScalar *ba = b->a; 566881824310SBarry Smith 566981824310SBarry Smith PetscInt *rp1,*rp2,ii,nrow1,nrow2,_i,rmax1,rmax2,N,low1,high1,low2,high2,t,lastcol1,lastcol2; 567081824310SBarry Smith PetscInt nonew = a->nonew; 5671dd6ea824SBarry Smith MatScalar *ap1,*ap2; 567281824310SBarry Smith 567381824310SBarry Smith PetscFunctionBegin; 567481824310SBarry Smith for (i=0; i<m; i++) { 567581824310SBarry Smith if (im[i] < 0) continue; 567681824310SBarry Smith #if defined(PETSC_USE_DEBUG) 5677e32f2f54SBarry Smith if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1); 567881824310SBarry Smith #endif 567981824310SBarry Smith if (im[i] >= rstart && im[i] < rend) { 568081824310SBarry Smith row = im[i] - rstart; 568181824310SBarry Smith lastcol1 = -1; 568281824310SBarry Smith rp1 = aj + ai[row]; 568381824310SBarry Smith ap1 = aa + ai[row]; 568481824310SBarry Smith rmax1 = aimax[row]; 568581824310SBarry Smith nrow1 = ailen[row]; 568681824310SBarry Smith low1 = 0; 568781824310SBarry Smith high1 = nrow1; 568881824310SBarry Smith lastcol2 = -1; 568981824310SBarry Smith rp2 = bj + bi[row]; 569081824310SBarry Smith ap2 = ba + bi[row]; 569181824310SBarry Smith rmax2 = bimax[row]; 569281824310SBarry Smith nrow2 = bilen[row]; 569381824310SBarry Smith low2 = 0; 569481824310SBarry Smith high2 = nrow2; 569581824310SBarry Smith 569681824310SBarry Smith for (j=0; j<n; j++) { 569781824310SBarry Smith if (roworiented) value = v[i*n+j]; else value = v[i+j*m]; 569881824310SBarry Smith if (ignorezeroentries && value == 0.0 && (addv == ADD_VALUES)) continue; 569981824310SBarry Smith if (in[j] >= cstart && in[j] < cend){ 570081824310SBarry Smith col = in[j] - cstart; 570181824310SBarry Smith MatSetValues_SeqAIJ_A_Private(row,col,value,addv); 570281824310SBarry Smith } else if (in[j] < 0) continue; 570381824310SBarry Smith #if defined(PETSC_USE_DEBUG) 5704cb9801acSJed Brown else if (in[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1); 570581824310SBarry Smith #endif 570681824310SBarry Smith else { 570781824310SBarry Smith if (mat->was_assembled) { 570881824310SBarry Smith if (!aij->colmap) { 570981824310SBarry Smith ierr = CreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr); 571081824310SBarry Smith } 571181824310SBarry Smith #if defined (PETSC_USE_CTABLE) 571281824310SBarry Smith ierr = PetscTableFind(aij->colmap,in[j]+1,&col);CHKERRQ(ierr); 571381824310SBarry Smith col--; 571481824310SBarry Smith #else 571581824310SBarry Smith col = aij->colmap[in[j]] - 1; 571681824310SBarry Smith #endif 571781824310SBarry Smith if (col < 0 && !((Mat_SeqAIJ*)(aij->A->data))->nonew) { 571881824310SBarry Smith ierr = DisAssemble_MPIAIJ(mat);CHKERRQ(ierr); 571981824310SBarry Smith col = in[j]; 572081824310SBarry Smith /* Reinitialize the variables required by MatSetValues_SeqAIJ_B_Private() */ 572181824310SBarry Smith B = aij->B; 572281824310SBarry Smith b = (Mat_SeqAIJ*)B->data; 572381824310SBarry Smith bimax = b->imax; bi = b->i; bilen = b->ilen; bj = b->j; 572481824310SBarry Smith rp2 = bj + bi[row]; 572581824310SBarry Smith ap2 = ba + bi[row]; 572681824310SBarry Smith rmax2 = bimax[row]; 572781824310SBarry Smith nrow2 = bilen[row]; 572881824310SBarry Smith low2 = 0; 572981824310SBarry Smith high2 = nrow2; 5730d0f46423SBarry Smith bm = aij->B->rmap->n; 573181824310SBarry Smith ba = b->a; 573281824310SBarry Smith } 573381824310SBarry Smith } else col = in[j]; 573481824310SBarry Smith MatSetValues_SeqAIJ_B_Private(row,col,value,addv); 573581824310SBarry Smith } 573681824310SBarry Smith } 573781824310SBarry Smith } else { 573881824310SBarry Smith if (!aij->donotstash) { 573981824310SBarry Smith if (roworiented) { 5740ace3abfcSBarry Smith ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,(PetscBool)(ignorezeroentries && (addv == ADD_VALUES)));CHKERRQ(ierr); 574181824310SBarry Smith } else { 5742ace3abfcSBarry Smith ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,(PetscBool)(ignorezeroentries && (addv == ADD_VALUES)));CHKERRQ(ierr); 574381824310SBarry Smith } 574481824310SBarry Smith } 574581824310SBarry Smith } 574681824310SBarry Smith }} 574781824310SBarry Smith PetscFunctionReturnVoid(); 574881824310SBarry Smith } 574981824310SBarry Smith EXTERN_C_END 575003bfb495SBarry Smith 5751