1 #define PETSCMAT_DLL 2 3 /* 4 Support for the parallel AIJ matrix vector multiply 5 */ 6 #include "../src/mat/impls/aij/mpi/mpiaij.h" 7 8 #undef __FUNCT__ 9 #define __FUNCT__ "MatSetUpMultiply_MPIAIJ" 10 PetscErrorCode MatSetUpMultiply_MPIAIJ(Mat mat) 11 { 12 Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 13 Mat_SeqAIJ *B = (Mat_SeqAIJ*)(aij->B->data); 14 PetscErrorCode ierr; 15 PetscInt i,j,*aj = B->j,ec = 0,*garray; 16 IS from,to; 17 Vec gvec; 18 PetscBool useblockis; 19 #if defined (PETSC_USE_CTABLE) 20 PetscTable gid1_lid1; 21 PetscTablePosition tpos; 22 PetscInt gid,lid; 23 #else 24 PetscInt N = mat->cmap->N,*indices; 25 #endif 26 27 PetscFunctionBegin; 28 29 #if defined (PETSC_USE_CTABLE) 30 /* use a table - Mark Adams */ 31 ierr = PetscTableCreate(aij->B->rmap->n,&gid1_lid1);CHKERRQ(ierr); 32 for (i=0; i<aij->B->rmap->n; i++) { 33 for (j=0; j<B->ilen[i]; j++) { 34 PetscInt data,gid1 = aj[B->i[i] + j] + 1; 35 ierr = PetscTableFind(gid1_lid1,gid1,&data);CHKERRQ(ierr); 36 if (!data) { 37 /* one based table */ 38 ierr = PetscTableAdd(gid1_lid1,gid1,++ec);CHKERRQ(ierr); 39 } 40 } 41 } 42 /* form array of columns we need */ 43 ierr = PetscMalloc((ec+1)*sizeof(PetscInt),&garray);CHKERRQ(ierr); 44 ierr = PetscTableGetHeadPosition(gid1_lid1,&tpos);CHKERRQ(ierr); 45 while (tpos) { 46 ierr = PetscTableGetNext(gid1_lid1,&tpos,&gid,&lid);CHKERRQ(ierr); 47 gid--; 48 lid--; 49 garray[lid] = gid; 50 } 51 ierr = PetscSortInt(ec,garray);CHKERRQ(ierr); /* sort, and rebuild */ 52 ierr = PetscTableRemoveAll(gid1_lid1);CHKERRQ(ierr); 53 for (i=0; i<ec; i++) { 54 ierr = PetscTableAdd(gid1_lid1,garray[i]+1,i+1);CHKERRQ(ierr); 55 } 56 /* compact out the extra columns in B */ 57 for (i=0; i<aij->B->rmap->n; i++) { 58 for (j=0; j<B->ilen[i]; j++) { 59 PetscInt gid1 = aj[B->i[i] + j] + 1; 60 ierr = PetscTableFind(gid1_lid1,gid1,&lid);CHKERRQ(ierr); 61 lid --; 62 aj[B->i[i] + j] = lid; 63 } 64 } 65 aij->B->cmap->n = aij->B->cmap->N = ec; 66 ierr = PetscLayoutSetUp((aij->B->cmap));CHKERRQ(ierr); 67 ierr = PetscTableDestroy(gid1_lid1);CHKERRQ(ierr); 68 /* Mark Adams */ 69 #else 70 /* Make an array as long as the number of columns */ 71 /* mark those columns that are in aij->B */ 72 ierr = PetscMalloc((N+1)*sizeof(PetscInt),&indices);CHKERRQ(ierr); 73 ierr = PetscMemzero(indices,N*sizeof(PetscInt));CHKERRQ(ierr); 74 for (i=0; i<aij->B->rmap->n; i++) { 75 for (j=0; j<B->ilen[i]; j++) { 76 if (!indices[aj[B->i[i] + j] ]) ec++; 77 indices[aj[B->i[i] + j] ] = 1; 78 } 79 } 80 81 /* form array of columns we need */ 82 ierr = PetscMalloc((ec+1)*sizeof(PetscInt),&garray);CHKERRQ(ierr); 83 ec = 0; 84 for (i=0; i<N; i++) { 85 if (indices[i]) garray[ec++] = i; 86 } 87 88 /* make indices now point into garray */ 89 for (i=0; i<ec; i++) { 90 indices[garray[i]] = i; 91 } 92 93 /* compact out the extra columns in B */ 94 for (i=0; i<aij->B->rmap->n; i++) { 95 for (j=0; j<B->ilen[i]; j++) { 96 aj[B->i[i] + j] = indices[aj[B->i[i] + j]]; 97 } 98 } 99 aij->B->cmap->n = aij->B->cmap->N = ec; 100 ierr = PetscLayoutSetUp((aij->B->cmap));CHKERRQ(ierr); 101 ierr = PetscFree(indices);CHKERRQ(ierr); 102 #endif 103 /* create local vector that is used to scatter into */ 104 ierr = VecCreateSeq(PETSC_COMM_SELF,ec,&aij->lvec);CHKERRQ(ierr); 105 106 /* create two temporary Index sets for build scatter gather */ 107 /* check for the special case where blocks are communicated for faster VecScatterXXX */ 108 useblockis = PETSC_TRUE; 109 if (mat->rmap->bs > 1) { 110 PetscInt bs = mat->rmap->bs,ibs,ga; 111 if (!(ec % bs)) { 112 for (i=0; i<ec/bs; i++) { 113 if ((ga = garray[ibs = i*bs]) % bs) { 114 useblockis = PETSC_FALSE; 115 break; 116 } 117 for (j=1; j<bs; j++) { 118 if (garray[ibs+j] != ga+j) { 119 useblockis = PETSC_FALSE; 120 break; 121 } 122 } 123 if (!useblockis) break; 124 } 125 } 126 } 127 if (useblockis) { 128 PetscInt *ga,bs = mat->rmap->bs,iec = ec/bs; 129 ierr = PetscInfo(mat,"Using block index set to define scatter\n"); 130 ierr = PetscMalloc((ec/mat->rmap->bs)*sizeof(PetscInt),&ga);CHKERRQ(ierr); 131 for (i=0; i<iec; i++) ga[i] = garray[i*bs]/bs; 132 ierr = ISCreateBlock(((PetscObject)mat)->comm,bs,iec,ga,PETSC_OWN_POINTER,&from);CHKERRQ(ierr); 133 } else { 134 ierr = ISCreateGeneral(((PetscObject)mat)->comm,ec,garray,PETSC_COPY_VALUES,&from);CHKERRQ(ierr); 135 } 136 ierr = ISCreateStride(PETSC_COMM_SELF,ec,0,1,&to);CHKERRQ(ierr); 137 138 /* create temporary global vector to generate scatter context */ 139 /* This does not allocate the array's memory so is efficient */ 140 ierr = VecCreateMPIWithArray(((PetscObject)mat)->comm,mat->cmap->n,mat->cmap->N,PETSC_NULL,&gvec);CHKERRQ(ierr); 141 142 /* generate the scatter context */ 143 ierr = VecScatterCreate(gvec,from,aij->lvec,to,&aij->Mvctx);CHKERRQ(ierr); 144 ierr = PetscLogObjectParent(mat,aij->Mvctx);CHKERRQ(ierr); 145 ierr = PetscLogObjectParent(mat,aij->lvec);CHKERRQ(ierr); 146 ierr = PetscLogObjectParent(mat,from);CHKERRQ(ierr); 147 ierr = PetscLogObjectParent(mat,to);CHKERRQ(ierr); 148 aij->garray = garray; 149 ierr = PetscLogObjectMemory(mat,(ec+1)*sizeof(PetscInt));CHKERRQ(ierr); 150 ierr = ISDestroy(from);CHKERRQ(ierr); 151 ierr = ISDestroy(to);CHKERRQ(ierr); 152 ierr = VecDestroy(gvec);CHKERRQ(ierr); 153 PetscFunctionReturn(0); 154 } 155 156 157 #undef __FUNCT__ 158 #define __FUNCT__ "DisAssemble_MPIAIJ" 159 /* 160 Takes the local part of an already assembled MPIAIJ matrix 161 and disassembles it. This is to allow new nonzeros into the matrix 162 that require more communication in the matrix vector multiply. 163 Thus certain data-structures must be rebuilt. 164 165 Kind of slow! But that's what application programmers get when 166 they are sloppy. 167 */ 168 PetscErrorCode DisAssemble_MPIAIJ(Mat A) 169 { 170 Mat_MPIAIJ *aij = (Mat_MPIAIJ*)A->data; 171 Mat B = aij->B,Bnew; 172 Mat_SeqAIJ *Baij = (Mat_SeqAIJ*)B->data; 173 PetscErrorCode ierr; 174 PetscInt i,j,m = B->rmap->n,n = A->cmap->N,col,ct = 0,*garray = aij->garray,*nz,ec; 175 PetscScalar v; 176 177 PetscFunctionBegin; 178 /* free stuff related to matrix-vec multiply */ 179 ierr = VecGetSize(aij->lvec,&ec);CHKERRQ(ierr); /* needed for PetscLogObjectMemory below */ 180 ierr = VecDestroy(aij->lvec);CHKERRQ(ierr); aij->lvec = 0; 181 ierr = VecScatterDestroy(aij->Mvctx);CHKERRQ(ierr); aij->Mvctx = 0; 182 if (aij->colmap) { 183 #if defined (PETSC_USE_CTABLE) 184 ierr = PetscTableDestroy(aij->colmap);CHKERRQ(ierr); 185 aij->colmap = 0; 186 #else 187 ierr = PetscFree(aij->colmap);CHKERRQ(ierr); 188 ierr = PetscLogObjectMemory(A,-aij->B->cmap->n*sizeof(PetscInt));CHKERRQ(ierr); 189 #endif 190 } 191 192 /* make sure that B is assembled so we can access its values */ 193 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 194 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 195 196 /* invent new B and copy stuff over */ 197 ierr = PetscMalloc((m+1)*sizeof(PetscInt),&nz);CHKERRQ(ierr); 198 for (i=0; i<m; i++) { 199 nz[i] = Baij->i[i+1] - Baij->i[i]; 200 } 201 ierr = MatCreate(PETSC_COMM_SELF,&Bnew);CHKERRQ(ierr); 202 ierr = MatSetSizes(Bnew,m,n,m,n);CHKERRQ(ierr); 203 ierr = MatSetType(Bnew,((PetscObject)B)->type_name);CHKERRQ(ierr); 204 ierr = MatSeqAIJSetPreallocation(Bnew,0,nz);CHKERRQ(ierr); 205 ierr = PetscFree(nz);CHKERRQ(ierr); 206 for (i=0; i<m; i++) { 207 for (j=Baij->i[i]; j<Baij->i[i+1]; j++) { 208 col = garray[Baij->j[ct]]; 209 v = Baij->a[ct++]; 210 ierr = MatSetValues(Bnew,1,&i,1,&col,&v,B->insertmode);CHKERRQ(ierr); 211 } 212 } 213 ierr = PetscFree(aij->garray);CHKERRQ(ierr); 214 ierr = PetscLogObjectMemory(A,-ec*sizeof(PetscInt));CHKERRQ(ierr); 215 ierr = MatDestroy(B);CHKERRQ(ierr); 216 ierr = PetscLogObjectParent(A,Bnew);CHKERRQ(ierr); 217 aij->B = Bnew; 218 A->was_assembled = PETSC_FALSE; 219 PetscFunctionReturn(0); 220 } 221 222 /* ugly stuff added for Glenn someday we should fix this up */ 223 224 static PetscInt *auglyrmapd = 0,*auglyrmapo = 0; /* mapping from the local ordering to the "diagonal" and "off-diagonal" 225 parts of the local matrix */ 226 static Vec auglydd = 0,auglyoo = 0; /* work vectors used to scale the two parts of the local matrix */ 227 228 229 #undef __FUNCT__ 230 #define __FUNCT__ "MatMPIAIJDiagonalScaleLocalSetUp" 231 PetscErrorCode MatMPIAIJDiagonalScaleLocalSetUp(Mat inA,Vec scale) 232 { 233 Mat_MPIAIJ *ina = (Mat_MPIAIJ*) inA->data; /*access private part of matrix */ 234 PetscErrorCode ierr; 235 PetscInt i,n,nt,cstart,cend,no,*garray = ina->garray,*lindices; 236 PetscInt *r_rmapd,*r_rmapo; 237 238 PetscFunctionBegin; 239 ierr = MatGetOwnershipRange(inA,&cstart,&cend);CHKERRQ(ierr); 240 ierr = MatGetSize(ina->A,PETSC_NULL,&n);CHKERRQ(ierr); 241 ierr = PetscMalloc((inA->rmapping->n+1)*sizeof(PetscInt),&r_rmapd);CHKERRQ(ierr); 242 ierr = PetscMemzero(r_rmapd,inA->rmapping->n*sizeof(PetscInt));CHKERRQ(ierr); 243 nt = 0; 244 for (i=0; i<inA->rmapping->n; i++) { 245 if (inA->rmapping->indices[i] >= cstart && inA->rmapping->indices[i] < cend) { 246 nt++; 247 r_rmapd[i] = inA->rmapping->indices[i] + 1; 248 } 249 } 250 if (nt != n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Hmm nt %D n %D",nt,n); 251 ierr = PetscMalloc((n+1)*sizeof(PetscInt),&auglyrmapd);CHKERRQ(ierr); 252 for (i=0; i<inA->rmapping->n; i++) { 253 if (r_rmapd[i]){ 254 auglyrmapd[(r_rmapd[i]-1)-cstart] = i; 255 } 256 } 257 ierr = PetscFree(r_rmapd);CHKERRQ(ierr); 258 ierr = VecCreateSeq(PETSC_COMM_SELF,n,&auglydd);CHKERRQ(ierr); 259 260 ierr = PetscMalloc((inA->cmap->N+1)*sizeof(PetscInt),&lindices);CHKERRQ(ierr); 261 ierr = PetscMemzero(lindices,inA->cmap->N*sizeof(PetscInt));CHKERRQ(ierr); 262 for (i=0; i<ina->B->cmap->n; i++) { 263 lindices[garray[i]] = i+1; 264 } 265 no = inA->rmapping->n - nt; 266 ierr = PetscMalloc((inA->rmapping->n+1)*sizeof(PetscInt),&r_rmapo);CHKERRQ(ierr); 267 ierr = PetscMemzero(r_rmapo,inA->rmapping->n*sizeof(PetscInt));CHKERRQ(ierr); 268 nt = 0; 269 for (i=0; i<inA->rmapping->n; i++) { 270 if (lindices[inA->rmapping->indices[i]]) { 271 nt++; 272 r_rmapo[i] = lindices[inA->rmapping->indices[i]]; 273 } 274 } 275 if (nt > no) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Hmm nt %D no %D",nt,n); 276 ierr = PetscFree(lindices);CHKERRQ(ierr); 277 ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&auglyrmapo);CHKERRQ(ierr); 278 for (i=0; i<inA->rmapping->n; i++) { 279 if (r_rmapo[i]){ 280 auglyrmapo[(r_rmapo[i]-1)] = i; 281 } 282 } 283 ierr = PetscFree(r_rmapo);CHKERRQ(ierr); 284 ierr = VecCreateSeq(PETSC_COMM_SELF,nt,&auglyoo);CHKERRQ(ierr); 285 286 PetscFunctionReturn(0); 287 } 288 289 #undef __FUNCT__ 290 #define __FUNCT__ "MatMPIAIJDiagonalScaleLocal" 291 PetscErrorCode MatMPIAIJDiagonalScaleLocal(Mat A,Vec scale) 292 { 293 /* This routine should really be abandoned as it duplicates MatDiagonalScaleLocal */ 294 PetscErrorCode ierr; 295 296 PetscFunctionBegin; 297 ierr = PetscTryMethod(A,"MatDiagonalScaleLocal_C",(Mat,Vec),(A,scale));CHKERRQ(ierr); 298 PetscFunctionReturn(0); 299 } 300 301 EXTERN_C_BEGIN 302 #undef __FUNCT__ 303 #define __FUNCT__ "MatDiagonalScaleLocal_MPIAIJ" 304 PetscErrorCode MatDiagonalScaleLocal_MPIAIJ(Mat A,Vec scale) 305 { 306 Mat_MPIAIJ *a = (Mat_MPIAIJ*) A->data; /*access private part of matrix */ 307 PetscErrorCode ierr; 308 PetscInt n,i; 309 PetscScalar *d,*o,*s; 310 311 PetscFunctionBegin; 312 if (!auglyrmapd) { 313 ierr = MatMPIAIJDiagonalScaleLocalSetUp(A,scale);CHKERRQ(ierr); 314 } 315 316 ierr = VecGetArray(scale,&s);CHKERRQ(ierr); 317 318 ierr = VecGetLocalSize(auglydd,&n);CHKERRQ(ierr); 319 ierr = VecGetArray(auglydd,&d);CHKERRQ(ierr); 320 for (i=0; i<n; i++) { 321 d[i] = s[auglyrmapd[i]]; /* copy "diagonal" (true local) portion of scale into dd vector */ 322 } 323 ierr = VecRestoreArray(auglydd,&d);CHKERRQ(ierr); 324 /* column scale "diagonal" portion of local matrix */ 325 ierr = MatDiagonalScale(a->A,PETSC_NULL,auglydd);CHKERRQ(ierr); 326 327 ierr = VecGetLocalSize(auglyoo,&n);CHKERRQ(ierr); 328 ierr = VecGetArray(auglyoo,&o);CHKERRQ(ierr); 329 for (i=0; i<n; i++) { 330 o[i] = s[auglyrmapo[i]]; /* copy "off-diagonal" portion of scale into oo vector */ 331 } 332 ierr = VecRestoreArray(scale,&s);CHKERRQ(ierr); 333 ierr = VecRestoreArray(auglyoo,&o);CHKERRQ(ierr); 334 /* column scale "off-diagonal" portion of local matrix */ 335 ierr = MatDiagonalScale(a->B,PETSC_NULL,auglyoo);CHKERRQ(ierr); 336 337 PetscFunctionReturn(0); 338 } 339 EXTERN_C_END 340 341 342