1 2 /* 3 Defines projective product routines where A is a MPIAIJ matrix 4 C = P^T * A * P 5 */ 6 7 #include <../src/mat/impls/aij/seq/aij.h> /*I "petscmat.h" I*/ 8 #include <../src/mat/utils/freespace.h> 9 #include <../src/mat/impls/aij/mpi/mpiaij.h> 10 #include <petscbt.h> 11 #include <petsctime.h> 12 13 /* #define PTAP_PROFILE */ 14 15 PetscErrorCode MatView_MPIAIJ_PtAP(Mat A,PetscViewer viewer) 16 { 17 PetscErrorCode ierr; 18 Mat_MPIAIJ *a=(Mat_MPIAIJ*)A->data; 19 Mat_APMPI *ptap=a->ap; 20 PetscBool iascii; 21 PetscViewerFormat format; 22 23 PetscFunctionBegin; 24 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 25 if (iascii) { 26 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 27 if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) { 28 if (ptap->algType == 0) { 29 ierr = PetscViewerASCIIPrintf(viewer,"using scalable MatPtAP() implementation\n");CHKERRQ(ierr); 30 } else if (ptap->algType == 1) { 31 ierr = PetscViewerASCIIPrintf(viewer,"using nonscalable MatPtAP() implementation\n");CHKERRQ(ierr); 32 } 33 } 34 } 35 ierr = (ptap->view)(A,viewer);CHKERRQ(ierr); 36 PetscFunctionReturn(0); 37 } 38 39 PetscErrorCode MatFreeIntermediateDataStructures_MPIAIJ_AP(Mat A) 40 { 41 PetscErrorCode ierr; 42 Mat_MPIAIJ *a=(Mat_MPIAIJ*)A->data; 43 Mat_APMPI *ptap=a->ap; 44 Mat_Merge_SeqsToMPI *merge=ptap->merge; 45 46 PetscFunctionBegin; 47 if (!ptap) PetscFunctionReturn(0); 48 49 ierr = PetscFree2(ptap->startsj_s,ptap->startsj_r);CHKERRQ(ierr); 50 ierr = PetscFree(ptap->bufa);CHKERRQ(ierr); 51 ierr = MatDestroy(&ptap->P_loc);CHKERRQ(ierr); 52 ierr = MatDestroy(&ptap->P_oth);CHKERRQ(ierr); 53 ierr = MatDestroy(&ptap->A_loc);CHKERRQ(ierr); /* used by MatTransposeMatMult() */ 54 ierr = MatDestroy(&ptap->Rd);CHKERRQ(ierr); 55 ierr = MatDestroy(&ptap->Ro);CHKERRQ(ierr); 56 if (ptap->AP_loc) { /* used by alg_rap */ 57 Mat_SeqAIJ *ap = (Mat_SeqAIJ*)(ptap->AP_loc)->data; 58 ierr = PetscFree(ap->i);CHKERRQ(ierr); 59 ierr = PetscFree2(ap->j,ap->a);CHKERRQ(ierr); 60 ierr = MatDestroy(&ptap->AP_loc);CHKERRQ(ierr); 61 } else { /* used by alg_ptap */ 62 ierr = PetscFree(ptap->api);CHKERRQ(ierr); 63 ierr = PetscFree(ptap->apj);CHKERRQ(ierr); 64 } 65 ierr = MatDestroy(&ptap->C_loc);CHKERRQ(ierr); 66 ierr = MatDestroy(&ptap->C_oth);CHKERRQ(ierr); 67 if (ptap->apa) {ierr = PetscFree(ptap->apa);CHKERRQ(ierr);} 68 69 if (merge) { /* used by alg_ptap */ 70 ierr = PetscFree(merge->id_r);CHKERRQ(ierr); 71 ierr = PetscFree(merge->len_s);CHKERRQ(ierr); 72 ierr = PetscFree(merge->len_r);CHKERRQ(ierr); 73 ierr = PetscFree(merge->bi);CHKERRQ(ierr); 74 ierr = PetscFree(merge->bj);CHKERRQ(ierr); 75 ierr = PetscFree(merge->buf_ri[0]);CHKERRQ(ierr); 76 ierr = PetscFree(merge->buf_ri);CHKERRQ(ierr); 77 ierr = PetscFree(merge->buf_rj[0]);CHKERRQ(ierr); 78 ierr = PetscFree(merge->buf_rj);CHKERRQ(ierr); 79 ierr = PetscFree(merge->coi);CHKERRQ(ierr); 80 ierr = PetscFree(merge->coj);CHKERRQ(ierr); 81 ierr = PetscFree(merge->owners_co);CHKERRQ(ierr); 82 ierr = PetscLayoutDestroy(&merge->rowmap);CHKERRQ(ierr); 83 ierr = PetscFree(ptap->merge);CHKERRQ(ierr); 84 } 85 86 A->ops->destroy = ptap->destroy; 87 ierr = PetscFree(a->ap);CHKERRQ(ierr); 88 PetscFunctionReturn(0); 89 } 90 91 PetscErrorCode MatDestroy_MPIAIJ_PtAP(Mat A) 92 { 93 PetscErrorCode ierr; 94 95 PetscFunctionBegin; 96 ierr = (*A->ops->freeintermediatedatastructures)(A);CHKERRQ(ierr); 97 ierr = (*A->ops->destroy)(A);CHKERRQ(ierr); 98 PetscFunctionReturn(0); 99 } 100 101 PETSC_INTERN PetscErrorCode MatPtAP_MPIAIJ_MPIAIJ(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C) 102 { 103 PetscErrorCode ierr; 104 PetscBool flg; 105 MPI_Comm comm; 106 #if !defined(PETSC_HAVE_HYPRE) 107 const char *algTypes[2] = {"scalable","nonscalable"}; 108 PetscInt nalg=2; 109 #else 110 const char *algTypes[3] = {"scalable","nonscalable","hypre"}; 111 PetscInt nalg=3; 112 #endif 113 PetscInt pN=P->cmap->N,alg=1; /* set default algorithm */ 114 115 PetscFunctionBegin; 116 /* check if matrix local sizes are compatible */ 117 ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr); 118 if (A->rmap->rstart != P->rmap->rstart || A->rmap->rend != P->rmap->rend) SETERRQ4(comm,PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, Arow (%D, %D) != Prow (%D,%D)",A->rmap->rstart,A->rmap->rend,P->rmap->rstart,P->rmap->rend); 119 if (A->cmap->rstart != P->rmap->rstart || A->cmap->rend != P->rmap->rend) SETERRQ4(comm,PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, Acol (%D, %D) != Prow (%D,%D)",A->cmap->rstart,A->cmap->rend,P->rmap->rstart,P->rmap->rend); 120 121 if (scall == MAT_INITIAL_MATRIX) { 122 /* pick an algorithm */ 123 ierr = PetscObjectOptionsBegin((PetscObject)A);CHKERRQ(ierr); 124 PetscOptionsObject->alreadyprinted = PETSC_FALSE; /* a hack to ensure the option shows in '-help' */ 125 ierr = PetscOptionsEList("-matptap_via","Algorithmic approach","MatPtAP",algTypes,nalg,algTypes[alg],&alg,&flg);CHKERRQ(ierr); 126 ierr = PetscOptionsEnd();CHKERRQ(ierr); 127 128 if (!flg && pN > 100000) { /* may switch to scalable algorithm as default */ 129 MatInfo Ainfo,Pinfo; 130 PetscInt nz_local; 131 PetscBool alg_scalable_loc=PETSC_FALSE,alg_scalable; 132 133 ierr = MatGetInfo(A,MAT_LOCAL,&Ainfo);CHKERRQ(ierr); 134 ierr = MatGetInfo(P,MAT_LOCAL,&Pinfo);CHKERRQ(ierr); 135 nz_local = (PetscInt)(Ainfo.nz_allocated + Pinfo.nz_allocated); 136 137 if (pN > fill*nz_local) alg_scalable_loc = PETSC_TRUE; 138 ierr = MPIU_Allreduce(&alg_scalable_loc,&alg_scalable,1,MPIU_BOOL,MPI_LOR,comm);CHKERRQ(ierr); 139 140 if (alg_scalable) { 141 alg = 0; /* scalable algorithm would 50% slower than nonscalable algorithm */ 142 } 143 } 144 145 switch (alg) { 146 case 1: 147 /* do R=P^T locally, then C=R*A*P -- nonscalable */ 148 ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr); 149 ierr = MatPtAPSymbolic_MPIAIJ_MPIAIJ(A,P,fill,C);CHKERRQ(ierr); 150 ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr); 151 break; 152 #if defined(PETSC_HAVE_HYPRE) 153 case 2: 154 /* Use boomerAMGBuildCoarseOperator */ 155 ierr = MatPtAPSymbolic_AIJ_AIJ_wHYPRE(A,P,fill,C);CHKERRQ(ierr); 156 PetscFunctionReturn(0); 157 break; 158 #endif 159 default: 160 /* do R=P^T locally, then C=R*A*P */ 161 ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr); 162 ierr = MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable(A,P,fill,C);CHKERRQ(ierr); 163 ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr); 164 break; 165 } 166 } 167 ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr); 168 ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr); 169 ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr); 170 PetscFunctionReturn(0); 171 } 172 173 PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_scalable(Mat A,Mat P,Mat C) 174 { 175 PetscErrorCode ierr; 176 Mat_MPIAIJ *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data,*c=(Mat_MPIAIJ*)C->data; 177 Mat_SeqAIJ *ad=(Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data; 178 Mat_SeqAIJ *ap,*p_loc,*p_oth,*c_seq; 179 Mat_APMPI *ptap = c->ap; 180 Mat AP_loc,C_loc,C_oth; 181 PetscInt i,rstart,rend,cm,ncols,row,*api,*apj,am = A->rmap->n,apnz; 182 PetscScalar *apa; 183 const PetscInt *cols; 184 const PetscScalar *vals; 185 PetscBool freestruct; 186 187 PetscFunctionBegin; 188 if (!ptap) { 189 MPI_Comm comm; 190 ierr = PetscObjectGetComm((PetscObject)C,&comm);CHKERRQ(ierr); 191 SETERRQ(comm,PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be reused. Do not call MatFreeIntermediateDataStructures() or use '-mat_freeintermediatedatastructures'"); 192 } 193 194 ierr = MatZeroEntries(C);CHKERRQ(ierr); 195 196 /* 1) get R = Pd^T,Ro = Po^T */ 197 if (ptap->reuse == MAT_REUSE_MATRIX) { 198 ierr = MatTranspose(p->A,MAT_REUSE_MATRIX,&ptap->Rd);CHKERRQ(ierr); 199 ierr = MatTranspose(p->B,MAT_REUSE_MATRIX,&ptap->Ro);CHKERRQ(ierr); 200 } 201 202 /* 2) get AP_loc */ 203 AP_loc = ptap->AP_loc; 204 ap = (Mat_SeqAIJ*)AP_loc->data; 205 206 /* 2-1) get P_oth = ptap->P_oth and P_loc = ptap->P_loc */ 207 /*-----------------------------------------------------*/ 208 if (ptap->reuse == MAT_REUSE_MATRIX) { 209 /* P_oth and P_loc are obtained in MatPtASymbolic() when reuse == MAT_INITIAL_MATRIX */ 210 ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_REUSE_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr); 211 ierr = MatMPIAIJGetLocalMat(P,MAT_REUSE_MATRIX,&ptap->P_loc);CHKERRQ(ierr); 212 } 213 214 /* 2-2) compute numeric A_loc*P - dominating part */ 215 /* ---------------------------------------------- */ 216 /* get data from symbolic products */ 217 p_loc = (Mat_SeqAIJ*)(ptap->P_loc)->data; 218 if (ptap->P_oth) p_oth = (Mat_SeqAIJ*)(ptap->P_oth)->data; 219 else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ptap->P_oth is NULL. Cannot proceed!"); 220 221 api = ap->i; 222 apj = ap->j; 223 for (i=0; i<am; i++) { 224 /* AP[i,:] = A[i,:]*P = Ad*P_loc Ao*P_oth */ 225 apnz = api[i+1] - api[i]; 226 apa = ap->a + api[i]; 227 ierr = PetscMemzero(apa,sizeof(PetscScalar)*apnz);CHKERRQ(ierr); 228 AProw_scalable(i,ad,ao,p_loc,p_oth,api,apj,apa); 229 ierr = PetscLogFlops(2.0*apnz);CHKERRQ(ierr); 230 } 231 232 /* 3) C_loc = Rd*AP_loc, C_oth = Ro*AP_loc */ 233 ierr = MatMatMultNumeric_SeqAIJ_SeqAIJ_Scalable(ptap->Rd,AP_loc,ptap->C_loc);CHKERRQ(ierr); 234 ierr = MatMatMultNumeric_SeqAIJ_SeqAIJ_Scalable(ptap->Ro,AP_loc,ptap->C_oth);CHKERRQ(ierr); 235 236 C_loc = ptap->C_loc; 237 C_oth = ptap->C_oth; 238 239 /* add C_loc and Co to to C */ 240 ierr = MatGetOwnershipRange(C,&rstart,&rend);CHKERRQ(ierr); 241 242 /* C_loc -> C */ 243 cm = C_loc->rmap->N; 244 c_seq = (Mat_SeqAIJ*)C_loc->data; 245 cols = c_seq->j; 246 vals = c_seq->a; 247 248 /* The (fast) MatSetValues_MPIAIJ_CopyFromCSRFormat function can only be used when C->was_assembled is PETSC_FALSE and */ 249 /* when there are no off-processor parts. */ 250 /* If was_assembled is true, then the statement aj[rowstart_diag+dnz_row] = mat_j[col] - cstart; in MatSetValues_MPIAIJ_CopyFromCSRFormat */ 251 /* is no longer true. Then the more complex function MatSetValues_MPIAIJ() has to be used, where the column index is looked up from */ 252 /* a table, and other, more complex stuff has to be done. */ 253 if (C->assembled) { 254 C->was_assembled = PETSC_TRUE; 255 C->assembled = PETSC_FALSE; 256 } 257 if (C->was_assembled) { 258 for (i=0; i<cm; i++) { 259 ncols = c_seq->i[i+1] - c_seq->i[i]; 260 row = rstart + i; 261 ierr = MatSetValues_MPIAIJ(C,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr); 262 cols += ncols; vals += ncols; 263 } 264 } else { 265 ierr = MatSetValues_MPIAIJ_CopyFromCSRFormat(C,c_seq->j,c_seq->i,c_seq->a);CHKERRQ(ierr); 266 } 267 268 /* Co -> C, off-processor part */ 269 cm = C_oth->rmap->N; 270 c_seq = (Mat_SeqAIJ*)C_oth->data; 271 cols = c_seq->j; 272 vals = c_seq->a; 273 for (i=0; i<cm; i++) { 274 ncols = c_seq->i[i+1] - c_seq->i[i]; 275 row = p->garray[i]; 276 ierr = MatSetValues(C,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr); 277 cols += ncols; vals += ncols; 278 } 279 ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 280 ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 281 282 ptap->reuse = MAT_REUSE_MATRIX; 283 284 ierr = PetscObjectOptionsBegin((PetscObject)C);CHKERRQ(ierr); 285 PetscOptionsObject->alreadyprinted = PETSC_FALSE; /* a hack to ensure the option shows in '-help' */ 286 /* supporting struct ptap consumes almost same amount of memory as C=PtAP, release it if C will not be updated by A and P */ 287 freestruct = PETSC_FALSE; 288 ierr = PetscOptionsBool("-mat_freeintermediatedatastructures","Free intermediate data structures", "MatFreeIntermediateDataStructures",freestruct, &freestruct, NULL);CHKERRQ(ierr); 289 if (freestruct) { 290 ierr = MatFreeIntermediateDataStructures(C);CHKERRQ(ierr); 291 } 292 ierr = PetscOptionsEnd();CHKERRQ(ierr); 293 PetscFunctionReturn(0); 294 } 295 296 PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable(Mat A,Mat P,PetscReal fill,Mat *C) 297 { 298 PetscErrorCode ierr; 299 Mat_APMPI *ptap; 300 Mat_MPIAIJ *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data,*c; 301 MPI_Comm comm; 302 PetscMPIInt size,rank; 303 Mat Cmpi,P_loc,P_oth; 304 PetscFreeSpaceList free_space=NULL,current_space=NULL; 305 PetscInt am=A->rmap->n,pm=P->rmap->n,pN=P->cmap->N,pn=P->cmap->n; 306 PetscInt *lnk,i,k,pnz,row,nsend; 307 PetscMPIInt tagi,tagj,*len_si,*len_s,*len_ri,icompleted=0,nrecv; 308 PetscInt **buf_rj,**buf_ri,**buf_ri_k; 309 PetscInt len,proc,*dnz,*onz,*owners,nzi,nspacedouble; 310 PetscInt nrows,*buf_s,*buf_si,*buf_si_i,**nextrow,**nextci; 311 MPI_Request *swaits,*rwaits; 312 MPI_Status *sstatus,rstatus; 313 PetscLayout rowmap; 314 PetscInt *owners_co,*coi,*coj; /* i and j array of (p->B)^T*A*P - used in the communication */ 315 PetscMPIInt *len_r,*id_r; /* array of length of comm->size, store send/recv matrix values */ 316 PetscInt *api,*apj,*Jptr,apnz,*prmap=p->garray,con,j,Crmax,*aj,*ai,*pi; 317 Mat_SeqAIJ *p_loc,*p_oth=NULL,*ad=(Mat_SeqAIJ*)(a->A)->data,*ao=NULL,*c_loc,*c_oth; 318 PetscScalar *apv; 319 PetscTable ta; 320 MatType mtype; 321 #if defined(PETSC_USE_INFO) 322 PetscReal apfill; 323 #endif 324 325 PetscFunctionBegin; 326 ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr); 327 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 328 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 329 330 if (size > 1) ao = (Mat_SeqAIJ*)(a->B)->data; 331 332 /* create symbolic parallel matrix Cmpi */ 333 ierr = MatCreate(comm,&Cmpi);CHKERRQ(ierr); 334 ierr = MatGetType(A,&mtype);CHKERRQ(ierr); 335 ierr = MatSetType(Cmpi,mtype);CHKERRQ(ierr); 336 337 /* create struct Mat_APMPI and attached it to C later */ 338 ierr = PetscNew(&ptap);CHKERRQ(ierr); 339 ptap->reuse = MAT_INITIAL_MATRIX; 340 ptap->algType = 0; 341 342 /* get P_oth by taking rows of P (= non-zero cols of local A) from other processors */ 343 ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_INITIAL_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&P_oth);CHKERRQ(ierr); 344 /* get P_loc by taking all local rows of P */ 345 ierr = MatMPIAIJGetLocalMat(P,MAT_INITIAL_MATRIX,&P_loc);CHKERRQ(ierr); 346 347 ptap->P_loc = P_loc; 348 ptap->P_oth = P_oth; 349 350 /* (0) compute Rd = Pd^T, Ro = Po^T */ 351 /* --------------------------------- */ 352 ierr = MatTranspose(p->A,MAT_INITIAL_MATRIX,&ptap->Rd);CHKERRQ(ierr); 353 ierr = MatTranspose(p->B,MAT_INITIAL_MATRIX,&ptap->Ro);CHKERRQ(ierr); 354 355 /* (1) compute symbolic AP = A_loc*P = Ad*P_loc + Ao*P_oth (api,apj) */ 356 /* ----------------------------------------------------------------- */ 357 p_loc = (Mat_SeqAIJ*)P_loc->data; 358 if (P_oth) p_oth = (Mat_SeqAIJ*)P_oth->data; 359 360 /* create and initialize a linked list */ 361 ierr = PetscTableCreate(pn,pN,&ta);CHKERRQ(ierr); /* for compute AP_loc and Cmpi */ 362 MatRowMergeMax_SeqAIJ(p_loc,P_loc->rmap->N,ta); 363 MatRowMergeMax_SeqAIJ(p_oth,P_oth->rmap->N,ta); 364 ierr = PetscTableGetCount(ta,&Crmax);CHKERRQ(ierr); /* Crmax = nnz(sum of Prows) */ 365 366 ierr = PetscLLCondensedCreate_Scalable(Crmax,&lnk);CHKERRQ(ierr); 367 368 /* Initial FreeSpace size is fill*(nnz(A) + nnz(P)) */ 369 if (ao) { 370 ierr = PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],PetscIntSumTruncate(ao->i[am],p_loc->i[pm]))),&free_space);CHKERRQ(ierr); 371 } else { 372 ierr = PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],p_loc->i[pm])),&free_space);CHKERRQ(ierr); 373 } 374 current_space = free_space; 375 nspacedouble = 0; 376 377 ierr = PetscMalloc1(am+1,&api);CHKERRQ(ierr); 378 api[0] = 0; 379 for (i=0; i<am; i++) { 380 /* diagonal portion: Ad[i,:]*P */ 381 ai = ad->i; pi = p_loc->i; 382 nzi = ai[i+1] - ai[i]; 383 aj = ad->j + ai[i]; 384 for (j=0; j<nzi; j++) { 385 row = aj[j]; 386 pnz = pi[row+1] - pi[row]; 387 Jptr = p_loc->j + pi[row]; 388 /* add non-zero cols of P into the sorted linked list lnk */ 389 ierr = PetscLLCondensedAddSorted_Scalable(pnz,Jptr,lnk);CHKERRQ(ierr); 390 } 391 /* off-diagonal portion: Ao[i,:]*P */ 392 if (ao) { 393 ai = ao->i; pi = p_oth->i; 394 nzi = ai[i+1] - ai[i]; 395 aj = ao->j + ai[i]; 396 for (j=0; j<nzi; j++) { 397 row = aj[j]; 398 pnz = pi[row+1] - pi[row]; 399 Jptr = p_oth->j + pi[row]; 400 ierr = PetscLLCondensedAddSorted_Scalable(pnz,Jptr,lnk);CHKERRQ(ierr); 401 } 402 } 403 apnz = lnk[0]; 404 api[i+1] = api[i] + apnz; 405 406 /* if free space is not available, double the total space in the list */ 407 if (current_space->local_remaining<apnz) { 408 ierr = PetscFreeSpaceGet(PetscIntSumTruncate(apnz,current_space->total_array_size),¤t_space);CHKERRQ(ierr); 409 nspacedouble++; 410 } 411 412 /* Copy data into free space, then initialize lnk */ 413 ierr = PetscLLCondensedClean_Scalable(apnz,current_space->array,lnk);CHKERRQ(ierr); 414 415 current_space->array += apnz; 416 current_space->local_used += apnz; 417 current_space->local_remaining -= apnz; 418 } 419 /* Allocate space for apj and apv, initialize apj, and */ 420 /* destroy list of free space and other temporary array(s) */ 421 ierr = PetscMalloc2(api[am],&apj,api[am],&apv);CHKERRQ(ierr); 422 ierr = PetscFreeSpaceContiguous(&free_space,apj);CHKERRQ(ierr); 423 ierr = PetscLLCondensedDestroy_Scalable(lnk);CHKERRQ(ierr); 424 425 /* Create AP_loc for reuse */ 426 ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,am,pN,api,apj,apv,&ptap->AP_loc);CHKERRQ(ierr); 427 428 #if defined(PETSC_USE_INFO) 429 if (ao) { 430 apfill = (PetscReal)api[am]/(ad->i[am]+ao->i[am]+p_loc->i[pm]+1); 431 } else { 432 apfill = (PetscReal)api[am]/(ad->i[am]+p_loc->i[pm]+1); 433 } 434 ptap->AP_loc->info.mallocs = nspacedouble; 435 ptap->AP_loc->info.fill_ratio_given = fill; 436 ptap->AP_loc->info.fill_ratio_needed = apfill; 437 438 if (api[am]) { 439 ierr = PetscInfo3(ptap->AP_loc,"Scalable algorithm, AP_loc reallocs %D; Fill ratio: given %g needed %g.\n",nspacedouble,(double)fill,(double)apfill);CHKERRQ(ierr); 440 ierr = PetscInfo1(ptap->AP_loc,"Use MatPtAP(A,B,MatReuse,%g,&C) for best AP_loc performance.;\n",(double)apfill);CHKERRQ(ierr); 441 } else { 442 ierr = PetscInfo(ptap->AP_loc,"Scalable algorithm, AP_loc is empty \n");CHKERRQ(ierr); 443 } 444 #endif 445 446 /* (2-1) compute symbolic Co = Ro*AP_loc */ 447 /* ------------------------------------ */ 448 ierr = MatMatMultSymbolic_SeqAIJ_SeqAIJ(ptap->Ro,ptap->AP_loc,fill,&ptap->C_oth);CHKERRQ(ierr); 449 450 /* (3) send coj of C_oth to other processors */ 451 /* ------------------------------------------ */ 452 /* determine row ownership */ 453 ierr = PetscLayoutCreate(comm,&rowmap);CHKERRQ(ierr); 454 rowmap->n = pn; 455 rowmap->bs = 1; 456 ierr = PetscLayoutSetUp(rowmap);CHKERRQ(ierr); 457 owners = rowmap->range; 458 459 /* determine the number of messages to send, their lengths */ 460 ierr = PetscMalloc4(size,&len_s,size,&len_si,size,&sstatus,size+2,&owners_co);CHKERRQ(ierr); 461 ierr = PetscMemzero(len_s,size*sizeof(PetscMPIInt));CHKERRQ(ierr); 462 ierr = PetscMemzero(len_si,size*sizeof(PetscMPIInt));CHKERRQ(ierr); 463 464 c_oth = (Mat_SeqAIJ*)ptap->C_oth->data; 465 coi = c_oth->i; coj = c_oth->j; 466 con = ptap->C_oth->rmap->n; 467 proc = 0; 468 for (i=0; i<con; i++) { 469 while (prmap[i] >= owners[proc+1]) proc++; 470 len_si[proc]++; /* num of rows in Co(=Pt*AP) to be sent to [proc] */ 471 len_s[proc] += coi[i+1] - coi[i]; /* num of nonzeros in Co to be sent to [proc] */ 472 } 473 474 len = 0; /* max length of buf_si[], see (4) */ 475 owners_co[0] = 0; 476 nsend = 0; 477 for (proc=0; proc<size; proc++) { 478 owners_co[proc+1] = owners_co[proc] + len_si[proc]; 479 if (len_s[proc]) { 480 nsend++; 481 len_si[proc] = 2*(len_si[proc] + 1); /* length of buf_si to be sent to [proc] */ 482 len += len_si[proc]; 483 } 484 } 485 486 /* determine the number and length of messages to receive for coi and coj */ 487 ierr = PetscGatherNumberOfMessages(comm,NULL,len_s,&nrecv);CHKERRQ(ierr); 488 ierr = PetscGatherMessageLengths2(comm,nsend,nrecv,len_s,len_si,&id_r,&len_r,&len_ri);CHKERRQ(ierr); 489 490 /* post the Irecv and Isend of coj */ 491 ierr = PetscCommGetNewTag(comm,&tagj);CHKERRQ(ierr); 492 ierr = PetscPostIrecvInt(comm,tagj,nrecv,id_r,len_r,&buf_rj,&rwaits);CHKERRQ(ierr); 493 ierr = PetscMalloc1(nsend+1,&swaits);CHKERRQ(ierr); 494 for (proc=0, k=0; proc<size; proc++) { 495 if (!len_s[proc]) continue; 496 i = owners_co[proc]; 497 ierr = MPI_Isend(coj+coi[i],len_s[proc],MPIU_INT,proc,tagj,comm,swaits+k);CHKERRQ(ierr); 498 k++; 499 } 500 501 /* (2-2) compute symbolic C_loc = Rd*AP_loc */ 502 /* ---------------------------------------- */ 503 ierr = MatMatMultSymbolic_SeqAIJ_SeqAIJ(ptap->Rd,ptap->AP_loc,fill,&ptap->C_loc);CHKERRQ(ierr); 504 c_loc = (Mat_SeqAIJ*)ptap->C_loc->data; 505 506 /* receives coj are complete */ 507 for (i=0; i<nrecv; i++) { 508 ierr = MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);CHKERRQ(ierr); 509 } 510 ierr = PetscFree(rwaits);CHKERRQ(ierr); 511 if (nsend) {ierr = MPI_Waitall(nsend,swaits,sstatus);CHKERRQ(ierr);} 512 513 /* add received column indices into ta to update Crmax */ 514 for (k=0; k<nrecv; k++) {/* k-th received message */ 515 Jptr = buf_rj[k]; 516 for (j=0; j<len_r[k]; j++) { 517 ierr = PetscTableAdd(ta,*(Jptr+j)+1,1,INSERT_VALUES);CHKERRQ(ierr); 518 } 519 } 520 ierr = PetscTableGetCount(ta,&Crmax);CHKERRQ(ierr); 521 ierr = PetscTableDestroy(&ta);CHKERRQ(ierr); 522 523 /* (4) send and recv coi */ 524 /*-----------------------*/ 525 ierr = PetscCommGetNewTag(comm,&tagi);CHKERRQ(ierr); 526 ierr = PetscPostIrecvInt(comm,tagi,nrecv,id_r,len_ri,&buf_ri,&rwaits);CHKERRQ(ierr); 527 ierr = PetscMalloc1(len+1,&buf_s);CHKERRQ(ierr); 528 buf_si = buf_s; /* points to the beginning of k-th msg to be sent */ 529 for (proc=0,k=0; proc<size; proc++) { 530 if (!len_s[proc]) continue; 531 /* form outgoing message for i-structure: 532 buf_si[0]: nrows to be sent 533 [1:nrows]: row index (global) 534 [nrows+1:2*nrows+1]: i-structure index 535 */ 536 /*-------------------------------------------*/ 537 nrows = len_si[proc]/2 - 1; /* num of rows in Co to be sent to [proc] */ 538 buf_si_i = buf_si + nrows+1; 539 buf_si[0] = nrows; 540 buf_si_i[0] = 0; 541 nrows = 0; 542 for (i=owners_co[proc]; i<owners_co[proc+1]; i++) { 543 nzi = coi[i+1] - coi[i]; 544 buf_si_i[nrows+1] = buf_si_i[nrows] + nzi; /* i-structure */ 545 buf_si[nrows+1] = prmap[i] -owners[proc]; /* local row index */ 546 nrows++; 547 } 548 ierr = MPI_Isend(buf_si,len_si[proc],MPIU_INT,proc,tagi,comm,swaits+k);CHKERRQ(ierr); 549 k++; 550 buf_si += len_si[proc]; 551 } 552 for (i=0; i<nrecv; i++) { 553 ierr = MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);CHKERRQ(ierr); 554 } 555 ierr = PetscFree(rwaits);CHKERRQ(ierr); 556 if (nsend) {ierr = MPI_Waitall(nsend,swaits,sstatus);CHKERRQ(ierr);} 557 558 ierr = PetscFree4(len_s,len_si,sstatus,owners_co);CHKERRQ(ierr); 559 ierr = PetscFree(len_ri);CHKERRQ(ierr); 560 ierr = PetscFree(swaits);CHKERRQ(ierr); 561 ierr = PetscFree(buf_s);CHKERRQ(ierr); 562 563 /* (5) compute the local portion of Cmpi */ 564 /* ------------------------------------------ */ 565 /* set initial free space to be Crmax, sufficient for holding nozeros in each row of Cmpi */ 566 ierr = PetscFreeSpaceGet(Crmax,&free_space);CHKERRQ(ierr); 567 current_space = free_space; 568 569 ierr = PetscMalloc3(nrecv,&buf_ri_k,nrecv,&nextrow,nrecv,&nextci);CHKERRQ(ierr); 570 for (k=0; k<nrecv; k++) { 571 buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */ 572 nrows = *buf_ri_k[k]; 573 nextrow[k] = buf_ri_k[k] + 1; /* next row number of k-th recved i-structure */ 574 nextci[k] = buf_ri_k[k] + (nrows + 1); /* poins to the next i-structure of k-th recved i-structure */ 575 } 576 577 ierr = MatPreallocateInitialize(comm,pn,pn,dnz,onz);CHKERRQ(ierr); 578 ierr = PetscLLCondensedCreate_Scalable(Crmax,&lnk);CHKERRQ(ierr); 579 for (i=0; i<pn; i++) { 580 /* add C_loc into Cmpi */ 581 nzi = c_loc->i[i+1] - c_loc->i[i]; 582 Jptr = c_loc->j + c_loc->i[i]; 583 ierr = PetscLLCondensedAddSorted_Scalable(nzi,Jptr,lnk);CHKERRQ(ierr); 584 585 /* add received col data into lnk */ 586 for (k=0; k<nrecv; k++) { /* k-th received message */ 587 if (i == *nextrow[k]) { /* i-th row */ 588 nzi = *(nextci[k]+1) - *nextci[k]; 589 Jptr = buf_rj[k] + *nextci[k]; 590 ierr = PetscLLCondensedAddSorted_Scalable(nzi,Jptr,lnk);CHKERRQ(ierr); 591 nextrow[k]++; nextci[k]++; 592 } 593 } 594 nzi = lnk[0]; 595 596 /* copy data into free space, then initialize lnk */ 597 ierr = PetscLLCondensedClean_Scalable(nzi,current_space->array,lnk);CHKERRQ(ierr); 598 ierr = MatPreallocateSet(i+owners[rank],nzi,current_space->array,dnz,onz);CHKERRQ(ierr); 599 } 600 ierr = PetscFree3(buf_ri_k,nextrow,nextci);CHKERRQ(ierr); 601 ierr = PetscLLCondensedDestroy_Scalable(lnk);CHKERRQ(ierr); 602 ierr = PetscFreeSpaceDestroy(free_space);CHKERRQ(ierr); 603 604 /* local sizes and preallocation */ 605 ierr = MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 606 ierr = MatSetBlockSizes(Cmpi,PetscAbs(P->cmap->bs),PetscAbs(P->cmap->bs));CHKERRQ(ierr); 607 ierr = MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);CHKERRQ(ierr); 608 ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr); 609 610 /* members in merge */ 611 ierr = PetscFree(id_r);CHKERRQ(ierr); 612 ierr = PetscFree(len_r);CHKERRQ(ierr); 613 ierr = PetscFree(buf_ri[0]);CHKERRQ(ierr); 614 ierr = PetscFree(buf_ri);CHKERRQ(ierr); 615 ierr = PetscFree(buf_rj[0]);CHKERRQ(ierr); 616 ierr = PetscFree(buf_rj);CHKERRQ(ierr); 617 ierr = PetscLayoutDestroy(&rowmap);CHKERRQ(ierr); 618 619 /* attach the supporting struct to Cmpi for reuse */ 620 c = (Mat_MPIAIJ*)Cmpi->data; 621 c->ap = ptap; 622 ptap->duplicate = Cmpi->ops->duplicate; 623 ptap->destroy = Cmpi->ops->destroy; 624 ptap->view = Cmpi->ops->view; 625 626 /* Cmpi is not ready for use - assembly will be done by MatPtAPNumeric() */ 627 Cmpi->assembled = PETSC_FALSE; 628 Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_scalable; 629 Cmpi->ops->destroy = MatDestroy_MPIAIJ_PtAP; 630 Cmpi->ops->view = MatView_MPIAIJ_PtAP; 631 Cmpi->ops->freeintermediatedatastructures = MatFreeIntermediateDataStructures_MPIAIJ_AP; 632 *C = Cmpi; 633 PetscFunctionReturn(0); 634 } 635 636 PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ(Mat A,Mat P,PetscReal fill,Mat *C) 637 { 638 PetscErrorCode ierr; 639 Mat_APMPI *ptap; 640 Mat_MPIAIJ *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data,*c; 641 MPI_Comm comm; 642 PetscMPIInt size,rank; 643 Mat Cmpi; 644 PetscFreeSpaceList free_space=NULL,current_space=NULL; 645 PetscInt am=A->rmap->n,pm=P->rmap->n,pN=P->cmap->N,pn=P->cmap->n; 646 PetscInt *lnk,i,k,pnz,row,nsend; 647 PetscBT lnkbt; 648 PetscMPIInt tagi,tagj,*len_si,*len_s,*len_ri,icompleted=0,nrecv; 649 PetscInt **buf_rj,**buf_ri,**buf_ri_k; 650 PetscInt len,proc,*dnz,*onz,*owners,nzi,nspacedouble; 651 PetscInt nrows,*buf_s,*buf_si,*buf_si_i,**nextrow,**nextci; 652 MPI_Request *swaits,*rwaits; 653 MPI_Status *sstatus,rstatus; 654 PetscLayout rowmap; 655 PetscInt *owners_co,*coi,*coj; /* i and j array of (p->B)^T*A*P - used in the communication */ 656 PetscMPIInt *len_r,*id_r; /* array of length of comm->size, store send/recv matrix values */ 657 PetscInt *api,*apj,*Jptr,apnz,*prmap=p->garray,con,j,ap_rmax=0,Crmax,*aj,*ai,*pi; 658 Mat_SeqAIJ *p_loc,*p_oth=NULL,*ad=(Mat_SeqAIJ*)(a->A)->data,*ao=NULL,*c_loc,*c_oth; 659 PetscScalar *apv; 660 PetscTable ta; 661 MatType mtype; 662 #if defined(PETSC_USE_INFO) 663 PetscReal apfill; 664 #endif 665 666 PetscFunctionBegin; 667 ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr); 668 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 669 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 670 671 if (size > 1) ao = (Mat_SeqAIJ*)(a->B)->data; 672 673 /* create symbolic parallel matrix Cmpi */ 674 ierr = MatCreate(comm,&Cmpi);CHKERRQ(ierr); 675 ierr = MatGetType(A,&mtype);CHKERRQ(ierr); 676 ierr = MatSetType(Cmpi,mtype);CHKERRQ(ierr); 677 678 /* Do dense axpy in MatPtAPNumeric_MPIAIJ_MPIAIJ() */ 679 Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ; 680 681 /* create struct Mat_APMPI and attached it to C later */ 682 ierr = PetscNew(&ptap);CHKERRQ(ierr); 683 ptap->reuse = MAT_INITIAL_MATRIX; 684 ptap->algType = 1; 685 686 /* get P_oth by taking rows of P (= non-zero cols of local A) from other processors */ 687 ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_INITIAL_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr); 688 /* get P_loc by taking all local rows of P */ 689 ierr = MatMPIAIJGetLocalMat(P,MAT_INITIAL_MATRIX,&ptap->P_loc);CHKERRQ(ierr); 690 691 /* (0) compute Rd = Pd^T, Ro = Po^T */ 692 /* --------------------------------- */ 693 ierr = MatTranspose(p->A,MAT_INITIAL_MATRIX,&ptap->Rd);CHKERRQ(ierr); 694 ierr = MatTranspose(p->B,MAT_INITIAL_MATRIX,&ptap->Ro);CHKERRQ(ierr); 695 696 /* (1) compute symbolic AP = A_loc*P = Ad*P_loc + Ao*P_oth (api,apj) */ 697 /* ----------------------------------------------------------------- */ 698 p_loc = (Mat_SeqAIJ*)(ptap->P_loc)->data; 699 if (ptap->P_oth) p_oth = (Mat_SeqAIJ*)(ptap->P_oth)->data; 700 701 /* create and initialize a linked list */ 702 ierr = PetscTableCreate(pn,pN,&ta);CHKERRQ(ierr); /* for compute AP_loc and Cmpi */ 703 MatRowMergeMax_SeqAIJ(p_loc,ptap->P_loc->rmap->N,ta); 704 MatRowMergeMax_SeqAIJ(p_oth,ptap->P_oth->rmap->N,ta); 705 ierr = PetscTableGetCount(ta,&Crmax);CHKERRQ(ierr); /* Crmax = nnz(sum of Prows) */ 706 /* printf("[%d] est %d, Crmax %d; pN %d\n",rank,5*(p_loc->rmax+p_oth->rmax + (PetscInt)(1.e-2*pN)),Crmax,pN); */ 707 708 ierr = PetscLLCondensedCreate(Crmax,pN,&lnk,&lnkbt);CHKERRQ(ierr); 709 710 /* Initial FreeSpace size is fill*(nnz(A) + nnz(P)) */ 711 if (ao) { 712 ierr = PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],PetscIntSumTruncate(ao->i[am],p_loc->i[pm]))),&free_space);CHKERRQ(ierr); 713 } else { 714 ierr = PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],p_loc->i[pm])),&free_space);CHKERRQ(ierr); 715 } 716 current_space = free_space; 717 nspacedouble = 0; 718 719 ierr = PetscMalloc1(am+1,&api);CHKERRQ(ierr); 720 api[0] = 0; 721 for (i=0; i<am; i++) { 722 /* diagonal portion: Ad[i,:]*P */ 723 ai = ad->i; pi = p_loc->i; 724 nzi = ai[i+1] - ai[i]; 725 aj = ad->j + ai[i]; 726 for (j=0; j<nzi; j++) { 727 row = aj[j]; 728 pnz = pi[row+1] - pi[row]; 729 Jptr = p_loc->j + pi[row]; 730 /* add non-zero cols of P into the sorted linked list lnk */ 731 ierr = PetscLLCondensedAddSorted(pnz,Jptr,lnk,lnkbt);CHKERRQ(ierr); 732 } 733 /* off-diagonal portion: Ao[i,:]*P */ 734 if (ao) { 735 ai = ao->i; pi = p_oth->i; 736 nzi = ai[i+1] - ai[i]; 737 aj = ao->j + ai[i]; 738 for (j=0; j<nzi; j++) { 739 row = aj[j]; 740 pnz = pi[row+1] - pi[row]; 741 Jptr = p_oth->j + pi[row]; 742 ierr = PetscLLCondensedAddSorted(pnz,Jptr,lnk,lnkbt);CHKERRQ(ierr); 743 } 744 } 745 apnz = lnk[0]; 746 api[i+1] = api[i] + apnz; 747 if (ap_rmax < apnz) ap_rmax = apnz; 748 749 /* if free space is not available, double the total space in the list */ 750 if (current_space->local_remaining<apnz) { 751 ierr = PetscFreeSpaceGet(PetscIntSumTruncate(apnz,current_space->total_array_size),¤t_space);CHKERRQ(ierr); 752 nspacedouble++; 753 } 754 755 /* Copy data into free space, then initialize lnk */ 756 ierr = PetscLLCondensedClean(pN,apnz,current_space->array,lnk,lnkbt);CHKERRQ(ierr); 757 758 current_space->array += apnz; 759 current_space->local_used += apnz; 760 current_space->local_remaining -= apnz; 761 } 762 /* Allocate space for apj and apv, initialize apj, and */ 763 /* destroy list of free space and other temporary array(s) */ 764 ierr = PetscMalloc2(api[am],&apj,api[am],&apv);CHKERRQ(ierr); 765 ierr = PetscFreeSpaceContiguous(&free_space,apj);CHKERRQ(ierr); 766 ierr = PetscLLDestroy(lnk,lnkbt);CHKERRQ(ierr); 767 768 /* Create AP_loc for reuse */ 769 ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,am,pN,api,apj,apv,&ptap->AP_loc);CHKERRQ(ierr); 770 771 #if defined(PETSC_USE_INFO) 772 if (ao) { 773 apfill = (PetscReal)api[am]/(ad->i[am]+ao->i[am]+p_loc->i[pm]+1); 774 } else { 775 apfill = (PetscReal)api[am]/(ad->i[am]+p_loc->i[pm]+1); 776 } 777 ptap->AP_loc->info.mallocs = nspacedouble; 778 ptap->AP_loc->info.fill_ratio_given = fill; 779 ptap->AP_loc->info.fill_ratio_needed = apfill; 780 781 if (api[am]) { 782 ierr = PetscInfo3(ptap->AP_loc,"Nonscalable algorithm, AP_loc reallocs %D; Fill ratio: given %g needed %g.\n",nspacedouble,(double)fill,(double)apfill);CHKERRQ(ierr); 783 ierr = PetscInfo1(ptap->AP_loc,"Use MatPtAP(A,B,MatReuse,%g,&C) for best AP_loc performance.;\n",(double)apfill);CHKERRQ(ierr); 784 } else { 785 ierr = PetscInfo(ptap->AP_loc,"Nonscalable algorithm, AP_loc is empty \n");CHKERRQ(ierr); 786 } 787 #endif 788 789 /* (2-1) compute symbolic Co = Ro*AP_loc */ 790 /* ------------------------------------ */ 791 ierr = MatMatMultSymbolic_SeqAIJ_SeqAIJ(ptap->Ro,ptap->AP_loc,fill,&ptap->C_oth);CHKERRQ(ierr); 792 793 /* (3) send coj of C_oth to other processors */ 794 /* ------------------------------------------ */ 795 /* determine row ownership */ 796 ierr = PetscLayoutCreate(comm,&rowmap);CHKERRQ(ierr); 797 rowmap->n = pn; 798 rowmap->bs = 1; 799 ierr = PetscLayoutSetUp(rowmap);CHKERRQ(ierr); 800 owners = rowmap->range; 801 802 /* determine the number of messages to send, their lengths */ 803 ierr = PetscMalloc4(size,&len_s,size,&len_si,size,&sstatus,size+2,&owners_co);CHKERRQ(ierr); 804 ierr = PetscMemzero(len_s,size*sizeof(PetscMPIInt));CHKERRQ(ierr); 805 ierr = PetscMemzero(len_si,size*sizeof(PetscMPIInt));CHKERRQ(ierr); 806 807 c_oth = (Mat_SeqAIJ*)ptap->C_oth->data; 808 coi = c_oth->i; coj = c_oth->j; 809 con = ptap->C_oth->rmap->n; 810 proc = 0; 811 for (i=0; i<con; i++) { 812 while (prmap[i] >= owners[proc+1]) proc++; 813 len_si[proc]++; /* num of rows in Co(=Pt*AP) to be sent to [proc] */ 814 len_s[proc] += coi[i+1] - coi[i]; /* num of nonzeros in Co to be sent to [proc] */ 815 } 816 817 len = 0; /* max length of buf_si[], see (4) */ 818 owners_co[0] = 0; 819 nsend = 0; 820 for (proc=0; proc<size; proc++) { 821 owners_co[proc+1] = owners_co[proc] + len_si[proc]; 822 if (len_s[proc]) { 823 nsend++; 824 len_si[proc] = 2*(len_si[proc] + 1); /* length of buf_si to be sent to [proc] */ 825 len += len_si[proc]; 826 } 827 } 828 829 /* determine the number and length of messages to receive for coi and coj */ 830 ierr = PetscGatherNumberOfMessages(comm,NULL,len_s,&nrecv);CHKERRQ(ierr); 831 ierr = PetscGatherMessageLengths2(comm,nsend,nrecv,len_s,len_si,&id_r,&len_r,&len_ri);CHKERRQ(ierr); 832 833 /* post the Irecv and Isend of coj */ 834 ierr = PetscCommGetNewTag(comm,&tagj);CHKERRQ(ierr); 835 ierr = PetscPostIrecvInt(comm,tagj,nrecv,id_r,len_r,&buf_rj,&rwaits);CHKERRQ(ierr); 836 ierr = PetscMalloc1(nsend+1,&swaits);CHKERRQ(ierr); 837 for (proc=0, k=0; proc<size; proc++) { 838 if (!len_s[proc]) continue; 839 i = owners_co[proc]; 840 ierr = MPI_Isend(coj+coi[i],len_s[proc],MPIU_INT,proc,tagj,comm,swaits+k);CHKERRQ(ierr); 841 k++; 842 } 843 844 /* (2-2) compute symbolic C_loc = Rd*AP_loc */ 845 /* ---------------------------------------- */ 846 ierr = MatMatMultSymbolic_SeqAIJ_SeqAIJ(ptap->Rd,ptap->AP_loc,fill,&ptap->C_loc);CHKERRQ(ierr); 847 c_loc = (Mat_SeqAIJ*)ptap->C_loc->data; 848 849 /* receives coj are complete */ 850 for (i=0; i<nrecv; i++) { 851 ierr = MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);CHKERRQ(ierr); 852 } 853 ierr = PetscFree(rwaits);CHKERRQ(ierr); 854 if (nsend) {ierr = MPI_Waitall(nsend,swaits,sstatus);CHKERRQ(ierr);} 855 856 /* add received column indices into ta to update Crmax */ 857 for (k=0; k<nrecv; k++) {/* k-th received message */ 858 Jptr = buf_rj[k]; 859 for (j=0; j<len_r[k]; j++) { 860 ierr = PetscTableAdd(ta,*(Jptr+j)+1,1,INSERT_VALUES);CHKERRQ(ierr); 861 } 862 } 863 ierr = PetscTableGetCount(ta,&Crmax);CHKERRQ(ierr); 864 ierr = PetscTableDestroy(&ta);CHKERRQ(ierr); 865 866 /* (4) send and recv coi */ 867 /*-----------------------*/ 868 ierr = PetscCommGetNewTag(comm,&tagi);CHKERRQ(ierr); 869 ierr = PetscPostIrecvInt(comm,tagi,nrecv,id_r,len_ri,&buf_ri,&rwaits);CHKERRQ(ierr); 870 ierr = PetscMalloc1(len+1,&buf_s);CHKERRQ(ierr); 871 buf_si = buf_s; /* points to the beginning of k-th msg to be sent */ 872 for (proc=0,k=0; proc<size; proc++) { 873 if (!len_s[proc]) continue; 874 /* form outgoing message for i-structure: 875 buf_si[0]: nrows to be sent 876 [1:nrows]: row index (global) 877 [nrows+1:2*nrows+1]: i-structure index 878 */ 879 /*-------------------------------------------*/ 880 nrows = len_si[proc]/2 - 1; /* num of rows in Co to be sent to [proc] */ 881 buf_si_i = buf_si + nrows+1; 882 buf_si[0] = nrows; 883 buf_si_i[0] = 0; 884 nrows = 0; 885 for (i=owners_co[proc]; i<owners_co[proc+1]; i++) { 886 nzi = coi[i+1] - coi[i]; 887 buf_si_i[nrows+1] = buf_si_i[nrows] + nzi; /* i-structure */ 888 buf_si[nrows+1] = prmap[i] -owners[proc]; /* local row index */ 889 nrows++; 890 } 891 ierr = MPI_Isend(buf_si,len_si[proc],MPIU_INT,proc,tagi,comm,swaits+k);CHKERRQ(ierr); 892 k++; 893 buf_si += len_si[proc]; 894 } 895 for (i=0; i<nrecv; i++) { 896 ierr = MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);CHKERRQ(ierr); 897 } 898 ierr = PetscFree(rwaits);CHKERRQ(ierr); 899 if (nsend) {ierr = MPI_Waitall(nsend,swaits,sstatus);CHKERRQ(ierr);} 900 901 ierr = PetscFree4(len_s,len_si,sstatus,owners_co);CHKERRQ(ierr); 902 ierr = PetscFree(len_ri);CHKERRQ(ierr); 903 ierr = PetscFree(swaits);CHKERRQ(ierr); 904 ierr = PetscFree(buf_s);CHKERRQ(ierr); 905 906 /* (5) compute the local portion of Cmpi */ 907 /* ------------------------------------------ */ 908 /* set initial free space to be Crmax, sufficient for holding nozeros in each row of Cmpi */ 909 ierr = PetscFreeSpaceGet(Crmax,&free_space);CHKERRQ(ierr); 910 current_space = free_space; 911 912 ierr = PetscMalloc3(nrecv,&buf_ri_k,nrecv,&nextrow,nrecv,&nextci);CHKERRQ(ierr); 913 for (k=0; k<nrecv; k++) { 914 buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */ 915 nrows = *buf_ri_k[k]; 916 nextrow[k] = buf_ri_k[k] + 1; /* next row number of k-th recved i-structure */ 917 nextci[k] = buf_ri_k[k] + (nrows + 1); /* poins to the next i-structure of k-th recved i-structure */ 918 } 919 920 ierr = MatPreallocateInitialize(comm,pn,pn,dnz,onz);CHKERRQ(ierr); 921 ierr = PetscLLCondensedCreate(Crmax,pN,&lnk,&lnkbt);CHKERRQ(ierr); 922 for (i=0; i<pn; i++) { 923 /* add C_loc into Cmpi */ 924 nzi = c_loc->i[i+1] - c_loc->i[i]; 925 Jptr = c_loc->j + c_loc->i[i]; 926 ierr = PetscLLCondensedAddSorted(nzi,Jptr,lnk,lnkbt);CHKERRQ(ierr); 927 928 /* add received col data into lnk */ 929 for (k=0; k<nrecv; k++) { /* k-th received message */ 930 if (i == *nextrow[k]) { /* i-th row */ 931 nzi = *(nextci[k]+1) - *nextci[k]; 932 Jptr = buf_rj[k] + *nextci[k]; 933 ierr = PetscLLCondensedAddSorted(nzi,Jptr,lnk,lnkbt);CHKERRQ(ierr); 934 nextrow[k]++; nextci[k]++; 935 } 936 } 937 nzi = lnk[0]; 938 939 /* copy data into free space, then initialize lnk */ 940 ierr = PetscLLCondensedClean(pN,nzi,current_space->array,lnk,lnkbt);CHKERRQ(ierr); 941 ierr = MatPreallocateSet(i+owners[rank],nzi,current_space->array,dnz,onz);CHKERRQ(ierr); 942 } 943 ierr = PetscFree3(buf_ri_k,nextrow,nextci);CHKERRQ(ierr); 944 ierr = PetscLLDestroy(lnk,lnkbt);CHKERRQ(ierr); 945 ierr = PetscFreeSpaceDestroy(free_space);CHKERRQ(ierr); 946 947 /* local sizes and preallocation */ 948 ierr = MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 949 ierr = MatSetBlockSizes(Cmpi,PetscAbs(P->cmap->bs),PetscAbs(P->cmap->bs));CHKERRQ(ierr); 950 ierr = MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);CHKERRQ(ierr); 951 ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr); 952 953 /* members in merge */ 954 ierr = PetscFree(id_r);CHKERRQ(ierr); 955 ierr = PetscFree(len_r);CHKERRQ(ierr); 956 ierr = PetscFree(buf_ri[0]);CHKERRQ(ierr); 957 ierr = PetscFree(buf_ri);CHKERRQ(ierr); 958 ierr = PetscFree(buf_rj[0]);CHKERRQ(ierr); 959 ierr = PetscFree(buf_rj);CHKERRQ(ierr); 960 ierr = PetscLayoutDestroy(&rowmap);CHKERRQ(ierr); 961 962 /* attach the supporting struct to Cmpi for reuse */ 963 c = (Mat_MPIAIJ*)Cmpi->data; 964 c->ap = ptap; 965 ptap->duplicate = Cmpi->ops->duplicate; 966 ptap->destroy = Cmpi->ops->destroy; 967 ptap->view = Cmpi->ops->view; 968 ierr = PetscCalloc1(pN,&ptap->apa);CHKERRQ(ierr); 969 970 /* Cmpi is not ready for use - assembly will be done by MatPtAPNumeric() */ 971 Cmpi->assembled = PETSC_FALSE; 972 Cmpi->ops->destroy = MatDestroy_MPIAIJ_PtAP; 973 Cmpi->ops->view = MatView_MPIAIJ_PtAP; 974 Cmpi->ops->freeintermediatedatastructures = MatFreeIntermediateDataStructures_MPIAIJ_AP; 975 *C = Cmpi; 976 PetscFunctionReturn(0); 977 } 978 979 PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ(Mat A,Mat P,Mat C) 980 { 981 PetscErrorCode ierr; 982 Mat_MPIAIJ *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data,*c=(Mat_MPIAIJ*)C->data; 983 Mat_SeqAIJ *ad=(Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data; 984 Mat_SeqAIJ *ap,*p_loc,*p_oth=NULL,*c_seq; 985 Mat_APMPI *ptap = c->ap; 986 Mat AP_loc,C_loc,C_oth; 987 PetscInt i,rstart,rend,cm,ncols,row; 988 PetscInt *api,*apj,am = A->rmap->n,j,col,apnz; 989 PetscScalar *apa; 990 const PetscInt *cols; 991 const PetscScalar *vals; 992 PetscBool freestruct; 993 994 PetscFunctionBegin; 995 if (!ptap) { 996 MPI_Comm comm; 997 ierr = PetscObjectGetComm((PetscObject)C,&comm);CHKERRQ(ierr); 998 SETERRQ(comm,PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be reused. Do not call MatFreeIntermediateDataStructures() or use '-mat_freeintermediatedatastructures'"); 999 } 1000 1001 ierr = MatZeroEntries(C);CHKERRQ(ierr); 1002 /* 1) get R = Pd^T,Ro = Po^T */ 1003 if (ptap->reuse == MAT_REUSE_MATRIX) { 1004 ierr = MatTranspose(p->A,MAT_REUSE_MATRIX,&ptap->Rd);CHKERRQ(ierr); 1005 ierr = MatTranspose(p->B,MAT_REUSE_MATRIX,&ptap->Ro);CHKERRQ(ierr); 1006 } 1007 1008 /* 2) get AP_loc */ 1009 AP_loc = ptap->AP_loc; 1010 ap = (Mat_SeqAIJ*)AP_loc->data; 1011 1012 /* 2-1) get P_oth = ptap->P_oth and P_loc = ptap->P_loc */ 1013 /*-----------------------------------------------------*/ 1014 if (ptap->reuse == MAT_REUSE_MATRIX) { 1015 /* P_oth and P_loc are obtained in MatPtASymbolic() when reuse == MAT_INITIAL_MATRIX */ 1016 ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_REUSE_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr); 1017 ierr = MatMPIAIJGetLocalMat(P,MAT_REUSE_MATRIX,&ptap->P_loc);CHKERRQ(ierr); 1018 } 1019 1020 /* 2-2) compute numeric A_loc*P - dominating part */ 1021 /* ---------------------------------------------- */ 1022 /* get data from symbolic products */ 1023 p_loc = (Mat_SeqAIJ*)(ptap->P_loc)->data; 1024 if (ptap->P_oth) { 1025 p_oth = (Mat_SeqAIJ*)(ptap->P_oth)->data; 1026 } 1027 apa = ptap->apa; 1028 api = ap->i; 1029 apj = ap->j; 1030 for (i=0; i<am; i++) { 1031 /* AP[i,:] = A[i,:]*P = Ad*P_loc Ao*P_oth */ 1032 AProw_nonscalable(i,ad,ao,p_loc,p_oth,apa); 1033 apnz = api[i+1] - api[i]; 1034 for (j=0; j<apnz; j++) { 1035 col = apj[j+api[i]]; 1036 ap->a[j+ap->i[i]] = apa[col]; 1037 apa[col] = 0.0; 1038 } 1039 ierr = PetscLogFlops(2.0*apnz);CHKERRQ(ierr); 1040 } 1041 1042 /* 3) C_loc = Rd*AP_loc, C_oth = Ro*AP_loc */ 1043 ierr = ((ptap->C_loc)->ops->matmultnumeric)(ptap->Rd,AP_loc,ptap->C_loc);CHKERRQ(ierr); 1044 ierr = ((ptap->C_oth)->ops->matmultnumeric)(ptap->Ro,AP_loc,ptap->C_oth);CHKERRQ(ierr); 1045 C_loc = ptap->C_loc; 1046 C_oth = ptap->C_oth; 1047 1048 /* add C_loc and Co to to C */ 1049 ierr = MatGetOwnershipRange(C,&rstart,&rend);CHKERRQ(ierr); 1050 1051 /* C_loc -> C */ 1052 cm = C_loc->rmap->N; 1053 c_seq = (Mat_SeqAIJ*)C_loc->data; 1054 cols = c_seq->j; 1055 vals = c_seq->a; 1056 1057 1058 /* The (fast) MatSetValues_MPIAIJ_CopyFromCSRFormat function can only be used when C->was_assembled is PETSC_FALSE and */ 1059 /* when there are no off-processor parts. */ 1060 /* If was_assembled is true, then the statement aj[rowstart_diag+dnz_row] = mat_j[col] - cstart; in MatSetValues_MPIAIJ_CopyFromCSRFormat */ 1061 /* is no longer true. Then the more complex function MatSetValues_MPIAIJ() has to be used, where the column index is looked up from */ 1062 /* a table, and other, more complex stuff has to be done. */ 1063 if (C->assembled) { 1064 C->was_assembled = PETSC_TRUE; 1065 C->assembled = PETSC_FALSE; 1066 } 1067 if (C->was_assembled) { 1068 for (i=0; i<cm; i++) { 1069 ncols = c_seq->i[i+1] - c_seq->i[i]; 1070 row = rstart + i; 1071 ierr = MatSetValues_MPIAIJ(C,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr); 1072 cols += ncols; vals += ncols; 1073 } 1074 } else { 1075 ierr = MatSetValues_MPIAIJ_CopyFromCSRFormat(C,c_seq->j,c_seq->i,c_seq->a);CHKERRQ(ierr); 1076 } 1077 1078 /* Co -> C, off-processor part */ 1079 cm = C_oth->rmap->N; 1080 c_seq = (Mat_SeqAIJ*)C_oth->data; 1081 cols = c_seq->j; 1082 vals = c_seq->a; 1083 for (i=0; i<cm; i++) { 1084 ncols = c_seq->i[i+1] - c_seq->i[i]; 1085 row = p->garray[i]; 1086 ierr = MatSetValues(C,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr); 1087 cols += ncols; vals += ncols; 1088 } 1089 1090 ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1091 ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1092 1093 ptap->reuse = MAT_REUSE_MATRIX; 1094 1095 ierr = PetscObjectOptionsBegin((PetscObject)C);CHKERRQ(ierr); 1096 PetscOptionsObject->alreadyprinted = PETSC_FALSE; /* a hack to ensure the option shows in '-help' */ 1097 /* supporting struct ptap consumes almost same amount of memory as C=PtAP, release it if C will not be updated by A and P */ 1098 freestruct = PETSC_FALSE; 1099 ierr = PetscOptionsBool("-mat_freeintermediatedatastructures","Free intermediate data structures", "MatFreeIntermediateDataStructures",freestruct, &freestruct, NULL);CHKERRQ(ierr); 1100 if (freestruct) { 1101 ierr = MatFreeIntermediateDataStructures(C);CHKERRQ(ierr); 1102 } 1103 ierr = PetscOptionsEnd();CHKERRQ(ierr); 1104 PetscFunctionReturn(0); 1105 } 1106