xref: /petsc/src/mat/impls/aij/mpi/mpiptap.c (revision 7d0a89b7cc5caadc25a93aaf4fe8ef10539ecf55)
1 
2 /*
3   Defines projective product routines where A is a MPIAIJ matrix
4           C = P^T * A * P
5 */
6 
7 #include <../src/mat/impls/aij/seq/aij.h>   /*I "petscmat.h" I*/
8 #include <../src/mat/utils/freespace.h>
9 #include <../src/mat/impls/aij/mpi/mpiaij.h>
10 #include <petscbt.h>
11 #include <petsctime.h>
12 
13 /* #define PTAP_PROFILE */
14 
15 PetscErrorCode MatView_MPIAIJ_PtAP(Mat A,PetscViewer viewer)
16 {
17   PetscErrorCode    ierr;
18   Mat_MPIAIJ        *a=(Mat_MPIAIJ*)A->data;
19   Mat_APMPI         *ptap=a->ap;
20   PetscBool         iascii;
21   PetscViewerFormat format;
22 
23   PetscFunctionBegin;
24   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
25   if (iascii) {
26     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
27     if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
28       if (ptap->algType == 0) {
29         ierr = PetscViewerASCIIPrintf(viewer,"using scalable MatPtAP() implementation\n");CHKERRQ(ierr);
30       } else if (ptap->algType == 1) {
31         ierr = PetscViewerASCIIPrintf(viewer,"using nonscalable MatPtAP() implementation\n");CHKERRQ(ierr);
32       }
33     }
34   }
35   ierr = (ptap->view)(A,viewer);CHKERRQ(ierr);
36   PetscFunctionReturn(0);
37 }
38 
39 PetscErrorCode MatFreeIntermediateDataStructures_MPIAIJ_AP(Mat A)
40 {
41   PetscErrorCode      ierr;
42   Mat_MPIAIJ          *a=(Mat_MPIAIJ*)A->data;
43   Mat_APMPI           *ptap=a->ap;
44   Mat_Merge_SeqsToMPI *merge;
45 
46   PetscFunctionBegin;
47   if (!ptap) PetscFunctionReturn(0);
48 
49   ierr = PetscFree2(ptap->startsj_s,ptap->startsj_r);CHKERRQ(ierr);
50   ierr = PetscFree(ptap->bufa);CHKERRQ(ierr);
51   ierr = MatDestroy(&ptap->P_loc);CHKERRQ(ierr);
52   ierr = MatDestroy(&ptap->P_oth);CHKERRQ(ierr);
53   ierr = MatDestroy(&ptap->A_loc);CHKERRQ(ierr); /* used by MatTransposeMatMult() */
54   ierr = MatDestroy(&ptap->Rd);CHKERRQ(ierr);
55   ierr = MatDestroy(&ptap->Ro);CHKERRQ(ierr);
56   if (ptap->AP_loc) { /* used by alg_rap */
57     Mat_SeqAIJ *ap = (Mat_SeqAIJ*)(ptap->AP_loc)->data;
58     ierr = PetscFree(ap->i);CHKERRQ(ierr);
59     ierr = PetscFree2(ap->j,ap->a);CHKERRQ(ierr);
60     ierr = MatDestroy(&ptap->AP_loc);CHKERRQ(ierr);
61   } else { /* used by alg_ptap */
62     ierr = PetscFree(ptap->api);CHKERRQ(ierr);
63     ierr = PetscFree(ptap->apj);CHKERRQ(ierr);
64   }
65   ierr = MatDestroy(&ptap->C_loc);CHKERRQ(ierr);
66   ierr = MatDestroy(&ptap->C_oth);CHKERRQ(ierr);
67   if (ptap->apa) {ierr = PetscFree(ptap->apa);CHKERRQ(ierr);}
68 
69   ierr = MatDestroy(&ptap->Pt);CHKERRQ(ierr);
70 
71   merge=ptap->merge;
72   if (merge) { /* used by alg_ptap */
73     ierr = PetscFree(merge->id_r);CHKERRQ(ierr);
74     ierr = PetscFree(merge->len_s);CHKERRQ(ierr);
75     ierr = PetscFree(merge->len_r);CHKERRQ(ierr);
76     ierr = PetscFree(merge->bi);CHKERRQ(ierr);
77     ierr = PetscFree(merge->bj);CHKERRQ(ierr);
78     ierr = PetscFree(merge->buf_ri[0]);CHKERRQ(ierr);
79     ierr = PetscFree(merge->buf_ri);CHKERRQ(ierr);
80     ierr = PetscFree(merge->buf_rj[0]);CHKERRQ(ierr);
81     ierr = PetscFree(merge->buf_rj);CHKERRQ(ierr);
82     ierr = PetscFree(merge->coi);CHKERRQ(ierr);
83     ierr = PetscFree(merge->coj);CHKERRQ(ierr);
84     ierr = PetscFree(merge->owners_co);CHKERRQ(ierr);
85     ierr = PetscLayoutDestroy(&merge->rowmap);CHKERRQ(ierr);
86     ierr = PetscFree(ptap->merge);CHKERRQ(ierr);
87   }
88 
89   A->ops->destroy = ptap->destroy;
90   ierr = PetscFree(a->ap);CHKERRQ(ierr);
91   PetscFunctionReturn(0);
92 }
93 
94 PetscErrorCode MatDestroy_MPIAIJ_PtAP(Mat A)
95 {
96   PetscErrorCode ierr;
97 
98   PetscFunctionBegin;
99   ierr = (*A->ops->freeintermediatedatastructures)(A);CHKERRQ(ierr);
100   ierr = (*A->ops->destroy)(A);CHKERRQ(ierr);
101   PetscFunctionReturn(0);
102 }
103 
104 PETSC_INTERN PetscErrorCode MatPtAP_MPIAIJ_MPIAIJ(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
105 {
106   PetscErrorCode ierr;
107   PetscBool      flg;
108   MPI_Comm       comm;
109 #if !defined(PETSC_HAVE_HYPRE)
110   const char          *algTypes[2] = {"scalable","nonscalable"};
111   PetscInt            nalg=2;
112 #else
113   const char          *algTypes[3] = {"scalable","nonscalable","hypre"};
114   PetscInt            nalg=3;
115 #endif
116   PetscInt            pN=P->cmap->N,alg=1; /* set default algorithm */
117 
118   PetscFunctionBegin;
119   /* check if matrix local sizes are compatible */
120   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
121   if (A->rmap->rstart != P->rmap->rstart || A->rmap->rend != P->rmap->rend) SETERRQ4(comm,PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, Arow (%D, %D) != Prow (%D,%D)",A->rmap->rstart,A->rmap->rend,P->rmap->rstart,P->rmap->rend);
122   if (A->cmap->rstart != P->rmap->rstart || A->cmap->rend != P->rmap->rend) SETERRQ4(comm,PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, Acol (%D, %D) != Prow (%D,%D)",A->cmap->rstart,A->cmap->rend,P->rmap->rstart,P->rmap->rend);
123 
124   if (scall == MAT_INITIAL_MATRIX) {
125     /* pick an algorithm */
126     ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MatPtAP","Mat");CHKERRQ(ierr);
127     ierr = PetscOptionsEList("-matptap_via","Algorithmic approach","MatPtAP",algTypes,nalg,algTypes[alg],&alg,&flg);CHKERRQ(ierr);
128     ierr = PetscOptionsEnd();CHKERRQ(ierr);
129 
130     if (!flg && pN > 100000) { /* may switch to scalable algorithm as default */
131       MatInfo     Ainfo,Pinfo;
132       PetscInt    nz_local;
133       PetscBool   alg_scalable_loc=PETSC_FALSE,alg_scalable;
134 
135       ierr = MatGetInfo(A,MAT_LOCAL,&Ainfo);CHKERRQ(ierr);
136       ierr = MatGetInfo(P,MAT_LOCAL,&Pinfo);CHKERRQ(ierr);
137       nz_local = (PetscInt)(Ainfo.nz_allocated + Pinfo.nz_allocated);
138 
139       if (pN > fill*nz_local) alg_scalable_loc = PETSC_TRUE;
140       ierr = MPIU_Allreduce(&alg_scalable_loc,&alg_scalable,1,MPIU_BOOL,MPI_LOR,comm);CHKERRQ(ierr);
141 
142       if (alg_scalable) {
143         alg = 0; /* scalable algorithm would 50% slower than nonscalable algorithm */
144       }
145     }
146 
147     switch (alg) {
148     case 1:
149       /* do R=P^T locally, then C=R*A*P -- nonscalable */
150       ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
151       ierr = MatPtAPSymbolic_MPIAIJ_MPIAIJ(A,P,fill,C);CHKERRQ(ierr);
152       ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
153       break;
154 #if defined(PETSC_HAVE_HYPRE)
155     case 2:
156       /* Use boomerAMGBuildCoarseOperator */
157       ierr = MatPtAPSymbolic_AIJ_AIJ_wHYPRE(A,P,fill,C);CHKERRQ(ierr);
158       PetscFunctionReturn(0);
159       break;
160 #endif
161     default:
162       /* do R=P^T locally, then C=R*A*P */
163       ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
164       ierr = MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable(A,P,fill,C);CHKERRQ(ierr);
165       ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
166       break;
167     }
168 
169     {
170       Mat_MPIAIJ *c  = (Mat_MPIAIJ*)(*C)->data;
171       Mat_APMPI  *ap = c->ap;
172       ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)(*C)),((PetscObject)(*C))->prefix,"MatFreeIntermediateDataStructures","Mat");CHKERRQ(ierr);
173       ap->freestruct = PETSC_FALSE;
174       ierr = PetscOptionsBool("-mat_freeintermediatedatastructures","Free intermediate data structures", "MatFreeIntermediateDataStructures",ap->freestruct,&ap->freestruct, NULL);CHKERRQ(ierr);
175       ierr = PetscOptionsEnd();CHKERRQ(ierr);
176     }
177   }
178 
179   ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
180   ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr);
181   ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
182   PetscFunctionReturn(0);
183 }
184 
185 PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_scalable(Mat A,Mat P,Mat C)
186 {
187   PetscErrorCode    ierr;
188   Mat_MPIAIJ        *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data,*c=(Mat_MPIAIJ*)C->data;
189   Mat_SeqAIJ        *ad=(Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data;
190   Mat_SeqAIJ        *ap,*p_loc,*p_oth,*c_seq;
191   Mat_APMPI         *ptap = c->ap;
192   Mat               AP_loc,C_loc,C_oth;
193   PetscInt          i,rstart,rend,cm,ncols,row,*api,*apj,am = A->rmap->n,apnz;
194   PetscScalar       *apa;
195   const PetscInt    *cols;
196   const PetscScalar *vals;
197 
198   PetscFunctionBegin;
199   if (!ptap) {
200     MPI_Comm comm;
201     ierr = PetscObjectGetComm((PetscObject)C,&comm);CHKERRQ(ierr);
202     SETERRQ(comm,PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be reused. Do not call MatFreeIntermediateDataStructures() or use '-mat_freeintermediatedatastructures'");
203   }
204 
205   ierr = MatZeroEntries(C);CHKERRQ(ierr);
206 
207   /* 1) get R = Pd^T,Ro = Po^T */
208   if (ptap->reuse == MAT_REUSE_MATRIX) {
209     ierr = MatTranspose(p->A,MAT_REUSE_MATRIX,&ptap->Rd);CHKERRQ(ierr);
210     ierr = MatTranspose(p->B,MAT_REUSE_MATRIX,&ptap->Ro);CHKERRQ(ierr);
211   }
212 
213   /* 2) get AP_loc */
214   AP_loc = ptap->AP_loc;
215   ap = (Mat_SeqAIJ*)AP_loc->data;
216 
217   /* 2-1) get P_oth = ptap->P_oth  and P_loc = ptap->P_loc */
218   /*-----------------------------------------------------*/
219   if (ptap->reuse == MAT_REUSE_MATRIX) {
220     /* P_oth and P_loc are obtained in MatPtASymbolic() when reuse == MAT_INITIAL_MATRIX */
221     ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_REUSE_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr);
222     ierr = MatMPIAIJGetLocalMat(P,MAT_REUSE_MATRIX,&ptap->P_loc);CHKERRQ(ierr);
223   }
224 
225   /* 2-2) compute numeric A_loc*P - dominating part */
226   /* ---------------------------------------------- */
227   /* get data from symbolic products */
228   p_loc = (Mat_SeqAIJ*)(ptap->P_loc)->data;
229   if (ptap->P_oth) p_oth = (Mat_SeqAIJ*)(ptap->P_oth)->data;
230   else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ptap->P_oth is NULL. Cannot proceed!");
231 
232   api   = ap->i;
233   apj   = ap->j;
234   for (i=0; i<am; i++) {
235     /* AP[i,:] = A[i,:]*P = Ad*P_loc Ao*P_oth */
236     apnz = api[i+1] - api[i];
237     apa = ap->a + api[i];
238     ierr = PetscMemzero(apa,sizeof(PetscScalar)*apnz);CHKERRQ(ierr);
239     AProw_scalable(i,ad,ao,p_loc,p_oth,api,apj,apa);
240     ierr = PetscLogFlops(2.0*apnz);CHKERRQ(ierr);
241   }
242 
243   /* 3) C_loc = Rd*AP_loc, C_oth = Ro*AP_loc */
244   ierr = MatMatMultNumeric_SeqAIJ_SeqAIJ_Scalable(ptap->Rd,AP_loc,ptap->C_loc);CHKERRQ(ierr);
245   ierr = MatMatMultNumeric_SeqAIJ_SeqAIJ_Scalable(ptap->Ro,AP_loc,ptap->C_oth);CHKERRQ(ierr);
246 
247   C_loc = ptap->C_loc;
248   C_oth = ptap->C_oth;
249 
250   /* add C_loc and Co to to C */
251   ierr = MatGetOwnershipRange(C,&rstart,&rend);CHKERRQ(ierr);
252 
253   /* C_loc -> C */
254   cm    = C_loc->rmap->N;
255   c_seq = (Mat_SeqAIJ*)C_loc->data;
256   cols = c_seq->j;
257   vals = c_seq->a;
258 
259   /* The (fast) MatSetValues_MPIAIJ_CopyFromCSRFormat function can only be used when C->was_assembled is PETSC_FALSE and */
260   /* when there are no off-processor parts.  */
261   /* If was_assembled is true, then the statement aj[rowstart_diag+dnz_row] = mat_j[col] - cstart; in MatSetValues_MPIAIJ_CopyFromCSRFormat */
262   /* is no longer true. Then the more complex function MatSetValues_MPIAIJ() has to be used, where the column index is looked up from */
263   /* a table, and other, more complex stuff has to be done. */
264   if (C->assembled) {
265     C->was_assembled = PETSC_TRUE;
266     C->assembled     = PETSC_FALSE;
267   }
268   if (C->was_assembled) {
269     for (i=0; i<cm; i++) {
270       ncols = c_seq->i[i+1] - c_seq->i[i];
271       row = rstart + i;
272       ierr = MatSetValues_MPIAIJ(C,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
273       cols += ncols; vals += ncols;
274     }
275   } else {
276     ierr = MatSetValues_MPIAIJ_CopyFromCSRFormat(C,c_seq->j,c_seq->i,c_seq->a);CHKERRQ(ierr);
277   }
278 
279   /* Co -> C, off-processor part */
280   cm = C_oth->rmap->N;
281   c_seq = (Mat_SeqAIJ*)C_oth->data;
282   cols = c_seq->j;
283   vals = c_seq->a;
284   for (i=0; i<cm; i++) {
285     ncols = c_seq->i[i+1] - c_seq->i[i];
286     row = p->garray[i];
287     ierr = MatSetValues(C,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
288     cols += ncols; vals += ncols;
289   }
290   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
291   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
292 
293   ptap->reuse = MAT_REUSE_MATRIX;
294 
295   /* supporting struct ptap consumes almost same amount of memory as C=PtAP, release it if C will not be updated by A and P */
296   if (ptap->freestruct) {
297     ierr = MatFreeIntermediateDataStructures(C);CHKERRQ(ierr);
298   }
299   PetscFunctionReturn(0);
300 }
301 
302 PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable(Mat A,Mat P,PetscReal fill,Mat *C)
303 {
304   PetscErrorCode      ierr;
305   Mat_APMPI           *ptap;
306   Mat_MPIAIJ          *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data,*c;
307   MPI_Comm            comm;
308   PetscMPIInt         size,rank;
309   Mat                 Cmpi,P_loc,P_oth;
310   PetscFreeSpaceList  free_space=NULL,current_space=NULL;
311   PetscInt            am=A->rmap->n,pm=P->rmap->n,pN=P->cmap->N,pn=P->cmap->n;
312   PetscInt            *lnk,i,k,pnz,row,nsend;
313   PetscMPIInt         tagi,tagj,*len_si,*len_s,*len_ri,icompleted=0,nrecv;
314   PetscInt            **buf_rj,**buf_ri,**buf_ri_k;
315   PetscInt            len,proc,*dnz,*onz,*owners,nzi,nspacedouble;
316   PetscInt            nrows,*buf_s,*buf_si,*buf_si_i,**nextrow,**nextci;
317   MPI_Request         *swaits,*rwaits;
318   MPI_Status          *sstatus,rstatus;
319   PetscLayout         rowmap;
320   PetscInt            *owners_co,*coi,*coj;    /* i and j array of (p->B)^T*A*P - used in the communication */
321   PetscMPIInt         *len_r,*id_r;    /* array of length of comm->size, store send/recv matrix values */
322   PetscInt            *api,*apj,*Jptr,apnz,*prmap=p->garray,con,j,Crmax,*aj,*ai,*pi;
323   Mat_SeqAIJ          *p_loc,*p_oth=NULL,*ad=(Mat_SeqAIJ*)(a->A)->data,*ao=NULL,*c_loc,*c_oth;
324   PetscScalar         *apv;
325   PetscTable          ta;
326   MatType             mtype;
327 #if defined(PETSC_USE_INFO)
328   PetscReal           apfill;
329 #endif
330 
331   PetscFunctionBegin;
332   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
333   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
334   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
335 
336   if (size > 1) ao = (Mat_SeqAIJ*)(a->B)->data;
337 
338   /* create symbolic parallel matrix Cmpi */
339   ierr = MatCreate(comm,&Cmpi);CHKERRQ(ierr);
340   ierr = MatGetType(A,&mtype);CHKERRQ(ierr);
341   ierr = MatSetType(Cmpi,mtype);CHKERRQ(ierr);
342 
343   /* create struct Mat_APMPI and attached it to C later */
344   ierr        = PetscNew(&ptap);CHKERRQ(ierr);
345   ptap->reuse = MAT_INITIAL_MATRIX;
346   ptap->algType = 0;
347 
348   /* get P_oth by taking rows of P (= non-zero cols of local A) from other processors */
349   ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_INITIAL_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&P_oth);CHKERRQ(ierr);
350   /* get P_loc by taking all local rows of P */
351   ierr = MatMPIAIJGetLocalMat(P,MAT_INITIAL_MATRIX,&P_loc);CHKERRQ(ierr);
352 
353   ptap->P_loc = P_loc;
354   ptap->P_oth = P_oth;
355 
356   /* (0) compute Rd = Pd^T, Ro = Po^T  */
357   /* --------------------------------- */
358   ierr = MatTranspose(p->A,MAT_INITIAL_MATRIX,&ptap->Rd);CHKERRQ(ierr);
359   ierr = MatTranspose(p->B,MAT_INITIAL_MATRIX,&ptap->Ro);CHKERRQ(ierr);
360 
361   /* (1) compute symbolic AP = A_loc*P = Ad*P_loc + Ao*P_oth (api,apj) */
362   /* ----------------------------------------------------------------- */
363   p_loc  = (Mat_SeqAIJ*)P_loc->data;
364   if (P_oth) p_oth = (Mat_SeqAIJ*)P_oth->data;
365 
366   /* create and initialize a linked list */
367   ierr = PetscTableCreate(pn,pN,&ta);CHKERRQ(ierr); /* for compute AP_loc and Cmpi */
368   MatRowMergeMax_SeqAIJ(p_loc,P_loc->rmap->N,ta);
369   MatRowMergeMax_SeqAIJ(p_oth,P_oth->rmap->N,ta);
370   ierr = PetscTableGetCount(ta,&Crmax);CHKERRQ(ierr); /* Crmax = nnz(sum of Prows) */
371 
372   ierr = PetscLLCondensedCreate_Scalable(Crmax,&lnk);CHKERRQ(ierr);
373 
374   /* Initial FreeSpace size is fill*(nnz(A) + nnz(P)) */
375   if (ao) {
376     ierr = PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],PetscIntSumTruncate(ao->i[am],p_loc->i[pm]))),&free_space);CHKERRQ(ierr);
377   } else {
378     ierr = PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],p_loc->i[pm])),&free_space);CHKERRQ(ierr);
379   }
380   current_space = free_space;
381   nspacedouble  = 0;
382 
383   ierr   = PetscMalloc1(am+1,&api);CHKERRQ(ierr);
384   api[0] = 0;
385   for (i=0; i<am; i++) {
386     /* diagonal portion: Ad[i,:]*P */
387     ai = ad->i; pi = p_loc->i;
388     nzi = ai[i+1] - ai[i];
389     aj  = ad->j + ai[i];
390     for (j=0; j<nzi; j++) {
391       row  = aj[j];
392       pnz  = pi[row+1] - pi[row];
393       Jptr = p_loc->j + pi[row];
394       /* add non-zero cols of P into the sorted linked list lnk */
395       ierr = PetscLLCondensedAddSorted_Scalable(pnz,Jptr,lnk);CHKERRQ(ierr);
396     }
397     /* off-diagonal portion: Ao[i,:]*P */
398     if (ao) {
399       ai = ao->i; pi = p_oth->i;
400       nzi = ai[i+1] - ai[i];
401       aj  = ao->j + ai[i];
402       for (j=0; j<nzi; j++) {
403         row  = aj[j];
404         pnz  = pi[row+1] - pi[row];
405         Jptr = p_oth->j + pi[row];
406         ierr = PetscLLCondensedAddSorted_Scalable(pnz,Jptr,lnk);CHKERRQ(ierr);
407       }
408     }
409     apnz     = lnk[0];
410     api[i+1] = api[i] + apnz;
411 
412     /* if free space is not available, double the total space in the list */
413     if (current_space->local_remaining<apnz) {
414       ierr = PetscFreeSpaceGet(PetscIntSumTruncate(apnz,current_space->total_array_size),&current_space);CHKERRQ(ierr);
415       nspacedouble++;
416     }
417 
418     /* Copy data into free space, then initialize lnk */
419     ierr = PetscLLCondensedClean_Scalable(apnz,current_space->array,lnk);CHKERRQ(ierr);
420 
421     current_space->array           += apnz;
422     current_space->local_used      += apnz;
423     current_space->local_remaining -= apnz;
424   }
425   /* Allocate space for apj and apv, initialize apj, and */
426   /* destroy list of free space and other temporary array(s) */
427   ierr = PetscMalloc2(api[am],&apj,api[am],&apv);CHKERRQ(ierr);
428   ierr = PetscFreeSpaceContiguous(&free_space,apj);CHKERRQ(ierr);
429   ierr = PetscLLCondensedDestroy_Scalable(lnk);CHKERRQ(ierr);
430 
431   /* Create AP_loc for reuse */
432   ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,am,pN,api,apj,apv,&ptap->AP_loc);CHKERRQ(ierr);
433 
434 #if defined(PETSC_USE_INFO)
435   if (ao) {
436     apfill = (PetscReal)api[am]/(ad->i[am]+ao->i[am]+p_loc->i[pm]+1);
437   } else {
438     apfill = (PetscReal)api[am]/(ad->i[am]+p_loc->i[pm]+1);
439   }
440   ptap->AP_loc->info.mallocs           = nspacedouble;
441   ptap->AP_loc->info.fill_ratio_given  = fill;
442   ptap->AP_loc->info.fill_ratio_needed = apfill;
443 
444   if (api[am]) {
445     ierr = PetscInfo3(ptap->AP_loc,"Scalable algorithm, AP_loc reallocs %D; Fill ratio: given %g needed %g.\n",nspacedouble,(double)fill,(double)apfill);CHKERRQ(ierr);
446     ierr = PetscInfo1(ptap->AP_loc,"Use MatPtAP(A,B,MatReuse,%g,&C) for best AP_loc performance.;\n",(double)apfill);CHKERRQ(ierr);
447   } else {
448     ierr = PetscInfo(ptap->AP_loc,"Scalable algorithm, AP_loc is empty \n");CHKERRQ(ierr);
449   }
450 #endif
451 
452   /* (2-1) compute symbolic Co = Ro*AP_loc  */
453   /* ------------------------------------ */
454   ierr = MatMatMultSymbolic_SeqAIJ_SeqAIJ(ptap->Ro,ptap->AP_loc,fill,&ptap->C_oth);CHKERRQ(ierr);
455 
456   /* (3) send coj of C_oth to other processors  */
457   /* ------------------------------------------ */
458   /* determine row ownership */
459   ierr = PetscLayoutCreate(comm,&rowmap);CHKERRQ(ierr);
460   rowmap->n  = pn;
461   rowmap->bs = 1;
462   ierr   = PetscLayoutSetUp(rowmap);CHKERRQ(ierr);
463   owners = rowmap->range;
464 
465   /* determine the number of messages to send, their lengths */
466   ierr = PetscMalloc4(size,&len_s,size,&len_si,size,&sstatus,size+2,&owners_co);CHKERRQ(ierr);
467   ierr = PetscMemzero(len_s,size*sizeof(PetscMPIInt));CHKERRQ(ierr);
468   ierr = PetscMemzero(len_si,size*sizeof(PetscMPIInt));CHKERRQ(ierr);
469 
470   c_oth = (Mat_SeqAIJ*)ptap->C_oth->data;
471   coi   = c_oth->i; coj = c_oth->j;
472   con   = ptap->C_oth->rmap->n;
473   proc  = 0;
474   for (i=0; i<con; i++) {
475     while (prmap[i] >= owners[proc+1]) proc++;
476     len_si[proc]++;               /* num of rows in Co(=Pt*AP) to be sent to [proc] */
477     len_s[proc] += coi[i+1] - coi[i]; /* num of nonzeros in Co to be sent to [proc] */
478   }
479 
480   len          = 0; /* max length of buf_si[], see (4) */
481   owners_co[0] = 0;
482   nsend        = 0;
483   for (proc=0; proc<size; proc++) {
484     owners_co[proc+1] = owners_co[proc] + len_si[proc];
485     if (len_s[proc]) {
486       nsend++;
487       len_si[proc] = 2*(len_si[proc] + 1); /* length of buf_si to be sent to [proc] */
488       len         += len_si[proc];
489     }
490   }
491 
492   /* determine the number and length of messages to receive for coi and coj  */
493   ierr = PetscGatherNumberOfMessages(comm,NULL,len_s,&nrecv);CHKERRQ(ierr);
494   ierr = PetscGatherMessageLengths2(comm,nsend,nrecv,len_s,len_si,&id_r,&len_r,&len_ri);CHKERRQ(ierr);
495 
496   /* post the Irecv and Isend of coj */
497   ierr = PetscCommGetNewTag(comm,&tagj);CHKERRQ(ierr);
498   ierr = PetscPostIrecvInt(comm,tagj,nrecv,id_r,len_r,&buf_rj,&rwaits);CHKERRQ(ierr);
499   ierr = PetscMalloc1(nsend+1,&swaits);CHKERRQ(ierr);
500   for (proc=0, k=0; proc<size; proc++) {
501     if (!len_s[proc]) continue;
502     i    = owners_co[proc];
503     ierr = MPI_Isend(coj+coi[i],len_s[proc],MPIU_INT,proc,tagj,comm,swaits+k);CHKERRQ(ierr);
504     k++;
505   }
506 
507   /* (2-2) compute symbolic C_loc = Rd*AP_loc */
508   /* ---------------------------------------- */
509   ierr = MatMatMultSymbolic_SeqAIJ_SeqAIJ(ptap->Rd,ptap->AP_loc,fill,&ptap->C_loc);CHKERRQ(ierr);
510   c_loc = (Mat_SeqAIJ*)ptap->C_loc->data;
511 
512   /* receives coj are complete */
513   for (i=0; i<nrecv; i++) {
514     ierr = MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);CHKERRQ(ierr);
515   }
516   ierr = PetscFree(rwaits);CHKERRQ(ierr);
517   if (nsend) {ierr = MPI_Waitall(nsend,swaits,sstatus);CHKERRQ(ierr);}
518 
519   /* add received column indices into ta to update Crmax */
520   for (k=0; k<nrecv; k++) {/* k-th received message */
521     Jptr = buf_rj[k];
522     for (j=0; j<len_r[k]; j++) {
523       ierr = PetscTableAdd(ta,*(Jptr+j)+1,1,INSERT_VALUES);CHKERRQ(ierr);
524     }
525   }
526   ierr = PetscTableGetCount(ta,&Crmax);CHKERRQ(ierr);
527   ierr = PetscTableDestroy(&ta);CHKERRQ(ierr);
528 
529   /* (4) send and recv coi */
530   /*-----------------------*/
531   ierr   = PetscCommGetNewTag(comm,&tagi);CHKERRQ(ierr);
532   ierr   = PetscPostIrecvInt(comm,tagi,nrecv,id_r,len_ri,&buf_ri,&rwaits);CHKERRQ(ierr);
533   ierr   = PetscMalloc1(len+1,&buf_s);CHKERRQ(ierr);
534   buf_si = buf_s;  /* points to the beginning of k-th msg to be sent */
535   for (proc=0,k=0; proc<size; proc++) {
536     if (!len_s[proc]) continue;
537     /* form outgoing message for i-structure:
538          buf_si[0]:                 nrows to be sent
539                [1:nrows]:           row index (global)
540                [nrows+1:2*nrows+1]: i-structure index
541     */
542     /*-------------------------------------------*/
543     nrows       = len_si[proc]/2 - 1; /* num of rows in Co to be sent to [proc] */
544     buf_si_i    = buf_si + nrows+1;
545     buf_si[0]   = nrows;
546     buf_si_i[0] = 0;
547     nrows       = 0;
548     for (i=owners_co[proc]; i<owners_co[proc+1]; i++) {
549       nzi = coi[i+1] - coi[i];
550       buf_si_i[nrows+1] = buf_si_i[nrows] + nzi;  /* i-structure */
551       buf_si[nrows+1]   = prmap[i] -owners[proc]; /* local row index */
552       nrows++;
553     }
554     ierr = MPI_Isend(buf_si,len_si[proc],MPIU_INT,proc,tagi,comm,swaits+k);CHKERRQ(ierr);
555     k++;
556     buf_si += len_si[proc];
557   }
558   for (i=0; i<nrecv; i++) {
559     ierr = MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);CHKERRQ(ierr);
560   }
561   ierr = PetscFree(rwaits);CHKERRQ(ierr);
562   if (nsend) {ierr = MPI_Waitall(nsend,swaits,sstatus);CHKERRQ(ierr);}
563 
564   ierr = PetscFree4(len_s,len_si,sstatus,owners_co);CHKERRQ(ierr);
565   ierr = PetscFree(len_ri);CHKERRQ(ierr);
566   ierr = PetscFree(swaits);CHKERRQ(ierr);
567   ierr = PetscFree(buf_s);CHKERRQ(ierr);
568 
569   /* (5) compute the local portion of Cmpi      */
570   /* ------------------------------------------ */
571   /* set initial free space to be Crmax, sufficient for holding nozeros in each row of Cmpi */
572   ierr          = PetscFreeSpaceGet(Crmax,&free_space);CHKERRQ(ierr);
573   current_space = free_space;
574 
575   ierr = PetscMalloc3(nrecv,&buf_ri_k,nrecv,&nextrow,nrecv,&nextci);CHKERRQ(ierr);
576   for (k=0; k<nrecv; k++) {
577     buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */
578     nrows       = *buf_ri_k[k];
579     nextrow[k]  = buf_ri_k[k] + 1;  /* next row number of k-th recved i-structure */
580     nextci[k]   = buf_ri_k[k] + (nrows + 1); /* poins to the next i-structure of k-th recved i-structure  */
581   }
582 
583   ierr = MatPreallocateInitialize(comm,pn,pn,dnz,onz);CHKERRQ(ierr);
584   ierr = PetscLLCondensedCreate_Scalable(Crmax,&lnk);CHKERRQ(ierr);
585   for (i=0; i<pn; i++) {
586     /* add C_loc into Cmpi */
587     nzi  = c_loc->i[i+1] - c_loc->i[i];
588     Jptr = c_loc->j + c_loc->i[i];
589     ierr = PetscLLCondensedAddSorted_Scalable(nzi,Jptr,lnk);CHKERRQ(ierr);
590 
591     /* add received col data into lnk */
592     for (k=0; k<nrecv; k++) { /* k-th received message */
593       if (i == *nextrow[k]) { /* i-th row */
594         nzi  = *(nextci[k]+1) - *nextci[k];
595         Jptr = buf_rj[k] + *nextci[k];
596         ierr = PetscLLCondensedAddSorted_Scalable(nzi,Jptr,lnk);CHKERRQ(ierr);
597         nextrow[k]++; nextci[k]++;
598       }
599     }
600     nzi = lnk[0];
601 
602     /* copy data into free space, then initialize lnk */
603     ierr = PetscLLCondensedClean_Scalable(nzi,current_space->array,lnk);CHKERRQ(ierr);
604     ierr = MatPreallocateSet(i+owners[rank],nzi,current_space->array,dnz,onz);CHKERRQ(ierr);
605   }
606   ierr = PetscFree3(buf_ri_k,nextrow,nextci);CHKERRQ(ierr);
607   ierr = PetscLLCondensedDestroy_Scalable(lnk);CHKERRQ(ierr);
608   ierr = PetscFreeSpaceDestroy(free_space);CHKERRQ(ierr);
609 
610   /* local sizes and preallocation */
611   ierr = MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
612   ierr = MatSetBlockSizes(Cmpi,PetscAbs(P->cmap->bs),PetscAbs(P->cmap->bs));CHKERRQ(ierr);
613   ierr = MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);CHKERRQ(ierr);
614   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
615 
616   /* members in merge */
617   ierr = PetscFree(id_r);CHKERRQ(ierr);
618   ierr = PetscFree(len_r);CHKERRQ(ierr);
619   ierr = PetscFree(buf_ri[0]);CHKERRQ(ierr);
620   ierr = PetscFree(buf_ri);CHKERRQ(ierr);
621   ierr = PetscFree(buf_rj[0]);CHKERRQ(ierr);
622   ierr = PetscFree(buf_rj);CHKERRQ(ierr);
623   ierr = PetscLayoutDestroy(&rowmap);CHKERRQ(ierr);
624 
625   /* attach the supporting struct to Cmpi for reuse */
626   c = (Mat_MPIAIJ*)Cmpi->data;
627   c->ap           = ptap;
628   ptap->duplicate = Cmpi->ops->duplicate;
629   ptap->destroy   = Cmpi->ops->destroy;
630   ptap->view      = Cmpi->ops->view;
631 
632   /* Cmpi is not ready for use - assembly will be done by MatPtAPNumeric() */
633   Cmpi->assembled        = PETSC_FALSE;
634   Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_scalable;
635   Cmpi->ops->destroy     = MatDestroy_MPIAIJ_PtAP;
636   Cmpi->ops->view        = MatView_MPIAIJ_PtAP;
637   Cmpi->ops->freeintermediatedatastructures = MatFreeIntermediateDataStructures_MPIAIJ_AP;
638   *C                     = Cmpi;
639   PetscFunctionReturn(0);
640 }
641 
642 PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ(Mat A,Mat P,PetscReal fill,Mat *C)
643 {
644   PetscErrorCode      ierr;
645   Mat_APMPI           *ptap;
646   Mat_MPIAIJ          *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data,*c;
647   MPI_Comm            comm;
648   PetscMPIInt         size,rank;
649   Mat                 Cmpi;
650   PetscFreeSpaceList  free_space=NULL,current_space=NULL;
651   PetscInt            am=A->rmap->n,pm=P->rmap->n,pN=P->cmap->N,pn=P->cmap->n;
652   PetscInt            *lnk,i,k,pnz,row,nsend;
653   PetscBT             lnkbt;
654   PetscMPIInt         tagi,tagj,*len_si,*len_s,*len_ri,icompleted=0,nrecv;
655   PetscInt            **buf_rj,**buf_ri,**buf_ri_k;
656   PetscInt            len,proc,*dnz,*onz,*owners,nzi,nspacedouble;
657   PetscInt            nrows,*buf_s,*buf_si,*buf_si_i,**nextrow,**nextci;
658   MPI_Request         *swaits,*rwaits;
659   MPI_Status          *sstatus,rstatus;
660   PetscLayout         rowmap;
661   PetscInt            *owners_co,*coi,*coj;    /* i and j array of (p->B)^T*A*P - used in the communication */
662   PetscMPIInt         *len_r,*id_r;    /* array of length of comm->size, store send/recv matrix values */
663   PetscInt            *api,*apj,*Jptr,apnz,*prmap=p->garray,con,j,ap_rmax=0,Crmax,*aj,*ai,*pi;
664   Mat_SeqAIJ          *p_loc,*p_oth=NULL,*ad=(Mat_SeqAIJ*)(a->A)->data,*ao=NULL,*c_loc,*c_oth;
665   PetscScalar         *apv;
666   PetscTable          ta;
667   MatType             mtype;
668 #if defined(PETSC_USE_INFO)
669   PetscReal           apfill;
670 #endif
671 
672   PetscFunctionBegin;
673   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
674   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
675   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
676 
677   if (size > 1) ao = (Mat_SeqAIJ*)(a->B)->data;
678 
679   /* create symbolic parallel matrix Cmpi */
680   ierr = MatCreate(comm,&Cmpi);CHKERRQ(ierr);
681   ierr = MatGetType(A,&mtype);CHKERRQ(ierr);
682   ierr = MatSetType(Cmpi,mtype);CHKERRQ(ierr);
683 
684   /* Do dense axpy in MatPtAPNumeric_MPIAIJ_MPIAIJ() */
685   Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ;
686 
687   /* create struct Mat_APMPI and attached it to C later */
688   ierr        = PetscNew(&ptap);CHKERRQ(ierr);
689   ptap->reuse = MAT_INITIAL_MATRIX;
690   ptap->algType = 1;
691 
692   /* get P_oth by taking rows of P (= non-zero cols of local A) from other processors */
693   ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_INITIAL_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr);
694   /* get P_loc by taking all local rows of P */
695   ierr = MatMPIAIJGetLocalMat(P,MAT_INITIAL_MATRIX,&ptap->P_loc);CHKERRQ(ierr);
696 
697   /* (0) compute Rd = Pd^T, Ro = Po^T  */
698   /* --------------------------------- */
699   ierr = MatTranspose(p->A,MAT_INITIAL_MATRIX,&ptap->Rd);CHKERRQ(ierr);
700   ierr = MatTranspose(p->B,MAT_INITIAL_MATRIX,&ptap->Ro);CHKERRQ(ierr);
701 
702   /* (1) compute symbolic AP = A_loc*P = Ad*P_loc + Ao*P_oth (api,apj) */
703   /* ----------------------------------------------------------------- */
704   p_loc  = (Mat_SeqAIJ*)(ptap->P_loc)->data;
705   if (ptap->P_oth) p_oth  = (Mat_SeqAIJ*)(ptap->P_oth)->data;
706 
707   /* create and initialize a linked list */
708   ierr = PetscTableCreate(pn,pN,&ta);CHKERRQ(ierr); /* for compute AP_loc and Cmpi */
709   MatRowMergeMax_SeqAIJ(p_loc,ptap->P_loc->rmap->N,ta);
710   MatRowMergeMax_SeqAIJ(p_oth,ptap->P_oth->rmap->N,ta);
711   ierr = PetscTableGetCount(ta,&Crmax);CHKERRQ(ierr); /* Crmax = nnz(sum of Prows) */
712   /* printf("[%d] est %d, Crmax %d; pN %d\n",rank,5*(p_loc->rmax+p_oth->rmax + (PetscInt)(1.e-2*pN)),Crmax,pN); */
713 
714   ierr = PetscLLCondensedCreate(Crmax,pN,&lnk,&lnkbt);CHKERRQ(ierr);
715 
716   /* Initial FreeSpace size is fill*(nnz(A) + nnz(P)) */
717   if (ao) {
718     ierr = PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],PetscIntSumTruncate(ao->i[am],p_loc->i[pm]))),&free_space);CHKERRQ(ierr);
719   } else {
720     ierr = PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],p_loc->i[pm])),&free_space);CHKERRQ(ierr);
721   }
722   current_space = free_space;
723   nspacedouble  = 0;
724 
725   ierr   = PetscMalloc1(am+1,&api);CHKERRQ(ierr);
726   api[0] = 0;
727   for (i=0; i<am; i++) {
728     /* diagonal portion: Ad[i,:]*P */
729     ai = ad->i; pi = p_loc->i;
730     nzi = ai[i+1] - ai[i];
731     aj  = ad->j + ai[i];
732     for (j=0; j<nzi; j++) {
733       row  = aj[j];
734       pnz  = pi[row+1] - pi[row];
735       Jptr = p_loc->j + pi[row];
736       /* add non-zero cols of P into the sorted linked list lnk */
737       ierr = PetscLLCondensedAddSorted(pnz,Jptr,lnk,lnkbt);CHKERRQ(ierr);
738     }
739     /* off-diagonal portion: Ao[i,:]*P */
740     if (ao) {
741       ai = ao->i; pi = p_oth->i;
742       nzi = ai[i+1] - ai[i];
743       aj  = ao->j + ai[i];
744       for (j=0; j<nzi; j++) {
745         row  = aj[j];
746         pnz  = pi[row+1] - pi[row];
747         Jptr = p_oth->j + pi[row];
748         ierr = PetscLLCondensedAddSorted(pnz,Jptr,lnk,lnkbt);CHKERRQ(ierr);
749       }
750     }
751     apnz     = lnk[0];
752     api[i+1] = api[i] + apnz;
753     if (ap_rmax < apnz) ap_rmax = apnz;
754 
755     /* if free space is not available, double the total space in the list */
756     if (current_space->local_remaining<apnz) {
757       ierr = PetscFreeSpaceGet(PetscIntSumTruncate(apnz,current_space->total_array_size),&current_space);CHKERRQ(ierr);
758       nspacedouble++;
759     }
760 
761     /* Copy data into free space, then initialize lnk */
762     ierr = PetscLLCondensedClean(pN,apnz,current_space->array,lnk,lnkbt);CHKERRQ(ierr);
763 
764     current_space->array           += apnz;
765     current_space->local_used      += apnz;
766     current_space->local_remaining -= apnz;
767   }
768   /* Allocate space for apj and apv, initialize apj, and */
769   /* destroy list of free space and other temporary array(s) */
770   ierr   = PetscMalloc2(api[am],&apj,api[am],&apv);CHKERRQ(ierr);
771   ierr   = PetscFreeSpaceContiguous(&free_space,apj);CHKERRQ(ierr);
772   ierr = PetscLLDestroy(lnk,lnkbt);CHKERRQ(ierr);
773 
774   /* Create AP_loc for reuse */
775   ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,am,pN,api,apj,apv,&ptap->AP_loc);CHKERRQ(ierr);
776 
777 #if defined(PETSC_USE_INFO)
778   if (ao) {
779     apfill = (PetscReal)api[am]/(ad->i[am]+ao->i[am]+p_loc->i[pm]+1);
780   } else {
781     apfill = (PetscReal)api[am]/(ad->i[am]+p_loc->i[pm]+1);
782   }
783   ptap->AP_loc->info.mallocs           = nspacedouble;
784   ptap->AP_loc->info.fill_ratio_given  = fill;
785   ptap->AP_loc->info.fill_ratio_needed = apfill;
786 
787   if (api[am]) {
788     ierr = PetscInfo3(ptap->AP_loc,"Nonscalable algorithm, AP_loc reallocs %D; Fill ratio: given %g needed %g.\n",nspacedouble,(double)fill,(double)apfill);CHKERRQ(ierr);
789     ierr = PetscInfo1(ptap->AP_loc,"Use MatPtAP(A,B,MatReuse,%g,&C) for best AP_loc performance.;\n",(double)apfill);CHKERRQ(ierr);
790   } else {
791     ierr = PetscInfo(ptap->AP_loc,"Nonscalable algorithm, AP_loc is empty \n");CHKERRQ(ierr);
792   }
793 #endif
794 
795   /* (2-1) compute symbolic Co = Ro*AP_loc  */
796   /* ------------------------------------ */
797   ierr = MatMatMultSymbolic_SeqAIJ_SeqAIJ(ptap->Ro,ptap->AP_loc,fill,&ptap->C_oth);CHKERRQ(ierr);
798 
799   /* (3) send coj of C_oth to other processors  */
800   /* ------------------------------------------ */
801   /* determine row ownership */
802   ierr = PetscLayoutCreate(comm,&rowmap);CHKERRQ(ierr);
803   rowmap->n  = pn;
804   rowmap->bs = 1;
805   ierr   = PetscLayoutSetUp(rowmap);CHKERRQ(ierr);
806   owners = rowmap->range;
807 
808   /* determine the number of messages to send, their lengths */
809   ierr = PetscMalloc4(size,&len_s,size,&len_si,size,&sstatus,size+2,&owners_co);CHKERRQ(ierr);
810   ierr = PetscMemzero(len_s,size*sizeof(PetscMPIInt));CHKERRQ(ierr);
811   ierr = PetscMemzero(len_si,size*sizeof(PetscMPIInt));CHKERRQ(ierr);
812 
813   c_oth = (Mat_SeqAIJ*)ptap->C_oth->data;
814   coi   = c_oth->i; coj = c_oth->j;
815   con   = ptap->C_oth->rmap->n;
816   proc  = 0;
817   for (i=0; i<con; i++) {
818     while (prmap[i] >= owners[proc+1]) proc++;
819     len_si[proc]++;               /* num of rows in Co(=Pt*AP) to be sent to [proc] */
820     len_s[proc] += coi[i+1] - coi[i]; /* num of nonzeros in Co to be sent to [proc] */
821   }
822 
823   len          = 0; /* max length of buf_si[], see (4) */
824   owners_co[0] = 0;
825   nsend        = 0;
826   for (proc=0; proc<size; proc++) {
827     owners_co[proc+1] = owners_co[proc] + len_si[proc];
828     if (len_s[proc]) {
829       nsend++;
830       len_si[proc] = 2*(len_si[proc] + 1); /* length of buf_si to be sent to [proc] */
831       len         += len_si[proc];
832     }
833   }
834 
835   /* determine the number and length of messages to receive for coi and coj  */
836   ierr = PetscGatherNumberOfMessages(comm,NULL,len_s,&nrecv);CHKERRQ(ierr);
837   ierr = PetscGatherMessageLengths2(comm,nsend,nrecv,len_s,len_si,&id_r,&len_r,&len_ri);CHKERRQ(ierr);
838 
839   /* post the Irecv and Isend of coj */
840   ierr = PetscCommGetNewTag(comm,&tagj);CHKERRQ(ierr);
841   ierr = PetscPostIrecvInt(comm,tagj,nrecv,id_r,len_r,&buf_rj,&rwaits);CHKERRQ(ierr);
842   ierr = PetscMalloc1(nsend+1,&swaits);CHKERRQ(ierr);
843   for (proc=0, k=0; proc<size; proc++) {
844     if (!len_s[proc]) continue;
845     i    = owners_co[proc];
846     ierr = MPI_Isend(coj+coi[i],len_s[proc],MPIU_INT,proc,tagj,comm,swaits+k);CHKERRQ(ierr);
847     k++;
848   }
849 
850   /* (2-2) compute symbolic C_loc = Rd*AP_loc */
851   /* ---------------------------------------- */
852   ierr = MatMatMultSymbolic_SeqAIJ_SeqAIJ(ptap->Rd,ptap->AP_loc,fill,&ptap->C_loc);CHKERRQ(ierr);
853   c_loc = (Mat_SeqAIJ*)ptap->C_loc->data;
854 
855   /* receives coj are complete */
856   for (i=0; i<nrecv; i++) {
857     ierr = MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);CHKERRQ(ierr);
858   }
859   ierr = PetscFree(rwaits);CHKERRQ(ierr);
860   if (nsend) {ierr = MPI_Waitall(nsend,swaits,sstatus);CHKERRQ(ierr);}
861 
862   /* add received column indices into ta to update Crmax */
863   for (k=0; k<nrecv; k++) {/* k-th received message */
864     Jptr = buf_rj[k];
865     for (j=0; j<len_r[k]; j++) {
866       ierr = PetscTableAdd(ta,*(Jptr+j)+1,1,INSERT_VALUES);CHKERRQ(ierr);
867     }
868   }
869   ierr = PetscTableGetCount(ta,&Crmax);CHKERRQ(ierr);
870   ierr = PetscTableDestroy(&ta);CHKERRQ(ierr);
871 
872   /* (4) send and recv coi */
873   /*-----------------------*/
874   ierr   = PetscCommGetNewTag(comm,&tagi);CHKERRQ(ierr);
875   ierr   = PetscPostIrecvInt(comm,tagi,nrecv,id_r,len_ri,&buf_ri,&rwaits);CHKERRQ(ierr);
876   ierr   = PetscMalloc1(len+1,&buf_s);CHKERRQ(ierr);
877   buf_si = buf_s;  /* points to the beginning of k-th msg to be sent */
878   for (proc=0,k=0; proc<size; proc++) {
879     if (!len_s[proc]) continue;
880     /* form outgoing message for i-structure:
881          buf_si[0]:                 nrows to be sent
882                [1:nrows]:           row index (global)
883                [nrows+1:2*nrows+1]: i-structure index
884     */
885     /*-------------------------------------------*/
886     nrows       = len_si[proc]/2 - 1; /* num of rows in Co to be sent to [proc] */
887     buf_si_i    = buf_si + nrows+1;
888     buf_si[0]   = nrows;
889     buf_si_i[0] = 0;
890     nrows       = 0;
891     for (i=owners_co[proc]; i<owners_co[proc+1]; i++) {
892       nzi = coi[i+1] - coi[i];
893       buf_si_i[nrows+1] = buf_si_i[nrows] + nzi;  /* i-structure */
894       buf_si[nrows+1]   = prmap[i] -owners[proc]; /* local row index */
895       nrows++;
896     }
897     ierr = MPI_Isend(buf_si,len_si[proc],MPIU_INT,proc,tagi,comm,swaits+k);CHKERRQ(ierr);
898     k++;
899     buf_si += len_si[proc];
900   }
901   for (i=0; i<nrecv; i++) {
902     ierr = MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);CHKERRQ(ierr);
903   }
904   ierr = PetscFree(rwaits);CHKERRQ(ierr);
905   if (nsend) {ierr = MPI_Waitall(nsend,swaits,sstatus);CHKERRQ(ierr);}
906 
907   ierr = PetscFree4(len_s,len_si,sstatus,owners_co);CHKERRQ(ierr);
908   ierr = PetscFree(len_ri);CHKERRQ(ierr);
909   ierr = PetscFree(swaits);CHKERRQ(ierr);
910   ierr = PetscFree(buf_s);CHKERRQ(ierr);
911 
912   /* (5) compute the local portion of Cmpi      */
913   /* ------------------------------------------ */
914   /* set initial free space to be Crmax, sufficient for holding nozeros in each row of Cmpi */
915   ierr          = PetscFreeSpaceGet(Crmax,&free_space);CHKERRQ(ierr);
916   current_space = free_space;
917 
918   ierr = PetscMalloc3(nrecv,&buf_ri_k,nrecv,&nextrow,nrecv,&nextci);CHKERRQ(ierr);
919   for (k=0; k<nrecv; k++) {
920     buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */
921     nrows       = *buf_ri_k[k];
922     nextrow[k]  = buf_ri_k[k] + 1;  /* next row number of k-th recved i-structure */
923     nextci[k]   = buf_ri_k[k] + (nrows + 1); /* poins to the next i-structure of k-th recved i-structure  */
924   }
925 
926   ierr = MatPreallocateInitialize(comm,pn,pn,dnz,onz);CHKERRQ(ierr);
927   ierr = PetscLLCondensedCreate(Crmax,pN,&lnk,&lnkbt);CHKERRQ(ierr);
928   for (i=0; i<pn; i++) {
929     /* add C_loc into Cmpi */
930     nzi  = c_loc->i[i+1] - c_loc->i[i];
931     Jptr = c_loc->j + c_loc->i[i];
932     ierr = PetscLLCondensedAddSorted(nzi,Jptr,lnk,lnkbt);CHKERRQ(ierr);
933 
934     /* add received col data into lnk */
935     for (k=0; k<nrecv; k++) { /* k-th received message */
936       if (i == *nextrow[k]) { /* i-th row */
937         nzi  = *(nextci[k]+1) - *nextci[k];
938         Jptr = buf_rj[k] + *nextci[k];
939         ierr = PetscLLCondensedAddSorted(nzi,Jptr,lnk,lnkbt);CHKERRQ(ierr);
940         nextrow[k]++; nextci[k]++;
941       }
942     }
943     nzi = lnk[0];
944 
945     /* copy data into free space, then initialize lnk */
946     ierr = PetscLLCondensedClean(pN,nzi,current_space->array,lnk,lnkbt);CHKERRQ(ierr);
947     ierr = MatPreallocateSet(i+owners[rank],nzi,current_space->array,dnz,onz);CHKERRQ(ierr);
948   }
949   ierr = PetscFree3(buf_ri_k,nextrow,nextci);CHKERRQ(ierr);
950   ierr = PetscLLDestroy(lnk,lnkbt);CHKERRQ(ierr);
951   ierr = PetscFreeSpaceDestroy(free_space);CHKERRQ(ierr);
952 
953   /* local sizes and preallocation */
954   ierr = MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
955   ierr = MatSetBlockSizes(Cmpi,PetscAbs(P->cmap->bs),PetscAbs(P->cmap->bs));CHKERRQ(ierr);
956   ierr = MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);CHKERRQ(ierr);
957   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
958 
959   /* members in merge */
960   ierr = PetscFree(id_r);CHKERRQ(ierr);
961   ierr = PetscFree(len_r);CHKERRQ(ierr);
962   ierr = PetscFree(buf_ri[0]);CHKERRQ(ierr);
963   ierr = PetscFree(buf_ri);CHKERRQ(ierr);
964   ierr = PetscFree(buf_rj[0]);CHKERRQ(ierr);
965   ierr = PetscFree(buf_rj);CHKERRQ(ierr);
966   ierr = PetscLayoutDestroy(&rowmap);CHKERRQ(ierr);
967 
968   /* attach the supporting struct to Cmpi for reuse */
969   c = (Mat_MPIAIJ*)Cmpi->data;
970   c->ap           = ptap;
971   ptap->duplicate = Cmpi->ops->duplicate;
972   ptap->destroy   = Cmpi->ops->destroy;
973   ptap->view      = Cmpi->ops->view;
974   ierr = PetscCalloc1(pN,&ptap->apa);CHKERRQ(ierr);
975 
976   /* Cmpi is not ready for use - assembly will be done by MatPtAPNumeric() */
977   Cmpi->assembled        = PETSC_FALSE;
978   Cmpi->ops->destroy     = MatDestroy_MPIAIJ_PtAP;
979   Cmpi->ops->view        = MatView_MPIAIJ_PtAP;
980   Cmpi->ops->freeintermediatedatastructures = MatFreeIntermediateDataStructures_MPIAIJ_AP;
981   *C                     = Cmpi;
982   PetscFunctionReturn(0);
983 }
984 
985 PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ(Mat A,Mat P,Mat C)
986 {
987   PetscErrorCode    ierr;
988   Mat_MPIAIJ        *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data,*c=(Mat_MPIAIJ*)C->data;
989   Mat_SeqAIJ        *ad=(Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data;
990   Mat_SeqAIJ        *ap,*p_loc,*p_oth=NULL,*c_seq;
991   Mat_APMPI         *ptap = c->ap;
992   Mat               AP_loc,C_loc,C_oth;
993   PetscInt          i,rstart,rend,cm,ncols,row;
994   PetscInt          *api,*apj,am = A->rmap->n,j,col,apnz;
995   PetscScalar       *apa;
996   const PetscInt    *cols;
997   const PetscScalar *vals;
998 
999   PetscFunctionBegin;
1000   if (!ptap) {
1001     MPI_Comm comm;
1002     ierr = PetscObjectGetComm((PetscObject)C,&comm);CHKERRQ(ierr);
1003     SETERRQ(comm,PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be reused. Do not call MatFreeIntermediateDataStructures() or use '-mat_freeintermediatedatastructures'");
1004   }
1005 
1006   ierr = MatZeroEntries(C);CHKERRQ(ierr);
1007   /* 1) get R = Pd^T,Ro = Po^T */
1008   if (ptap->reuse == MAT_REUSE_MATRIX) {
1009     ierr = MatTranspose(p->A,MAT_REUSE_MATRIX,&ptap->Rd);CHKERRQ(ierr);
1010     ierr = MatTranspose(p->B,MAT_REUSE_MATRIX,&ptap->Ro);CHKERRQ(ierr);
1011   }
1012 
1013   /* 2) get AP_loc */
1014   AP_loc = ptap->AP_loc;
1015   ap = (Mat_SeqAIJ*)AP_loc->data;
1016 
1017   /* 2-1) get P_oth = ptap->P_oth  and P_loc = ptap->P_loc */
1018   /*-----------------------------------------------------*/
1019   if (ptap->reuse == MAT_REUSE_MATRIX) {
1020     /* P_oth and P_loc are obtained in MatPtASymbolic() when reuse == MAT_INITIAL_MATRIX */
1021     ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_REUSE_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr);
1022     ierr = MatMPIAIJGetLocalMat(P,MAT_REUSE_MATRIX,&ptap->P_loc);CHKERRQ(ierr);
1023   }
1024 
1025   /* 2-2) compute numeric A_loc*P - dominating part */
1026   /* ---------------------------------------------- */
1027   /* get data from symbolic products */
1028   p_loc = (Mat_SeqAIJ*)(ptap->P_loc)->data;
1029   if (ptap->P_oth) {
1030     p_oth = (Mat_SeqAIJ*)(ptap->P_oth)->data;
1031   }
1032   apa   = ptap->apa;
1033   api   = ap->i;
1034   apj   = ap->j;
1035   for (i=0; i<am; i++) {
1036     /* AP[i,:] = A[i,:]*P = Ad*P_loc Ao*P_oth */
1037     AProw_nonscalable(i,ad,ao,p_loc,p_oth,apa);
1038     apnz = api[i+1] - api[i];
1039     for (j=0; j<apnz; j++) {
1040       col = apj[j+api[i]];
1041       ap->a[j+ap->i[i]] = apa[col];
1042       apa[col] = 0.0;
1043     }
1044     ierr = PetscLogFlops(2.0*apnz);CHKERRQ(ierr);
1045   }
1046 
1047   /* 3) C_loc = Rd*AP_loc, C_oth = Ro*AP_loc */
1048   ierr = ((ptap->C_loc)->ops->matmultnumeric)(ptap->Rd,AP_loc,ptap->C_loc);CHKERRQ(ierr);
1049   ierr = ((ptap->C_oth)->ops->matmultnumeric)(ptap->Ro,AP_loc,ptap->C_oth);CHKERRQ(ierr);
1050   C_loc = ptap->C_loc;
1051   C_oth = ptap->C_oth;
1052 
1053   /* add C_loc and Co to to C */
1054   ierr = MatGetOwnershipRange(C,&rstart,&rend);CHKERRQ(ierr);
1055 
1056   /* C_loc -> C */
1057   cm    = C_loc->rmap->N;
1058   c_seq = (Mat_SeqAIJ*)C_loc->data;
1059   cols = c_seq->j;
1060   vals = c_seq->a;
1061 
1062 
1063   /* The (fast) MatSetValues_MPIAIJ_CopyFromCSRFormat function can only be used when C->was_assembled is PETSC_FALSE and */
1064   /* when there are no off-processor parts.  */
1065   /* If was_assembled is true, then the statement aj[rowstart_diag+dnz_row] = mat_j[col] - cstart; in MatSetValues_MPIAIJ_CopyFromCSRFormat */
1066   /* is no longer true. Then the more complex function MatSetValues_MPIAIJ() has to be used, where the column index is looked up from */
1067   /* a table, and other, more complex stuff has to be done. */
1068   if (C->assembled) {
1069     C->was_assembled = PETSC_TRUE;
1070     C->assembled     = PETSC_FALSE;
1071   }
1072   if (C->was_assembled) {
1073     for (i=0; i<cm; i++) {
1074       ncols = c_seq->i[i+1] - c_seq->i[i];
1075       row = rstart + i;
1076       ierr = MatSetValues_MPIAIJ(C,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
1077       cols += ncols; vals += ncols;
1078     }
1079   } else {
1080     ierr = MatSetValues_MPIAIJ_CopyFromCSRFormat(C,c_seq->j,c_seq->i,c_seq->a);CHKERRQ(ierr);
1081   }
1082 
1083   /* Co -> C, off-processor part */
1084   cm = C_oth->rmap->N;
1085   c_seq = (Mat_SeqAIJ*)C_oth->data;
1086   cols = c_seq->j;
1087   vals = c_seq->a;
1088   for (i=0; i<cm; i++) {
1089     ncols = c_seq->i[i+1] - c_seq->i[i];
1090     row = p->garray[i];
1091     ierr = MatSetValues(C,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
1092     cols += ncols; vals += ncols;
1093   }
1094 
1095   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1096   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1097 
1098   ptap->reuse = MAT_REUSE_MATRIX;
1099 
1100   /* supporting struct ptap consumes almost same amount of memory as C=PtAP, release it if C will not be updated by A and P */
1101   if (ptap->freestruct) {
1102     ierr = MatFreeIntermediateDataStructures(C);CHKERRQ(ierr);
1103   }
1104   PetscFunctionReturn(0);
1105 }
1106