xref: /petsc/src/mat/impls/aij/mpi/mpiptap.c (revision acc4240f1b8ac3f700a8a55bed7df0482f60e3eb)
1 
2 /*
3   Defines projective product routines where A is a MPIAIJ matrix
4           C = P^T * A * P
5 */
6 
7 #include <../src/mat/impls/aij/seq/aij.h>   /*I "petscmat.h" I*/
8 #include <../src/mat/utils/freespace.h>
9 #include <../src/mat/impls/aij/mpi/mpiaij.h>
10 #include <petscbt.h>
11 #include <petsctime.h>
12 #include <petsc/private/hashmapiv.h>
13 #include <petsc/private/hashseti.h>
14 #include <petscsf.h>
15 
16 /* #define PTAP_PROFILE */
17 
18 PetscErrorCode MatView_MPIAIJ_PtAP(Mat A,PetscViewer viewer)
19 {
20   PetscErrorCode    ierr;
21   Mat_MPIAIJ        *a=(Mat_MPIAIJ*)A->data;
22   Mat_APMPI         *ptap=a->ap;
23   PetscBool         iascii;
24   PetscViewerFormat format;
25 
26   PetscFunctionBegin;
27   if (!ptap) {
28     /* hack: MatDuplicate() sets oldmat->ops->view to newmat which is a base mat class with null ptpa! */
29     A->ops->view = MatView_MPIAIJ;
30     ierr = (A->ops->view)(A,viewer);CHKERRQ(ierr);
31     PetscFunctionReturn(0);
32   }
33 
34   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
35   if (iascii) {
36     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
37     if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
38       if (ptap->algType == 0) {
39         ierr = PetscViewerASCIIPrintf(viewer,"using scalable MatPtAP() implementation\n");CHKERRQ(ierr);
40       } else if (ptap->algType == 1) {
41         ierr = PetscViewerASCIIPrintf(viewer,"using nonscalable MatPtAP() implementation\n");CHKERRQ(ierr);
42       } else if (ptap->algType == 2) {
43         ierr = PetscViewerASCIIPrintf(viewer,"using allatonce MatPtAP() implementation\n");CHKERRQ(ierr);
44       } else if (ptap->algType == 3) {
45         ierr = PetscViewerASCIIPrintf(viewer,"using merged allatonce MatPtAP() implementation\n");CHKERRQ(ierr);
46       }
47     }
48   }
49   ierr = (ptap->view)(A,viewer);CHKERRQ(ierr);
50   PetscFunctionReturn(0);
51 }
52 
53 PetscErrorCode MatFreeIntermediateDataStructures_MPIAIJ_AP(Mat A)
54 {
55   PetscErrorCode      ierr;
56   Mat_MPIAIJ          *a=(Mat_MPIAIJ*)A->data;
57   Mat_APMPI           *ptap=a->ap;
58   Mat_Merge_SeqsToMPI *merge;
59 
60   PetscFunctionBegin;
61   if (!ptap) PetscFunctionReturn(0);
62 
63   ierr = PetscFree2(ptap->startsj_s,ptap->startsj_r);CHKERRQ(ierr);
64   ierr = PetscFree(ptap->bufa);CHKERRQ(ierr);
65   ierr = MatDestroy(&ptap->P_loc);CHKERRQ(ierr);
66   ierr = MatDestroy(&ptap->P_oth);CHKERRQ(ierr);
67   ierr = MatDestroy(&ptap->A_loc);CHKERRQ(ierr); /* used by MatTransposeMatMult() */
68   ierr = MatDestroy(&ptap->Rd);CHKERRQ(ierr);
69   ierr = MatDestroy(&ptap->Ro);CHKERRQ(ierr);
70   if (ptap->AP_loc) { /* used by alg_rap */
71     Mat_SeqAIJ *ap = (Mat_SeqAIJ*)(ptap->AP_loc)->data;
72     ierr = PetscFree(ap->i);CHKERRQ(ierr);
73     ierr = PetscFree2(ap->j,ap->a);CHKERRQ(ierr);
74     ierr = MatDestroy(&ptap->AP_loc);CHKERRQ(ierr);
75   } else { /* used by alg_ptap */
76     ierr = PetscFree(ptap->api);CHKERRQ(ierr);
77     ierr = PetscFree(ptap->apj);CHKERRQ(ierr);
78   }
79   ierr = MatDestroy(&ptap->C_loc);CHKERRQ(ierr);
80   ierr = MatDestroy(&ptap->C_oth);CHKERRQ(ierr);
81   if (ptap->apa) {ierr = PetscFree(ptap->apa);CHKERRQ(ierr);}
82 
83   ierr = MatDestroy(&ptap->Pt);CHKERRQ(ierr);
84 
85   merge=ptap->merge;
86   if (merge) { /* used by alg_ptap */
87     ierr = PetscFree(merge->id_r);CHKERRQ(ierr);
88     ierr = PetscFree(merge->len_s);CHKERRQ(ierr);
89     ierr = PetscFree(merge->len_r);CHKERRQ(ierr);
90     ierr = PetscFree(merge->bi);CHKERRQ(ierr);
91     ierr = PetscFree(merge->bj);CHKERRQ(ierr);
92     ierr = PetscFree(merge->buf_ri[0]);CHKERRQ(ierr);
93     ierr = PetscFree(merge->buf_ri);CHKERRQ(ierr);
94     ierr = PetscFree(merge->buf_rj[0]);CHKERRQ(ierr);
95     ierr = PetscFree(merge->buf_rj);CHKERRQ(ierr);
96     ierr = PetscFree(merge->coi);CHKERRQ(ierr);
97     ierr = PetscFree(merge->coj);CHKERRQ(ierr);
98     ierr = PetscFree(merge->owners_co);CHKERRQ(ierr);
99     ierr = PetscLayoutDestroy(&merge->rowmap);CHKERRQ(ierr);
100     ierr = PetscFree(ptap->merge);CHKERRQ(ierr);
101   }
102   ierr = ISLocalToGlobalMappingDestroy(&ptap->ltog);CHKERRQ(ierr);
103 
104   ierr = PetscSFDestroy(&ptap->sf);CHKERRQ(ierr);
105   ierr = PetscFree(ptap->c_othi);CHKERRQ(ierr);
106   ierr = PetscFree(ptap->c_rmti);CHKERRQ(ierr);
107   PetscFunctionReturn(0);
108 }
109 
110 PetscErrorCode MatDestroy_MPIAIJ_PtAP(Mat A)
111 {
112   PetscErrorCode ierr;
113   Mat_MPIAIJ     *a=(Mat_MPIAIJ*)A->data;
114   Mat_APMPI      *ptap=a->ap;
115 
116   PetscFunctionBegin;
117   ierr = (*A->ops->freeintermediatedatastructures)(A);CHKERRQ(ierr);
118   ierr = (*ptap->destroy)(A);CHKERRQ(ierr); /* MatDestroy_MPIAIJ(A) */
119   ierr = PetscFree(ptap);CHKERRQ(ierr);
120   PetscFunctionReturn(0);
121 }
122 
123 PETSC_INTERN PetscErrorCode MatPtAP_MPIAIJ_MPIAIJ(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
124 {
125   PetscErrorCode ierr;
126   PetscBool      flg;
127   MPI_Comm       comm;
128 #if !defined(PETSC_HAVE_HYPRE)
129   const char          *algTypes[4] = {"scalable","nonscalable","allatonce","allatonce_merged"};
130   PetscInt            nalg=4;
131 #else
132   const char          *algTypes[5] = {"scalable","nonscalable","allatonce","allatonce_merged","hypre"};
133   PetscInt            nalg=5;
134 #endif
135   PetscInt            pN=P->cmap->N,alg=1; /* set default algorithm */
136 
137   PetscFunctionBegin;
138   /* check if matrix local sizes are compatible */
139   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
140   if (A->rmap->rstart != P->rmap->rstart || A->rmap->rend != P->rmap->rend) SETERRQ4(comm,PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, Arow (%D, %D) != Prow (%D,%D)",A->rmap->rstart,A->rmap->rend,P->rmap->rstart,P->rmap->rend);
141   if (A->cmap->rstart != P->rmap->rstart || A->cmap->rend != P->rmap->rend) SETERRQ4(comm,PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, Acol (%D, %D) != Prow (%D,%D)",A->cmap->rstart,A->cmap->rend,P->rmap->rstart,P->rmap->rend);
142 
143   if (scall == MAT_INITIAL_MATRIX) {
144     /* pick an algorithm */
145     ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MatPtAP","Mat");CHKERRQ(ierr);
146     ierr = PetscOptionsEList("-matptap_via","Algorithmic approach","MatPtAP",algTypes,nalg,algTypes[alg],&alg,&flg);CHKERRQ(ierr);
147     ierr = PetscOptionsEnd();CHKERRQ(ierr);
148 
149     if (!flg && pN > 100000) { /* may switch to scalable algorithm as default */
150       MatInfo     Ainfo,Pinfo;
151       PetscInt    nz_local;
152       PetscBool   alg_scalable_loc=PETSC_FALSE,alg_scalable;
153 
154       ierr = MatGetInfo(A,MAT_LOCAL,&Ainfo);CHKERRQ(ierr);
155       ierr = MatGetInfo(P,MAT_LOCAL,&Pinfo);CHKERRQ(ierr);
156       nz_local = (PetscInt)(Ainfo.nz_allocated + Pinfo.nz_allocated);
157 
158       if (pN > fill*nz_local) alg_scalable_loc = PETSC_TRUE;
159       ierr = MPIU_Allreduce(&alg_scalable_loc,&alg_scalable,1,MPIU_BOOL,MPI_LOR,comm);CHKERRQ(ierr);
160 
161       if (alg_scalable) {
162         alg = 0; /* scalable algorithm would 50% slower than nonscalable algorithm */
163       }
164     }
165 
166     switch (alg) {
167     case 1:
168       /* do R=P^T locally, then C=R*A*P -- nonscalable */
169       ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
170       ierr = MatPtAPSymbolic_MPIAIJ_MPIAIJ(A,P,fill,C);CHKERRQ(ierr);
171       ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
172       break;
173     case 2:
174       /* compute C=P^T*A*P allatonce */
175       ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
176       ierr = MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce(A,P,fill,C);CHKERRQ(ierr);
177       ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
178       break;
179     case 3:
180       /* compute C=P^T*A*P allatonce */
181       ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
182       ierr = MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce_merged(A,P,fill,C);CHKERRQ(ierr);
183       ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
184       break;
185 #if defined(PETSC_HAVE_HYPRE)
186     case 4:
187       /* Use boomerAMGBuildCoarseOperator */
188       ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
189       ierr = MatPtAPSymbolic_AIJ_AIJ_wHYPRE(A,P,fill,C);CHKERRQ(ierr);
190       ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
191       break;
192 #endif
193     default:
194       /* do R=P^T locally, then C=R*A*P */
195       ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
196       ierr = MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable(A,P,fill,C);CHKERRQ(ierr);
197       ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0);CHKERRQ(ierr);
198       break;
199     }
200 
201     if (alg == 0 || alg == 1 || alg == 2 || alg == 3) {
202       Mat_MPIAIJ *c  = (Mat_MPIAIJ*)(*C)->data;
203       Mat_APMPI  *ap = c->ap;
204       ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)(*C)),((PetscObject)(*C))->prefix,"MatFreeIntermediateDataStructures","Mat");CHKERRQ(ierr);
205       ap->freestruct = PETSC_FALSE;
206       ierr = PetscOptionsBool("-mat_freeintermediatedatastructures","Free intermediate data structures", "MatFreeIntermediateDataStructures",ap->freestruct,&ap->freestruct, NULL);CHKERRQ(ierr);
207       ierr = PetscOptionsEnd();CHKERRQ(ierr);
208     }
209   }
210 
211   ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
212   ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr);
213   ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0);CHKERRQ(ierr);
214   PetscFunctionReturn(0);
215 }
216 
217 PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_scalable(Mat A,Mat P,Mat C)
218 {
219   PetscErrorCode    ierr;
220   Mat_MPIAIJ        *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data,*c=(Mat_MPIAIJ*)C->data;
221   Mat_SeqAIJ        *ad=(Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data;
222   Mat_SeqAIJ        *ap,*p_loc,*p_oth,*c_seq;
223   Mat_APMPI         *ptap = c->ap;
224   Mat               AP_loc,C_loc,C_oth;
225   PetscInt          i,rstart,rend,cm,ncols,row,*api,*apj,am = A->rmap->n,apnz,nout;
226   PetscScalar       *apa;
227   const PetscInt    *cols;
228   const PetscScalar *vals;
229 
230   PetscFunctionBegin;
231   if (!ptap) {
232     MPI_Comm comm;
233     ierr = PetscObjectGetComm((PetscObject)C,&comm);CHKERRQ(ierr);
234     SETERRQ(comm,PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be reused. Do not call MatFreeIntermediateDataStructures() or use '-mat_freeintermediatedatastructures'");
235   }
236 
237   ierr = MatZeroEntries(C);CHKERRQ(ierr);
238 
239   /* 1) get R = Pd^T,Ro = Po^T */
240   if (ptap->reuse == MAT_REUSE_MATRIX) {
241     ierr = MatTranspose(p->A,MAT_REUSE_MATRIX,&ptap->Rd);CHKERRQ(ierr);
242     ierr = MatTranspose(p->B,MAT_REUSE_MATRIX,&ptap->Ro);CHKERRQ(ierr);
243   }
244 
245   /* 2) get AP_loc */
246   AP_loc = ptap->AP_loc;
247   ap = (Mat_SeqAIJ*)AP_loc->data;
248 
249   /* 2-1) get P_oth = ptap->P_oth  and P_loc = ptap->P_loc */
250   /*-----------------------------------------------------*/
251   if (ptap->reuse == MAT_REUSE_MATRIX) {
252     /* P_oth and P_loc are obtained in MatPtASymbolic() when reuse == MAT_INITIAL_MATRIX */
253     ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_REUSE_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr);
254     ierr = MatMPIAIJGetLocalMat(P,MAT_REUSE_MATRIX,&ptap->P_loc);CHKERRQ(ierr);
255   }
256 
257   /* 2-2) compute numeric A_loc*P - dominating part */
258   /* ---------------------------------------------- */
259   /* get data from symbolic products */
260   p_loc = (Mat_SeqAIJ*)(ptap->P_loc)->data;
261   if (ptap->P_oth) p_oth = (Mat_SeqAIJ*)(ptap->P_oth)->data;
262   else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ptap->P_oth is NULL. Cannot proceed!");
263 
264   api   = ap->i;
265   apj   = ap->j;
266   ierr = ISLocalToGlobalMappingApply(ptap->ltog,api[AP_loc->rmap->n],apj,apj);CHKERRQ(ierr);
267   for (i=0; i<am; i++) {
268     /* AP[i,:] = A[i,:]*P = Ad*P_loc Ao*P_oth */
269     apnz = api[i+1] - api[i];
270     apa = ap->a + api[i];
271     ierr = PetscMemzero(apa,sizeof(PetscScalar)*apnz);CHKERRQ(ierr);
272     AProw_scalable(i,ad,ao,p_loc,p_oth,api,apj,apa);
273     ierr = PetscLogFlops(2.0*apnz);CHKERRQ(ierr);
274   }
275   ierr = ISGlobalToLocalMappingApply(ptap->ltog,IS_GTOLM_DROP,api[AP_loc->rmap->n],apj,&nout,apj);CHKERRQ(ierr);
276   if (api[AP_loc->rmap->n] != nout) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Incorrect mapping %D != %D\n",api[AP_loc->rmap->n],nout);
277 
278   /* 3) C_loc = Rd*AP_loc, C_oth = Ro*AP_loc */
279   /* Always use scalable version since we are in the MPI scalable version */
280   ierr = MatMatMultNumeric_SeqAIJ_SeqAIJ_Scalable(ptap->Rd,AP_loc,ptap->C_loc);CHKERRQ(ierr);
281   ierr = MatMatMultNumeric_SeqAIJ_SeqAIJ_Scalable(ptap->Ro,AP_loc,ptap->C_oth);CHKERRQ(ierr);
282 
283   C_loc = ptap->C_loc;
284   C_oth = ptap->C_oth;
285 
286   /* add C_loc and Co to to C */
287   ierr = MatGetOwnershipRange(C,&rstart,&rend);CHKERRQ(ierr);
288 
289   /* C_loc -> C */
290   cm    = C_loc->rmap->N;
291   c_seq = (Mat_SeqAIJ*)C_loc->data;
292   cols = c_seq->j;
293   vals = c_seq->a;
294   ierr = ISLocalToGlobalMappingApply(ptap->ltog,c_seq->i[C_loc->rmap->n],c_seq->j,c_seq->j);CHKERRQ(ierr);
295 
296   /* The (fast) MatSetValues_MPIAIJ_CopyFromCSRFormat function can only be used when C->was_assembled is PETSC_FALSE and */
297   /* when there are no off-processor parts.  */
298   /* If was_assembled is true, then the statement aj[rowstart_diag+dnz_row] = mat_j[col] - cstart; in MatSetValues_MPIAIJ_CopyFromCSRFormat */
299   /* is no longer true. Then the more complex function MatSetValues_MPIAIJ() has to be used, where the column index is looked up from */
300   /* a table, and other, more complex stuff has to be done. */
301   if (C->assembled) {
302     C->was_assembled = PETSC_TRUE;
303     C->assembled     = PETSC_FALSE;
304   }
305   if (C->was_assembled) {
306     for (i=0; i<cm; i++) {
307       ncols = c_seq->i[i+1] - c_seq->i[i];
308       row = rstart + i;
309       ierr = MatSetValues_MPIAIJ(C,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
310       cols += ncols; vals += ncols;
311     }
312   } else {
313     ierr = MatSetValues_MPIAIJ_CopyFromCSRFormat(C,c_seq->j,c_seq->i,c_seq->a);CHKERRQ(ierr);
314   }
315   ierr = ISGlobalToLocalMappingApply(ptap->ltog,IS_GTOLM_DROP,c_seq->i[C_loc->rmap->n],c_seq->j,&nout,c_seq->j);CHKERRQ(ierr);
316   if (c_seq->i[C_loc->rmap->n] != nout) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Incorrect mapping %D != %D\n",c_seq->i[C_loc->rmap->n],nout);
317 
318   /* Co -> C, off-processor part */
319   cm = C_oth->rmap->N;
320   c_seq = (Mat_SeqAIJ*)C_oth->data;
321   cols = c_seq->j;
322   vals = c_seq->a;
323   ierr = ISLocalToGlobalMappingApply(ptap->ltog,c_seq->i[C_oth->rmap->n],c_seq->j,c_seq->j);CHKERRQ(ierr);
324   for (i=0; i<cm; i++) {
325     ncols = c_seq->i[i+1] - c_seq->i[i];
326     row = p->garray[i];
327     ierr = MatSetValues(C,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
328     cols += ncols; vals += ncols;
329   }
330   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
331   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
332 
333   ptap->reuse = MAT_REUSE_MATRIX;
334 
335   ierr = ISGlobalToLocalMappingApply(ptap->ltog,IS_GTOLM_DROP,c_seq->i[C_oth->rmap->n],c_seq->j,&nout,c_seq->j);CHKERRQ(ierr);
336   if (c_seq->i[C_oth->rmap->n] != nout) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Incorrect mapping %D != %D\n",c_seq->i[C_loc->rmap->n],nout);
337 
338   /* supporting struct ptap consumes almost same amount of memory as C=PtAP, release it if C will not be updated by A and P */
339   if (ptap->freestruct) {
340     ierr = MatFreeIntermediateDataStructures(C);CHKERRQ(ierr);
341   }
342   PetscFunctionReturn(0);
343 }
344 
345 PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable(Mat A,Mat P,PetscReal fill,Mat *C)
346 {
347   PetscErrorCode      ierr;
348   Mat_APMPI           *ptap;
349   Mat_MPIAIJ          *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data,*c;
350   MPI_Comm            comm;
351   PetscMPIInt         size,rank;
352   Mat                 Cmpi,P_loc,P_oth;
353   PetscFreeSpaceList  free_space=NULL,current_space=NULL;
354   PetscInt            am=A->rmap->n,pm=P->rmap->n,pN=P->cmap->N,pn=P->cmap->n;
355   PetscInt            *lnk,i,k,pnz,row,nsend;
356   PetscMPIInt         tagi,tagj,*len_si,*len_s,*len_ri,icompleted=0,nrecv;
357   PetscInt            **buf_rj,**buf_ri,**buf_ri_k;
358   PetscInt            len,proc,*dnz,*onz,*owners,nzi,nspacedouble;
359   PetscInt            nrows,*buf_s,*buf_si,*buf_si_i,**nextrow,**nextci;
360   MPI_Request         *swaits,*rwaits;
361   MPI_Status          *sstatus,rstatus;
362   PetscLayout         rowmap;
363   PetscInt            *owners_co,*coi,*coj;    /* i and j array of (p->B)^T*A*P - used in the communication */
364   PetscMPIInt         *len_r,*id_r;    /* array of length of comm->size, store send/recv matrix values */
365   PetscInt            *api,*apj,*Jptr,apnz,*prmap=p->garray,con,j,Crmax,*aj,*ai,*pi,nout;
366   Mat_SeqAIJ          *p_loc,*p_oth=NULL,*ad=(Mat_SeqAIJ*)(a->A)->data,*ao=NULL,*c_loc,*c_oth;
367   PetscScalar         *apv;
368   PetscTable          ta;
369   MatType             mtype;
370   const char          *prefix;
371 #if defined(PETSC_USE_INFO)
372   PetscReal           apfill;
373 #endif
374 
375   PetscFunctionBegin;
376   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
377   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
378   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
379 
380   if (size > 1) ao = (Mat_SeqAIJ*)(a->B)->data;
381 
382   /* create symbolic parallel matrix Cmpi */
383   ierr = MatCreate(comm,&Cmpi);CHKERRQ(ierr);
384   ierr = MatGetType(A,&mtype);CHKERRQ(ierr);
385   ierr = MatSetType(Cmpi,mtype);CHKERRQ(ierr);
386 
387   /* create struct Mat_APMPI and attached it to C later */
388   ierr        = PetscNew(&ptap);CHKERRQ(ierr);
389   ptap->reuse = MAT_INITIAL_MATRIX;
390   ptap->algType = 0;
391 
392   /* get P_oth by taking rows of P (= non-zero cols of local A) from other processors */
393   ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_INITIAL_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&P_oth);CHKERRQ(ierr);
394   /* get P_loc by taking all local rows of P */
395   ierr = MatMPIAIJGetLocalMat(P,MAT_INITIAL_MATRIX,&P_loc);CHKERRQ(ierr);
396 
397   ptap->P_loc = P_loc;
398   ptap->P_oth = P_oth;
399 
400   /* (0) compute Rd = Pd^T, Ro = Po^T  */
401   /* --------------------------------- */
402   ierr = MatTranspose(p->A,MAT_INITIAL_MATRIX,&ptap->Rd);CHKERRQ(ierr);
403   ierr = MatTranspose(p->B,MAT_INITIAL_MATRIX,&ptap->Ro);CHKERRQ(ierr);
404 
405   /* (1) compute symbolic AP = A_loc*P = Ad*P_loc + Ao*P_oth (api,apj) */
406   /* ----------------------------------------------------------------- */
407   p_loc  = (Mat_SeqAIJ*)P_loc->data;
408   if (P_oth) p_oth = (Mat_SeqAIJ*)P_oth->data;
409 
410   /* create and initialize a linked list */
411   ierr = PetscTableCreate(pn,pN,&ta);CHKERRQ(ierr); /* for compute AP_loc and Cmpi */
412   MatRowMergeMax_SeqAIJ(p_loc,P_loc->rmap->N,ta);
413   MatRowMergeMax_SeqAIJ(p_oth,P_oth->rmap->N,ta);
414   ierr = PetscTableGetCount(ta,&Crmax);CHKERRQ(ierr); /* Crmax = nnz(sum of Prows) */
415 
416   ierr = PetscLLCondensedCreate_Scalable(Crmax,&lnk);CHKERRQ(ierr);
417 
418   /* Initial FreeSpace size is fill*(nnz(A) + nnz(P)) */
419   if (ao) {
420     ierr = PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],PetscIntSumTruncate(ao->i[am],p_loc->i[pm]))),&free_space);CHKERRQ(ierr);
421   } else {
422     ierr = PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],p_loc->i[pm])),&free_space);CHKERRQ(ierr);
423   }
424   current_space = free_space;
425   nspacedouble  = 0;
426 
427   ierr   = PetscMalloc1(am+1,&api);CHKERRQ(ierr);
428   api[0] = 0;
429   for (i=0; i<am; i++) {
430     /* diagonal portion: Ad[i,:]*P */
431     ai = ad->i; pi = p_loc->i;
432     nzi = ai[i+1] - ai[i];
433     aj  = ad->j + ai[i];
434     for (j=0; j<nzi; j++) {
435       row  = aj[j];
436       pnz  = pi[row+1] - pi[row];
437       Jptr = p_loc->j + pi[row];
438       /* add non-zero cols of P into the sorted linked list lnk */
439       ierr = PetscLLCondensedAddSorted_Scalable(pnz,Jptr,lnk);CHKERRQ(ierr);
440     }
441     /* off-diagonal portion: Ao[i,:]*P */
442     if (ao) {
443       ai = ao->i; pi = p_oth->i;
444       nzi = ai[i+1] - ai[i];
445       aj  = ao->j + ai[i];
446       for (j=0; j<nzi; j++) {
447         row  = aj[j];
448         pnz  = pi[row+1] - pi[row];
449         Jptr = p_oth->j + pi[row];
450         ierr = PetscLLCondensedAddSorted_Scalable(pnz,Jptr,lnk);CHKERRQ(ierr);
451       }
452     }
453     apnz     = lnk[0];
454     api[i+1] = api[i] + apnz;
455 
456     /* if free space is not available, double the total space in the list */
457     if (current_space->local_remaining<apnz) {
458       ierr = PetscFreeSpaceGet(PetscIntSumTruncate(apnz,current_space->total_array_size),&current_space);CHKERRQ(ierr);
459       nspacedouble++;
460     }
461 
462     /* Copy data into free space, then initialize lnk */
463     ierr = PetscLLCondensedClean_Scalable(apnz,current_space->array,lnk);CHKERRQ(ierr);
464 
465     current_space->array           += apnz;
466     current_space->local_used      += apnz;
467     current_space->local_remaining -= apnz;
468   }
469   /* Allocate space for apj and apv, initialize apj, and */
470   /* destroy list of free space and other temporary array(s) */
471   ierr = PetscCalloc2(api[am],&apj,api[am],&apv);CHKERRQ(ierr);
472   ierr = PetscFreeSpaceContiguous(&free_space,apj);CHKERRQ(ierr);
473   ierr = PetscLLCondensedDestroy_Scalable(lnk);CHKERRQ(ierr);
474 
475   /* Create AP_loc for reuse */
476   ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,am,pN,api,apj,apv,&ptap->AP_loc);CHKERRQ(ierr);
477   ierr = MatSeqAIJCompactOutExtraColumns_SeqAIJ(ptap->AP_loc, &ptap->ltog);CHKERRQ(ierr);
478 
479 #if defined(PETSC_USE_INFO)
480   if (ao) {
481     apfill = (PetscReal)api[am]/(ad->i[am]+ao->i[am]+p_loc->i[pm]+1);
482   } else {
483     apfill = (PetscReal)api[am]/(ad->i[am]+p_loc->i[pm]+1);
484   }
485   ptap->AP_loc->info.mallocs           = nspacedouble;
486   ptap->AP_loc->info.fill_ratio_given  = fill;
487   ptap->AP_loc->info.fill_ratio_needed = apfill;
488 
489   if (api[am]) {
490     ierr = PetscInfo3(ptap->AP_loc,"Scalable algorithm, AP_loc reallocs %D; Fill ratio: given %g needed %g.\n",nspacedouble,(double)fill,(double)apfill);CHKERRQ(ierr);
491     ierr = PetscInfo1(ptap->AP_loc,"Use MatPtAP(A,B,MatReuse,%g,&C) for best AP_loc performance.;\n",(double)apfill);CHKERRQ(ierr);
492   } else {
493     ierr = PetscInfo(ptap->AP_loc,"Scalable algorithm, AP_loc is empty \n");CHKERRQ(ierr);
494   }
495 #endif
496 
497   /* (2-1) compute symbolic Co = Ro*AP_loc  */
498   /* ------------------------------------ */
499   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
500   ierr = MatSetOptionsPrefix(ptap->Ro,prefix);CHKERRQ(ierr);
501   ierr = MatAppendOptionsPrefix(ptap->Ro,"inner_offdiag_");CHKERRQ(ierr);
502   ierr = MatMatMultSymbolic_SeqAIJ_SeqAIJ(ptap->Ro,ptap->AP_loc,fill,&ptap->C_oth);CHKERRQ(ierr);
503 
504   /* (3) send coj of C_oth to other processors  */
505   /* ------------------------------------------ */
506   /* determine row ownership */
507   ierr = PetscLayoutCreate(comm,&rowmap);CHKERRQ(ierr);
508   rowmap->n  = pn;
509   rowmap->bs = 1;
510   ierr   = PetscLayoutSetUp(rowmap);CHKERRQ(ierr);
511   owners = rowmap->range;
512 
513   /* determine the number of messages to send, their lengths */
514   ierr = PetscMalloc4(size,&len_s,size,&len_si,size,&sstatus,size+2,&owners_co);CHKERRQ(ierr);
515   ierr = PetscMemzero(len_s,size*sizeof(PetscMPIInt));CHKERRQ(ierr);
516   ierr = PetscMemzero(len_si,size*sizeof(PetscMPIInt));CHKERRQ(ierr);
517 
518   c_oth = (Mat_SeqAIJ*)ptap->C_oth->data;
519   coi   = c_oth->i; coj = c_oth->j;
520   con   = ptap->C_oth->rmap->n;
521   proc  = 0;
522   ierr = ISLocalToGlobalMappingApply(ptap->ltog,coi[con],coj,coj);CHKERRQ(ierr);
523   for (i=0; i<con; i++) {
524     while (prmap[i] >= owners[proc+1]) proc++;
525     len_si[proc]++;               /* num of rows in Co(=Pt*AP) to be sent to [proc] */
526     len_s[proc] += coi[i+1] - coi[i]; /* num of nonzeros in Co to be sent to [proc] */
527   }
528 
529   len          = 0; /* max length of buf_si[], see (4) */
530   owners_co[0] = 0;
531   nsend        = 0;
532   for (proc=0; proc<size; proc++) {
533     owners_co[proc+1] = owners_co[proc] + len_si[proc];
534     if (len_s[proc]) {
535       nsend++;
536       len_si[proc] = 2*(len_si[proc] + 1); /* length of buf_si to be sent to [proc] */
537       len         += len_si[proc];
538     }
539   }
540 
541   /* determine the number and length of messages to receive for coi and coj  */
542   ierr = PetscGatherNumberOfMessages(comm,NULL,len_s,&nrecv);CHKERRQ(ierr);
543   ierr = PetscGatherMessageLengths2(comm,nsend,nrecv,len_s,len_si,&id_r,&len_r,&len_ri);CHKERRQ(ierr);
544 
545   /* post the Irecv and Isend of coj */
546   ierr = PetscCommGetNewTag(comm,&tagj);CHKERRQ(ierr);
547   ierr = PetscPostIrecvInt(comm,tagj,nrecv,id_r,len_r,&buf_rj,&rwaits);CHKERRQ(ierr);
548   ierr = PetscMalloc1(nsend+1,&swaits);CHKERRQ(ierr);
549   for (proc=0, k=0; proc<size; proc++) {
550     if (!len_s[proc]) continue;
551     i    = owners_co[proc];
552     ierr = MPI_Isend(coj+coi[i],len_s[proc],MPIU_INT,proc,tagj,comm,swaits+k);CHKERRQ(ierr);
553     k++;
554   }
555 
556   /* (2-2) compute symbolic C_loc = Rd*AP_loc */
557   /* ---------------------------------------- */
558   ierr = MatSetOptionsPrefix(ptap->Rd,prefix);CHKERRQ(ierr);
559   ierr = MatAppendOptionsPrefix(ptap->Rd,"inner_diag_");CHKERRQ(ierr);
560   ierr = MatMatMultSymbolic_SeqAIJ_SeqAIJ(ptap->Rd,ptap->AP_loc,fill,&ptap->C_loc);CHKERRQ(ierr);
561   c_loc = (Mat_SeqAIJ*)ptap->C_loc->data;
562   ierr = ISLocalToGlobalMappingApply(ptap->ltog,c_loc->i[ptap->C_loc->rmap->n],c_loc->j,c_loc->j);CHKERRQ(ierr);
563 
564   /* receives coj are complete */
565   for (i=0; i<nrecv; i++) {
566     ierr = MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);CHKERRQ(ierr);
567   }
568   ierr = PetscFree(rwaits);CHKERRQ(ierr);
569   if (nsend) {ierr = MPI_Waitall(nsend,swaits,sstatus);CHKERRQ(ierr);}
570 
571   /* add received column indices into ta to update Crmax */
572   for (k=0; k<nrecv; k++) {/* k-th received message */
573     Jptr = buf_rj[k];
574     for (j=0; j<len_r[k]; j++) {
575       ierr = PetscTableAdd(ta,*(Jptr+j)+1,1,INSERT_VALUES);CHKERRQ(ierr);
576     }
577   }
578   ierr = PetscTableGetCount(ta,&Crmax);CHKERRQ(ierr);
579   ierr = PetscTableDestroy(&ta);CHKERRQ(ierr);
580 
581   /* (4) send and recv coi */
582   /*-----------------------*/
583   ierr   = PetscCommGetNewTag(comm,&tagi);CHKERRQ(ierr);
584   ierr   = PetscPostIrecvInt(comm,tagi,nrecv,id_r,len_ri,&buf_ri,&rwaits);CHKERRQ(ierr);
585   ierr   = PetscMalloc1(len+1,&buf_s);CHKERRQ(ierr);
586   buf_si = buf_s;  /* points to the beginning of k-th msg to be sent */
587   for (proc=0,k=0; proc<size; proc++) {
588     if (!len_s[proc]) continue;
589     /* form outgoing message for i-structure:
590          buf_si[0]:                 nrows to be sent
591                [1:nrows]:           row index (global)
592                [nrows+1:2*nrows+1]: i-structure index
593     */
594     /*-------------------------------------------*/
595     nrows       = len_si[proc]/2 - 1; /* num of rows in Co to be sent to [proc] */
596     buf_si_i    = buf_si + nrows+1;
597     buf_si[0]   = nrows;
598     buf_si_i[0] = 0;
599     nrows       = 0;
600     for (i=owners_co[proc]; i<owners_co[proc+1]; i++) {
601       nzi = coi[i+1] - coi[i];
602       buf_si_i[nrows+1] = buf_si_i[nrows] + nzi;  /* i-structure */
603       buf_si[nrows+1]   = prmap[i] -owners[proc]; /* local row index */
604       nrows++;
605     }
606     ierr = MPI_Isend(buf_si,len_si[proc],MPIU_INT,proc,tagi,comm,swaits+k);CHKERRQ(ierr);
607     k++;
608     buf_si += len_si[proc];
609   }
610   for (i=0; i<nrecv; i++) {
611     ierr = MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);CHKERRQ(ierr);
612   }
613   ierr = PetscFree(rwaits);CHKERRQ(ierr);
614   if (nsend) {ierr = MPI_Waitall(nsend,swaits,sstatus);CHKERRQ(ierr);}
615 
616   ierr = PetscFree4(len_s,len_si,sstatus,owners_co);CHKERRQ(ierr);
617   ierr = PetscFree(len_ri);CHKERRQ(ierr);
618   ierr = PetscFree(swaits);CHKERRQ(ierr);
619   ierr = PetscFree(buf_s);CHKERRQ(ierr);
620 
621   /* (5) compute the local portion of Cmpi      */
622   /* ------------------------------------------ */
623   /* set initial free space to be Crmax, sufficient for holding nozeros in each row of Cmpi */
624   ierr          = PetscFreeSpaceGet(Crmax,&free_space);CHKERRQ(ierr);
625   current_space = free_space;
626 
627   ierr = PetscMalloc3(nrecv,&buf_ri_k,nrecv,&nextrow,nrecv,&nextci);CHKERRQ(ierr);
628   for (k=0; k<nrecv; k++) {
629     buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */
630     nrows       = *buf_ri_k[k];
631     nextrow[k]  = buf_ri_k[k] + 1;  /* next row number of k-th recved i-structure */
632     nextci[k]   = buf_ri_k[k] + (nrows + 1); /* poins to the next i-structure of k-th recved i-structure  */
633   }
634 
635   ierr = MatPreallocateInitialize(comm,pn,pn,dnz,onz);CHKERRQ(ierr);
636   ierr = PetscLLCondensedCreate_Scalable(Crmax,&lnk);CHKERRQ(ierr);
637   for (i=0; i<pn; i++) {
638     /* add C_loc into Cmpi */
639     nzi  = c_loc->i[i+1] - c_loc->i[i];
640     Jptr = c_loc->j + c_loc->i[i];
641     ierr = PetscLLCondensedAddSorted_Scalable(nzi,Jptr,lnk);CHKERRQ(ierr);
642 
643     /* add received col data into lnk */
644     for (k=0; k<nrecv; k++) { /* k-th received message */
645       if (i == *nextrow[k]) { /* i-th row */
646         nzi  = *(nextci[k]+1) - *nextci[k];
647         Jptr = buf_rj[k] + *nextci[k];
648         ierr = PetscLLCondensedAddSorted_Scalable(nzi,Jptr,lnk);CHKERRQ(ierr);
649         nextrow[k]++; nextci[k]++;
650       }
651     }
652     nzi = lnk[0];
653 
654     /* copy data into free space, then initialize lnk */
655     ierr = PetscLLCondensedClean_Scalable(nzi,current_space->array,lnk);CHKERRQ(ierr);
656     ierr = MatPreallocateSet(i+owners[rank],nzi,current_space->array,dnz,onz);CHKERRQ(ierr);
657   }
658   ierr = PetscFree3(buf_ri_k,nextrow,nextci);CHKERRQ(ierr);
659   ierr = PetscLLCondensedDestroy_Scalable(lnk);CHKERRQ(ierr);
660   ierr = PetscFreeSpaceDestroy(free_space);CHKERRQ(ierr);
661 
662   /* local sizes and preallocation */
663   ierr = MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
664   ierr = MatSetBlockSizes(Cmpi,PetscAbs(P->cmap->bs),PetscAbs(P->cmap->bs));CHKERRQ(ierr);
665   ierr = MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);CHKERRQ(ierr);
666   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
667 
668   /* members in merge */
669   ierr = PetscFree(id_r);CHKERRQ(ierr);
670   ierr = PetscFree(len_r);CHKERRQ(ierr);
671   ierr = PetscFree(buf_ri[0]);CHKERRQ(ierr);
672   ierr = PetscFree(buf_ri);CHKERRQ(ierr);
673   ierr = PetscFree(buf_rj[0]);CHKERRQ(ierr);
674   ierr = PetscFree(buf_rj);CHKERRQ(ierr);
675   ierr = PetscLayoutDestroy(&rowmap);CHKERRQ(ierr);
676 
677   /* attach the supporting struct to Cmpi for reuse */
678   c = (Mat_MPIAIJ*)Cmpi->data;
679   c->ap           = ptap;
680   ptap->duplicate = Cmpi->ops->duplicate;
681   ptap->destroy   = Cmpi->ops->destroy;
682   ptap->view      = Cmpi->ops->view;
683 
684   /* Cmpi is not ready for use - assembly will be done by MatPtAPNumeric() */
685   Cmpi->assembled        = PETSC_FALSE;
686   Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_scalable;
687   Cmpi->ops->destroy     = MatDestroy_MPIAIJ_PtAP;
688   Cmpi->ops->view        = MatView_MPIAIJ_PtAP;
689   Cmpi->ops->freeintermediatedatastructures = MatFreeIntermediateDataStructures_MPIAIJ_AP;
690   *C                     = Cmpi;
691 
692    nout = 0;
693    ierr = ISGlobalToLocalMappingApply(ptap->ltog,IS_GTOLM_DROP,c_oth->i[ptap->C_oth->rmap->n],c_oth->j,&nout,c_oth->j);CHKERRQ(ierr);
694    if (c_oth->i[ptap->C_oth->rmap->n] != nout) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Incorrect mapping %D != %D\n",c_oth->i[ptap->C_oth->rmap->n],nout);
695    ierr = ISGlobalToLocalMappingApply(ptap->ltog,IS_GTOLM_DROP,c_loc->i[ptap->C_loc->rmap->n],c_loc->j,&nout,c_loc->j);CHKERRQ(ierr);
696    if (c_loc->i[ptap->C_loc->rmap->n] != nout) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Incorrect mapping %D != %D\n",c_loc->i[ptap->C_loc->rmap->n],nout);
697 
698   PetscFunctionReturn(0);
699 }
700 
701 PETSC_STATIC_INLINE PetscErrorCode MatPtAPSymbolicComputeOneRowOfAP_private(Mat A,Mat P,Mat P_oth,PetscInt i,PetscHSetI dht,PetscHSetI oht)
702 {
703   Mat_MPIAIJ           *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data;
704   Mat_SeqAIJ           *ad=(Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data,*p_oth=(Mat_SeqAIJ*)P_oth->data,*pd=(Mat_SeqAIJ*)p->A->data,*po=(Mat_SeqAIJ*)p->B->data;
705   PetscInt             *ai,nzi,j,*aj,row,col,*pi,*pj,pnz,nzpi,*p_othcols,k;
706   PetscInt             pcstart,pcend,column;
707   PetscErrorCode       ierr;
708 
709   PetscFunctionBegin;
710 
711   pcstart = P->cmap->rstart;
712   pcend   = P->cmap->rend;
713   /* diagonal portion: Ad[i,:]*P */
714   ai = ad->i;
715   nzi = ai[i+1] - ai[i];
716   aj  = ad->j + ai[i];
717   for (j=0; j<nzi; j++) {
718     row  = aj[j];
719     nzpi = pd->i[row+1] - pd->i[row];
720     pj  = pd->j + pd->i[row];
721     for (k=0; k<nzpi; k++) {
722       ierr = PetscHSetIAdd(dht,pj[k]+pcstart);CHKERRQ(ierr);
723     }
724   }
725   for (j=0; j<nzi; j++) {
726     row  = aj[j];
727     nzpi = po->i[row+1] - po->i[row];
728     pj  = po->j + po->i[row];
729     for (k=0; k<nzpi; k++) {
730       ierr = PetscHSetIAdd(oht,p->garray[pj[k]]);CHKERRQ(ierr);
731     }
732   }
733 
734   /* off diagonal part: Ao[i, :]*P_oth */
735   if (ao) {
736     ai = ao->i;
737     pi = p_oth->i;
738     nzi = ai[i+1] - ai[i];
739     aj  = ao->j + ai[i];
740     for (j=0; j<nzi; j++) {
741       row  = aj[j];
742       pnz  = pi[row+1] - pi[row];
743       p_othcols = p_oth->j + pi[row];
744       for (col=0; col<pnz; col++) {
745         column = p_othcols[col];
746         if (column>=pcstart && column<pcend) {
747           ierr = PetscHSetIAdd(dht,column);CHKERRQ(ierr);
748         } else {
749           ierr = PetscHSetIAdd(oht,column);CHKERRQ(ierr);
750         }
751       }
752     }
753   } /* end if (ao) */
754   PetscFunctionReturn(0);
755 }
756 
757 PETSC_STATIC_INLINE PetscErrorCode MatPtAPNumericComputeOneRowOfAP_private(Mat A,Mat P,Mat P_oth,PetscInt i,PetscHMapIV hmap)
758 {
759   Mat_MPIAIJ           *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data;
760   Mat_SeqAIJ           *ad=(Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data,*p_oth=(Mat_SeqAIJ*)P_oth->data,*pd=(Mat_SeqAIJ*)p->A->data,*po=(Mat_SeqAIJ*)p->B->data;
761   PetscInt             *ai,nzi,j,*aj,row,col,*pi,pnz,*p_othcols,pcstart,*pj,k,nzpi;
762   PetscScalar          ra,*aa,*pa;
763   PetscErrorCode       ierr;
764 
765   PetscFunctionBegin;
766   pcstart = P->cmap->rstart;
767   /* diagonal portion: Ad[i,:]*P */
768   ai  = ad->i;
769   nzi = ai[i+1] - ai[i];
770   aj  = ad->j + ai[i];
771   aa  = ad->a + ai[i];
772 
773   for (j=0; j<nzi; j++) {
774     ra   = aa[j];
775     row  = aj[j];
776     nzpi = pd->i[row+1] - pd->i[row];
777     pj = pd->j + pd->i[row];
778     pa = pd->a + pd->i[row];
779     for (k=0; k<nzpi; k++) {
780       ierr = PetscHMapIVAdd(hmap,pj[k]+pcstart,ra*pa[k]);CHKERRQ(ierr);
781     }
782   }
783   for (j=0; j<nzi; j++) {
784     ra   = aa[j];
785     row  = aj[j];
786     nzpi = po->i[row+1] - po->i[row];
787     pj = po->j + po->i[row];
788     pa = po->a + po->i[row];
789     for (k=0; k<nzpi; k++) {
790       ierr = PetscHMapIVAdd(hmap,p->garray[pj[k]],ra*pa[k]);CHKERRQ(ierr);
791     }
792   }
793 
794 
795   /* off diagonal part: Ao[i, :]*P_oth */
796   if (ao) {
797     ai = ao->i;
798     pi = p_oth->i;
799     nzi = ai[i+1] - ai[i];
800     aj  = ao->j + ai[i];
801     aa  = ao->a + ai[i];
802     for (j=0; j<nzi; j++) {
803       row  = aj[j];
804       ra   = aa[j];
805       pnz  = pi[row+1] - pi[row];
806       p_othcols = p_oth->j + pi[row];
807       pa   = p_oth->a + pi[row];
808       for (col=0; col<pnz; col++) {
809         ierr = PetscHMapIVAdd(hmap,p_othcols[col],ra*pa[col]);CHKERRQ(ierr);
810       }
811     }
812   } /* end if (ao) */
813   PetscFunctionReturn(0);
814 }
815 
816 PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce(Mat A,Mat P,Mat C)
817 {
818   PetscErrorCode    ierr;
819   Mat_MPIAIJ        *p=(Mat_MPIAIJ*)P->data,*c=(Mat_MPIAIJ*)C->data;
820   Mat_SeqAIJ        *cd,*co,*po,*pd;
821   Mat_APMPI         *ptap = c->ap;
822   PetscHMapIV       hmap;
823   PetscInt          i,j,jj,kk,nzi,*c_rmtj,voff,*c_othj,pn,pon,pcstart,pcend,ccstart,ccend,row,am,*poj,*pdj,*apindices,cmaxr,*c_rmtc,*c_rmtjj,*dcc,*occ,loc;
824   PetscScalar       *c_rmta,*c_otha,*poa,*pda,*apvalues,*apvaluestmp,*c_rmtaa;
825   MPI_Comm          comm;
826 
827   PetscFunctionBegin;
828   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
829   if (!ptap) SETERRQ(comm,PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be reused. Do not call MatFreeIntermediateDataStructures() or use '-mat_freeintermediatedatastructures'");
830 
831   ierr = MatZeroEntries(C);CHKERRQ(ierr);
832 
833   /* Get P_oth = ptap->P_oth  and P_loc = ptap->P_loc */
834   /*-----------------------------------------------------*/
835   if (ptap->reuse == MAT_REUSE_MATRIX) {
836     /* P_oth and P_loc are obtained in MatPtASymbolic() when reuse == MAT_INITIAL_MATRIX */
837     ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_REUSE_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr);
838   }
839 
840   po = (Mat_SeqAIJ*) p->B->data;
841   pd = (Mat_SeqAIJ*) p->A->data;
842 
843   ierr = MatGetLocalSize(p->B,NULL,&pon);CHKERRQ(ierr);
844 
845   ierr = PetscCalloc2(ptap->c_rmti[pon],&c_rmtj,ptap->c_rmti[pon],&c_rmta);CHKERRQ(ierr);
846   ierr = MatGetLocalSize(A,&am,NULL);CHKERRQ(ierr);
847   cmaxr = 0;
848   for (i=0; i<pon; i++) {
849     cmaxr = PetscMax(cmaxr,ptap->c_rmti[i+1]-ptap->c_rmti[i]);
850   }
851   ierr = PetscCalloc4(cmaxr,&apindices,cmaxr,&apvalues,cmaxr,&apvaluestmp,pon,&c_rmtc);CHKERRQ(ierr);
852   ierr = PetscHMapIVCreate(&hmap);CHKERRQ(ierr);
853   ierr = PetscHMapIVResize(hmap,cmaxr);CHKERRQ(ierr);
854   for (i=0; i<am && pon; i++) {
855     ierr = PetscHMapIVClear(hmap);CHKERRQ(ierr);
856     nzi = po->i[i+1] - po->i[i];
857     if (!nzi) continue;
858     ierr = MatPtAPNumericComputeOneRowOfAP_private(A,P,ptap->P_oth,i,hmap);CHKERRQ(ierr);
859     voff = 0;
860     ierr = PetscHMapIVGetPairs(hmap,&voff,apindices,apvalues);CHKERRQ(ierr);
861     if (!voff) continue;
862     /*ierr = PetscMemzero(c_rmtc,sizeof(PetscInt)*pon);CHKERRQ(ierr);*/
863 
864     /* Form C(ii, :) */
865     poj = po->j + po->i[i];
866     poa = po->a + po->i[i];
867     for (j=0; j<nzi; j++) {
868       c_rmtjj = c_rmtj + ptap->c_rmti[poj[j]];
869       c_rmtaa = c_rmta + ptap->c_rmti[poj[j]];
870       for (jj=0; jj<voff; jj++) {
871         apvaluestmp[jj] = apvalues[jj]*poa[j];
872         /*If the row is empty */
873         if (!c_rmtc[poj[j]]) {
874           c_rmtjj[jj] = apindices[jj];
875           c_rmtaa[jj] = apvaluestmp[jj];
876           c_rmtc[poj[j]]++;
877         } else {
878           ierr = PetscFindInt(apindices[jj],c_rmtc[poj[j]],c_rmtjj,&loc);CHKERRQ(ierr);
879           if (loc>=0){ /* hit */
880             c_rmtaa[loc] += apvaluestmp[jj];
881           } else { /* new element */
882             loc = -(loc+1);
883             /* Move data backward */
884             for (kk=c_rmtc[poj[j]]; kk>loc; kk--) {
885               c_rmtjj[kk] = c_rmtjj[kk-1];
886               c_rmtaa[kk] = c_rmtaa[kk-1];
887             }/* End kk */
888             c_rmtjj[loc] = apindices[jj];
889             c_rmtaa[loc] = apvaluestmp[jj];
890             c_rmtc[poj[j]]++;
891           }
892         }
893       } /* End jj */
894     } /* End j */
895   } /* End i */
896 
897   ierr = PetscFree4(apindices,apvalues,apvaluestmp,c_rmtc);CHKERRQ(ierr);
898 
899   ierr = MatGetLocalSize(P,NULL,&pn);CHKERRQ(ierr);
900   ierr = PetscCalloc2(ptap->c_othi[pn],&c_othj,ptap->c_othi[pn],&c_otha);CHKERRQ(ierr);
901 
902   ierr = PetscSFReduceBegin(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);CHKERRQ(ierr);
903   ierr = PetscSFReduceBegin(ptap->sf,MPIU_SCALAR,c_rmta,c_otha,MPIU_REPLACE);CHKERRQ(ierr);
904   ierr = MatGetOwnershipRangeColumn(P,&pcstart,&pcend);CHKERRQ(ierr);
905   cd = (Mat_SeqAIJ*)(c->A)->data;
906   co = (Mat_SeqAIJ*)(c->B)->data;
907 
908   cmaxr = 0;
909   for (i=0; i<pn; i++) {
910     cmaxr = PetscMax(cmaxr,(cd->i[i+1]-cd->i[i])+(co->i[i+1]-co->i[i]));
911   }
912   ierr = PetscCalloc5(cmaxr,&apindices,cmaxr,&apvalues,cmaxr,&apvaluestmp,pn,&dcc,pn,&occ);CHKERRQ(ierr);
913   ierr = PetscHMapIVCreate(&hmap);CHKERRQ(ierr);
914   ierr = PetscHMapIVResize(hmap,cmaxr);CHKERRQ(ierr);
915   for (i=0; i<am && pn; i++) {
916     ierr = PetscHMapIVClear(hmap);CHKERRQ(ierr);
917     nzi = pd->i[i+1] - pd->i[i];
918     if (!nzi) continue;
919     ierr = MatPtAPNumericComputeOneRowOfAP_private(A,P,ptap->P_oth,i,hmap);CHKERRQ(ierr);
920     voff = 0;
921     ierr = PetscHMapIVGetPairs(hmap,&voff,apindices,apvalues);CHKERRQ(ierr);
922     if (!voff) continue;
923     /* Form C(ii, :) */
924     pdj = pd->j + pd->i[i];
925     pda = pd->a + pd->i[i];
926     for (j=0; j<nzi; j++) {
927       row = pcstart + pdj[j];
928       for (jj=0; jj<voff; jj++) {
929         apvaluestmp[jj] = apvalues[jj]*pda[j];
930       }
931       ierr = MatSetValues(C,1,&row,voff,apindices,apvaluestmp,ADD_VALUES);CHKERRQ(ierr);
932     }
933   }
934 
935   ierr = MatGetOwnershipRangeColumn(C,&ccstart,&ccend);CHKERRQ(ierr);
936   ierr = PetscFree5(apindices,apvalues,apvaluestmp,dcc,occ);CHKERRQ(ierr);
937   ierr = PetscHMapIVDestroy(&hmap);CHKERRQ(ierr);
938   ierr = PetscSFReduceEnd(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPI_SUM);CHKERRQ(ierr);
939   ierr = PetscSFReduceEnd(ptap->sf,MPIU_SCALAR,c_rmta,c_otha,MPIU_REPLACE);CHKERRQ(ierr);
940   ierr = PetscFree2(c_rmtj,c_rmta);CHKERRQ(ierr);
941 
942   /* Add contributions from remote */
943   for (i = 0; i < pn; i++) {
944     row = i + pcstart;
945     ierr = MatSetValues(C,1,&row,ptap->c_othi[i+1]-ptap->c_othi[i],c_othj+ptap->c_othi[i],c_otha+ptap->c_othi[i],ADD_VALUES);CHKERRQ(ierr);
946   }
947   ierr = PetscFree2(c_othj,c_otha);CHKERRQ(ierr);
948 
949   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
950   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
951 
952   ptap->reuse = MAT_REUSE_MATRIX;
953 
954   /* supporting struct ptap consumes almost same amount of memory as C=PtAP, release it if C will not be updated by A and P */
955   if (ptap->freestruct) {
956     ierr = MatFreeIntermediateDataStructures(C);CHKERRQ(ierr);
957   }
958   PetscFunctionReturn(0);
959 }
960 
961 PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce_merged(Mat A,Mat P,Mat C)
962 {
963   PetscErrorCode    ierr;
964   Mat_MPIAIJ        *p=(Mat_MPIAIJ*)P->data,*c=(Mat_MPIAIJ*)C->data;
965   Mat_SeqAIJ        *cd,*co,*po,*pd;
966   Mat_APMPI         *ptap = c->ap;
967   PetscHMapIV       hmap;
968   PetscInt          i,j,jj,kk,nzi,dnzi,*c_rmtj,voff,*c_othj,pn,pon,pcstart,pcend,row,am,*poj,*pdj,*apindices,cmaxr,*c_rmtc,*c_rmtjj,loc;
969   PetscScalar       *c_rmta,*c_otha,*poa,*pda,*apvalues,*apvaluestmp,*c_rmtaa;
970   MPI_Comm          comm;
971 
972   PetscFunctionBegin;
973   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
974   if (!ptap) SETERRQ(comm,PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be reused. Do not call MatFreeIntermediateDataStructures() or use '-mat_freeintermediatedatastructures'");
975 
976   ierr = MatZeroEntries(C);CHKERRQ(ierr);
977 
978   /* Get P_oth = ptap->P_oth  and P_loc = ptap->P_loc */
979   /*-----------------------------------------------------*/
980   if (ptap->reuse == MAT_REUSE_MATRIX) {
981     /* P_oth and P_loc are obtained in MatPtASymbolic() when reuse == MAT_INITIAL_MATRIX */
982     ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_REUSE_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr);
983   }
984 
985   po = (Mat_SeqAIJ*) p->B->data;
986   pd = (Mat_SeqAIJ*) p->A->data;
987 
988   ierr = MatGetLocalSize(p->B,NULL,&pon);CHKERRQ(ierr);
989   ierr = MatGetLocalSize(P,NULL,&pn);CHKERRQ(ierr);
990 
991   ierr = PetscCalloc2(ptap->c_rmti[pon],&c_rmtj,ptap->c_rmti[pon],&c_rmta);CHKERRQ(ierr);
992   ierr = MatGetLocalSize(A,&am,NULL);CHKERRQ(ierr);
993   ierr = MatGetOwnershipRangeColumn(P,&pcstart,&pcend);CHKERRQ(ierr);
994   cmaxr = 0;
995   for (i=0; i<pon; i++) {
996     cmaxr = PetscMax(cmaxr,ptap->c_rmti[i+1]-ptap->c_rmti[i]);
997   }
998   cd = (Mat_SeqAIJ*)(c->A)->data;
999   co = (Mat_SeqAIJ*)(c->B)->data;
1000   for (i=0; i<pn; i++) {
1001     cmaxr = PetscMax(cmaxr,(cd->i[i+1]-cd->i[i])+(co->i[i+1]-co->i[i]));
1002   }
1003   ierr = PetscCalloc4(cmaxr,&apindices,cmaxr,&apvalues,cmaxr,&apvaluestmp,pon,&c_rmtc);CHKERRQ(ierr);
1004   ierr = PetscHMapIVCreate(&hmap);CHKERRQ(ierr);
1005   ierr = PetscHMapIVResize(hmap,cmaxr);CHKERRQ(ierr);
1006   for (i=0; i<am && (pon || pn); i++) {
1007     ierr = PetscHMapIVClear(hmap);CHKERRQ(ierr);
1008     nzi  = po->i[i+1] - po->i[i];
1009     dnzi = pd->i[i+1] - pd->i[i];
1010     if (!nzi && !dnzi) continue;
1011     ierr = MatPtAPNumericComputeOneRowOfAP_private(A,P,ptap->P_oth,i,hmap);CHKERRQ(ierr);
1012     voff = 0;
1013     ierr = PetscHMapIVGetPairs(hmap,&voff,apindices,apvalues);CHKERRQ(ierr);
1014     if (!voff) continue;
1015 
1016     /* Form remote C(ii, :) */
1017     poj = po->j + po->i[i];
1018     poa = po->a + po->i[i];
1019     for (j=0; j<nzi; j++) {
1020       c_rmtjj = c_rmtj + ptap->c_rmti[poj[j]];
1021       c_rmtaa = c_rmta + ptap->c_rmti[poj[j]];
1022       for (jj=0; jj<voff; jj++) {
1023         apvaluestmp[jj] = apvalues[jj]*poa[j];
1024         /*If the row is empty */
1025         if (!c_rmtc[poj[j]]) {
1026           c_rmtjj[jj] = apindices[jj];
1027           c_rmtaa[jj] = apvaluestmp[jj];
1028           c_rmtc[poj[j]]++;
1029         } else {
1030           ierr = PetscFindInt(apindices[jj],c_rmtc[poj[j]],c_rmtjj,&loc);CHKERRQ(ierr);
1031           if (loc>=0){ /* hit */
1032             c_rmtaa[loc] += apvaluestmp[jj];
1033           } else { /* new element */
1034             loc = -(loc+1);
1035             /* Move data backward */
1036             for (kk=c_rmtc[poj[j]]; kk>loc; kk--) {
1037               c_rmtjj[kk] = c_rmtjj[kk-1];
1038               c_rmtaa[kk] = c_rmtaa[kk-1];
1039             }/* End kk */
1040             c_rmtjj[loc] = apindices[jj];
1041             c_rmtaa[loc] = apvaluestmp[jj];
1042             c_rmtc[poj[j]]++;
1043           }
1044         }
1045       } /* End jj */
1046     } /* End j */
1047 
1048     /* Form local C(ii, :) */
1049     pdj = pd->j + pd->i[i];
1050     pda = pd->a + pd->i[i];
1051     for (j=0; j<dnzi; j++) {
1052       row = pcstart + pdj[j];
1053       for (jj=0; jj<voff; jj++) {
1054         apvaluestmp[jj] = apvalues[jj]*pda[j];
1055       }/* End kk */
1056       ierr = MatSetValues(C,1,&row,voff,apindices,apvaluestmp,ADD_VALUES);CHKERRQ(ierr);
1057     }/* End j */
1058   } /* End i */
1059 
1060   ierr = PetscFree4(apindices,apvalues,apvaluestmp,c_rmtc);CHKERRQ(ierr);
1061   ierr = PetscHMapIVDestroy(&hmap);CHKERRQ(ierr);
1062   ierr = PetscCalloc2(ptap->c_othi[pn],&c_othj,ptap->c_othi[pn],&c_otha);CHKERRQ(ierr);
1063 
1064   ierr = PetscSFReduceBegin(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);CHKERRQ(ierr);
1065   ierr = PetscSFReduceBegin(ptap->sf,MPIU_SCALAR,c_rmta,c_otha,MPIU_REPLACE);CHKERRQ(ierr);
1066   ierr = PetscSFReduceEnd(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPI_SUM);CHKERRQ(ierr);
1067   ierr = PetscSFReduceEnd(ptap->sf,MPIU_SCALAR,c_rmta,c_otha,MPIU_REPLACE);CHKERRQ(ierr);
1068   ierr = PetscFree2(c_rmtj,c_rmta);CHKERRQ(ierr);
1069 
1070   /* Add contributions from remote */
1071   for (i = 0; i < pn; i++) {
1072     row = i + pcstart;
1073     ierr = MatSetValues(C,1,&row,ptap->c_othi[i+1]-ptap->c_othi[i],c_othj+ptap->c_othi[i],c_otha+ptap->c_othi[i],ADD_VALUES);CHKERRQ(ierr);
1074   }
1075   ierr = PetscFree2(c_othj,c_otha);CHKERRQ(ierr);
1076 
1077   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1078   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1079 
1080   ptap->reuse = MAT_REUSE_MATRIX;
1081 
1082   /* supporting struct ptap consumes almost same amount of memory as C=PtAP, release it if C will not be updated by A and P */
1083   if (ptap->freestruct) {
1084     ierr = MatFreeIntermediateDataStructures(C);CHKERRQ(ierr);
1085   }
1086   PetscFunctionReturn(0);
1087 }
1088 
1089 PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce(Mat A,Mat P,PetscReal fill,Mat *C)
1090 {
1091   Mat_APMPI           *ptap;
1092   Mat_MPIAIJ          *p=(Mat_MPIAIJ*)P->data,*c;
1093   MPI_Comm            comm;
1094   Mat                 Cmpi;
1095   Mat_SeqAIJ          *pd,*po;
1096   MatType             mtype;
1097   PetscSF             sf;
1098   PetscSFNode         *iremote;
1099   PetscInt            rootspacesize,*rootspace,*rootspaceoffsets,nleaves;
1100   const PetscInt      *rootdegrees;
1101   PetscHSetI          ht,oht,*hta,*hto;
1102   PetscInt            pn,pon,*c_rmtc,i,j,nzi,htsize,htosize,*c_rmtj,off,*c_othj,rcvncols,sendncols,*c_rmtoffsets;
1103   PetscInt            owner,lidx,*rdj,col,pcstart,pcend,*dnz,*onz,am,arstart,arend,*poj,*pdj;
1104   PetscInt            nalg=2,alg=0;
1105   PetscBool           flg;
1106   const char          *algTypes[2] = {"overlapping","merged"};
1107   PetscErrorCode      ierr;
1108 
1109   PetscFunctionBegin;
1110   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
1111 
1112   /* Create symbolic parallel matrix Cmpi */
1113   ierr = MatGetLocalSize(P,NULL,&pn);CHKERRQ(ierr);
1114   ierr = MatCreate(comm,&Cmpi);CHKERRQ(ierr);
1115   ierr = MatGetType(A,&mtype);CHKERRQ(ierr);
1116   ierr = MatSetType(Cmpi,mtype);CHKERRQ(ierr);
1117   ierr = MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1118 
1119   ierr        = PetscNew(&ptap);CHKERRQ(ierr);
1120   ptap->reuse = MAT_INITIAL_MATRIX;
1121   ptap->algType = 2;
1122 
1123   /* Get P_oth by taking rows of P (= non-zero cols of local A) from other processors */
1124   ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_INITIAL_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr);
1125 
1126   po = (Mat_SeqAIJ*)p->B->data;
1127   pd = (Mat_SeqAIJ*)p->A->data;
1128 
1129   /* This equals to the number of offdiag columns in P */
1130   ierr = MatGetLocalSize(p->B,NULL,&pon);CHKERRQ(ierr);
1131   /* offsets */
1132   ierr = PetscMalloc1(pon+1,&ptap->c_rmti);CHKERRQ(ierr);
1133   /* The number of columns we will send to remote ranks */
1134   ierr = PetscMalloc1(pon,&c_rmtc);CHKERRQ(ierr);
1135   ierr = PetscMalloc1(pon,&hta);CHKERRQ(ierr);
1136   for (i=0; i<pon; i++) {
1137     ierr = PetscHSetICreate(&hta[i]);CHKERRQ(ierr);
1138   }
1139   ierr = MatGetLocalSize(A,&am,NULL);CHKERRQ(ierr);
1140   ierr = MatGetOwnershipRange(A,&arstart,&arend);CHKERRQ(ierr);
1141   /* Create hash table to merge all columns for C(i, :) */
1142   ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
1143 
1144   ptap->c_rmti[0] = 0;
1145   /* 2) Pass 1: calculate the size for C_rmt (a matrix need to be sent to other processors)  */
1146   for (i=0; i<am && pon; i++) {
1147     /* Form one row of AP */
1148     ierr = PetscHSetIClear(ht);CHKERRQ(ierr);
1149     /* If the off diag is empty, we should not do any calculation */
1150     nzi = po->i[i+1] - po->i[i];
1151     if (!nzi) continue;
1152 
1153     ierr = MatPtAPSymbolicComputeOneRowOfAP_private(A,P,ptap->P_oth,i,ht,ht);CHKERRQ(ierr);
1154     ierr = PetscHSetIGetSize(ht,&htsize);CHKERRQ(ierr);
1155     /* If AP is empty, just continue */
1156     if (!htsize) continue;
1157     /* Form C(ii, :) */
1158     poj = po->j + po->i[i];
1159     for (j=0; j<nzi; j++) {
1160       ierr = PetscHSetIAppend(hta[poj[j]],ht);CHKERRQ(ierr);
1161     }
1162   }
1163 
1164   for (i=0; i<pon; i++) {
1165     ierr = PetscHSetIGetSize(hta[i],&htsize);CHKERRQ(ierr);
1166     ptap->c_rmti[i+1] = ptap->c_rmti[i] + htsize;
1167     c_rmtc[i] = htsize;
1168   }
1169 
1170   ierr = PetscMalloc1(ptap->c_rmti[pon],&c_rmtj);CHKERRQ(ierr);
1171 
1172   for (i=0; i<pon; i++) {
1173     off = 0;
1174     ierr = PetscHSetIGetElems(hta[i],&off,c_rmtj+ptap->c_rmti[i]);CHKERRQ(ierr);
1175     ierr = PetscHSetIDestroy(&hta[i]);CHKERRQ(ierr);
1176   }
1177   ierr = PetscFree(hta);CHKERRQ(ierr);
1178 
1179   ierr = PetscCalloc1(pon,&iremote);CHKERRQ(ierr);
1180   for (i=0; i<pon; i++) {
1181     owner = 0; lidx = 0;
1182     ierr = PetscLayoutFindOwnerIndex(P->cmap,p->garray[i],&owner,&lidx);CHKERRQ(ierr);
1183     iremote[i].index = lidx;
1184     iremote[i].rank  = owner;
1185   }
1186 
1187   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
1188   ierr = PetscSFSetGraph(sf,pn,pon,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
1189   /* Reorder ranks properly so that the data handled by gather and scatter have the same order */
1190   ierr = PetscSFSetRankOrder(sf,PETSC_TRUE);CHKERRQ(ierr);
1191   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
1192   ierr = PetscSFSetUp(sf);CHKERRQ(ierr);
1193   /* How many neighbors have contributions to my rows? */
1194   ierr = PetscSFComputeDegreeBegin(sf,&rootdegrees);CHKERRQ(ierr);
1195   ierr = PetscSFComputeDegreeEnd(sf,&rootdegrees);CHKERRQ(ierr);
1196   rootspacesize = 0;
1197   for (i = 0; i < pn; i++) {
1198     rootspacesize += rootdegrees[i];
1199   }
1200   ierr = PetscCalloc1(rootspacesize,&rootspace);CHKERRQ(ierr);
1201   ierr = PetscCalloc1(rootspacesize+1,&rootspaceoffsets);CHKERRQ(ierr);
1202   /* Get information from leaves
1203    * Number of columns other people contribute to my rows
1204    * */
1205   ierr = PetscSFGatherBegin(sf,MPIU_INT,c_rmtc,rootspace);CHKERRQ(ierr);
1206   ierr = PetscSFGatherEnd(sf,MPIU_INT,c_rmtc,rootspace);CHKERRQ(ierr);
1207   ierr = PetscFree(c_rmtc);CHKERRQ(ierr);
1208   ierr = PetscCalloc1(pn+1,&ptap->c_othi);CHKERRQ(ierr);
1209   /* The number of columns is received for each row */
1210   ptap->c_othi[0] = 0;
1211   rootspacesize = 0;
1212   rootspaceoffsets[0] = 0;
1213   for (i = 0; i < pn; i++) {
1214     rcvncols = 0;
1215     for (j = 0; j<rootdegrees[i]; j++) {
1216       rcvncols += rootspace[rootspacesize];
1217       rootspaceoffsets[rootspacesize+1] = rootspaceoffsets[rootspacesize] + rootspace[rootspacesize];
1218       rootspacesize++;
1219     }
1220     ptap->c_othi[i+1] = ptap->c_othi[i] + rcvncols;
1221   }
1222   ierr = PetscFree(rootspace);CHKERRQ(ierr);
1223 
1224   ierr = PetscCalloc1(pon,&c_rmtoffsets);CHKERRQ(ierr);
1225   ierr = PetscSFScatterBegin(sf,MPIU_INT,rootspaceoffsets,c_rmtoffsets);CHKERRQ(ierr);
1226   ierr = PetscSFScatterEnd(sf,MPIU_INT,rootspaceoffsets,c_rmtoffsets);CHKERRQ(ierr);
1227   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
1228   ierr = PetscFree(rootspaceoffsets);CHKERRQ(ierr);
1229 
1230   ierr = PetscCalloc1(ptap->c_rmti[pon],&iremote);CHKERRQ(ierr);
1231   nleaves = 0;
1232   for (i = 0; i<pon; i++) {
1233     owner = 0; lidx = 0;
1234     ierr = PetscLayoutFindOwnerIndex(P->cmap,p->garray[i],&owner,&lidx);CHKERRQ(ierr);
1235     sendncols = ptap->c_rmti[i+1] - ptap->c_rmti[i];
1236     for (j=0; j<sendncols; j++) {
1237       iremote[nleaves].rank = owner;
1238       iremote[nleaves++].index = c_rmtoffsets[i] + j;
1239     }
1240   }
1241   ierr = PetscFree(c_rmtoffsets);CHKERRQ(ierr);
1242   ierr = PetscCalloc1(ptap->c_othi[pn],&c_othj);CHKERRQ(ierr);
1243 
1244   ierr = PetscSFCreate(comm,&ptap->sf);CHKERRQ(ierr);
1245   ierr = PetscSFSetGraph(ptap->sf,ptap->c_othi[pn],nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
1246   ierr = PetscSFSetFromOptions(ptap->sf);CHKERRQ(ierr);
1247   /* One to one map */
1248   ierr = PetscSFReduceBegin(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);CHKERRQ(ierr);
1249 
1250   ierr = PetscCalloc2(pn,&dnz,pn,&onz);CHKERRQ(ierr);
1251   ierr = PetscHSetICreate(&oht);CHKERRQ(ierr);
1252   ierr = MatGetOwnershipRangeColumn(P,&pcstart,&pcend);CHKERRQ(ierr);
1253   ierr = PetscMalloc2(pn,&hta,pn,&hto);CHKERRQ(ierr);
1254   for (i=0; i<pn; i++) {
1255     ierr = PetscHSetICreate(&hta[i]);CHKERRQ(ierr);
1256     ierr = PetscHSetICreate(&hto[i]);CHKERRQ(ierr);
1257   }
1258   /* Work on local part */
1259   /* 4) Pass 1: Estimate memory for C_loc */
1260   for (i=0; i<am && pn; i++) {
1261     ierr = PetscHSetIClear(ht);CHKERRQ(ierr);
1262     ierr = PetscHSetIClear(oht);CHKERRQ(ierr);
1263     nzi = pd->i[i+1] - pd->i[i];
1264     if (!nzi) continue;
1265 
1266     ierr = MatPtAPSymbolicComputeOneRowOfAP_private(A,P,ptap->P_oth,i,ht,oht);CHKERRQ(ierr);
1267     ierr = PetscHSetIGetSize(ht,&htsize);CHKERRQ(ierr);
1268     ierr = PetscHSetIGetSize(oht,&htosize);CHKERRQ(ierr);
1269     if (!(htsize+htosize)) continue;
1270     /* Form C(ii, :) */
1271     pdj = pd->j + pd->i[i];
1272     for (j=0; j<nzi; j++) {
1273       ierr = PetscHSetIAppend(hta[pdj[j]],ht);CHKERRQ(ierr);
1274       ierr = PetscHSetIAppend(hto[pdj[j]],oht);CHKERRQ(ierr);
1275     }
1276   }
1277 
1278   ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
1279   ierr = PetscHSetIDestroy(&oht);CHKERRQ(ierr);
1280 
1281   /* Get remote data */
1282   ierr = PetscSFReduceEnd(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);CHKERRQ(ierr);
1283   ierr = PetscFree(c_rmtj);CHKERRQ(ierr);
1284 
1285   for (i = 0; i < pn; i++) {
1286     nzi = ptap->c_othi[i+1] - ptap->c_othi[i];
1287     rdj = c_othj + ptap->c_othi[i];
1288     for (j = 0; j < nzi; j++) {
1289       col =  rdj[j];
1290       /* diag part */
1291       if (col>=pcstart && col<pcend) {
1292         ierr = PetscHSetIAdd(hta[i],col);CHKERRQ(ierr);
1293       } else { /* off diag */
1294         ierr = PetscHSetIAdd(hto[i],col);CHKERRQ(ierr);
1295       }
1296     }
1297     ierr = PetscHSetIGetSize(hta[i],&htsize);CHKERRQ(ierr);
1298     dnz[i] = htsize;
1299     ierr = PetscHSetIDestroy(&hta[i]);CHKERRQ(ierr);
1300     ierr = PetscHSetIGetSize(hto[i],&htsize);CHKERRQ(ierr);
1301     onz[i] = htsize;
1302     ierr = PetscHSetIDestroy(&hto[i]);CHKERRQ(ierr);
1303   }
1304 
1305   ierr = PetscFree2(hta,hto);CHKERRQ(ierr);
1306   ierr = PetscFree(c_othj);CHKERRQ(ierr);
1307 
1308   /* local sizes and preallocation */
1309   ierr = MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1310   ierr = MatSetBlockSizes(Cmpi,PetscAbs(P->cmap->bs),PetscAbs(P->cmap->bs));CHKERRQ(ierr);
1311   ierr = MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);CHKERRQ(ierr);
1312   ierr = PetscFree2(dnz,onz);CHKERRQ(ierr);
1313 
1314   /* attach the supporting struct to Cmpi for reuse */
1315   c = (Mat_MPIAIJ*)Cmpi->data;
1316   c->ap           = ptap;
1317   ptap->duplicate = Cmpi->ops->duplicate;
1318   ptap->destroy   = Cmpi->ops->destroy;
1319   ptap->view      = Cmpi->ops->view;
1320 
1321   /* Cmpi is not ready for use - assembly will be done by MatPtAPNumeric() */
1322   Cmpi->assembled        = PETSC_FALSE;
1323   /* pick an algorithm */
1324   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MatPtAP","Mat");CHKERRQ(ierr);
1325   alg = 0;
1326   ierr = PetscOptionsEList("-matptap_allatonce_via","PtAP allatonce numeric approach","MatPtAP",algTypes,nalg,algTypes[alg],&alg,&flg);CHKERRQ(ierr);
1327   ierr = PetscOptionsEnd();CHKERRQ(ierr);
1328   switch (alg) {
1329     case 0:
1330       Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce;
1331       break;
1332     case 1:
1333       Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce_merged;
1334       break;
1335     default:
1336       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG," Unsupported allatonce numerical algorithm \n");
1337   }
1338   Cmpi->ops->destroy     = MatDestroy_MPIAIJ_PtAP;
1339   Cmpi->ops->view        = MatView_MPIAIJ_PtAP;
1340   Cmpi->ops->freeintermediatedatastructures = MatFreeIntermediateDataStructures_MPIAIJ_AP;
1341   *C                     = Cmpi;
1342   PetscFunctionReturn(0);
1343 }
1344 
1345 PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce_merged(Mat A,Mat P,PetscReal fill,Mat *C)
1346 {
1347   Mat_APMPI           *ptap;
1348   Mat_MPIAIJ          *p=(Mat_MPIAIJ*)P->data,*c;
1349   MPI_Comm            comm;
1350   Mat                 Cmpi;
1351   Mat_SeqAIJ          *pd,*po;
1352   MatType             mtype;
1353   PetscSF             sf;
1354   PetscSFNode         *iremote;
1355   PetscInt            rootspacesize,*rootspace,*rootspaceoffsets,nleaves;
1356   const PetscInt      *rootdegrees;
1357   PetscHSetI          ht,oht,*hta,*hto,*htd;
1358   PetscInt            pn,pon,*c_rmtc,i,j,nzi,dnzi,htsize,htosize,*c_rmtj,off,*c_othj,rcvncols,sendncols,*c_rmtoffsets;
1359   PetscInt            owner,lidx,*rdj,col,pcstart,pcend,*dnz,*onz,am,arstart,arend,*poj,*pdj;
1360   PetscInt            nalg=2,alg=0;
1361   PetscBool           flg;
1362   const char          *algTypes[2] = {"merged","overlapping"};
1363   PetscErrorCode      ierr;
1364 
1365   PetscFunctionBegin;
1366   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
1367 
1368   /* Create symbolic parallel matrix Cmpi */
1369   ierr = MatGetLocalSize(P,NULL,&pn);CHKERRQ(ierr);
1370   ierr = MatCreate(comm,&Cmpi);CHKERRQ(ierr);
1371   ierr = MatGetType(A,&mtype);CHKERRQ(ierr);
1372   ierr = MatSetType(Cmpi,mtype);CHKERRQ(ierr);
1373   ierr = MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1374 
1375   ierr        = PetscNew(&ptap);CHKERRQ(ierr);
1376   ptap->reuse = MAT_INITIAL_MATRIX;
1377   ptap->algType = 3;
1378 
1379   /* 0) Get P_oth by taking rows of P (= non-zero cols of local A) from other processors */
1380   ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_INITIAL_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr);
1381 
1382   po = (Mat_SeqAIJ*)p->B->data;
1383   pd = (Mat_SeqAIJ*)p->A->data;
1384 
1385   /* This equals to the number of offdiag columns in P */
1386   ierr = MatGetLocalSize(p->B,NULL,&pon);CHKERRQ(ierr);
1387   /* offsets */
1388   ierr = PetscMalloc1(pon+1,&ptap->c_rmti);CHKERRQ(ierr);
1389   /* The number of columns we will send to remote ranks */
1390   ierr = PetscMalloc1(pon,&c_rmtc);CHKERRQ(ierr);
1391   ierr = PetscMalloc1(pon,&hta);CHKERRQ(ierr);
1392   for (i=0; i<pon; i++) {
1393     ierr = PetscHSetICreate(&hta[i]);CHKERRQ(ierr);
1394   }
1395   ierr = MatGetLocalSize(A,&am,NULL);CHKERRQ(ierr);
1396   ierr = MatGetOwnershipRange(A,&arstart,&arend);CHKERRQ(ierr);
1397   /* Create hash table to merge all columns for C(i, :) */
1398   ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
1399   ierr = PetscHSetICreate(&oht);CHKERRQ(ierr);
1400   ierr = PetscMalloc2(pn,&htd,pn,&hto);CHKERRQ(ierr);
1401   for (i=0; i<pn; i++) {
1402     ierr = PetscHSetICreate(&htd[i]);CHKERRQ(ierr);
1403     ierr = PetscHSetICreate(&hto[i]);CHKERRQ(ierr);
1404   }
1405   ptap->c_rmti[0] = 0;
1406   /* 2) Pass 1: calculate the size for C_rmt (a matrix need to be sent to other processors)  */
1407   for (i=0; i<am && (pon || pn); i++) {
1408     /* Form one row of AP */
1409     ierr = PetscHSetIClear(ht);CHKERRQ(ierr);
1410     ierr = PetscHSetIClear(oht);CHKERRQ(ierr);
1411     /* If the off diag is empty, we should not do any calculation */
1412     nzi = po->i[i+1] - po->i[i];
1413     dnzi = pd->i[i+1] - pd->i[i];
1414     if (!nzi && !dnzi) continue;
1415 
1416     ierr = MatPtAPSymbolicComputeOneRowOfAP_private(A,P,ptap->P_oth,i,ht,oht);CHKERRQ(ierr);
1417     ierr = PetscHSetIGetSize(ht,&htsize);CHKERRQ(ierr);
1418     ierr = PetscHSetIGetSize(oht,&htosize);CHKERRQ(ierr);
1419     /* If AP is empty, just continue */
1420     if (!(htsize+htosize)) continue;
1421 
1422     /* Form remote C(ii, :) */
1423     poj = po->j + po->i[i];
1424     for (j=0; j<nzi; j++) {
1425       ierr = PetscHSetIAppend(hta[poj[j]],ht);CHKERRQ(ierr);
1426       ierr = PetscHSetIAppend(hta[poj[j]],oht);CHKERRQ(ierr);
1427     }
1428 
1429     /* Form local C(ii, :) */
1430     pdj = pd->j + pd->i[i];
1431     for (j=0; j<dnzi; j++) {
1432       ierr = PetscHSetIAppend(htd[pdj[j]],ht);CHKERRQ(ierr);
1433       ierr = PetscHSetIAppend(hto[pdj[j]],oht);CHKERRQ(ierr);
1434     }
1435   }
1436 
1437   ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
1438   ierr = PetscHSetIDestroy(&oht);CHKERRQ(ierr);
1439 
1440   for (i=0; i<pon; i++) {
1441     ierr = PetscHSetIGetSize(hta[i],&htsize);CHKERRQ(ierr);
1442     ptap->c_rmti[i+1] = ptap->c_rmti[i] + htsize;
1443     c_rmtc[i] = htsize;
1444   }
1445 
1446   ierr = PetscMalloc1(ptap->c_rmti[pon],&c_rmtj);CHKERRQ(ierr);
1447 
1448   for (i=0; i<pon; i++) {
1449     off = 0;
1450     ierr = PetscHSetIGetElems(hta[i],&off,c_rmtj+ptap->c_rmti[i]);CHKERRQ(ierr);
1451     ierr = PetscHSetIDestroy(&hta[i]);CHKERRQ(ierr);
1452   }
1453   ierr = PetscFree(hta);CHKERRQ(ierr);
1454 
1455   ierr = PetscCalloc1(pon,&iremote);CHKERRQ(ierr);
1456   for (i=0; i<pon; i++) {
1457     owner = 0; lidx = 0;
1458     ierr = PetscLayoutFindOwnerIndex(P->cmap,p->garray[i],&owner,&lidx);CHKERRQ(ierr);
1459     iremote[i].index = lidx;
1460     iremote[i].rank  = owner;
1461   }
1462 
1463   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
1464   ierr = PetscSFSetGraph(sf,pn,pon,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
1465   /* Reorder ranks properly so that the data handled by gather and scatter have the same order */
1466   ierr = PetscSFSetRankOrder(sf,PETSC_TRUE);CHKERRQ(ierr);
1467   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
1468   ierr = PetscSFSetUp(sf);CHKERRQ(ierr);
1469   /* How many neighbors have contributions to my rows? */
1470   ierr = PetscSFComputeDegreeBegin(sf,&rootdegrees);CHKERRQ(ierr);
1471   ierr = PetscSFComputeDegreeEnd(sf,&rootdegrees);CHKERRQ(ierr);
1472   rootspacesize = 0;
1473   for (i = 0; i < pn; i++) {
1474     rootspacesize += rootdegrees[i];
1475   }
1476   ierr = PetscCalloc1(rootspacesize,&rootspace);CHKERRQ(ierr);
1477   ierr = PetscCalloc1(rootspacesize+1,&rootspaceoffsets);CHKERRQ(ierr);
1478   /* Get information from leaves
1479    * Number of columns other people contribute to my rows
1480    * */
1481   ierr = PetscSFGatherBegin(sf,MPIU_INT,c_rmtc,rootspace);CHKERRQ(ierr);
1482   ierr = PetscSFGatherEnd(sf,MPIU_INT,c_rmtc,rootspace);CHKERRQ(ierr);
1483   ierr = PetscFree(c_rmtc);CHKERRQ(ierr);
1484   ierr = PetscCalloc1(pn+1,&ptap->c_othi);CHKERRQ(ierr);
1485   /* The number of columns is received for each row */
1486   ptap->c_othi[0] = 0;
1487   rootspacesize = 0;
1488   rootspaceoffsets[0] = 0;
1489   for (i = 0; i < pn; i++) {
1490     rcvncols = 0;
1491     for (j = 0; j<rootdegrees[i]; j++) {
1492       rcvncols += rootspace[rootspacesize];
1493       rootspaceoffsets[rootspacesize+1] = rootspaceoffsets[rootspacesize] + rootspace[rootspacesize];
1494       rootspacesize++;
1495     }
1496     ptap->c_othi[i+1] = ptap->c_othi[i] + rcvncols;
1497   }
1498   ierr = PetscFree(rootspace);CHKERRQ(ierr);
1499 
1500   ierr = PetscCalloc1(pon,&c_rmtoffsets);CHKERRQ(ierr);
1501   ierr = PetscSFScatterBegin(sf,MPIU_INT,rootspaceoffsets,c_rmtoffsets);CHKERRQ(ierr);
1502   ierr = PetscSFScatterEnd(sf,MPIU_INT,rootspaceoffsets,c_rmtoffsets);CHKERRQ(ierr);
1503   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
1504   ierr = PetscFree(rootspaceoffsets);CHKERRQ(ierr);
1505 
1506   ierr = PetscCalloc1(ptap->c_rmti[pon],&iremote);CHKERRQ(ierr);
1507   nleaves = 0;
1508   for (i = 0; i<pon; i++) {
1509     owner = 0; lidx = 0;
1510     ierr = PetscLayoutFindOwnerIndex(P->cmap,p->garray[i],&owner,&lidx);CHKERRQ(ierr);
1511     sendncols = ptap->c_rmti[i+1] - ptap->c_rmti[i];
1512     for (j=0; j<sendncols; j++) {
1513       iremote[nleaves].rank = owner;
1514       iremote[nleaves++].index = c_rmtoffsets[i] + j;
1515     }
1516   }
1517   ierr = PetscFree(c_rmtoffsets);CHKERRQ(ierr);
1518   ierr = PetscCalloc1(ptap->c_othi[pn],&c_othj);CHKERRQ(ierr);
1519 
1520   ierr = PetscSFCreate(comm,&ptap->sf);CHKERRQ(ierr);
1521   ierr = PetscSFSetGraph(ptap->sf,ptap->c_othi[pn],nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
1522   ierr = PetscSFSetFromOptions(ptap->sf);CHKERRQ(ierr);
1523   /* One to one map */
1524   ierr = PetscSFReduceBegin(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);CHKERRQ(ierr);
1525   /* Get remote data */
1526   ierr = PetscSFReduceEnd(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);CHKERRQ(ierr);
1527   ierr = PetscFree(c_rmtj);CHKERRQ(ierr);
1528   ierr = PetscCalloc2(pn,&dnz,pn,&onz);CHKERRQ(ierr);
1529   ierr = MatGetOwnershipRangeColumn(P,&pcstart,&pcend);CHKERRQ(ierr);
1530 
1531   for (i = 0; i < pn; i++) {
1532     nzi = ptap->c_othi[i+1] - ptap->c_othi[i];
1533     rdj = c_othj + ptap->c_othi[i];
1534     for (j = 0; j < nzi; j++) {
1535       col =  rdj[j];
1536       /* diag part */
1537       if (col>=pcstart && col<pcend) {
1538         ierr = PetscHSetIAdd(htd[i],col);CHKERRQ(ierr);
1539       } else { /* off diag */
1540         ierr = PetscHSetIAdd(hto[i],col);CHKERRQ(ierr);
1541       }
1542     }
1543     ierr = PetscHSetIGetSize(htd[i],&htsize);CHKERRQ(ierr);
1544     dnz[i] = htsize;
1545     ierr = PetscHSetIDestroy(&htd[i]);CHKERRQ(ierr);
1546     ierr = PetscHSetIGetSize(hto[i],&htsize);CHKERRQ(ierr);
1547     onz[i] = htsize;
1548     ierr = PetscHSetIDestroy(&hto[i]);CHKERRQ(ierr);
1549   }
1550 
1551   ierr = PetscFree2(htd,hto);CHKERRQ(ierr);
1552   ierr = PetscFree(c_othj);CHKERRQ(ierr);
1553 
1554   /* local sizes and preallocation */
1555   ierr = MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1556   ierr = MatSetBlockSizes(Cmpi,PetscAbs(P->cmap->bs),PetscAbs(P->cmap->bs));CHKERRQ(ierr);
1557   ierr = MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);CHKERRQ(ierr);
1558   ierr = PetscFree2(dnz,onz);CHKERRQ(ierr);
1559 
1560   /* attach the supporting struct to Cmpi for reuse */
1561   c = (Mat_MPIAIJ*)Cmpi->data;
1562   c->ap           = ptap;
1563   ptap->duplicate = Cmpi->ops->duplicate;
1564   ptap->destroy   = Cmpi->ops->destroy;
1565   ptap->view      = Cmpi->ops->view;
1566 
1567   /* Cmpi is not ready for use - assembly will be done by MatPtAPNumeric() */
1568   Cmpi->assembled        = PETSC_FALSE;
1569   /* pick an algorithm */
1570   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MatPtAP","Mat");CHKERRQ(ierr);
1571   alg = 0;
1572   ierr = PetscOptionsEList("-matptap_allatonce_via","PtAP allatonce numeric approach","MatPtAP",algTypes,nalg,algTypes[alg],&alg,&flg);CHKERRQ(ierr);
1573   ierr = PetscOptionsEnd();CHKERRQ(ierr);
1574   switch (alg) {
1575     case 0:
1576       Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce_merged;
1577       break;
1578     case 1:
1579       Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce;
1580       break;
1581     default:
1582       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG," Unsupported allatonce numerical algorithm \n");
1583   }
1584   Cmpi->ops->destroy     = MatDestroy_MPIAIJ_PtAP;
1585   Cmpi->ops->view        = MatView_MPIAIJ_PtAP;
1586   Cmpi->ops->freeintermediatedatastructures = MatFreeIntermediateDataStructures_MPIAIJ_AP;
1587   *C                     = Cmpi;
1588   PetscFunctionReturn(0);
1589 }
1590 
1591 PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ(Mat A,Mat P,PetscReal fill,Mat *C)
1592 {
1593   PetscErrorCode      ierr;
1594   Mat_APMPI           *ptap;
1595   Mat_MPIAIJ          *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data,*c;
1596   MPI_Comm            comm;
1597   PetscMPIInt         size,rank;
1598   Mat                 Cmpi;
1599   PetscFreeSpaceList  free_space=NULL,current_space=NULL;
1600   PetscInt            am=A->rmap->n,pm=P->rmap->n,pN=P->cmap->N,pn=P->cmap->n;
1601   PetscInt            *lnk,i,k,pnz,row,nsend;
1602   PetscBT             lnkbt;
1603   PetscMPIInt         tagi,tagj,*len_si,*len_s,*len_ri,icompleted=0,nrecv;
1604   PetscInt            **buf_rj,**buf_ri,**buf_ri_k;
1605   PetscInt            len,proc,*dnz,*onz,*owners,nzi,nspacedouble;
1606   PetscInt            nrows,*buf_s,*buf_si,*buf_si_i,**nextrow,**nextci;
1607   MPI_Request         *swaits,*rwaits;
1608   MPI_Status          *sstatus,rstatus;
1609   PetscLayout         rowmap;
1610   PetscInt            *owners_co,*coi,*coj;    /* i and j array of (p->B)^T*A*P - used in the communication */
1611   PetscMPIInt         *len_r,*id_r;    /* array of length of comm->size, store send/recv matrix values */
1612   PetscInt            *api,*apj,*Jptr,apnz,*prmap=p->garray,con,j,ap_rmax=0,Crmax,*aj,*ai,*pi;
1613   Mat_SeqAIJ          *p_loc,*p_oth=NULL,*ad=(Mat_SeqAIJ*)(a->A)->data,*ao=NULL,*c_loc,*c_oth;
1614   PetscScalar         *apv;
1615   PetscTable          ta;
1616   MatType             mtype;
1617   const char          *prefix;
1618 #if defined(PETSC_USE_INFO)
1619   PetscReal           apfill;
1620 #endif
1621 
1622   PetscFunctionBegin;
1623   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
1624   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1625   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1626 
1627   if (size > 1) ao = (Mat_SeqAIJ*)(a->B)->data;
1628 
1629   /* create symbolic parallel matrix Cmpi */
1630   ierr = MatCreate(comm,&Cmpi);CHKERRQ(ierr);
1631   ierr = MatGetType(A,&mtype);CHKERRQ(ierr);
1632   ierr = MatSetType(Cmpi,mtype);CHKERRQ(ierr);
1633 
1634   /* Do dense axpy in MatPtAPNumeric_MPIAIJ_MPIAIJ() */
1635   Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ;
1636 
1637   /* create struct Mat_APMPI and attached it to C later */
1638   ierr        = PetscNew(&ptap);CHKERRQ(ierr);
1639   ptap->reuse = MAT_INITIAL_MATRIX;
1640   ptap->algType = 1;
1641 
1642   /* get P_oth by taking rows of P (= non-zero cols of local A) from other processors */
1643   ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_INITIAL_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr);
1644   /* get P_loc by taking all local rows of P */
1645   ierr = MatMPIAIJGetLocalMat(P,MAT_INITIAL_MATRIX,&ptap->P_loc);CHKERRQ(ierr);
1646 
1647   /* (0) compute Rd = Pd^T, Ro = Po^T  */
1648   /* --------------------------------- */
1649   ierr = MatTranspose(p->A,MAT_INITIAL_MATRIX,&ptap->Rd);CHKERRQ(ierr);
1650   ierr = MatTranspose(p->B,MAT_INITIAL_MATRIX,&ptap->Ro);CHKERRQ(ierr);
1651 
1652   /* (1) compute symbolic AP = A_loc*P = Ad*P_loc + Ao*P_oth (api,apj) */
1653   /* ----------------------------------------------------------------- */
1654   p_loc  = (Mat_SeqAIJ*)(ptap->P_loc)->data;
1655   if (ptap->P_oth) p_oth  = (Mat_SeqAIJ*)(ptap->P_oth)->data;
1656 
1657   /* create and initialize a linked list */
1658   ierr = PetscTableCreate(pn,pN,&ta);CHKERRQ(ierr); /* for compute AP_loc and Cmpi */
1659   MatRowMergeMax_SeqAIJ(p_loc,ptap->P_loc->rmap->N,ta);
1660   MatRowMergeMax_SeqAIJ(p_oth,ptap->P_oth->rmap->N,ta);
1661   ierr = PetscTableGetCount(ta,&Crmax);CHKERRQ(ierr); /* Crmax = nnz(sum of Prows) */
1662   /* printf("[%d] est %d, Crmax %d; pN %d\n",rank,5*(p_loc->rmax+p_oth->rmax + (PetscInt)(1.e-2*pN)),Crmax,pN); */
1663 
1664   ierr = PetscLLCondensedCreate(Crmax,pN,&lnk,&lnkbt);CHKERRQ(ierr);
1665 
1666   /* Initial FreeSpace size is fill*(nnz(A) + nnz(P)) */
1667   if (ao) {
1668     ierr = PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],PetscIntSumTruncate(ao->i[am],p_loc->i[pm]))),&free_space);CHKERRQ(ierr);
1669   } else {
1670     ierr = PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],p_loc->i[pm])),&free_space);CHKERRQ(ierr);
1671   }
1672   current_space = free_space;
1673   nspacedouble  = 0;
1674 
1675   ierr   = PetscMalloc1(am+1,&api);CHKERRQ(ierr);
1676   api[0] = 0;
1677   for (i=0; i<am; i++) {
1678     /* diagonal portion: Ad[i,:]*P */
1679     ai = ad->i; pi = p_loc->i;
1680     nzi = ai[i+1] - ai[i];
1681     aj  = ad->j + ai[i];
1682     for (j=0; j<nzi; j++) {
1683       row  = aj[j];
1684       pnz  = pi[row+1] - pi[row];
1685       Jptr = p_loc->j + pi[row];
1686       /* add non-zero cols of P into the sorted linked list lnk */
1687       ierr = PetscLLCondensedAddSorted(pnz,Jptr,lnk,lnkbt);CHKERRQ(ierr);
1688     }
1689     /* off-diagonal portion: Ao[i,:]*P */
1690     if (ao) {
1691       ai = ao->i; pi = p_oth->i;
1692       nzi = ai[i+1] - ai[i];
1693       aj  = ao->j + ai[i];
1694       for (j=0; j<nzi; j++) {
1695         row  = aj[j];
1696         pnz  = pi[row+1] - pi[row];
1697         Jptr = p_oth->j + pi[row];
1698         ierr = PetscLLCondensedAddSorted(pnz,Jptr,lnk,lnkbt);CHKERRQ(ierr);
1699       }
1700     }
1701     apnz     = lnk[0];
1702     api[i+1] = api[i] + apnz;
1703     if (ap_rmax < apnz) ap_rmax = apnz;
1704 
1705     /* if free space is not available, double the total space in the list */
1706     if (current_space->local_remaining<apnz) {
1707       ierr = PetscFreeSpaceGet(PetscIntSumTruncate(apnz,current_space->total_array_size),&current_space);CHKERRQ(ierr);
1708       nspacedouble++;
1709     }
1710 
1711     /* Copy data into free space, then initialize lnk */
1712     ierr = PetscLLCondensedClean(pN,apnz,current_space->array,lnk,lnkbt);CHKERRQ(ierr);
1713 
1714     current_space->array           += apnz;
1715     current_space->local_used      += apnz;
1716     current_space->local_remaining -= apnz;
1717   }
1718   /* Allocate space for apj and apv, initialize apj, and */
1719   /* destroy list of free space and other temporary array(s) */
1720   ierr   = PetscMalloc2(api[am],&apj,api[am],&apv);CHKERRQ(ierr);
1721   ierr   = PetscFreeSpaceContiguous(&free_space,apj);CHKERRQ(ierr);
1722   ierr = PetscLLDestroy(lnk,lnkbt);CHKERRQ(ierr);
1723 
1724   /* Create AP_loc for reuse */
1725   ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,am,pN,api,apj,apv,&ptap->AP_loc);CHKERRQ(ierr);
1726 
1727 #if defined(PETSC_USE_INFO)
1728   if (ao) {
1729     apfill = (PetscReal)api[am]/(ad->i[am]+ao->i[am]+p_loc->i[pm]+1);
1730   } else {
1731     apfill = (PetscReal)api[am]/(ad->i[am]+p_loc->i[pm]+1);
1732   }
1733   ptap->AP_loc->info.mallocs           = nspacedouble;
1734   ptap->AP_loc->info.fill_ratio_given  = fill;
1735   ptap->AP_loc->info.fill_ratio_needed = apfill;
1736 
1737   if (api[am]) {
1738     ierr = PetscInfo3(ptap->AP_loc,"Nonscalable algorithm, AP_loc reallocs %D; Fill ratio: given %g needed %g.\n",nspacedouble,(double)fill,(double)apfill);CHKERRQ(ierr);
1739     ierr = PetscInfo1(ptap->AP_loc,"Use MatPtAP(A,B,MatReuse,%g,&C) for best AP_loc performance.;\n",(double)apfill);CHKERRQ(ierr);
1740   } else {
1741     ierr = PetscInfo(ptap->AP_loc,"Nonscalable algorithm, AP_loc is empty \n");CHKERRQ(ierr);
1742   }
1743 #endif
1744 
1745   /* (2-1) compute symbolic Co = Ro*AP_loc  */
1746   /* ------------------------------------ */
1747   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1748   ierr = MatSetOptionsPrefix(ptap->Ro,prefix);CHKERRQ(ierr);
1749   ierr = MatAppendOptionsPrefix(ptap->Ro,"inner_offdiag_");CHKERRQ(ierr);
1750   ierr = MatMatMultSymbolic_SeqAIJ_SeqAIJ(ptap->Ro,ptap->AP_loc,fill,&ptap->C_oth);CHKERRQ(ierr);
1751 
1752   /* (3) send coj of C_oth to other processors  */
1753   /* ------------------------------------------ */
1754   /* determine row ownership */
1755   ierr = PetscLayoutCreate(comm,&rowmap);CHKERRQ(ierr);
1756   rowmap->n  = pn;
1757   rowmap->bs = 1;
1758   ierr   = PetscLayoutSetUp(rowmap);CHKERRQ(ierr);
1759   owners = rowmap->range;
1760 
1761   /* determine the number of messages to send, their lengths */
1762   ierr = PetscMalloc4(size,&len_s,size,&len_si,size,&sstatus,size+2,&owners_co);CHKERRQ(ierr);
1763   ierr = PetscMemzero(len_s,size*sizeof(PetscMPIInt));CHKERRQ(ierr);
1764   ierr = PetscMemzero(len_si,size*sizeof(PetscMPIInt));CHKERRQ(ierr);
1765 
1766   c_oth = (Mat_SeqAIJ*)ptap->C_oth->data;
1767   coi   = c_oth->i; coj = c_oth->j;
1768   con   = ptap->C_oth->rmap->n;
1769   proc  = 0;
1770   for (i=0; i<con; i++) {
1771     while (prmap[i] >= owners[proc+1]) proc++;
1772     len_si[proc]++;               /* num of rows in Co(=Pt*AP) to be sent to [proc] */
1773     len_s[proc] += coi[i+1] - coi[i]; /* num of nonzeros in Co to be sent to [proc] */
1774   }
1775 
1776   len          = 0; /* max length of buf_si[], see (4) */
1777   owners_co[0] = 0;
1778   nsend        = 0;
1779   for (proc=0; proc<size; proc++) {
1780     owners_co[proc+1] = owners_co[proc] + len_si[proc];
1781     if (len_s[proc]) {
1782       nsend++;
1783       len_si[proc] = 2*(len_si[proc] + 1); /* length of buf_si to be sent to [proc] */
1784       len         += len_si[proc];
1785     }
1786   }
1787 
1788   /* determine the number and length of messages to receive for coi and coj  */
1789   ierr = PetscGatherNumberOfMessages(comm,NULL,len_s,&nrecv);CHKERRQ(ierr);
1790   ierr = PetscGatherMessageLengths2(comm,nsend,nrecv,len_s,len_si,&id_r,&len_r,&len_ri);CHKERRQ(ierr);
1791 
1792   /* post the Irecv and Isend of coj */
1793   ierr = PetscCommGetNewTag(comm,&tagj);CHKERRQ(ierr);
1794   ierr = PetscPostIrecvInt(comm,tagj,nrecv,id_r,len_r,&buf_rj,&rwaits);CHKERRQ(ierr);
1795   ierr = PetscMalloc1(nsend+1,&swaits);CHKERRQ(ierr);
1796   for (proc=0, k=0; proc<size; proc++) {
1797     if (!len_s[proc]) continue;
1798     i    = owners_co[proc];
1799     ierr = MPI_Isend(coj+coi[i],len_s[proc],MPIU_INT,proc,tagj,comm,swaits+k);CHKERRQ(ierr);
1800     k++;
1801   }
1802 
1803   /* (2-2) compute symbolic C_loc = Rd*AP_loc */
1804   /* ---------------------------------------- */
1805   ierr = MatSetOptionsPrefix(ptap->Rd,prefix);CHKERRQ(ierr);
1806   ierr = MatAppendOptionsPrefix(ptap->Rd,"inner_diag_");CHKERRQ(ierr);
1807   ierr = MatMatMultSymbolic_SeqAIJ_SeqAIJ(ptap->Rd,ptap->AP_loc,fill,&ptap->C_loc);CHKERRQ(ierr);
1808   c_loc = (Mat_SeqAIJ*)ptap->C_loc->data;
1809 
1810   /* receives coj are complete */
1811   for (i=0; i<nrecv; i++) {
1812     ierr = MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);CHKERRQ(ierr);
1813   }
1814   ierr = PetscFree(rwaits);CHKERRQ(ierr);
1815   if (nsend) {ierr = MPI_Waitall(nsend,swaits,sstatus);CHKERRQ(ierr);}
1816 
1817   /* add received column indices into ta to update Crmax */
1818   for (k=0; k<nrecv; k++) {/* k-th received message */
1819     Jptr = buf_rj[k];
1820     for (j=0; j<len_r[k]; j++) {
1821       ierr = PetscTableAdd(ta,*(Jptr+j)+1,1,INSERT_VALUES);CHKERRQ(ierr);
1822     }
1823   }
1824   ierr = PetscTableGetCount(ta,&Crmax);CHKERRQ(ierr);
1825   ierr = PetscTableDestroy(&ta);CHKERRQ(ierr);
1826 
1827   /* (4) send and recv coi */
1828   /*-----------------------*/
1829   ierr   = PetscCommGetNewTag(comm,&tagi);CHKERRQ(ierr);
1830   ierr   = PetscPostIrecvInt(comm,tagi,nrecv,id_r,len_ri,&buf_ri,&rwaits);CHKERRQ(ierr);
1831   ierr   = PetscMalloc1(len+1,&buf_s);CHKERRQ(ierr);
1832   buf_si = buf_s;  /* points to the beginning of k-th msg to be sent */
1833   for (proc=0,k=0; proc<size; proc++) {
1834     if (!len_s[proc]) continue;
1835     /* form outgoing message for i-structure:
1836          buf_si[0]:                 nrows to be sent
1837                [1:nrows]:           row index (global)
1838                [nrows+1:2*nrows+1]: i-structure index
1839     */
1840     /*-------------------------------------------*/
1841     nrows       = len_si[proc]/2 - 1; /* num of rows in Co to be sent to [proc] */
1842     buf_si_i    = buf_si + nrows+1;
1843     buf_si[0]   = nrows;
1844     buf_si_i[0] = 0;
1845     nrows       = 0;
1846     for (i=owners_co[proc]; i<owners_co[proc+1]; i++) {
1847       nzi = coi[i+1] - coi[i];
1848       buf_si_i[nrows+1] = buf_si_i[nrows] + nzi;  /* i-structure */
1849       buf_si[nrows+1]   = prmap[i] -owners[proc]; /* local row index */
1850       nrows++;
1851     }
1852     ierr = MPI_Isend(buf_si,len_si[proc],MPIU_INT,proc,tagi,comm,swaits+k);CHKERRQ(ierr);
1853     k++;
1854     buf_si += len_si[proc];
1855   }
1856   for (i=0; i<nrecv; i++) {
1857     ierr = MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);CHKERRQ(ierr);
1858   }
1859   ierr = PetscFree(rwaits);CHKERRQ(ierr);
1860   if (nsend) {ierr = MPI_Waitall(nsend,swaits,sstatus);CHKERRQ(ierr);}
1861 
1862   ierr = PetscFree4(len_s,len_si,sstatus,owners_co);CHKERRQ(ierr);
1863   ierr = PetscFree(len_ri);CHKERRQ(ierr);
1864   ierr = PetscFree(swaits);CHKERRQ(ierr);
1865   ierr = PetscFree(buf_s);CHKERRQ(ierr);
1866 
1867   /* (5) compute the local portion of Cmpi      */
1868   /* ------------------------------------------ */
1869   /* set initial free space to be Crmax, sufficient for holding nozeros in each row of Cmpi */
1870   ierr          = PetscFreeSpaceGet(Crmax,&free_space);CHKERRQ(ierr);
1871   current_space = free_space;
1872 
1873   ierr = PetscMalloc3(nrecv,&buf_ri_k,nrecv,&nextrow,nrecv,&nextci);CHKERRQ(ierr);
1874   for (k=0; k<nrecv; k++) {
1875     buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */
1876     nrows       = *buf_ri_k[k];
1877     nextrow[k]  = buf_ri_k[k] + 1;  /* next row number of k-th recved i-structure */
1878     nextci[k]   = buf_ri_k[k] + (nrows + 1); /* poins to the next i-structure of k-th recved i-structure  */
1879   }
1880 
1881   ierr = MatPreallocateInitialize(comm,pn,pn,dnz,onz);CHKERRQ(ierr);
1882   ierr = PetscLLCondensedCreate(Crmax,pN,&lnk,&lnkbt);CHKERRQ(ierr);
1883   for (i=0; i<pn; i++) {
1884     /* add C_loc into Cmpi */
1885     nzi  = c_loc->i[i+1] - c_loc->i[i];
1886     Jptr = c_loc->j + c_loc->i[i];
1887     ierr = PetscLLCondensedAddSorted(nzi,Jptr,lnk,lnkbt);CHKERRQ(ierr);
1888 
1889     /* add received col data into lnk */
1890     for (k=0; k<nrecv; k++) { /* k-th received message */
1891       if (i == *nextrow[k]) { /* i-th row */
1892         nzi  = *(nextci[k]+1) - *nextci[k];
1893         Jptr = buf_rj[k] + *nextci[k];
1894         ierr = PetscLLCondensedAddSorted(nzi,Jptr,lnk,lnkbt);CHKERRQ(ierr);
1895         nextrow[k]++; nextci[k]++;
1896       }
1897     }
1898     nzi = lnk[0];
1899 
1900     /* copy data into free space, then initialize lnk */
1901     ierr = PetscLLCondensedClean(pN,nzi,current_space->array,lnk,lnkbt);CHKERRQ(ierr);
1902     ierr = MatPreallocateSet(i+owners[rank],nzi,current_space->array,dnz,onz);CHKERRQ(ierr);
1903   }
1904   ierr = PetscFree3(buf_ri_k,nextrow,nextci);CHKERRQ(ierr);
1905   ierr = PetscLLDestroy(lnk,lnkbt);CHKERRQ(ierr);
1906   ierr = PetscFreeSpaceDestroy(free_space);CHKERRQ(ierr);
1907 
1908   /* local sizes and preallocation */
1909   ierr = MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1910   ierr = MatSetBlockSizes(Cmpi,PetscAbs(P->cmap->bs),PetscAbs(P->cmap->bs));CHKERRQ(ierr);
1911   ierr = MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);CHKERRQ(ierr);
1912   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
1913 
1914   /* members in merge */
1915   ierr = PetscFree(id_r);CHKERRQ(ierr);
1916   ierr = PetscFree(len_r);CHKERRQ(ierr);
1917   ierr = PetscFree(buf_ri[0]);CHKERRQ(ierr);
1918   ierr = PetscFree(buf_ri);CHKERRQ(ierr);
1919   ierr = PetscFree(buf_rj[0]);CHKERRQ(ierr);
1920   ierr = PetscFree(buf_rj);CHKERRQ(ierr);
1921   ierr = PetscLayoutDestroy(&rowmap);CHKERRQ(ierr);
1922 
1923   /* attach the supporting struct to Cmpi for reuse */
1924   c = (Mat_MPIAIJ*)Cmpi->data;
1925   c->ap           = ptap;
1926   ptap->duplicate = Cmpi->ops->duplicate;
1927   ptap->destroy   = Cmpi->ops->destroy;
1928   ptap->view      = Cmpi->ops->view;
1929   ierr = PetscCalloc1(pN,&ptap->apa);CHKERRQ(ierr);
1930 
1931   /* Cmpi is not ready for use - assembly will be done by MatPtAPNumeric() */
1932   Cmpi->assembled        = PETSC_FALSE;
1933   Cmpi->ops->destroy     = MatDestroy_MPIAIJ_PtAP;
1934   Cmpi->ops->view        = MatView_MPIAIJ_PtAP;
1935   Cmpi->ops->freeintermediatedatastructures = MatFreeIntermediateDataStructures_MPIAIJ_AP;
1936   *C                     = Cmpi;
1937   PetscFunctionReturn(0);
1938 }
1939 
1940 PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ(Mat A,Mat P,Mat C)
1941 {
1942   PetscErrorCode    ierr;
1943   Mat_MPIAIJ        *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data,*c=(Mat_MPIAIJ*)C->data;
1944   Mat_SeqAIJ        *ad=(Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data;
1945   Mat_SeqAIJ        *ap,*p_loc,*p_oth=NULL,*c_seq;
1946   Mat_APMPI         *ptap = c->ap;
1947   Mat               AP_loc,C_loc,C_oth;
1948   PetscInt          i,rstart,rend,cm,ncols,row;
1949   PetscInt          *api,*apj,am = A->rmap->n,j,col,apnz;
1950   PetscScalar       *apa;
1951   const PetscInt    *cols;
1952   const PetscScalar *vals;
1953 
1954   PetscFunctionBegin;
1955   if (!ptap) {
1956     MPI_Comm comm;
1957     ierr = PetscObjectGetComm((PetscObject)C,&comm);CHKERRQ(ierr);
1958     SETERRQ(comm,PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be reused. Do not call MatFreeIntermediateDataStructures() or use '-mat_freeintermediatedatastructures'");
1959   }
1960 
1961   ierr = MatZeroEntries(C);CHKERRQ(ierr);
1962   /* 1) get R = Pd^T,Ro = Po^T */
1963   if (ptap->reuse == MAT_REUSE_MATRIX) {
1964     ierr = MatTranspose(p->A,MAT_REUSE_MATRIX,&ptap->Rd);CHKERRQ(ierr);
1965     ierr = MatTranspose(p->B,MAT_REUSE_MATRIX,&ptap->Ro);CHKERRQ(ierr);
1966   }
1967 
1968   /* 2) get AP_loc */
1969   AP_loc = ptap->AP_loc;
1970   ap = (Mat_SeqAIJ*)AP_loc->data;
1971 
1972   /* 2-1) get P_oth = ptap->P_oth  and P_loc = ptap->P_loc */
1973   /*-----------------------------------------------------*/
1974   if (ptap->reuse == MAT_REUSE_MATRIX) {
1975     /* P_oth and P_loc are obtained in MatPtASymbolic() when reuse == MAT_INITIAL_MATRIX */
1976     ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_REUSE_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr);
1977     ierr = MatMPIAIJGetLocalMat(P,MAT_REUSE_MATRIX,&ptap->P_loc);CHKERRQ(ierr);
1978   }
1979 
1980   /* 2-2) compute numeric A_loc*P - dominating part */
1981   /* ---------------------------------------------- */
1982   /* get data from symbolic products */
1983   p_loc = (Mat_SeqAIJ*)(ptap->P_loc)->data;
1984   if (ptap->P_oth) {
1985     p_oth = (Mat_SeqAIJ*)(ptap->P_oth)->data;
1986   }
1987   apa   = ptap->apa;
1988   api   = ap->i;
1989   apj   = ap->j;
1990   for (i=0; i<am; i++) {
1991     /* AP[i,:] = A[i,:]*P = Ad*P_loc Ao*P_oth */
1992     AProw_nonscalable(i,ad,ao,p_loc,p_oth,apa);
1993     apnz = api[i+1] - api[i];
1994     for (j=0; j<apnz; j++) {
1995       col = apj[j+api[i]];
1996       ap->a[j+ap->i[i]] = apa[col];
1997       apa[col] = 0.0;
1998     }
1999     ierr = PetscLogFlops(2.0*apnz);CHKERRQ(ierr);
2000   }
2001 
2002   /* 3) C_loc = Rd*AP_loc, C_oth = Ro*AP_loc */
2003   ierr = ((ptap->C_loc)->ops->matmultnumeric)(ptap->Rd,AP_loc,ptap->C_loc);CHKERRQ(ierr);
2004   ierr = ((ptap->C_oth)->ops->matmultnumeric)(ptap->Ro,AP_loc,ptap->C_oth);CHKERRQ(ierr);
2005   C_loc = ptap->C_loc;
2006   C_oth = ptap->C_oth;
2007 
2008   /* add C_loc and Co to to C */
2009   ierr = MatGetOwnershipRange(C,&rstart,&rend);CHKERRQ(ierr);
2010 
2011   /* C_loc -> C */
2012   cm    = C_loc->rmap->N;
2013   c_seq = (Mat_SeqAIJ*)C_loc->data;
2014   cols = c_seq->j;
2015   vals = c_seq->a;
2016 
2017 
2018   /* The (fast) MatSetValues_MPIAIJ_CopyFromCSRFormat function can only be used when C->was_assembled is PETSC_FALSE and */
2019   /* when there are no off-processor parts.  */
2020   /* If was_assembled is true, then the statement aj[rowstart_diag+dnz_row] = mat_j[col] - cstart; in MatSetValues_MPIAIJ_CopyFromCSRFormat */
2021   /* is no longer true. Then the more complex function MatSetValues_MPIAIJ() has to be used, where the column index is looked up from */
2022   /* a table, and other, more complex stuff has to be done. */
2023   if (C->assembled) {
2024     C->was_assembled = PETSC_TRUE;
2025     C->assembled     = PETSC_FALSE;
2026   }
2027   if (C->was_assembled) {
2028     for (i=0; i<cm; i++) {
2029       ncols = c_seq->i[i+1] - c_seq->i[i];
2030       row = rstart + i;
2031       ierr = MatSetValues_MPIAIJ(C,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
2032       cols += ncols; vals += ncols;
2033     }
2034   } else {
2035     ierr = MatSetValues_MPIAIJ_CopyFromCSRFormat(C,c_seq->j,c_seq->i,c_seq->a);CHKERRQ(ierr);
2036   }
2037 
2038   /* Co -> C, off-processor part */
2039   cm = C_oth->rmap->N;
2040   c_seq = (Mat_SeqAIJ*)C_oth->data;
2041   cols = c_seq->j;
2042   vals = c_seq->a;
2043   for (i=0; i<cm; i++) {
2044     ncols = c_seq->i[i+1] - c_seq->i[i];
2045     row = p->garray[i];
2046     ierr = MatSetValues(C,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
2047     cols += ncols; vals += ncols;
2048   }
2049 
2050   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2051   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2052 
2053   ptap->reuse = MAT_REUSE_MATRIX;
2054 
2055   /* supporting struct ptap consumes almost same amount of memory as C=PtAP, release it if C will not be updated by A and P */
2056   if (ptap->freestruct) {
2057     ierr = MatFreeIntermediateDataStructures(C);CHKERRQ(ierr);
2058   }
2059   PetscFunctionReturn(0);
2060 }
2061