xref: /petsc/src/mat/impls/baij/mpi/baijov.c (revision 30bc6a77c816dcf2db4d7202f4edbdbe9fcef653)
1 
2 /*
3    Routines to compute overlapping regions of a parallel MPI matrix
4   and to find submatrices that were shared across processors.
5 */
6 #include <../src/mat/impls/baij/mpi/mpibaij.h>
7 #include <petscbt.h>
8 
9 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Local(Mat,PetscInt,char**,PetscInt*,PetscInt**);
10 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Receive(Mat,PetscInt,PetscInt**,PetscInt**,PetscInt*);
11 extern PetscErrorCode MatGetRow_MPIBAIJ(Mat,PetscInt,PetscInt*,PetscInt**,PetscScalar**);
12 extern PetscErrorCode MatRestoreRow_MPIBAIJ(Mat,PetscInt,PetscInt*,PetscInt**,PetscScalar**);
13 
14 PetscErrorCode MatIncreaseOverlap_MPIBAIJ(Mat C,PetscInt imax,IS is[],PetscInt ov)
15 {
16   PetscErrorCode ierr;
17   PetscInt       i,N=C->cmap->N, bs=C->rmap->bs;
18   IS             *is_new;
19 
20   PetscFunctionBegin;
21   ierr = PetscMalloc1(imax,&is_new);CHKERRQ(ierr);
22   /* Convert the indices into block format */
23   ierr = ISCompressIndicesGeneral(N,C->rmap->n,bs,imax,is,is_new);CHKERRQ(ierr);
24   if (ov < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap specified\n");
25   for (i=0; i<ov; ++i) {
26     ierr = MatIncreaseOverlap_MPIBAIJ_Once(C,imax,is_new);CHKERRQ(ierr);
27   }
28   for (i=0; i<imax; i++) {ierr = ISDestroy(&is[i]);CHKERRQ(ierr);}
29   ierr = ISExpandIndicesGeneral(N,N,bs,imax,is_new,is);CHKERRQ(ierr);
30   for (i=0; i<imax; i++) {ierr = ISDestroy(&is_new[i]);CHKERRQ(ierr);}
31   ierr = PetscFree(is_new);CHKERRQ(ierr);
32   PetscFunctionReturn(0);
33 }
34 
35 /*
36   Sample message format:
37   If a processor A wants processor B to process some elements corresponding
38   to index sets is[1], is[5]
39   mesg [0] = 2   (no of index sets in the mesg)
40   -----------
41   mesg [1] = 1 => is[1]
42   mesg [2] = sizeof(is[1]);
43   -----------
44   mesg [5] = 5  => is[5]
45   mesg [6] = sizeof(is[5]);
46   -----------
47   mesg [7]
48   mesg [n]  data(is[1])
49   -----------
50   mesg[n+1]
51   mesg[m]  data(is[5])
52   -----------
53 
54   Notes:
55   nrqs - no of requests sent (or to be sent out)
56   nrqr - no of requests recieved (which have to be or which have been processed
57 */
58 PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Once(Mat C,PetscInt imax,IS is[])
59 {
60   Mat_MPIBAIJ    *c = (Mat_MPIBAIJ*)C->data;
61   const PetscInt **idx,*idx_i;
62   PetscInt       *n,*w3,*w4,**data,len;
63   PetscErrorCode ierr;
64   PetscMPIInt    size,rank,tag1,tag2,*w2,*w1,nrqr;
65   PetscInt       Mbs,i,j,k,**rbuf,row,proc=-1,nrqs,msz,**outdat,**ptr;
66   PetscInt       *ctr,*pa,*tmp,*isz,*isz1,**xdata,**rbuf2,*d_p;
67   PetscMPIInt    *onodes1,*olengths1,*onodes2,*olengths2;
68   PetscBT        *table;
69   MPI_Comm       comm;
70   MPI_Request    *s_waits1,*r_waits1,*s_waits2,*r_waits2;
71   MPI_Status     *s_status,*recv_status;
72   char           *t_p;
73 
74   PetscFunctionBegin;
75   ierr = PetscObjectGetComm((PetscObject)C,&comm);CHKERRQ(ierr);
76   size = c->size;
77   rank = c->rank;
78   Mbs  = c->Mbs;
79 
80   ierr = PetscObjectGetNewTag((PetscObject)C,&tag1);CHKERRQ(ierr);
81   ierr = PetscObjectGetNewTag((PetscObject)C,&tag2);CHKERRQ(ierr);
82 
83   ierr = PetscMalloc2(imax+1,&idx,imax,&n);CHKERRQ(ierr);
84 
85   for (i=0; i<imax; i++) {
86     ierr = ISGetIndices(is[i],&idx[i]);CHKERRQ(ierr);
87     ierr = ISGetLocalSize(is[i],&n[i]);CHKERRQ(ierr);
88   }
89 
90   /* evaluate communication - mesg to who,length of mesg, and buffer space
91      required. Based on this, buffers are allocated, and data copied into them*/
92   ierr = PetscCalloc4(size,&w1,size,&w2,size,&w3,size,&w4);CHKERRQ(ierr);
93   for (i=0; i<imax; i++) {
94     ierr  = PetscMemzero(w4,size*sizeof(PetscInt));CHKERRQ(ierr); /* initialise work vector*/
95     idx_i = idx[i];
96     len   = n[i];
97     for (j=0; j<len; j++) {
98       row = idx_i[j];
99       if (row < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index set cannot have negative entries");
100       ierr = PetscLayoutFindOwner(C->rmap,row*C->rmap->bs,&proc);CHKERRQ(ierr);
101       w4[proc]++;
102     }
103     for (j=0; j<size; j++) {
104       if (w4[j]) { w1[j] += w4[j]; w3[j]++;}
105     }
106   }
107 
108   nrqs     = 0;              /* no of outgoing messages */
109   msz      = 0;              /* total mesg length (for all proc */
110   w1[rank] = 0;              /* no mesg sent to itself */
111   w3[rank] = 0;
112   for (i=0; i<size; i++) {
113     if (w1[i])  {w2[i] = 1; nrqs++;} /* there exists a message to proc i */
114   }
115   /* pa - is list of processors to communicate with */
116   ierr = PetscMalloc1(nrqs+1,&pa);CHKERRQ(ierr);
117   for (i=0,j=0; i<size; i++) {
118     if (w1[i]) {pa[j] = i; j++;}
119   }
120 
121   /* Each message would have a header = 1 + 2*(no of IS) + data */
122   for (i=0; i<nrqs; i++) {
123     j      = pa[i];
124     w1[j] += w2[j] + 2*w3[j];
125     msz   += w1[j];
126   }
127 
128   /* Determine the number of messages to expect, their lengths, from from-ids */
129   ierr = PetscGatherNumberOfMessages(comm,w2,w1,&nrqr);CHKERRQ(ierr);
130   ierr = PetscGatherMessageLengths(comm,nrqs,nrqr,w1,&onodes1,&olengths1);CHKERRQ(ierr);
131 
132   /* Now post the Irecvs corresponding to these messages */
133   ierr = PetscPostIrecvInt(comm,tag1,nrqr,onodes1,olengths1,&rbuf,&r_waits1);CHKERRQ(ierr);
134 
135   /* Allocate Memory for outgoing messages */
136   ierr = PetscMalloc4(size,&outdat,size,&ptr,msz,&tmp,size,&ctr);CHKERRQ(ierr);
137   ierr = PetscMemzero(outdat,size*sizeof(PetscInt*));CHKERRQ(ierr);
138   ierr = PetscMemzero(ptr,size*sizeof(PetscInt*));CHKERRQ(ierr);
139   {
140     PetscInt *iptr = tmp,ict  = 0;
141     for (i=0; i<nrqs; i++) {
142       j         = pa[i];
143       iptr     +=  ict;
144       outdat[j] = iptr;
145       ict       = w1[j];
146     }
147   }
148 
149   /* Form the outgoing messages */
150   /*plug in the headers*/
151   for (i=0; i<nrqs; i++) {
152     j            = pa[i];
153     outdat[j][0] = 0;
154     ierr         = PetscMemzero(outdat[j]+1,2*w3[j]*sizeof(PetscInt));CHKERRQ(ierr);
155     ptr[j]       = outdat[j] + 2*w3[j] + 1;
156   }
157 
158   /* Memory for doing local proc's work*/
159   {
160     ierr = PetscCalloc5(imax,&table, imax,&data, imax,&isz, Mbs*imax,&d_p, (Mbs/PETSC_BITS_PER_BYTE+1)*imax,&t_p);CHKERRQ(ierr);
161 
162     for (i=0; i<imax; i++) {
163       table[i] = t_p + (Mbs/PETSC_BITS_PER_BYTE+1)*i;
164       data[i]  = d_p + (Mbs)*i;
165     }
166   }
167 
168   /* Parse the IS and update local tables and the outgoing buf with the data*/
169   {
170     PetscInt n_i,*data_i,isz_i,*outdat_j,ctr_j;
171     PetscBT  table_i;
172 
173     for (i=0; i<imax; i++) {
174       ierr    = PetscMemzero(ctr,size*sizeof(PetscInt));CHKERRQ(ierr);
175       n_i     = n[i];
176       table_i = table[i];
177       idx_i   = idx[i];
178       data_i  = data[i];
179       isz_i   = isz[i];
180       for (j=0; j<n_i; j++) {   /* parse the indices of each IS */
181         row  = idx_i[j];
182         ierr = PetscLayoutFindOwner(C->rmap,row*C->rmap->bs,&proc);CHKERRQ(ierr);
183         if (proc != rank) { /* copy to the outgoing buffer */
184           ctr[proc]++;
185           *ptr[proc] = row;
186           ptr[proc]++;
187         } else { /* Update the local table */
188           if (!PetscBTLookupSet(table_i,row)) data_i[isz_i++] = row;
189         }
190       }
191       /* Update the headers for the current IS */
192       for (j=0; j<size; j++) { /* Can Optimise this loop by using pa[] */
193         if ((ctr_j = ctr[j])) {
194           outdat_j        = outdat[j];
195           k               = ++outdat_j[0];
196           outdat_j[2*k]   = ctr_j;
197           outdat_j[2*k-1] = i;
198         }
199       }
200       isz[i] = isz_i;
201     }
202   }
203 
204   /*  Now  post the sends */
205   ierr = PetscMalloc1(nrqs+1,&s_waits1);CHKERRQ(ierr);
206   for (i=0; i<nrqs; ++i) {
207     j    = pa[i];
208     ierr = MPI_Isend(outdat[j],w1[j],MPIU_INT,j,tag1,comm,s_waits1+i);CHKERRQ(ierr);
209   }
210 
211   /* No longer need the original indices*/
212   for (i=0; i<imax; ++i) {
213     ierr = ISRestoreIndices(is[i],idx+i);CHKERRQ(ierr);
214   }
215   ierr = PetscFree2(idx,n);CHKERRQ(ierr);
216 
217   for (i=0; i<imax; ++i) {
218     ierr = ISDestroy(&is[i]);CHKERRQ(ierr);
219   }
220 
221   /* Do Local work*/
222   ierr = MatIncreaseOverlap_MPIBAIJ_Local(C,imax,table,isz,data);CHKERRQ(ierr);
223 
224   /* Receive messages*/
225   ierr = PetscMalloc1(nrqr+1,&recv_status);CHKERRQ(ierr);
226   if (nrqr) {ierr = MPI_Waitall(nrqr,r_waits1,recv_status);CHKERRQ(ierr);}
227 
228   ierr = PetscMalloc1(nrqs+1,&s_status);CHKERRQ(ierr);
229   if (nrqs) {ierr = MPI_Waitall(nrqs,s_waits1,s_status);CHKERRQ(ierr);}
230 
231   /* Phase 1 sends are complete - deallocate buffers */
232   ierr = PetscFree4(outdat,ptr,tmp,ctr);CHKERRQ(ierr);
233   ierr = PetscFree4(w1,w2,w3,w4);CHKERRQ(ierr);
234 
235   ierr = PetscMalloc1(nrqr+1,&xdata);CHKERRQ(ierr);
236   ierr = PetscMalloc1(nrqr+1,&isz1);CHKERRQ(ierr);
237   ierr = MatIncreaseOverlap_MPIBAIJ_Receive(C,nrqr,rbuf,xdata,isz1);CHKERRQ(ierr);
238   ierr = PetscFree(rbuf[0]);CHKERRQ(ierr);
239   ierr = PetscFree(rbuf);CHKERRQ(ierr);
240 
241   /* Send the data back*/
242   /* Do a global reduction to know the buffer space req for incoming messages*/
243   {
244     PetscMPIInt *rw1;
245 
246     ierr = PetscCalloc1(size,&rw1);CHKERRQ(ierr);
247 
248     for (i=0; i<nrqr; ++i) {
249       proc = recv_status[i].MPI_SOURCE;
250       if (proc != onodes1[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"MPI_SOURCE mismatch");
251       rw1[proc] = isz1[i];
252     }
253 
254     ierr = PetscFree(onodes1);CHKERRQ(ierr);
255     ierr = PetscFree(olengths1);CHKERRQ(ierr);
256 
257     /* Determine the number of messages to expect, their lengths, from from-ids */
258     ierr = PetscGatherMessageLengths(comm,nrqr,nrqs,rw1,&onodes2,&olengths2);CHKERRQ(ierr);
259     ierr = PetscFree(rw1);CHKERRQ(ierr);
260   }
261   /* Now post the Irecvs corresponding to these messages */
262   ierr = PetscPostIrecvInt(comm,tag2,nrqs,onodes2,olengths2,&rbuf2,&r_waits2);CHKERRQ(ierr);
263 
264   /*  Now  post the sends */
265   ierr = PetscMalloc1(nrqr+1,&s_waits2);CHKERRQ(ierr);
266   for (i=0; i<nrqr; ++i) {
267     j    = recv_status[i].MPI_SOURCE;
268     ierr = MPI_Isend(xdata[i],isz1[i],MPIU_INT,j,tag2,comm,s_waits2+i);CHKERRQ(ierr);
269   }
270 
271   /* receive work done on other processors*/
272   {
273     PetscMPIInt idex;
274     PetscInt    is_no,ct1,max,*rbuf2_i,isz_i,*data_i,jmax;
275     PetscBT     table_i;
276     MPI_Status  *status2;
277 
278     ierr = PetscMalloc1(PetscMax(nrqr,nrqs)+1,&status2);CHKERRQ(ierr);
279     for (i=0; i<nrqs; ++i) {
280       ierr = MPI_Waitany(nrqs,r_waits2,&idex,status2+i);CHKERRQ(ierr);
281       /* Process the message*/
282       rbuf2_i = rbuf2[idex];
283       ct1     = 2*rbuf2_i[0]+1;
284       jmax    = rbuf2[idex][0];
285       for (j=1; j<=jmax; j++) {
286         max     = rbuf2_i[2*j];
287         is_no   = rbuf2_i[2*j-1];
288         isz_i   = isz[is_no];
289         data_i  = data[is_no];
290         table_i = table[is_no];
291         for (k=0; k<max; k++,ct1++) {
292           row = rbuf2_i[ct1];
293           if (!PetscBTLookupSet(table_i,row)) data_i[isz_i++] = row;
294         }
295         isz[is_no] = isz_i;
296       }
297     }
298     if (nrqr) {ierr = MPI_Waitall(nrqr,s_waits2,status2);CHKERRQ(ierr);}
299     ierr = PetscFree(status2);CHKERRQ(ierr);
300   }
301 
302   for (i=0; i<imax; ++i) {
303     ierr = ISCreateGeneral(PETSC_COMM_SELF,isz[i],data[i],PETSC_COPY_VALUES,is+i);CHKERRQ(ierr);
304   }
305 
306 
307   ierr = PetscFree(onodes2);CHKERRQ(ierr);
308   ierr = PetscFree(olengths2);CHKERRQ(ierr);
309 
310   ierr = PetscFree(pa);CHKERRQ(ierr);
311   ierr = PetscFree(rbuf2[0]);CHKERRQ(ierr);
312   ierr = PetscFree(rbuf2);CHKERRQ(ierr);
313   ierr = PetscFree(s_waits1);CHKERRQ(ierr);
314   ierr = PetscFree(r_waits1);CHKERRQ(ierr);
315   ierr = PetscFree(s_waits2);CHKERRQ(ierr);
316   ierr = PetscFree(r_waits2);CHKERRQ(ierr);
317   ierr = PetscFree5(table,data,isz,d_p,t_p);CHKERRQ(ierr);
318   ierr = PetscFree(s_status);CHKERRQ(ierr);
319   ierr = PetscFree(recv_status);CHKERRQ(ierr);
320   ierr = PetscFree(xdata[0]);CHKERRQ(ierr);
321   ierr = PetscFree(xdata);CHKERRQ(ierr);
322   ierr = PetscFree(isz1);CHKERRQ(ierr);
323   PetscFunctionReturn(0);
324 }
325 
326 /*
327    MatIncreaseOverlap_MPIBAIJ_Local - Called by MatincreaseOverlap, to do
328        the work on the local processor.
329 
330      Inputs:
331       C      - MAT_MPIBAIJ;
332       imax - total no of index sets processed at a time;
333       table  - an array of char - size = Mbs bits.
334 
335      Output:
336       isz    - array containing the count of the solution elements corresponding
337                to each index set;
338       data   - pointer to the solutions
339 */
340 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Local(Mat C,PetscInt imax,PetscBT *table,PetscInt *isz,PetscInt **data)
341 {
342   Mat_MPIBAIJ *c = (Mat_MPIBAIJ*)C->data;
343   Mat         A  = c->A,B = c->B;
344   Mat_SeqBAIJ *a = (Mat_SeqBAIJ*)A->data,*b = (Mat_SeqBAIJ*)B->data;
345   PetscInt    start,end,val,max,rstart,cstart,*ai,*aj;
346   PetscInt    *bi,*bj,*garray,i,j,k,row,*data_i,isz_i;
347   PetscBT     table_i;
348 
349   PetscFunctionBegin;
350   rstart = c->rstartbs;
351   cstart = c->cstartbs;
352   ai     = a->i;
353   aj     = a->j;
354   bi     = b->i;
355   bj     = b->j;
356   garray = c->garray;
357 
358 
359   for (i=0; i<imax; i++) {
360     data_i  = data[i];
361     table_i = table[i];
362     isz_i   = isz[i];
363     for (j=0,max=isz[i]; j<max; j++) {
364       row   = data_i[j] - rstart;
365       start = ai[row];
366       end   = ai[row+1];
367       for (k=start; k<end; k++) { /* Amat */
368         val = aj[k] + cstart;
369         if (!PetscBTLookupSet(table_i,val)) data_i[isz_i++] = val;
370       }
371       start = bi[row];
372       end   = bi[row+1];
373       for (k=start; k<end; k++) { /* Bmat */
374         val = garray[bj[k]];
375         if (!PetscBTLookupSet(table_i,val)) data_i[isz_i++] = val;
376       }
377     }
378     isz[i] = isz_i;
379   }
380   PetscFunctionReturn(0);
381 }
382 /*
383       MatIncreaseOverlap_MPIBAIJ_Receive - Process the recieved messages,
384          and return the output
385 
386          Input:
387            C    - the matrix
388            nrqr - no of messages being processed.
389            rbuf - an array of pointers to the recieved requests
390 
391          Output:
392            xdata - array of messages to be sent back
393            isz1  - size of each message
394 
395   For better efficiency perhaps we should malloc separately each xdata[i],
396 then if a remalloc is required we need only copy the data for that one row
397 rather than all previous rows as it is now where a single large chunck of
398 memory is used.
399 
400 */
401 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Receive(Mat C,PetscInt nrqr,PetscInt **rbuf,PetscInt **xdata,PetscInt * isz1)
402 {
403   Mat_MPIBAIJ    *c = (Mat_MPIBAIJ*)C->data;
404   Mat            A  = c->A,B = c->B;
405   Mat_SeqBAIJ    *a = (Mat_SeqBAIJ*)A->data,*b = (Mat_SeqBAIJ*)B->data;
406   PetscErrorCode ierr;
407   PetscInt       rstart,cstart,*ai,*aj,*bi,*bj,*garray,i,j,k;
408   PetscInt       row,total_sz,ct,ct1,ct2,ct3,mem_estimate,oct2,l,start,end;
409   PetscInt       val,max1,max2,Mbs,no_malloc =0,*tmp,new_estimate,ctr;
410   PetscInt       *rbuf_i,kmax,rbuf_0;
411   PetscBT        xtable;
412 
413   PetscFunctionBegin;
414   Mbs    = c->Mbs;
415   rstart = c->rstartbs;
416   cstart = c->cstartbs;
417   ai     = a->i;
418   aj     = a->j;
419   bi     = b->i;
420   bj     = b->j;
421   garray = c->garray;
422 
423 
424   for (i=0,ct=0,total_sz=0; i<nrqr; ++i) {
425     rbuf_i =  rbuf[i];
426     rbuf_0 =  rbuf_i[0];
427     ct    += rbuf_0;
428     for (j=1; j<=rbuf_0; j++) total_sz += rbuf_i[2*j];
429   }
430 
431   if (c->Mbs) max1 = ct*(a->nz +b->nz)/c->Mbs;
432   else        max1 = 1;
433   mem_estimate = 3*((total_sz > max1 ? total_sz : max1)+1);
434   ierr         = PetscMalloc1(mem_estimate,&xdata[0]);CHKERRQ(ierr);
435   ++no_malloc;
436   ierr = PetscBTCreate(Mbs,&xtable);CHKERRQ(ierr);
437   ierr = PetscMemzero(isz1,nrqr*sizeof(PetscInt));CHKERRQ(ierr);
438 
439   ct3 = 0;
440   for (i=0; i<nrqr; i++) { /* for easch mesg from proc i */
441     rbuf_i =  rbuf[i];
442     rbuf_0 =  rbuf_i[0];
443     ct1    =  2*rbuf_0+1;
444     ct2    =  ct1;
445     ct3   += ct1;
446     for (j=1; j<=rbuf_0; j++) { /* for each IS from proc i*/
447       ierr = PetscBTMemzero(Mbs,xtable);CHKERRQ(ierr);
448       oct2 = ct2;
449       kmax = rbuf_i[2*j];
450       for (k=0; k<kmax; k++,ct1++) {
451         row = rbuf_i[ct1];
452         if (!PetscBTLookupSet(xtable,row)) {
453           if (!(ct3 < mem_estimate)) {
454             new_estimate = (PetscInt)(1.5*mem_estimate)+1;
455             ierr         = PetscMalloc1(new_estimate,&tmp);CHKERRQ(ierr);
456             ierr         = PetscMemcpy(tmp,xdata[0],mem_estimate*sizeof(PetscInt));CHKERRQ(ierr);
457             ierr         = PetscFree(xdata[0]);CHKERRQ(ierr);
458             xdata[0]     = tmp;
459             mem_estimate = new_estimate; ++no_malloc;
460             for (ctr=1; ctr<=i; ctr++) xdata[ctr] = xdata[ctr-1] + isz1[ctr-1];
461           }
462           xdata[i][ct2++] = row;
463           ct3++;
464         }
465       }
466       for (k=oct2,max2=ct2; k<max2; k++)  {
467         row   = xdata[i][k] - rstart;
468         start = ai[row];
469         end   = ai[row+1];
470         for (l=start; l<end; l++) {
471           val = aj[l] + cstart;
472           if (!PetscBTLookupSet(xtable,val)) {
473             if (!(ct3 < mem_estimate)) {
474               new_estimate = (PetscInt)(1.5*mem_estimate)+1;
475               ierr         = PetscMalloc1(new_estimate,&tmp);CHKERRQ(ierr);
476               ierr         = PetscMemcpy(tmp,xdata[0],mem_estimate*sizeof(PetscInt));CHKERRQ(ierr);
477               ierr         = PetscFree(xdata[0]);CHKERRQ(ierr);
478               xdata[0]     = tmp;
479               mem_estimate = new_estimate; ++no_malloc;
480               for (ctr=1; ctr<=i; ctr++) xdata[ctr] = xdata[ctr-1] + isz1[ctr-1];
481             }
482             xdata[i][ct2++] = val;
483             ct3++;
484           }
485         }
486         start = bi[row];
487         end   = bi[row+1];
488         for (l=start; l<end; l++) {
489           val = garray[bj[l]];
490           if (!PetscBTLookupSet(xtable,val)) {
491             if (!(ct3 < mem_estimate)) {
492               new_estimate = (PetscInt)(1.5*mem_estimate)+1;
493               ierr         = PetscMalloc1(new_estimate,&tmp);CHKERRQ(ierr);
494               ierr         = PetscMemcpy(tmp,xdata[0],mem_estimate*sizeof(PetscInt));CHKERRQ(ierr);
495               ierr         = PetscFree(xdata[0]);CHKERRQ(ierr);
496               xdata[0]     = tmp;
497               mem_estimate = new_estimate; ++no_malloc;
498               for (ctr =1; ctr <=i; ctr++) xdata[ctr] = xdata[ctr-1] + isz1[ctr-1];
499             }
500             xdata[i][ct2++] = val;
501             ct3++;
502           }
503         }
504       }
505       /* Update the header*/
506       xdata[i][2*j]   = ct2 - oct2; /* Undo the vector isz1 and use only a var*/
507       xdata[i][2*j-1] = rbuf_i[2*j-1];
508     }
509     xdata[i][0] = rbuf_0;
510     xdata[i+1]  = xdata[i] + ct2;
511     isz1[i]     = ct2; /* size of each message */
512   }
513   ierr = PetscBTDestroy(&xtable);CHKERRQ(ierr);
514   ierr = PetscInfo3(C,"Allocated %D bytes, required %D, no of mallocs = %D\n",mem_estimate,ct3,no_malloc);CHKERRQ(ierr);
515   PetscFunctionReturn(0);
516 }
517 
518 PetscErrorCode MatGetSubMatrices_MPIBAIJ(Mat C,PetscInt ismax,const IS isrow[],const IS iscol[],MatReuse scall,Mat *submat[])
519 {
520   IS             *isrow_block,*iscol_block;
521   Mat_MPIBAIJ    *c = (Mat_MPIBAIJ*)C->data;
522   PetscErrorCode ierr;
523   PetscInt       nmax,nstages_local,nstages,i,pos,max_no,N=C->cmap->N,bs=C->rmap->bs;
524   Mat_SeqBAIJ    *subc;
525   Mat_SubMat     *smat;
526 
527   PetscFunctionBegin;
528   /* The compression and expansion should be avoided. Doesn't point
529      out errors, might change the indices, hence buggey */
530   ierr = PetscMalloc2(ismax+1,&isrow_block,ismax+1,&iscol_block);CHKERRQ(ierr);
531   ierr = ISCompressIndicesGeneral(N,C->rmap->n,bs,ismax,isrow,isrow_block);CHKERRQ(ierr);
532   ierr = ISCompressIndicesGeneral(N,C->cmap->n,bs,ismax,iscol,iscol_block);CHKERRQ(ierr);
533 
534   /* Determine the number of stages through which submatrices are done */
535   nmax = 20*1000000 / (c->Nbs * sizeof(PetscInt));
536   if (!nmax) nmax = 1;
537 
538   if (scall == MAT_INITIAL_MATRIX) {
539     nstages_local = ismax/nmax + ((ismax % nmax) ? 1 : 0);
540 
541     /* Make sure every processor loops through the nstages */
542     ierr = MPIU_Allreduce(&nstages_local,&nstages,1,MPIU_INT,MPI_MAX,PetscObjectComm((PetscObject)C));CHKERRQ(ierr);
543 
544     ierr = PetscMalloc1(ismax+1,submat);CHKERRQ(ierr);
545   } else {
546     subc = (Mat_SeqBAIJ*)((*submat)[0]->data);
547     smat   = subc->submatis1;
548     if (!smat) {
549       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"MatGetSubMatrices(...,MAT_REUSE_MATRIX,...) requires submat");
550     }
551     nstages = smat->nstages;
552   }
553 
554   for (i=0,pos=0; i<nstages; i++) {
555     if (pos+nmax <= ismax) max_no = nmax;
556     else if (pos == ismax) max_no = 0;
557     else                   max_no = ismax-pos;
558 
559     ierr = MatGetSubMatrices_MPIBAIJ_local(C,max_no,isrow_block+pos,iscol_block+pos,scall,*submat+pos);CHKERRQ(ierr);
560     pos += max_no;
561   }
562 
563   if (scall == MAT_INITIAL_MATRIX && ismax) {
564     /* save nstages for reuse */
565     subc = (Mat_SeqBAIJ*)((*submat)[0]->data);
566     smat = subc->submatis1;
567     if (!smat) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"smat does not exit");
568     smat->nstages = nstages;
569   }
570 
571   for (i=0; i<ismax; i++) {
572     ierr = ISDestroy(&isrow_block[i]);CHKERRQ(ierr);
573     ierr = ISDestroy(&iscol_block[i]);CHKERRQ(ierr);
574   }
575   ierr = PetscFree2(isrow_block,iscol_block);CHKERRQ(ierr);
576   PetscFunctionReturn(0);
577 }
578 
579 #if defined(PETSC_USE_CTABLE)
580 PetscErrorCode PetscGetProc(const PetscInt row, const PetscMPIInt size, const PetscInt proc_gnode[], PetscMPIInt *rank)
581 {
582   PetscInt       nGlobalNd = proc_gnode[size];
583   PetscMPIInt    fproc;
584   PetscErrorCode ierr;
585 
586   PetscFunctionBegin;
587   ierr = PetscMPIIntCast((PetscInt)(((float)row * (float)size / (float)nGlobalNd + 0.5)),&fproc);CHKERRQ(ierr);
588   if (fproc > size) fproc = size;
589   while (row < proc_gnode[fproc] || row >= proc_gnode[fproc+1]) {
590     if (row < proc_gnode[fproc]) fproc--;
591     else                         fproc++;
592   }
593   *rank = fproc;
594   PetscFunctionReturn(0);
595 }
596 #endif
597 
598 /* -------------------------------------------------------------------------*/
599 /* This code is used for BAIJ and SBAIJ matrices (unfortunate dependency) */
600 
601 PetscErrorCode MatDestroy_MPIBAIJ_MatGetSubmatrices(Mat C)
602 {
603   PetscErrorCode ierr;
604   Mat_SeqBAIJ    *c = (Mat_SeqBAIJ*)C->data;
605   Mat_SubMat     *submatj = c->submatis1;
606   PetscInt       i;
607 
608   PetscFunctionBegin;
609   if (!submatj->id) { /* delete data that are linked only to submats[id=0] */
610     ierr = PetscFree4(submatj->sbuf1,submatj->ptr,submatj->tmp,submatj->ctr);CHKERRQ(ierr);
611 
612     for (i=0; i<submatj->nrqr; ++i) {
613       ierr = PetscFree(submatj->sbuf2[i]);CHKERRQ(ierr);
614     }
615     ierr = PetscFree3(submatj->sbuf2,submatj->req_size,submatj->req_source1);CHKERRQ(ierr);
616 
617     if (submatj->rbuf1) {
618       ierr = PetscFree(submatj->rbuf1[0]);CHKERRQ(ierr);
619       ierr = PetscFree(submatj->rbuf1);CHKERRQ(ierr);
620     }
621 
622     for (i=0; i<submatj->nrqs; ++i) {
623       ierr = PetscFree(submatj->rbuf3[i]);CHKERRQ(ierr);
624     }
625     ierr = PetscFree3(submatj->req_source2,submatj->rbuf2,submatj->rbuf3);CHKERRQ(ierr);
626     ierr = PetscFree(submatj->pa);CHKERRQ(ierr);
627   }
628 
629 #if defined(PETSC_USE_CTABLE)
630   ierr = PetscTableDestroy((PetscTable*)&submatj->rmap);CHKERRQ(ierr);
631   if (submatj->cmap_loc) {ierr = PetscFree(submatj->cmap_loc);CHKERRQ(ierr);}
632   ierr = PetscFree(submatj->rmap_loc);CHKERRQ(ierr);
633 #else
634   ierr = PetscFree(submatj->rmap);CHKERRQ(ierr);
635 #endif
636 
637   if (!submatj->allcolumns) {
638 #if defined(PETSC_USE_CTABLE)
639     ierr = PetscTableDestroy((PetscTable*)&submatj->cmap);CHKERRQ(ierr);
640 #else
641     ierr = PetscFree(submatj->cmap);CHKERRQ(ierr);
642 #endif
643   }
644   ierr = submatj->destroy(C);CHKERRQ(ierr);
645   ierr = PetscFree(submatj->row2proc);CHKERRQ(ierr);
646 
647   ierr = PetscFree(submatj);CHKERRQ(ierr);
648   PetscFunctionReturn(0);
649 }
650 
651 PetscErrorCode MatGetSubMatrices_MPIBAIJ_local(Mat C,PetscInt ismax,const IS isrow[],const IS iscol[],MatReuse scall,Mat *submats)
652 {
653   Mat_MPIBAIJ    *c = (Mat_MPIBAIJ*)C->data;
654   Mat            A  = c->A;
655   Mat_SeqBAIJ    *a = (Mat_SeqBAIJ*)A->data,*b = (Mat_SeqBAIJ*)c->B->data,*subc;
656   const PetscInt **icol,**irow;
657   PetscInt       *nrow,*ncol,start;
658   PetscErrorCode ierr;
659   PetscMPIInt    rank,size,tag0,tag2,tag3,tag4,*w1,*w2,*w3,*w4,nrqr;
660   PetscInt       **sbuf1,**sbuf2,i,j,k,l,ct1,ct2,**rbuf1,row,proc=-1;
661   PetscInt       nrqs=0,msz,**ptr=NULL,*req_size=NULL,*ctr=NULL,*pa,*tmp=NULL,tcol;
662   PetscInt       **rbuf3=NULL,*req_source1=NULL,*req_source2,**sbuf_aj,**rbuf2=NULL,max1,max2;
663   PetscInt       **lens,is_no,ncols,*cols,mat_i,*mat_j,tmp2,jmax;
664 #if defined(PETSC_USE_CTABLE)
665   PetscTable     *cmap,cmap_i=NULL,*rmap,rmap_i;
666 #else
667   PetscInt       **cmap,*cmap_i=NULL,**rmap,*rmap_i;
668 #endif
669   const PetscInt *irow_i;
670   PetscInt       ctr_j,*sbuf1_j,*sbuf_aj_i,*rbuf1_i,kmax,*lens_i;
671   MPI_Request    *s_waits1,*r_waits1,*s_waits2,*r_waits2,*r_waits3;
672   MPI_Request    *r_waits4,*s_waits3,*s_waits4;
673   MPI_Status     *r_status1,*r_status2,*s_status1,*s_status3,*s_status2;
674   MPI_Status     *r_status3,*r_status4,*s_status4;
675   MPI_Comm       comm;
676   PetscScalar    **rbuf4,*rbuf4_i=NULL,**sbuf_aa,*vals,*mat_a=NULL,*imat_a,*sbuf_aa_i;
677   PetscMPIInt    *onodes1,*olengths1,end;
678   PetscInt       **row2proc,*row2proc_i,*imat_ilen,*imat_j,*imat_i;
679   Mat_SubMat     **smats,*smat_i;
680   PetscBool      *issorted,colflag,iscsorted=PETSC_TRUE;
681   PetscInt       *sbuf1_i,*rbuf2_i,*rbuf3_i,ilen;
682   PetscInt       bs=C->rmap->bs,bs2=c->bs2,rstart = c->rstartbs;
683   PetscBool      ijonly=c->ijonly; /* private flag indicates only matrix data structures are requested */
684   PetscInt       nzA,nzB,*a_i=a->i,*b_i=b->i,*a_j = a->j,*b_j = b->j,ctmp,imark,*cworkA,*cworkB;
685   PetscScalar    *vworkA=NULL,*vworkB=NULL,*a_a = a->a,*b_a = b->a;
686   PetscInt       cstart = c->cstartbs,*bmap = c->garray;
687   PetscBool      *allrows,*allcolumns;
688 
689   PetscFunctionBegin;
690   ierr = PetscObjectGetComm((PetscObject)C,&comm);CHKERRQ(ierr);
691   size = c->size;
692   rank = c->rank;
693 
694   ierr = PetscMalloc6(ismax,&smats,ismax,&row2proc,ismax,&cmap,ismax,&rmap,ismax,&allcolumns,ismax,&allrows);CHKERRQ(ierr);
695   ierr = PetscMalloc5(ismax,&irow,ismax,&icol,ismax,&nrow,ismax,&ncol,ismax,&issorted);CHKERRQ(ierr);
696 
697   for (i=0; i<ismax; i++) {
698     ierr = ISSorted(iscol[i],&issorted[i]);CHKERRQ(ierr);
699     if (!issorted[i]) iscsorted = issorted[i]; /* columns are not sorted! */
700     ierr = ISSorted(isrow[i],&issorted[i]);CHKERRQ(ierr);
701 
702     /* Check for special case: allcolumns */
703     ierr = ISIdentity(iscol[i],&colflag);CHKERRQ(ierr);
704     ierr = ISGetLocalSize(iscol[i],&ncol[i]);CHKERRQ(ierr);
705 
706     if (colflag && ncol[i] == c->Nbs) {
707       allcolumns[i] = PETSC_TRUE;
708       icol[i]       = NULL;
709     } else {
710       allcolumns[i] = PETSC_FALSE;
711       ierr = ISGetIndices(iscol[i],&icol[i]);CHKERRQ(ierr);
712     }
713 
714     /* Check for special case: allrows */
715     ierr = ISIdentity(isrow[i],&colflag);CHKERRQ(ierr);
716     ierr = ISGetLocalSize(isrow[i],&nrow[i]);CHKERRQ(ierr);
717     if (colflag && nrow[i] == c->Mbs) {
718       allrows[i] = PETSC_TRUE;
719       irow[i]    = NULL;
720     } else {
721       allrows[i] = PETSC_FALSE;
722       ierr = ISGetIndices(isrow[i],&irow[i]);CHKERRQ(ierr);
723     }
724   }
725 
726   if (scall == MAT_REUSE_MATRIX) {
727     /* Assumes new rows are same length as the old rows */
728     for (i=0; i<ismax; i++) {
729       subc = (Mat_SeqBAIJ*)(submats[i]->data);
730       if (subc->mbs != nrow[i] || subc->nbs != ncol[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. wrong size");
731 
732       /* Initial matrix as if empty */
733       ierr = PetscMemzero(subc->ilen,subc->mbs*sizeof(PetscInt));CHKERRQ(ierr);
734 
735       /* Initial matrix as if empty */
736       submats[i]->factortype = C->factortype;
737 
738       smat_i   = subc->submatis1;
739       smats[i] = smat_i;
740 
741       nrqs        = smat_i->nrqs;
742       nrqr        = smat_i->nrqr;
743       rbuf1       = smat_i->rbuf1;
744       rbuf2       = smat_i->rbuf2;
745       rbuf3       = smat_i->rbuf3;
746       req_source2 = smat_i->req_source2;
747 
748       sbuf1     = smat_i->sbuf1;
749       sbuf2     = smat_i->sbuf2;
750       ptr       = smat_i->ptr;
751       tmp       = smat_i->tmp;
752       ctr       = smat_i->ctr;
753 
754       pa          = smat_i->pa;
755       req_size    = smat_i->req_size;
756       req_source1 = smat_i->req_source1;
757 
758       allcolumns[i] = smat_i->allcolumns;
759       allrows[i]    = smat_i->allrows;
760       row2proc[i]   = smat_i->row2proc;
761       rmap[i]       = smat_i->rmap;
762       cmap[i]       = smat_i->cmap;
763     }
764   } else { /* scall == MAT_INITIAL_MATRIX */
765     /* Get some new tags to keep the communication clean */
766     ierr = PetscObjectGetNewTag((PetscObject)C,&tag2);CHKERRQ(ierr);
767     ierr = PetscObjectGetNewTag((PetscObject)C,&tag3);CHKERRQ(ierr);
768 
769     /* evaluate communication - mesg to who, length of mesg, and buffer space
770      required. Based on this, buffers are allocated, and data copied into them*/
771     ierr = PetscCalloc4(size,&w1,size,&w2,size,&w3,size,&w4);CHKERRQ(ierr);   /* mesg size, initialize work vectors */
772 
773     for (i=0; i<ismax; i++) {
774       jmax   = nrow[i];
775       irow_i = irow[i];
776 
777       ierr   = PetscMalloc1(jmax,&row2proc_i);CHKERRQ(ierr);
778       row2proc[i] = row2proc_i;
779 
780       if (issorted[i]) proc = 0;
781       for (j=0; j<jmax; j++) {
782         if (!issorted[i]) proc = 0;
783         if (allrows[i]) row = j;
784         else row = irow_i[j];
785 
786         while (row >= c->rangebs[proc+1]) proc++;
787         w4[proc]++;
788         row2proc_i[j] = proc; /* map row index to proc */
789       }
790       for (j=0; j<size; j++) {
791         if (w4[j]) { w1[j] += w4[j];  w3[j]++; w4[j] = 0;}
792       }
793     }
794 
795     nrqs     = 0;              /* no of outgoing messages */
796     msz      = 0;              /* total mesg length (for all procs) */
797     w1[rank] = 0;              /* no mesg sent to self */
798     w3[rank] = 0;
799     for (i=0; i<size; i++) {
800       if (w1[i])  { w2[i] = 1; nrqs++;} /* there exists a message to proc i */
801     }
802     ierr = PetscMalloc1(nrqs+1,&pa);CHKERRQ(ierr); /*(proc -array)*/
803     for (i=0,j=0; i<size; i++) {
804       if (w1[i]) { pa[j] = i; j++; }
805     }
806 
807     /* Each message would have a header = 1 + 2*(no of IS) + data */
808     for (i=0; i<nrqs; i++) {
809       j      = pa[i];
810       w1[j] += w2[j] + 2* w3[j];
811       msz   += w1[j];
812     }
813     ierr = PetscInfo2(0,"Number of outgoing messages %D Total message length %D\n",nrqs,msz);CHKERRQ(ierr);
814 
815     /* Determine the number of messages to expect, their lengths, from from-ids */
816     ierr = PetscGatherNumberOfMessages(comm,w2,w1,&nrqr);CHKERRQ(ierr);
817     ierr = PetscGatherMessageLengths(comm,nrqs,nrqr,w1,&onodes1,&olengths1);CHKERRQ(ierr);
818 
819     /* Now post the Irecvs corresponding to these messages */
820     tag0 = ((PetscObject)C)->tag;
821     ierr = PetscPostIrecvInt(comm,tag0,nrqr,onodes1,olengths1,&rbuf1,&r_waits1);CHKERRQ(ierr);
822 
823     ierr = PetscFree(onodes1);CHKERRQ(ierr);
824     ierr = PetscFree(olengths1);CHKERRQ(ierr);
825 
826     /* Allocate Memory for outgoing messages */
827     ierr = PetscMalloc4(size,&sbuf1,size,&ptr,2*msz,&tmp,size,&ctr);CHKERRQ(ierr);
828     ierr = PetscMemzero(sbuf1,size*sizeof(PetscInt*));CHKERRQ(ierr);
829     ierr = PetscMemzero(ptr,size*sizeof(PetscInt*));CHKERRQ(ierr);
830 
831     {
832       PetscInt *iptr = tmp;
833       k    = 0;
834       for (i=0; i<nrqs; i++) {
835         j        = pa[i];
836         iptr    += k;
837         sbuf1[j] = iptr;
838         k        = w1[j];
839       }
840     }
841 
842     /* Form the outgoing messages. Initialize the header space */
843     for (i=0; i<nrqs; i++) {
844       j           = pa[i];
845       sbuf1[j][0] = 0;
846       ierr        = PetscMemzero(sbuf1[j]+1,2*w3[j]*sizeof(PetscInt));CHKERRQ(ierr);
847       ptr[j]      = sbuf1[j] + 2*w3[j] + 1;
848     }
849 
850     /* Parse the isrow and copy data into outbuf */
851     for (i=0; i<ismax; i++) {
852       row2proc_i = row2proc[i];
853       ierr   = PetscMemzero(ctr,size*sizeof(PetscInt));CHKERRQ(ierr);
854       irow_i = irow[i];
855       jmax   = nrow[i];
856       for (j=0; j<jmax; j++) {  /* parse the indices of each IS */
857         proc = row2proc_i[j];
858         if (allrows[i]) row = j;
859         else row = irow_i[j];
860 
861         if (proc != rank) { /* copy to the outgoing buf*/
862           ctr[proc]++;
863           *ptr[proc] = row;
864           ptr[proc]++;
865         }
866       }
867       /* Update the headers for the current IS */
868       for (j=0; j<size; j++) { /* Can Optimise this loop too */
869         if ((ctr_j = ctr[j])) {
870           sbuf1_j        = sbuf1[j];
871           k              = ++sbuf1_j[0];
872           sbuf1_j[2*k]   = ctr_j;
873           sbuf1_j[2*k-1] = i;
874         }
875       }
876     }
877 
878     /*  Now  post the sends */
879     ierr = PetscMalloc1(nrqs+1,&s_waits1);CHKERRQ(ierr);
880     for (i=0; i<nrqs; ++i) {
881       j    = pa[i];
882       ierr = MPI_Isend(sbuf1[j],w1[j],MPIU_INT,j,tag0,comm,s_waits1+i);CHKERRQ(ierr);
883     }
884 
885     /* Post Receives to capture the buffer size */
886     ierr = PetscMalloc1(nrqs+1,&r_waits2);CHKERRQ(ierr);
887     ierr = PetscMalloc3(nrqs+1,&req_source2,nrqs+1,&rbuf2,nrqs+1,&rbuf3);CHKERRQ(ierr);
888     rbuf2[0] = tmp + msz;
889     for (i=1; i<nrqs; ++i) {
890       rbuf2[i] = rbuf2[i-1]+w1[pa[i-1]];
891     }
892     for (i=0; i<nrqs; ++i) {
893       j    = pa[i];
894       ierr = MPI_Irecv(rbuf2[i],w1[j],MPIU_INT,j,tag2,comm,r_waits2+i);CHKERRQ(ierr);
895     }
896 
897     /* Send to other procs the buf size they should allocate */
898     /* Receive messages*/
899     ierr = PetscMalloc1(nrqr+1,&s_waits2);CHKERRQ(ierr);
900     ierr = PetscMalloc1(nrqr+1,&r_status1);CHKERRQ(ierr);
901     ierr = PetscMalloc3(nrqr,&sbuf2,nrqr,&req_size,nrqr,&req_source1);CHKERRQ(ierr);
902     {
903       PetscInt   *sAi = a->i,*sBi = b->i,id;
904       PetscInt   *sbuf2_i;
905 
906       ierr = MPI_Waitall(nrqr,r_waits1,r_status1);CHKERRQ(ierr);
907       for (i=0; i<nrqr; ++i) {
908         req_size[i] = 0;
909         rbuf1_i        = rbuf1[i];
910         start          = 2*rbuf1_i[0] + 1;
911         ierr           = MPI_Get_count(r_status1+i,MPIU_INT,&end);CHKERRQ(ierr);
912         ierr           = PetscMalloc1(end+1,&sbuf2[i]);CHKERRQ(ierr);
913         sbuf2_i        = sbuf2[i];
914         for (j=start; j<end; j++) {
915           id              = rbuf1_i[j] - rstart;
916           ncols           = sAi[id+1] - sAi[id] + sBi[id+1] - sBi[id];
917           sbuf2_i[j]      = ncols;
918           req_size[i] += ncols;
919         }
920         req_source1[i] = r_status1[i].MPI_SOURCE;
921         /* form the header */
922         sbuf2_i[0] = req_size[i];
923         for (j=1; j<start; j++) sbuf2_i[j] = rbuf1_i[j];
924 
925         ierr = MPI_Isend(sbuf2_i,end,MPIU_INT,req_source1[i],tag2,comm,s_waits2+i);CHKERRQ(ierr);
926       }
927     }
928     ierr = PetscFree(r_status1);CHKERRQ(ierr);
929     ierr = PetscFree(r_waits1);CHKERRQ(ierr);
930     ierr = PetscFree4(w1,w2,w3,w4);CHKERRQ(ierr);
931 
932     /* Receive messages*/
933     ierr = PetscMalloc1(nrqs+1,&r_waits3);CHKERRQ(ierr);
934     ierr = PetscMalloc1(nrqs+1,&r_status2);CHKERRQ(ierr);
935 
936     ierr = MPI_Waitall(nrqs,r_waits2,r_status2);CHKERRQ(ierr);
937     for (i=0; i<nrqs; ++i) {
938       ierr = PetscMalloc1(rbuf2[i][0]+1,&rbuf3[i]);CHKERRQ(ierr);
939       req_source2[i] = r_status2[i].MPI_SOURCE;
940       ierr = MPI_Irecv(rbuf3[i],rbuf2[i][0],MPIU_INT,req_source2[i],tag3,comm,r_waits3+i);CHKERRQ(ierr);
941     }
942     ierr = PetscFree(r_status2);CHKERRQ(ierr);
943     ierr = PetscFree(r_waits2);CHKERRQ(ierr);
944 
945     /* Wait on sends1 and sends2 */
946     ierr = PetscMalloc1(nrqs+1,&s_status1);CHKERRQ(ierr);
947     ierr = PetscMalloc1(nrqr+1,&s_status2);CHKERRQ(ierr);
948 
949     if (nrqs) {ierr = MPI_Waitall(nrqs,s_waits1,s_status1);CHKERRQ(ierr);}
950     if (nrqr) {ierr = MPI_Waitall(nrqr,s_waits2,s_status2);CHKERRQ(ierr);}
951     ierr = PetscFree(s_status1);CHKERRQ(ierr);
952     ierr = PetscFree(s_status2);CHKERRQ(ierr);
953     ierr = PetscFree(s_waits1);CHKERRQ(ierr);
954     ierr = PetscFree(s_waits2);CHKERRQ(ierr);
955 
956     /* Now allocate sending buffers for a->j, and send them off */
957     ierr = PetscMalloc1(nrqr+1,&sbuf_aj);CHKERRQ(ierr);
958     for (i=0,j=0; i<nrqr; i++) j += req_size[i];
959     ierr = PetscMalloc1(j+1,&sbuf_aj[0]);CHKERRQ(ierr);
960     for (i=1; i<nrqr; i++) sbuf_aj[i] = sbuf_aj[i-1] + req_size[i-1];
961 
962     ierr = PetscMalloc1(nrqr+1,&s_waits3);CHKERRQ(ierr);
963     {
964 
965       for (i=0; i<nrqr; i++) {
966         rbuf1_i   = rbuf1[i];
967         sbuf_aj_i = sbuf_aj[i];
968         ct1       = 2*rbuf1_i[0] + 1;
969         ct2       = 0;
970         for (j=1,max1=rbuf1_i[0]; j<=max1; j++) {
971           kmax = rbuf1[i][2*j];
972           for (k=0; k<kmax; k++,ct1++) {
973             row    = rbuf1_i[ct1] - rstart;
974             nzA    = a_i[row+1] - a_i[row]; nzB = b_i[row+1] - b_i[row];
975             ncols  = nzA + nzB;
976             cworkA = a_j + a_i[row]; cworkB = b_j + b_i[row];
977 
978             /* load the column indices for this row into cols */
979             cols = sbuf_aj_i + ct2;
980             for (l=0; l<nzB; l++) {
981               if ((ctmp = bmap[cworkB[l]]) < cstart) cols[l] = ctmp;
982               else break;
983             }
984             imark = l;
985             for (l=0; l<nzA; l++) {cols[imark+l] = cstart + cworkA[l];}
986             for (l=imark; l<nzB; l++) cols[nzA+l] = bmap[cworkB[l]];
987             ct2 += ncols;
988           }
989         }
990         ierr = MPI_Isend(sbuf_aj_i,req_size[i],MPIU_INT,req_source1[i],tag3,comm,s_waits3+i);CHKERRQ(ierr);
991       }
992     }
993     ierr = PetscMalloc2(nrqs+1,&r_status3,nrqr+1,&s_status3);CHKERRQ(ierr);
994 
995     /* create col map: global col of C -> local col of submatrices */
996     const PetscInt *icol_i;
997 #if defined(PETSC_USE_CTABLE)
998     for (i=0; i<ismax; i++) {
999       if (!allcolumns[i]) {
1000         ierr = PetscTableCreate(ncol[i]+1,c->Nbs+1,&cmap[i]);CHKERRQ(ierr);
1001 
1002         jmax   = ncol[i];
1003         icol_i = icol[i];
1004         cmap_i = cmap[i];
1005         for (j=0; j<jmax; j++) {
1006           ierr = PetscTableAdd(cmap[i],icol_i[j]+1,j+1,INSERT_VALUES);CHKERRQ(ierr);
1007         }
1008       } else cmap[i] = NULL;
1009     }
1010 #else
1011     for (i=0; i<ismax; i++) {
1012       if (!allcolumns[i]) {
1013         ierr   = PetscCalloc1(c->Nbs,&cmap[i]);CHKERRQ(ierr);
1014         jmax   = ncol[i];
1015         icol_i = icol[i];
1016         cmap_i = cmap[i];
1017         for (j=0; j<jmax; j++) cmap_i[icol_i[j]] = j+1;
1018       } else cmap[i] = NULL;
1019     }
1020 #endif
1021 
1022     /* Create lens which is required for MatCreate... */
1023     for (i=0,j=0; i<ismax; i++) j += nrow[i];
1024     ierr = PetscMalloc1(ismax,&lens);CHKERRQ(ierr);
1025 
1026     if (ismax) {
1027       ierr = PetscCalloc1(j,&lens[0]);CHKERRQ(ierr);
1028     }
1029     for (i=1; i<ismax; i++) lens[i] = lens[i-1] + nrow[i-1];
1030 
1031     /* Update lens from local data */
1032     for (i=0; i<ismax; i++) {
1033       row2proc_i = row2proc[i];
1034       jmax = nrow[i];
1035       if (!allcolumns[i]) cmap_i = cmap[i];
1036       irow_i = irow[i];
1037       lens_i = lens[i];
1038       for (j=0; j<jmax; j++) {
1039         if (allrows[i]) row = j;
1040         else row = irow_i[j]; /* global blocked row of C */
1041 
1042         proc = row2proc_i[j];
1043         if (proc == rank) {
1044           /* Get indices from matA and then from matB */
1045 #if defined(PETSC_USE_CTABLE)
1046           PetscInt   tt;
1047 #endif
1048           row    = row - rstart;
1049           nzA    = a_i[row+1] - a_i[row];
1050           nzB    = b_i[row+1] - b_i[row];
1051           cworkA =  a_j + a_i[row];
1052           cworkB = b_j + b_i[row];
1053 
1054           if (!allcolumns[i]) {
1055 #if defined(PETSC_USE_CTABLE)
1056             for (k=0; k<nzA; k++) {
1057               ierr = PetscTableFind(cmap_i,cstart+cworkA[k]+1,&tt);CHKERRQ(ierr);
1058               if (tt) lens_i[j]++;
1059             }
1060             for (k=0; k<nzB; k++) {
1061               ierr = PetscTableFind(cmap_i,bmap[cworkB[k]]+1,&tt);CHKERRQ(ierr);
1062               if (tt) lens_i[j]++;
1063             }
1064 
1065 #else
1066             for (k=0; k<nzA; k++) {
1067               if (cmap_i[cstart + cworkA[k]]) lens_i[j]++;
1068             }
1069             for (k=0; k<nzB; k++) {
1070               if (cmap_i[bmap[cworkB[k]]]) lens_i[j]++;
1071             }
1072 #endif
1073           } else { /* allcolumns */
1074             lens_i[j] = nzA + nzB;
1075           }
1076         }
1077       }
1078     }
1079 
1080     /* Create row map: global row of C -> local row of submatrices */
1081     for (i=0; i<ismax; i++) {
1082       if (!allrows[i]) {
1083 #if defined(PETSC_USE_CTABLE)
1084         ierr   = PetscTableCreate(nrow[i]+1,c->Mbs+1,&rmap[i]);CHKERRQ(ierr);
1085         irow_i = irow[i];
1086         jmax   = nrow[i];
1087         for (j=0; j<jmax; j++) {
1088           if (allrows[i]) {
1089             ierr = PetscTableAdd(rmap[i],j+1,j+1,INSERT_VALUES);CHKERRQ(ierr);
1090           } else {
1091             ierr = PetscTableAdd(rmap[i],irow_i[j]+1,j+1,INSERT_VALUES);CHKERRQ(ierr);
1092           }
1093         }
1094 #else
1095         ierr   = PetscCalloc1(c->Mbs,&rmap[i]);CHKERRQ(ierr);
1096         rmap_i = rmap[i];
1097         irow_i = irow[i];
1098         jmax   = nrow[i];
1099         for (j=0; j<jmax; j++) {
1100           if (allrows[i]) rmap_i[j] = j;
1101           else rmap_i[irow_i[j]] = j;
1102         }
1103 #endif
1104       } else rmap[i] = NULL;
1105     }
1106 
1107     /* Update lens from offproc data */
1108     {
1109       PetscInt *rbuf2_i,*rbuf3_i,*sbuf1_i;
1110 
1111       ierr    = MPI_Waitall(nrqs,r_waits3,r_status3);CHKERRQ(ierr);
1112       for (tmp2=0; tmp2<nrqs; tmp2++) {
1113         sbuf1_i = sbuf1[pa[tmp2]];
1114         jmax    = sbuf1_i[0];
1115         ct1     = 2*jmax+1;
1116         ct2     = 0;
1117         rbuf2_i = rbuf2[tmp2];
1118         rbuf3_i = rbuf3[tmp2];
1119         for (j=1; j<=jmax; j++) {
1120           is_no  = sbuf1_i[2*j-1];
1121           max1   = sbuf1_i[2*j];
1122           lens_i = lens[is_no];
1123           if (!allcolumns[is_no]) cmap_i = cmap[is_no];
1124           rmap_i = rmap[is_no];
1125           for (k=0; k<max1; k++,ct1++) {
1126             if (allrows[is_no]) {
1127               row = sbuf1_i[ct1];
1128             } else {
1129 #if defined(PETSC_USE_CTABLE)
1130               ierr = PetscTableFind(rmap_i,sbuf1_i[ct1]+1,&row);CHKERRQ(ierr);
1131               row--;
1132               if (row < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"row not found in table");
1133 #else
1134               row = rmap_i[sbuf1_i[ct1]]; /* the val in the new matrix to be */
1135 #endif
1136             }
1137             max2 = rbuf2_i[ct1];
1138             for (l=0; l<max2; l++,ct2++) {
1139               if (!allcolumns[is_no]) {
1140 #if defined(PETSC_USE_CTABLE)
1141                 ierr = PetscTableFind(cmap_i,rbuf3_i[ct2]+1,&tcol);CHKERRQ(ierr);
1142 #else
1143                 tcol = cmap_i[rbuf3_i[ct2]];
1144 #endif
1145                 if (tcol) lens_i[row]++;
1146               } else { /* allcolumns */
1147                 lens_i[row]++; /* lens_i[row] += max2 ? */
1148               }
1149             }
1150           }
1151         }
1152       }
1153     }
1154     ierr = PetscFree(r_waits3);CHKERRQ(ierr);
1155     if (nrqr) {ierr = MPI_Waitall(nrqr,s_waits3,s_status3);CHKERRQ(ierr);}
1156     ierr = PetscFree2(r_status3,s_status3);CHKERRQ(ierr);
1157     ierr = PetscFree(s_waits3);CHKERRQ(ierr);
1158 
1159     /* Create the submatrices */
1160     for (i=0; i<ismax; i++) {
1161       PetscInt bs_tmp;
1162       if (ijonly) bs_tmp = 1;
1163       else        bs_tmp = bs;
1164 
1165       ierr = MatCreate(PETSC_COMM_SELF,submats+i);CHKERRQ(ierr);
1166       ierr = MatSetSizes(submats[i],nrow[i]*bs_tmp,ncol[i]*bs_tmp,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1167 
1168       ierr = MatSetType(submats[i],((PetscObject)A)->type_name);CHKERRQ(ierr);
1169       ierr = MatSeqBAIJSetPreallocation(submats[i],bs_tmp,0,lens[i]);CHKERRQ(ierr);
1170       ierr = MatSeqSBAIJSetPreallocation(submats[i],bs_tmp,0,lens[i]);CHKERRQ(ierr); /* this subroutine is used by SBAIJ routines */
1171 
1172       /* create struct Mat_SubMat and attached it to submat */
1173       ierr = PetscNew(&smat_i);CHKERRQ(ierr);
1174       subc = (Mat_SeqBAIJ*)submats[i]->data;
1175       subc->submatis1 = smat_i;
1176       smats[i]        = smat_i;
1177 
1178       smat_i->destroy          = submats[i]->ops->destroy;
1179       submats[i]->ops->destroy = MatDestroy_MPIBAIJ_MatGetSubmatrices;
1180       submats[i]->factortype   = C->factortype;
1181 
1182       smat_i->id          = i;
1183       smat_i->nrqs        = nrqs;
1184       smat_i->nrqr        = nrqr;
1185       smat_i->rbuf1       = rbuf1;
1186       smat_i->rbuf2       = rbuf2;
1187       smat_i->rbuf3       = rbuf3;
1188       smat_i->sbuf2       = sbuf2;
1189       smat_i->req_source2 = req_source2;
1190 
1191       smat_i->sbuf1       = sbuf1;
1192       smat_i->ptr         = ptr;
1193       smat_i->tmp         = tmp;
1194       smat_i->ctr         = ctr;
1195 
1196       smat_i->pa           = pa;
1197       smat_i->req_size     = req_size;
1198       smat_i->req_source1  = req_source1;
1199 
1200       smat_i->allcolumns  = allcolumns[i];
1201       smat_i->allrows     = allrows[i];
1202       smat_i->singleis    = PETSC_FALSE;
1203       smat_i->row2proc    = row2proc[i];
1204       smat_i->rmap        = rmap[i];
1205       smat_i->cmap        = cmap[i];
1206     }
1207 
1208     if (ismax) {ierr = PetscFree(lens[0]);CHKERRQ(ierr);}
1209     ierr = PetscFree(lens);CHKERRQ(ierr);
1210     ierr = PetscFree(sbuf_aj[0]);CHKERRQ(ierr);
1211     ierr = PetscFree(sbuf_aj);CHKERRQ(ierr);
1212 
1213   } /* endof scall == MAT_INITIAL_MATRIX */
1214 
1215   /* Post recv matrix values */
1216   if (!ijonly) {
1217     ierr = PetscObjectGetNewTag((PetscObject)C,&tag4);CHKERRQ(ierr);
1218     ierr = PetscMalloc1(nrqs+1,&rbuf4);CHKERRQ(ierr);
1219     ierr = PetscMalloc1(nrqs+1,&r_waits4);CHKERRQ(ierr);
1220     ierr = PetscMalloc1(nrqs+1,&r_status4);CHKERRQ(ierr);
1221     ierr = PetscMalloc1(nrqr+1,&s_status4);CHKERRQ(ierr);
1222     for (i=0; i<nrqs; ++i) {
1223       ierr = PetscMalloc1(rbuf2[i][0]*bs2,&rbuf4[i]);CHKERRQ(ierr);
1224       ierr = MPI_Irecv(rbuf4[i],rbuf2[i][0]*bs2,MPIU_SCALAR,req_source2[i],tag4,comm,r_waits4+i);CHKERRQ(ierr);
1225     }
1226 
1227     /* Allocate sending buffers for a->a, and send them off */
1228     ierr = PetscMalloc1(nrqr+1,&sbuf_aa);CHKERRQ(ierr);
1229     for (i=0,j=0; i<nrqr; i++) j += req_size[i];
1230 
1231     ierr = PetscMalloc1((j+1)*bs2,&sbuf_aa[0]);CHKERRQ(ierr);
1232     for (i=1; i<nrqr; i++) sbuf_aa[i] = sbuf_aa[i-1] + req_size[i-1]*bs2;
1233 
1234     ierr = PetscMalloc1(nrqr+1,&s_waits4);CHKERRQ(ierr);
1235 
1236     for (i=0; i<nrqr; i++) {
1237       rbuf1_i   = rbuf1[i];
1238       sbuf_aa_i = sbuf_aa[i];
1239       ct1       = 2*rbuf1_i[0]+1;
1240       ct2       = 0;
1241       for (j=1,max1=rbuf1_i[0]; j<=max1; j++) {
1242         kmax = rbuf1_i[2*j];
1243         for (k=0; k<kmax; k++,ct1++) {
1244           row    = rbuf1_i[ct1] - rstart;
1245           nzA    = a_i[row+1] - a_i[row];
1246           nzB    = b_i[row+1] - b_i[row];
1247           ncols  = nzA + nzB;
1248           cworkB = b_j + b_i[row];
1249           vworkA = a_a + a_i[row]*bs2;
1250           vworkB = b_a + b_i[row]*bs2;
1251 
1252           /* load the column values for this row into vals*/
1253           vals = sbuf_aa_i+ct2*bs2;
1254           for (l=0; l<nzB; l++) {
1255             if ((bmap[cworkB[l]]) < cstart) {
1256               ierr = PetscMemcpy(vals+l*bs2,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1257             } else break;
1258           }
1259           imark = l;
1260           for (l=0; l<nzA; l++) {
1261             ierr = PetscMemcpy(vals+(imark+l)*bs2,vworkA+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1262           }
1263           for (l=imark; l<nzB; l++) {
1264             ierr = PetscMemcpy(vals+(nzA+l)*bs2,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1265           }
1266 
1267           ct2 += ncols;
1268         }
1269       }
1270       ierr = MPI_Isend(sbuf_aa_i,req_size[i]*bs2,MPIU_SCALAR,req_source1[i],tag4,comm,s_waits4+i);CHKERRQ(ierr);
1271     }
1272   }
1273 
1274   if (!ismax) {
1275     ierr = PetscFree(rbuf1[0]);CHKERRQ(ierr);
1276     ierr = PetscFree(rbuf1);CHKERRQ(ierr);
1277   }
1278 
1279   /* Assemble the matrices */
1280   /* First assemble the local rows */
1281   for (i=0; i<ismax; i++) {
1282     row2proc_i = row2proc[i];
1283     subc      = (Mat_SeqBAIJ*)submats[i]->data;
1284     imat_ilen = subc->ilen;
1285     imat_j    = subc->j;
1286     imat_i    = subc->i;
1287     imat_a    = subc->a;
1288 
1289     if (!allcolumns[i]) cmap_i = cmap[i];
1290     rmap_i = rmap[i];
1291     irow_i = irow[i];
1292     jmax   = nrow[i];
1293     for (j=0; j<jmax; j++) {
1294       if (allrows[i]) row = j;
1295       else row  = irow_i[j];
1296       proc = row2proc_i[j];
1297 
1298       if (proc == rank) {
1299 
1300         row    = row - rstart;
1301         nzA    = a_i[row+1] - a_i[row];
1302         nzB    = b_i[row+1] - b_i[row];
1303         cworkA = a_j + a_i[row];
1304         cworkB = b_j + b_i[row];
1305         if (!ijonly) {
1306           vworkA = a_a + a_i[row]*bs2;
1307           vworkB = b_a + b_i[row]*bs2;
1308         }
1309 
1310         if (allrows[i]) {
1311           row = row+rstart;
1312         } else {
1313 #if defined(PETSC_USE_CTABLE)
1314           ierr = PetscTableFind(rmap_i,row+rstart+1,&row);CHKERRQ(ierr);
1315           row--;
1316 
1317           if (row < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"row not found in table");
1318 #else
1319           row = rmap_i[row + rstart];
1320 #endif
1321         }
1322         mat_i = imat_i[row];
1323         if (!ijonly) mat_a = imat_a + mat_i*bs2;
1324         mat_j    = imat_j + mat_i;
1325         ilen = imat_ilen[row];
1326 
1327         /* load the column indices for this row into cols*/
1328         if (!allcolumns[i]) {
1329           for (l=0; l<nzB; l++) {
1330             if ((ctmp = bmap[cworkB[l]]) < cstart) {
1331 #if defined(PETSC_USE_CTABLE)
1332               ierr = PetscTableFind(cmap_i,ctmp+1,&tcol);CHKERRQ(ierr);
1333               if (tcol) {
1334 #else
1335               if ((tcol = cmap_i[ctmp])) {
1336 #endif
1337                 *mat_j++ = tcol - 1;
1338                 ierr     = PetscMemcpy(mat_a,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1339                 mat_a   += bs2;
1340                 ilen++;
1341               }
1342             } else break;
1343           }
1344           imark = l;
1345           for (l=0; l<nzA; l++) {
1346 #if defined(PETSC_USE_CTABLE)
1347             ierr = PetscTableFind(cmap_i,cstart+cworkA[l]+1,&tcol);CHKERRQ(ierr);
1348             if (tcol) {
1349 #else
1350             if ((tcol = cmap_i[cstart + cworkA[l]])) {
1351 #endif
1352               *mat_j++ = tcol - 1;
1353               if (!ijonly) {
1354                 ierr   = PetscMemcpy(mat_a,vworkA+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1355                 mat_a += bs2;
1356               }
1357               ilen++;
1358             }
1359           }
1360           for (l=imark; l<nzB; l++) {
1361 #if defined(PETSC_USE_CTABLE)
1362             ierr = PetscTableFind(cmap_i,bmap[cworkB[l]]+1,&tcol);CHKERRQ(ierr);
1363             if (tcol) {
1364 #else
1365             if ((tcol = cmap_i[bmap[cworkB[l]]])) {
1366 #endif
1367               *mat_j++ = tcol - 1;
1368               if (!ijonly) {
1369                 ierr   = PetscMemcpy(mat_a,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1370                 mat_a += bs2;
1371               }
1372               ilen++;
1373             }
1374           }
1375         } else { /* allcolumns */
1376           for (l=0; l<nzB; l++) {
1377             if ((ctmp = bmap[cworkB[l]]) < cstart) {
1378               *mat_j++ = ctmp;
1379               ierr     = PetscMemcpy(mat_a,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1380               mat_a   += bs2;
1381               ilen++;
1382             } else break;
1383           }
1384           imark = l;
1385           for (l=0; l<nzA; l++) {
1386             *mat_j++ = cstart+cworkA[l];
1387             if (!ijonly) {
1388               ierr   = PetscMemcpy(mat_a,vworkA+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1389               mat_a += bs2;
1390             }
1391             ilen++;
1392           }
1393           for (l=imark; l<nzB; l++) {
1394             *mat_j++ = bmap[cworkB[l]];
1395             if (!ijonly) {
1396               ierr   = PetscMemcpy(mat_a,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1397               mat_a += bs2;
1398             }
1399             ilen++;
1400           }
1401         }
1402         imat_ilen[row] = ilen;
1403       }
1404     }
1405   }
1406 
1407   /* Now assemble the off proc rows */
1408   if (!ijonly) {
1409     ierr = MPI_Waitall(nrqs,r_waits4,r_status4);CHKERRQ(ierr);
1410   }
1411   for (tmp2=0; tmp2<nrqs; tmp2++) {
1412     sbuf1_i = sbuf1[pa[tmp2]];
1413     jmax    = sbuf1_i[0];
1414     ct1     = 2*jmax + 1;
1415     ct2     = 0;
1416     rbuf2_i = rbuf2[tmp2];
1417     rbuf3_i = rbuf3[tmp2];
1418     if (!ijonly) rbuf4_i = rbuf4[tmp2];
1419     for (j=1; j<=jmax; j++) {
1420       is_no     = sbuf1_i[2*j-1];
1421       rmap_i    = rmap[is_no];
1422       if (!allcolumns[is_no]) cmap_i = cmap[is_no];
1423       subc      = (Mat_SeqBAIJ*)submats[is_no]->data;
1424       imat_ilen = subc->ilen;
1425       imat_j    = subc->j;
1426       imat_i    = subc->i;
1427       if (!ijonly) imat_a    = subc->a;
1428       max1      = sbuf1_i[2*j];
1429       for (k=0; k<max1; k++,ct1++) { /* for each recved block row */
1430         row = sbuf1_i[ct1];
1431 
1432         if (allrows[is_no]) {
1433           row = sbuf1_i[ct1];
1434         } else {
1435 #if defined(PETSC_USE_CTABLE)
1436           ierr = PetscTableFind(rmap_i,row+1,&row);CHKERRQ(ierr);
1437           row--;
1438           if (row < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"row not found in table");
1439 #else
1440           row = rmap_i[row];
1441 #endif
1442         }
1443         ilen  = imat_ilen[row];
1444         mat_i = imat_i[row];
1445         if (!ijonly) mat_a = imat_a + mat_i*bs2;
1446         mat_j = imat_j + mat_i;
1447         max2  = rbuf2_i[ct1];
1448         if (!allcolumns[is_no]) {
1449           for (l=0; l<max2; l++,ct2++) {
1450 #if defined(PETSC_USE_CTABLE)
1451             ierr = PetscTableFind(cmap_i,rbuf3_i[ct2]+1,&tcol);CHKERRQ(ierr);
1452 #else
1453             tcol = cmap_i[rbuf3_i[ct2]];
1454 #endif
1455             if (tcol) {
1456               *mat_j++ = tcol - 1;
1457               if (!ijonly) {
1458                 ierr   = PetscMemcpy(mat_a,rbuf4_i+ct2*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1459                 mat_a += bs2;
1460               }
1461               ilen++;
1462             }
1463           }
1464         } else { /* allcolumns */
1465           for (l=0; l<max2; l++,ct2++) {
1466             *mat_j++ = rbuf3_i[ct2]; /* same global column index of C */
1467             if (!ijonly) {
1468               ierr   = PetscMemcpy(mat_a,rbuf4_i+ct2*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1469               mat_a += bs2;
1470             }
1471             ilen++;
1472           }
1473         }
1474         imat_ilen[row] = ilen;
1475       }
1476     }
1477   }
1478 
1479   if (!iscsorted) { /* sort column indices of the rows */
1480     MatScalar *work;
1481 
1482     ierr = PetscMalloc1(bs2,&work);CHKERRQ(ierr);
1483     for (i=0; i<ismax; i++) {
1484       subc      = (Mat_SeqBAIJ*)submats[i]->data;
1485       imat_ilen = subc->ilen;
1486       imat_j    = subc->j;
1487       imat_i    = subc->i;
1488       if (!ijonly) imat_a = subc->a;
1489       if (allcolumns[i]) continue;
1490 
1491       jmax = nrow[i];
1492       for (j=0; j<jmax; j++) {
1493         mat_i = imat_i[j];
1494         mat_j = imat_j + mat_i;
1495         ilen  = imat_ilen[j];
1496         if (ijonly) {
1497           ierr = PetscSortInt(ilen,mat_j);CHKERRQ(ierr);
1498         } else {
1499           mat_a = imat_a + mat_i*bs2;
1500           ierr = PetscSortIntWithDataArray(ilen,mat_j,mat_a,bs2*sizeof(MatScalar),work);CHKERRQ(ierr);
1501         }
1502       }
1503     }
1504     ierr = PetscFree(work);CHKERRQ(ierr);
1505   }
1506 
1507   if (!ijonly) {
1508     ierr = PetscFree(r_status4);CHKERRQ(ierr);
1509     ierr = PetscFree(r_waits4);CHKERRQ(ierr);
1510     if (nrqr) {ierr = MPI_Waitall(nrqr,s_waits4,s_status4);CHKERRQ(ierr);}
1511     ierr = PetscFree(s_waits4);CHKERRQ(ierr);
1512     ierr = PetscFree(s_status4);CHKERRQ(ierr);
1513   }
1514 
1515   /* Restore the indices */
1516   for (i=0; i<ismax; i++) {
1517     if (!allrows[i]) {
1518       ierr = ISRestoreIndices(isrow[i],irow+i);CHKERRQ(ierr);
1519     }
1520     if (!allcolumns[i]) {
1521       ierr = ISRestoreIndices(iscol[i],icol+i);CHKERRQ(ierr);
1522     }
1523   }
1524 
1525   for (i=0; i<ismax; i++) {
1526     ierr = MatAssemblyBegin(submats[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1527     ierr = MatAssemblyEnd(submats[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1528   }
1529 
1530   /* Destroy allocated memory */
1531   if (!ismax) {
1532     ierr = PetscFree(pa);CHKERRQ(ierr);
1533     ierr = PetscFree4(sbuf1,ptr,tmp,ctr);CHKERRQ(ierr);
1534     for (i=0; i<nrqr; ++i) {
1535       ierr = PetscFree(sbuf2[i]);CHKERRQ(ierr);
1536     }
1537     for (i=0; i<nrqs; ++i) {
1538       ierr = PetscFree(rbuf3[i]);CHKERRQ(ierr);
1539     }
1540 
1541     ierr = PetscFree3(sbuf2,req_size,req_source1);CHKERRQ(ierr);
1542     ierr = PetscFree3(req_source2,rbuf2,rbuf3);CHKERRQ(ierr);
1543   }
1544   ierr = PetscFree5(irow,icol,nrow,ncol,issorted);CHKERRQ(ierr);
1545   ierr = PetscFree6(smats,row2proc,cmap,rmap,allcolumns,allrows);CHKERRQ(ierr);
1546 
1547   if (!ijonly) {
1548     ierr = PetscFree(sbuf_aa[0]);CHKERRQ(ierr);
1549     ierr = PetscFree(sbuf_aa);CHKERRQ(ierr);
1550 
1551     for (i=0; i<nrqs; ++i) {
1552       ierr = PetscFree(rbuf4[i]);CHKERRQ(ierr);
1553     }
1554     ierr = PetscFree(rbuf4);CHKERRQ(ierr);
1555   }
1556   c->ijonly = PETSC_FALSE; /* set back to the default */
1557   PetscFunctionReturn(0);
1558 }
1559