xref: /petsc/src/mat/impls/baij/mpi/baijov.c (revision cbd2c53bc8d4e32f1aab714f173fd0b4c821fa72)
1 
2 /*
3    Routines to compute overlapping regions of a parallel MPI matrix
4   and to find submatrices that were shared across processors.
5 */
6 #include <../src/mat/impls/baij/mpi/mpibaij.h>
7 #include <petscbt.h>
8 
9 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Local(Mat,PetscInt,char**,PetscInt*,PetscInt**);
10 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Receive(Mat,PetscInt,PetscInt**,PetscInt**,PetscInt*);
11 extern PetscErrorCode MatGetRow_MPIBAIJ(Mat,PetscInt,PetscInt*,PetscInt**,PetscScalar**);
12 extern PetscErrorCode MatRestoreRow_MPIBAIJ(Mat,PetscInt,PetscInt*,PetscInt**,PetscScalar**);
13 
14 PetscErrorCode MatIncreaseOverlap_MPIBAIJ(Mat C,PetscInt imax,IS is[],PetscInt ov)
15 {
16   PetscErrorCode ierr;
17   PetscInt       i,N=C->cmap->N, bs=C->rmap->bs;
18   IS             *is_new;
19 
20   PetscFunctionBegin;
21   ierr = PetscMalloc1(imax,&is_new);CHKERRQ(ierr);
22   /* Convert the indices into block format */
23   ierr = ISCompressIndicesGeneral(N,C->rmap->n,bs,imax,is,is_new);CHKERRQ(ierr);
24   if (ov < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap specified\n");
25   for (i=0; i<ov; ++i) {
26     ierr = MatIncreaseOverlap_MPIBAIJ_Once(C,imax,is_new);CHKERRQ(ierr);
27   }
28   for (i=0; i<imax; i++) {ierr = ISDestroy(&is[i]);CHKERRQ(ierr);}
29   ierr = ISExpandIndicesGeneral(N,N,bs,imax,is_new,is);CHKERRQ(ierr);
30   for (i=0; i<imax; i++) {ierr = ISDestroy(&is_new[i]);CHKERRQ(ierr);}
31   ierr = PetscFree(is_new);CHKERRQ(ierr);
32   PetscFunctionReturn(0);
33 }
34 
35 /*
36   Sample message format:
37   If a processor A wants processor B to process some elements corresponding
38   to index sets is[1], is[5]
39   mesg [0] = 2   (no of index sets in the mesg)
40   -----------
41   mesg [1] = 1 => is[1]
42   mesg [2] = sizeof(is[1]);
43   -----------
44   mesg [5] = 5  => is[5]
45   mesg [6] = sizeof(is[5]);
46   -----------
47   mesg [7]
48   mesg [n]  data(is[1])
49   -----------
50   mesg[n+1]
51   mesg[m]  data(is[5])
52   -----------
53 
54   Notes:
55   nrqs - no of requests sent (or to be sent out)
56   nrqr - no of requests received (which have to be or which have been processed)
57 */
58 PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Once(Mat C,PetscInt imax,IS is[])
59 {
60   Mat_MPIBAIJ    *c = (Mat_MPIBAIJ*)C->data;
61   const PetscInt **idx,*idx_i;
62   PetscInt       *n,*w3,*w4,**data,len;
63   PetscErrorCode ierr;
64   PetscMPIInt    size,rank,tag1,tag2,*w2,*w1,nrqr;
65   PetscInt       Mbs,i,j,k,**rbuf,row,nrqs,msz,**outdat,**ptr;
66   PetscInt       *ctr,*pa,*tmp,*isz,*isz1,**xdata,**rbuf2,*d_p;
67   PetscMPIInt    *onodes1,*olengths1,*onodes2,*olengths2,proc=-1;
68   PetscBT        *table;
69   MPI_Comm       comm,*iscomms;
70   MPI_Request    *s_waits1,*r_waits1,*s_waits2,*r_waits2;
71   MPI_Status     *s_status,*recv_status;
72   char           *t_p;
73 
74   PetscFunctionBegin;
75   ierr = PetscObjectGetComm((PetscObject)C,&comm);CHKERRQ(ierr);
76   size = c->size;
77   rank = c->rank;
78   Mbs  = c->Mbs;
79 
80   ierr = PetscObjectGetNewTag((PetscObject)C,&tag1);CHKERRQ(ierr);
81   ierr = PetscObjectGetNewTag((PetscObject)C,&tag2);CHKERRQ(ierr);
82 
83   ierr = PetscMalloc2(imax+1,(PetscInt***)&idx,imax,&n);CHKERRQ(ierr);
84 
85   for (i=0; i<imax; i++) {
86     ierr = ISGetIndices(is[i],&idx[i]);CHKERRQ(ierr);
87     ierr = ISGetLocalSize(is[i],&n[i]);CHKERRQ(ierr);
88   }
89 
90   /* evaluate communication - mesg to who,length of mesg, and buffer space
91      required. Based on this, buffers are allocated, and data copied into them*/
92   ierr = PetscCalloc4(size,&w1,size,&w2,size,&w3,size,&w4);CHKERRQ(ierr);
93   for (i=0; i<imax; i++) {
94     ierr  = PetscArrayzero(w4,size);CHKERRQ(ierr); /* initialise work vector*/
95     idx_i = idx[i];
96     len   = n[i];
97     for (j=0; j<len; j++) {
98       row = idx_i[j];
99       if (row < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index set cannot have negative entries");
100       ierr = PetscLayoutFindOwner(C->rmap,row*C->rmap->bs,&proc);CHKERRQ(ierr);
101       w4[proc]++;
102     }
103     for (j=0; j<size; j++) {
104       if (w4[j]) { w1[j] += w4[j]; w3[j]++;}
105     }
106   }
107 
108   nrqs     = 0;              /* no of outgoing messages */
109   msz      = 0;              /* total mesg length (for all proc */
110   w1[rank] = 0;              /* no mesg sent to itself */
111   w3[rank] = 0;
112   for (i=0; i<size; i++) {
113     if (w1[i])  {w2[i] = 1; nrqs++;} /* there exists a message to proc i */
114   }
115   /* pa - is list of processors to communicate with */
116   ierr = PetscMalloc1(nrqs+1,&pa);CHKERRQ(ierr);
117   for (i=0,j=0; i<size; i++) {
118     if (w1[i]) {pa[j] = i; j++;}
119   }
120 
121   /* Each message would have a header = 1 + 2*(no of IS) + data */
122   for (i=0; i<nrqs; i++) {
123     j      = pa[i];
124     w1[j] += w2[j] + 2*w3[j];
125     msz   += w1[j];
126   }
127 
128   /* Determine the number of messages to expect, their lengths, from from-ids */
129   ierr = PetscGatherNumberOfMessages(comm,w2,w1,&nrqr);CHKERRQ(ierr);
130   ierr = PetscGatherMessageLengths(comm,nrqs,nrqr,w1,&onodes1,&olengths1);CHKERRQ(ierr);
131 
132   /* Now post the Irecvs corresponding to these messages */
133   ierr = PetscPostIrecvInt(comm,tag1,nrqr,onodes1,olengths1,&rbuf,&r_waits1);CHKERRQ(ierr);
134 
135   /* Allocate Memory for outgoing messages */
136   ierr = PetscMalloc4(size,&outdat,size,&ptr,msz,&tmp,size,&ctr);CHKERRQ(ierr);
137   ierr = PetscArrayzero(outdat,size);CHKERRQ(ierr);
138   ierr = PetscArrayzero(ptr,size);CHKERRQ(ierr);
139   {
140     PetscInt *iptr = tmp,ict  = 0;
141     for (i=0; i<nrqs; i++) {
142       j         = pa[i];
143       iptr     +=  ict;
144       outdat[j] = iptr;
145       ict       = w1[j];
146     }
147   }
148 
149   /* Form the outgoing messages */
150   /*plug in the headers*/
151   for (i=0; i<nrqs; i++) {
152     j            = pa[i];
153     outdat[j][0] = 0;
154     ierr         = PetscArrayzero(outdat[j]+1,2*w3[j]);CHKERRQ(ierr);
155     ptr[j]       = outdat[j] + 2*w3[j] + 1;
156   }
157 
158   /* Memory for doing local proc's work*/
159   {
160     ierr = PetscCalloc5(imax,&table, imax,&data, imax,&isz, Mbs*imax,&d_p, (Mbs/PETSC_BITS_PER_BYTE+1)*imax,&t_p);CHKERRQ(ierr);
161 
162     for (i=0; i<imax; i++) {
163       table[i] = t_p + (Mbs/PETSC_BITS_PER_BYTE+1)*i;
164       data[i]  = d_p + (Mbs)*i;
165     }
166   }
167 
168   /* Parse the IS and update local tables and the outgoing buf with the data*/
169   {
170     PetscInt n_i,*data_i,isz_i,*outdat_j,ctr_j;
171     PetscBT  table_i;
172 
173     for (i=0; i<imax; i++) {
174       ierr    = PetscArrayzero(ctr,size);CHKERRQ(ierr);
175       n_i     = n[i];
176       table_i = table[i];
177       idx_i   = idx[i];
178       data_i  = data[i];
179       isz_i   = isz[i];
180       for (j=0; j<n_i; j++) {   /* parse the indices of each IS */
181         row  = idx_i[j];
182         ierr = PetscLayoutFindOwner(C->rmap,row*C->rmap->bs,&proc);CHKERRQ(ierr);
183         if (proc != rank) { /* copy to the outgoing buffer */
184           ctr[proc]++;
185           *ptr[proc] = row;
186           ptr[proc]++;
187         } else { /* Update the local table */
188           if (!PetscBTLookupSet(table_i,row)) data_i[isz_i++] = row;
189         }
190       }
191       /* Update the headers for the current IS */
192       for (j=0; j<size; j++) { /* Can Optimise this loop by using pa[] */
193         if ((ctr_j = ctr[j])) {
194           outdat_j        = outdat[j];
195           k               = ++outdat_j[0];
196           outdat_j[2*k]   = ctr_j;
197           outdat_j[2*k-1] = i;
198         }
199       }
200       isz[i] = isz_i;
201     }
202   }
203 
204   /*  Now  post the sends */
205   ierr = PetscMalloc1(nrqs+1,&s_waits1);CHKERRQ(ierr);
206   for (i=0; i<nrqs; ++i) {
207     j    = pa[i];
208     ierr = MPI_Isend(outdat[j],w1[j],MPIU_INT,j,tag1,comm,s_waits1+i);CHKERRMPI(ierr);
209   }
210 
211   /* No longer need the original indices*/
212   for (i=0; i<imax; ++i) {
213     ierr = ISRestoreIndices(is[i],idx+i);CHKERRQ(ierr);
214   }
215   ierr = PetscFree2(*(PetscInt***)&idx,n);CHKERRQ(ierr);
216 
217   ierr = PetscMalloc1(imax,&iscomms);CHKERRQ(ierr);
218   for (i=0; i<imax; ++i) {
219     ierr = PetscCommDuplicate(PetscObjectComm((PetscObject)is[i]),&iscomms[i],NULL);CHKERRQ(ierr);
220     ierr = ISDestroy(&is[i]);CHKERRQ(ierr);
221   }
222 
223   /* Do Local work*/
224   ierr = MatIncreaseOverlap_MPIBAIJ_Local(C,imax,table,isz,data);CHKERRQ(ierr);
225 
226   /* Receive messages*/
227   ierr = PetscMalloc1(nrqr+1,&recv_status);CHKERRQ(ierr);
228   if (nrqr) {ierr = MPI_Waitall(nrqr,r_waits1,recv_status);CHKERRMPI(ierr);}
229 
230   ierr = PetscMalloc1(nrqs+1,&s_status);CHKERRQ(ierr);
231   if (nrqs) {ierr = MPI_Waitall(nrqs,s_waits1,s_status);CHKERRMPI(ierr);}
232 
233   /* Phase 1 sends are complete - deallocate buffers */
234   ierr = PetscFree4(outdat,ptr,tmp,ctr);CHKERRQ(ierr);
235   ierr = PetscFree4(w1,w2,w3,w4);CHKERRQ(ierr);
236 
237   ierr = PetscMalloc1(nrqr+1,&xdata);CHKERRQ(ierr);
238   ierr = PetscMalloc1(nrqr+1,&isz1);CHKERRQ(ierr);
239   ierr = MatIncreaseOverlap_MPIBAIJ_Receive(C,nrqr,rbuf,xdata,isz1);CHKERRQ(ierr);
240   ierr = PetscFree(rbuf[0]);CHKERRQ(ierr);
241   ierr = PetscFree(rbuf);CHKERRQ(ierr);
242 
243   /* Send the data back*/
244   /* Do a global reduction to know the buffer space req for incoming messages*/
245   {
246     PetscMPIInt *rw1;
247 
248     ierr = PetscCalloc1(size,&rw1);CHKERRQ(ierr);
249 
250     for (i=0; i<nrqr; ++i) {
251       proc = recv_status[i].MPI_SOURCE;
252       if (proc != onodes1[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"MPI_SOURCE mismatch");
253       rw1[proc] = isz1[i];
254     }
255 
256     ierr = PetscFree(onodes1);CHKERRQ(ierr);
257     ierr = PetscFree(olengths1);CHKERRQ(ierr);
258 
259     /* Determine the number of messages to expect, their lengths, from from-ids */
260     ierr = PetscGatherMessageLengths(comm,nrqr,nrqs,rw1,&onodes2,&olengths2);CHKERRQ(ierr);
261     ierr = PetscFree(rw1);CHKERRQ(ierr);
262   }
263   /* Now post the Irecvs corresponding to these messages */
264   ierr = PetscPostIrecvInt(comm,tag2,nrqs,onodes2,olengths2,&rbuf2,&r_waits2);CHKERRQ(ierr);
265 
266   /*  Now  post the sends */
267   ierr = PetscMalloc1(nrqr+1,&s_waits2);CHKERRQ(ierr);
268   for (i=0; i<nrqr; ++i) {
269     j    = recv_status[i].MPI_SOURCE;
270     ierr = MPI_Isend(xdata[i],isz1[i],MPIU_INT,j,tag2,comm,s_waits2+i);CHKERRMPI(ierr);
271   }
272 
273   /* receive work done on other processors*/
274   {
275     PetscMPIInt idex;
276     PetscInt    is_no,ct1,max,*rbuf2_i,isz_i,*data_i,jmax;
277     PetscBT     table_i;
278     MPI_Status  *status2;
279 
280     ierr = PetscMalloc1(PetscMax(nrqr,nrqs)+1,&status2);CHKERRQ(ierr);
281     for (i=0; i<nrqs; ++i) {
282       ierr = MPI_Waitany(nrqs,r_waits2,&idex,status2+i);CHKERRMPI(ierr);
283       /* Process the message*/
284       rbuf2_i = rbuf2[idex];
285       ct1     = 2*rbuf2_i[0]+1;
286       jmax    = rbuf2[idex][0];
287       for (j=1; j<=jmax; j++) {
288         max     = rbuf2_i[2*j];
289         is_no   = rbuf2_i[2*j-1];
290         isz_i   = isz[is_no];
291         data_i  = data[is_no];
292         table_i = table[is_no];
293         for (k=0; k<max; k++,ct1++) {
294           row = rbuf2_i[ct1];
295           if (!PetscBTLookupSet(table_i,row)) data_i[isz_i++] = row;
296         }
297         isz[is_no] = isz_i;
298       }
299     }
300     if (nrqr) {ierr = MPI_Waitall(nrqr,s_waits2,status2);CHKERRMPI(ierr);}
301     ierr = PetscFree(status2);CHKERRQ(ierr);
302   }
303 
304   for (i=0; i<imax; ++i) {
305     ierr = ISCreateGeneral(iscomms[i],isz[i],data[i],PETSC_COPY_VALUES,is+i);CHKERRQ(ierr);
306     ierr = PetscCommDestroy(&iscomms[i]);CHKERRQ(ierr);
307   }
308 
309   ierr = PetscFree(iscomms);CHKERRQ(ierr);
310   ierr = PetscFree(onodes2);CHKERRQ(ierr);
311   ierr = PetscFree(olengths2);CHKERRQ(ierr);
312 
313   ierr = PetscFree(pa);CHKERRQ(ierr);
314   ierr = PetscFree(rbuf2[0]);CHKERRQ(ierr);
315   ierr = PetscFree(rbuf2);CHKERRQ(ierr);
316   ierr = PetscFree(s_waits1);CHKERRQ(ierr);
317   ierr = PetscFree(r_waits1);CHKERRQ(ierr);
318   ierr = PetscFree(s_waits2);CHKERRQ(ierr);
319   ierr = PetscFree(r_waits2);CHKERRQ(ierr);
320   ierr = PetscFree5(table,data,isz,d_p,t_p);CHKERRQ(ierr);
321   ierr = PetscFree(s_status);CHKERRQ(ierr);
322   ierr = PetscFree(recv_status);CHKERRQ(ierr);
323   ierr = PetscFree(xdata[0]);CHKERRQ(ierr);
324   ierr = PetscFree(xdata);CHKERRQ(ierr);
325   ierr = PetscFree(isz1);CHKERRQ(ierr);
326   PetscFunctionReturn(0);
327 }
328 
329 /*
330    MatIncreaseOverlap_MPIBAIJ_Local - Called by MatincreaseOverlap, to do
331        the work on the local processor.
332 
333      Inputs:
334       C      - MAT_MPIBAIJ;
335       imax - total no of index sets processed at a time;
336       table  - an array of char - size = Mbs bits.
337 
338      Output:
339       isz    - array containing the count of the solution elements corresponding
340                to each index set;
341       data   - pointer to the solutions
342 */
343 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Local(Mat C,PetscInt imax,PetscBT *table,PetscInt *isz,PetscInt **data)
344 {
345   Mat_MPIBAIJ *c = (Mat_MPIBAIJ*)C->data;
346   Mat         A  = c->A,B = c->B;
347   Mat_SeqBAIJ *a = (Mat_SeqBAIJ*)A->data,*b = (Mat_SeqBAIJ*)B->data;
348   PetscInt    start,end,val,max,rstart,cstart,*ai,*aj;
349   PetscInt    *bi,*bj,*garray,i,j,k,row,*data_i,isz_i;
350   PetscBT     table_i;
351 
352   PetscFunctionBegin;
353   rstart = c->rstartbs;
354   cstart = c->cstartbs;
355   ai     = a->i;
356   aj     = a->j;
357   bi     = b->i;
358   bj     = b->j;
359   garray = c->garray;
360 
361   for (i=0; i<imax; i++) {
362     data_i  = data[i];
363     table_i = table[i];
364     isz_i   = isz[i];
365     for (j=0,max=isz[i]; j<max; j++) {
366       row   = data_i[j] - rstart;
367       start = ai[row];
368       end   = ai[row+1];
369       for (k=start; k<end; k++) { /* Amat */
370         val = aj[k] + cstart;
371         if (!PetscBTLookupSet(table_i,val)) data_i[isz_i++] = val;
372       }
373       start = bi[row];
374       end   = bi[row+1];
375       for (k=start; k<end; k++) { /* Bmat */
376         val = garray[bj[k]];
377         if (!PetscBTLookupSet(table_i,val)) data_i[isz_i++] = val;
378       }
379     }
380     isz[i] = isz_i;
381   }
382   PetscFunctionReturn(0);
383 }
384 /*
385       MatIncreaseOverlap_MPIBAIJ_Receive - Process the received messages,
386          and return the output
387 
388          Input:
389            C    - the matrix
390            nrqr - no of messages being processed.
391            rbuf - an array of pointers to the received requests
392 
393          Output:
394            xdata - array of messages to be sent back
395            isz1  - size of each message
396 
397   For better efficiency perhaps we should malloc separately each xdata[i],
398 then if a remalloc is required we need only copy the data for that one row
399 rather than all previous rows as it is now where a single large chunck of
400 memory is used.
401 
402 */
403 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Receive(Mat C,PetscInt nrqr,PetscInt **rbuf,PetscInt **xdata,PetscInt * isz1)
404 {
405   Mat_MPIBAIJ    *c = (Mat_MPIBAIJ*)C->data;
406   Mat            A  = c->A,B = c->B;
407   Mat_SeqBAIJ    *a = (Mat_SeqBAIJ*)A->data,*b = (Mat_SeqBAIJ*)B->data;
408   PetscErrorCode ierr;
409   PetscInt       rstart,cstart,*ai,*aj,*bi,*bj,*garray,i,j,k;
410   PetscInt       row,total_sz,ct,ct1,ct2,ct3,mem_estimate,oct2,l,start,end;
411   PetscInt       val,max1,max2,Mbs,no_malloc =0,*tmp,new_estimate,ctr;
412   PetscInt       *rbuf_i,kmax,rbuf_0;
413   PetscBT        xtable;
414 
415   PetscFunctionBegin;
416   Mbs    = c->Mbs;
417   rstart = c->rstartbs;
418   cstart = c->cstartbs;
419   ai     = a->i;
420   aj     = a->j;
421   bi     = b->i;
422   bj     = b->j;
423   garray = c->garray;
424 
425   for (i=0,ct=0,total_sz=0; i<nrqr; ++i) {
426     rbuf_i =  rbuf[i];
427     rbuf_0 =  rbuf_i[0];
428     ct    += rbuf_0;
429     for (j=1; j<=rbuf_0; j++) total_sz += rbuf_i[2*j];
430   }
431 
432   if (c->Mbs) max1 = ct*(a->nz +b->nz)/c->Mbs;
433   else        max1 = 1;
434   mem_estimate = 3*((total_sz > max1 ? total_sz : max1)+1);
435   ierr         = PetscMalloc1(mem_estimate,&xdata[0]);CHKERRQ(ierr);
436   ++no_malloc;
437   ierr = PetscBTCreate(Mbs,&xtable);CHKERRQ(ierr);
438   ierr = PetscArrayzero(isz1,nrqr);CHKERRQ(ierr);
439 
440   ct3 = 0;
441   for (i=0; i<nrqr; i++) { /* for easch mesg from proc i */
442     rbuf_i =  rbuf[i];
443     rbuf_0 =  rbuf_i[0];
444     ct1    =  2*rbuf_0+1;
445     ct2    =  ct1;
446     ct3   += ct1;
447     for (j=1; j<=rbuf_0; j++) { /* for each IS from proc i*/
448       ierr = PetscBTMemzero(Mbs,xtable);CHKERRQ(ierr);
449       oct2 = ct2;
450       kmax = rbuf_i[2*j];
451       for (k=0; k<kmax; k++,ct1++) {
452         row = rbuf_i[ct1];
453         if (!PetscBTLookupSet(xtable,row)) {
454           if (!(ct3 < mem_estimate)) {
455             new_estimate = (PetscInt)(1.5*mem_estimate)+1;
456             ierr         = PetscMalloc1(new_estimate,&tmp);CHKERRQ(ierr);
457             ierr         = PetscArraycpy(tmp,xdata[0],mem_estimate);CHKERRQ(ierr);
458             ierr         = PetscFree(xdata[0]);CHKERRQ(ierr);
459             xdata[0]     = tmp;
460             mem_estimate = new_estimate; ++no_malloc;
461             for (ctr=1; ctr<=i; ctr++) xdata[ctr] = xdata[ctr-1] + isz1[ctr-1];
462           }
463           xdata[i][ct2++] = row;
464           ct3++;
465         }
466       }
467       for (k=oct2,max2=ct2; k<max2; k++)  {
468         row   = xdata[i][k] - rstart;
469         start = ai[row];
470         end   = ai[row+1];
471         for (l=start; l<end; l++) {
472           val = aj[l] + cstart;
473           if (!PetscBTLookupSet(xtable,val)) {
474             if (!(ct3 < mem_estimate)) {
475               new_estimate = (PetscInt)(1.5*mem_estimate)+1;
476               ierr         = PetscMalloc1(new_estimate,&tmp);CHKERRQ(ierr);
477               ierr         = PetscArraycpy(tmp,xdata[0],mem_estimate);CHKERRQ(ierr);
478               ierr         = PetscFree(xdata[0]);CHKERRQ(ierr);
479               xdata[0]     = tmp;
480               mem_estimate = new_estimate; ++no_malloc;
481               for (ctr=1; ctr<=i; ctr++) xdata[ctr] = xdata[ctr-1] + isz1[ctr-1];
482             }
483             xdata[i][ct2++] = val;
484             ct3++;
485           }
486         }
487         start = bi[row];
488         end   = bi[row+1];
489         for (l=start; l<end; l++) {
490           val = garray[bj[l]];
491           if (!PetscBTLookupSet(xtable,val)) {
492             if (!(ct3 < mem_estimate)) {
493               new_estimate = (PetscInt)(1.5*mem_estimate)+1;
494               ierr         = PetscMalloc1(new_estimate,&tmp);CHKERRQ(ierr);
495               ierr         = PetscArraycpy(tmp,xdata[0],mem_estimate);CHKERRQ(ierr);
496               ierr         = PetscFree(xdata[0]);CHKERRQ(ierr);
497               xdata[0]     = tmp;
498               mem_estimate = new_estimate; ++no_malloc;
499               for (ctr =1; ctr <=i; ctr++) xdata[ctr] = xdata[ctr-1] + isz1[ctr-1];
500             }
501             xdata[i][ct2++] = val;
502             ct3++;
503           }
504         }
505       }
506       /* Update the header*/
507       xdata[i][2*j]   = ct2 - oct2; /* Undo the vector isz1 and use only a var*/
508       xdata[i][2*j-1] = rbuf_i[2*j-1];
509     }
510     xdata[i][0] = rbuf_0;
511     xdata[i+1]  = xdata[i] + ct2;
512     isz1[i]     = ct2; /* size of each message */
513   }
514   ierr = PetscBTDestroy(&xtable);CHKERRQ(ierr);
515   ierr = PetscInfo3(C,"Allocated %D bytes, required %D, no of mallocs = %D\n",mem_estimate,ct3,no_malloc);CHKERRQ(ierr);
516   PetscFunctionReturn(0);
517 }
518 
519 PetscErrorCode MatCreateSubMatrices_MPIBAIJ(Mat C,PetscInt ismax,const IS isrow[],const IS iscol[],MatReuse scall,Mat *submat[])
520 {
521   IS             *isrow_block,*iscol_block;
522   Mat_MPIBAIJ    *c = (Mat_MPIBAIJ*)C->data;
523   PetscErrorCode ierr;
524   PetscInt       nmax,nstages_local,nstages,i,pos,max_no,N=C->cmap->N,bs=C->rmap->bs;
525   Mat_SeqBAIJ    *subc;
526   Mat_SubSppt    *smat;
527 
528   PetscFunctionBegin;
529   /* The compression and expansion should be avoided. Doesn't point
530      out errors, might change the indices, hence buggey */
531   ierr = PetscMalloc2(ismax+1,&isrow_block,ismax+1,&iscol_block);CHKERRQ(ierr);
532   ierr = ISCompressIndicesGeneral(N,C->rmap->n,bs,ismax,isrow,isrow_block);CHKERRQ(ierr);
533   ierr = ISCompressIndicesGeneral(N,C->cmap->n,bs,ismax,iscol,iscol_block);CHKERRQ(ierr);
534 
535   /* Determine the number of stages through which submatrices are done */
536   if (!C->cmap->N) nmax=20*1000000/sizeof(PetscInt);
537   else nmax = 20*1000000 / (c->Nbs * sizeof(PetscInt));
538   if (!nmax) nmax = 1;
539 
540   if (scall == MAT_INITIAL_MATRIX) {
541     nstages_local = ismax/nmax + ((ismax % nmax) ? 1 : 0); /* local nstages */
542 
543     /* Make sure every processor loops through the nstages */
544     ierr = MPIU_Allreduce(&nstages_local,&nstages,1,MPIU_INT,MPI_MAX,PetscObjectComm((PetscObject)C));CHKERRMPI(ierr);
545 
546     /* Allocate memory to hold all the submatrices and dummy submatrices */
547     ierr = PetscCalloc1(ismax+nstages,submat);CHKERRQ(ierr);
548   } else { /* MAT_REUSE_MATRIX */
549     if (ismax) {
550       subc = (Mat_SeqBAIJ*)((*submat)[0]->data);
551       smat   = subc->submatis1;
552     } else { /* (*submat)[0] is a dummy matrix */
553       smat = (Mat_SubSppt*)(*submat)[0]->data;
554     }
555     if (!smat) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"MatCreateSubMatrices(...,MAT_REUSE_MATRIX,...) requires submat");
556     nstages = smat->nstages;
557   }
558 
559   for (i=0,pos=0; i<nstages; i++) {
560     if (pos+nmax <= ismax) max_no = nmax;
561     else if (pos == ismax) max_no = 0;
562     else                   max_no = ismax-pos;
563 
564     ierr = MatCreateSubMatrices_MPIBAIJ_local(C,max_no,isrow_block+pos,iscol_block+pos,scall,*submat+pos);CHKERRQ(ierr);
565     if (!max_no && scall == MAT_INITIAL_MATRIX) { /* submat[pos] is a dummy matrix */
566       smat = (Mat_SubSppt*)(*submat)[pos]->data;
567       smat->nstages = nstages;
568     }
569     pos += max_no;
570   }
571 
572   if (scall == MAT_INITIAL_MATRIX && ismax) {
573     /* save nstages for reuse */
574     subc = (Mat_SeqBAIJ*)((*submat)[0]->data);
575     smat = subc->submatis1;
576     smat->nstages = nstages;
577   }
578 
579   for (i=0; i<ismax; i++) {
580     ierr = ISDestroy(&isrow_block[i]);CHKERRQ(ierr);
581     ierr = ISDestroy(&iscol_block[i]);CHKERRQ(ierr);
582   }
583   ierr = PetscFree2(isrow_block,iscol_block);CHKERRQ(ierr);
584   PetscFunctionReturn(0);
585 }
586 
587 #if defined(PETSC_USE_CTABLE)
588 PetscErrorCode PetscGetProc(const PetscInt row, const PetscMPIInt size, const PetscInt proc_gnode[], PetscMPIInt *rank)
589 {
590   PetscInt       nGlobalNd = proc_gnode[size];
591   PetscMPIInt    fproc;
592   PetscErrorCode ierr;
593 
594   PetscFunctionBegin;
595   ierr = PetscMPIIntCast((PetscInt)(((float)row * (float)size / (float)nGlobalNd + 0.5)),&fproc);CHKERRQ(ierr);
596   if (fproc > size) fproc = size;
597   while (row < proc_gnode[fproc] || row >= proc_gnode[fproc+1]) {
598     if (row < proc_gnode[fproc]) fproc--;
599     else                         fproc++;
600   }
601   *rank = fproc;
602   PetscFunctionReturn(0);
603 }
604 #endif
605 
606 /* -------------------------------------------------------------------------*/
607 /* This code is used for BAIJ and SBAIJ matrices (unfortunate dependency) */
608 PetscErrorCode MatCreateSubMatrices_MPIBAIJ_local(Mat C,PetscInt ismax,const IS isrow[],const IS iscol[],MatReuse scall,Mat *submats)
609 {
610   Mat_MPIBAIJ    *c = (Mat_MPIBAIJ*)C->data;
611   Mat            A  = c->A;
612   Mat_SeqBAIJ    *a = (Mat_SeqBAIJ*)A->data,*b = (Mat_SeqBAIJ*)c->B->data,*subc;
613   const PetscInt **icol,**irow;
614   PetscInt       *nrow,*ncol,start;
615   PetscErrorCode ierr;
616   PetscMPIInt    rank,size,tag0,tag2,tag3,tag4,*w1,*w2,*w3,*w4,nrqr;
617   PetscInt       **sbuf1,**sbuf2,*sbuf2_i,i,j,k,l,ct1,ct2,**rbuf1,row,proc=-1;
618   PetscInt       nrqs=0,msz,**ptr=NULL,*req_size=NULL,*ctr=NULL,*pa,*tmp=NULL,tcol;
619   PetscInt       **rbuf3=NULL,*req_source1=NULL,*req_source2,**sbuf_aj,**rbuf2=NULL,max1,max2;
620   PetscInt       **lens,is_no,ncols,*cols,mat_i,*mat_j,tmp2,jmax;
621 #if defined(PETSC_USE_CTABLE)
622   PetscTable     *cmap,cmap_i=NULL,*rmap,rmap_i;
623 #else
624   PetscInt       **cmap,*cmap_i=NULL,**rmap,*rmap_i;
625 #endif
626   const PetscInt *irow_i,*icol_i;
627   PetscInt       ctr_j,*sbuf1_j,*sbuf_aj_i,*rbuf1_i,kmax,*lens_i;
628   MPI_Request    *s_waits1,*r_waits1,*s_waits2,*r_waits2,*r_waits3;
629   MPI_Request    *r_waits4,*s_waits3,*s_waits4;
630   MPI_Status     *r_status1,*r_status2,*s_status1,*s_status3,*s_status2;
631   MPI_Status     *r_status3,*r_status4,*s_status4;
632   MPI_Comm       comm;
633   PetscScalar    **rbuf4,*rbuf4_i=NULL,**sbuf_aa,*vals,*mat_a=NULL,*imat_a=NULL,*sbuf_aa_i;
634   PetscMPIInt    *onodes1,*olengths1,end;
635   PetscInt       **row2proc,*row2proc_i,*imat_ilen,*imat_j,*imat_i;
636   Mat_SubSppt    *smat_i;
637   PetscBool      *issorted,colflag,iscsorted=PETSC_TRUE;
638   PetscInt       *sbuf1_i,*rbuf2_i,*rbuf3_i,ilen;
639   PetscInt       bs=C->rmap->bs,bs2=c->bs2,rstart = c->rstartbs;
640   PetscBool      ijonly=c->ijonly; /* private flag indicates only matrix data structures are requested */
641   PetscInt       nzA,nzB,*a_i=a->i,*b_i=b->i,*a_j = a->j,*b_j = b->j,ctmp,imark,*cworkA,*cworkB;
642   PetscScalar    *vworkA=NULL,*vworkB=NULL,*a_a = a->a,*b_a = b->a;
643   PetscInt       cstart = c->cstartbs,*bmap = c->garray;
644   PetscBool      *allrows,*allcolumns;
645 
646   PetscFunctionBegin;
647   ierr = PetscObjectGetComm((PetscObject)C,&comm);CHKERRQ(ierr);
648   size = c->size;
649   rank = c->rank;
650 
651   ierr = PetscMalloc5(ismax,&row2proc,ismax,&cmap,ismax,&rmap,ismax+1,&allcolumns,ismax,&allrows);CHKERRQ(ierr);
652   ierr = PetscMalloc5(ismax,(PetscInt***)&irow,ismax,(PetscInt***)&icol,ismax,&nrow,ismax,&ncol,ismax,&issorted);CHKERRQ(ierr);
653 
654   for (i=0; i<ismax; i++) {
655     ierr = ISSorted(iscol[i],&issorted[i]);CHKERRQ(ierr);
656     if (!issorted[i]) iscsorted = issorted[i]; /* columns are not sorted! */
657     ierr = ISSorted(isrow[i],&issorted[i]);CHKERRQ(ierr);
658 
659     /* Check for special case: allcolumns */
660     ierr = ISIdentity(iscol[i],&colflag);CHKERRQ(ierr);
661     ierr = ISGetLocalSize(iscol[i],&ncol[i]);CHKERRQ(ierr);
662 
663     if (colflag && ncol[i] == c->Nbs) {
664       allcolumns[i] = PETSC_TRUE;
665       icol[i]       = NULL;
666     } else {
667       allcolumns[i] = PETSC_FALSE;
668       ierr = ISGetIndices(iscol[i],&icol[i]);CHKERRQ(ierr);
669     }
670 
671     /* Check for special case: allrows */
672     ierr = ISIdentity(isrow[i],&colflag);CHKERRQ(ierr);
673     ierr = ISGetLocalSize(isrow[i],&nrow[i]);CHKERRQ(ierr);
674     if (colflag && nrow[i] == c->Mbs) {
675       allrows[i] = PETSC_TRUE;
676       irow[i]    = NULL;
677     } else {
678       allrows[i] = PETSC_FALSE;
679       ierr = ISGetIndices(isrow[i],&irow[i]);CHKERRQ(ierr);
680     }
681   }
682 
683   if (scall == MAT_REUSE_MATRIX) {
684     /* Assumes new rows are same length as the old rows */
685     for (i=0; i<ismax; i++) {
686       subc = (Mat_SeqBAIJ*)(submats[i]->data);
687       if (subc->mbs != nrow[i] || subc->nbs != ncol[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. wrong size");
688 
689       /* Initial matrix as if empty */
690       ierr = PetscArrayzero(subc->ilen,subc->mbs);CHKERRQ(ierr);
691 
692       /* Initial matrix as if empty */
693       submats[i]->factortype = C->factortype;
694 
695       smat_i   = subc->submatis1;
696 
697       nrqs        = smat_i->nrqs;
698       nrqr        = smat_i->nrqr;
699       rbuf1       = smat_i->rbuf1;
700       rbuf2       = smat_i->rbuf2;
701       rbuf3       = smat_i->rbuf3;
702       req_source2 = smat_i->req_source2;
703 
704       sbuf1     = smat_i->sbuf1;
705       sbuf2     = smat_i->sbuf2;
706       ptr       = smat_i->ptr;
707       tmp       = smat_i->tmp;
708       ctr       = smat_i->ctr;
709 
710       pa          = smat_i->pa;
711       req_size    = smat_i->req_size;
712       req_source1 = smat_i->req_source1;
713 
714       allcolumns[i] = smat_i->allcolumns;
715       allrows[i]    = smat_i->allrows;
716       row2proc[i]   = smat_i->row2proc;
717       rmap[i]       = smat_i->rmap;
718       cmap[i]       = smat_i->cmap;
719     }
720 
721     if (!ismax){ /* Get dummy submatrices and retrieve struct submatis1 */
722       if (!submats[0]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"submats are null, cannot reuse");
723       smat_i = (Mat_SubSppt*)submats[0]->data;
724 
725       nrqs        = smat_i->nrqs;
726       nrqr        = smat_i->nrqr;
727       rbuf1       = smat_i->rbuf1;
728       rbuf2       = smat_i->rbuf2;
729       rbuf3       = smat_i->rbuf3;
730       req_source2 = smat_i->req_source2;
731 
732       sbuf1       = smat_i->sbuf1;
733       sbuf2       = smat_i->sbuf2;
734       ptr         = smat_i->ptr;
735       tmp         = smat_i->tmp;
736       ctr         = smat_i->ctr;
737 
738       pa          = smat_i->pa;
739       req_size    = smat_i->req_size;
740       req_source1 = smat_i->req_source1;
741 
742       allcolumns[0] = PETSC_FALSE;
743     }
744   } else { /* scall == MAT_INITIAL_MATRIX */
745     /* Get some new tags to keep the communication clean */
746     ierr = PetscObjectGetNewTag((PetscObject)C,&tag2);CHKERRQ(ierr);
747     ierr = PetscObjectGetNewTag((PetscObject)C,&tag3);CHKERRQ(ierr);
748 
749     /* evaluate communication - mesg to who, length of mesg, and buffer space
750      required. Based on this, buffers are allocated, and data copied into them*/
751     ierr = PetscCalloc4(size,&w1,size,&w2,size,&w3,size,&w4);CHKERRQ(ierr);   /* mesg size, initialize work vectors */
752 
753     for (i=0; i<ismax; i++) {
754       jmax   = nrow[i];
755       irow_i = irow[i];
756 
757       ierr   = PetscMalloc1(jmax,&row2proc_i);CHKERRQ(ierr);
758       row2proc[i] = row2proc_i;
759 
760       if (issorted[i]) proc = 0;
761       for (j=0; j<jmax; j++) {
762         if (!issorted[i]) proc = 0;
763         if (allrows[i]) row = j;
764         else row = irow_i[j];
765 
766         while (row >= c->rangebs[proc+1]) proc++;
767         w4[proc]++;
768         row2proc_i[j] = proc; /* map row index to proc */
769       }
770       for (j=0; j<size; j++) {
771         if (w4[j]) { w1[j] += w4[j];  w3[j]++; w4[j] = 0;}
772       }
773     }
774 
775     nrqs     = 0;              /* no of outgoing messages */
776     msz      = 0;              /* total mesg length (for all procs) */
777     w1[rank] = 0;              /* no mesg sent to self */
778     w3[rank] = 0;
779     for (i=0; i<size; i++) {
780       if (w1[i])  { w2[i] = 1; nrqs++;} /* there exists a message to proc i */
781     }
782     ierr = PetscMalloc1(nrqs+1,&pa);CHKERRQ(ierr); /*(proc -array)*/
783     for (i=0,j=0; i<size; i++) {
784       if (w1[i]) { pa[j] = i; j++; }
785     }
786 
787     /* Each message would have a header = 1 + 2*(no of IS) + data */
788     for (i=0; i<nrqs; i++) {
789       j      = pa[i];
790       w1[j] += w2[j] + 2* w3[j];
791       msz   += w1[j];
792     }
793     ierr = PetscInfo2(0,"Number of outgoing messages %D Total message length %D\n",nrqs,msz);CHKERRQ(ierr);
794 
795     /* Determine the number of messages to expect, their lengths, from from-ids */
796     ierr = PetscGatherNumberOfMessages(comm,w2,w1,&nrqr);CHKERRQ(ierr);
797     ierr = PetscGatherMessageLengths(comm,nrqs,nrqr,w1,&onodes1,&olengths1);CHKERRQ(ierr);
798 
799     /* Now post the Irecvs corresponding to these messages */
800     tag0 = ((PetscObject)C)->tag;
801     ierr = PetscPostIrecvInt(comm,tag0,nrqr,onodes1,olengths1,&rbuf1,&r_waits1);CHKERRQ(ierr);
802 
803     ierr = PetscFree(onodes1);CHKERRQ(ierr);
804     ierr = PetscFree(olengths1);CHKERRQ(ierr);
805 
806     /* Allocate Memory for outgoing messages */
807     ierr = PetscMalloc4(size,&sbuf1,size,&ptr,2*msz,&tmp,size,&ctr);CHKERRQ(ierr);
808     ierr = PetscArrayzero(sbuf1,size);CHKERRQ(ierr);
809     ierr = PetscArrayzero(ptr,size);CHKERRQ(ierr);
810 
811     {
812       PetscInt *iptr = tmp;
813       k    = 0;
814       for (i=0; i<nrqs; i++) {
815         j        = pa[i];
816         iptr    += k;
817         sbuf1[j] = iptr;
818         k        = w1[j];
819       }
820     }
821 
822     /* Form the outgoing messages. Initialize the header space */
823     for (i=0; i<nrqs; i++) {
824       j           = pa[i];
825       sbuf1[j][0] = 0;
826       ierr        = PetscArrayzero(sbuf1[j]+1,2*w3[j]);CHKERRQ(ierr);
827       ptr[j]      = sbuf1[j] + 2*w3[j] + 1;
828     }
829 
830     /* Parse the isrow and copy data into outbuf */
831     for (i=0; i<ismax; i++) {
832       row2proc_i = row2proc[i];
833       ierr   = PetscArrayzero(ctr,size);CHKERRQ(ierr);
834       irow_i = irow[i];
835       jmax   = nrow[i];
836       for (j=0; j<jmax; j++) {  /* parse the indices of each IS */
837         proc = row2proc_i[j];
838         if (allrows[i]) row = j;
839         else row = irow_i[j];
840 
841         if (proc != rank) { /* copy to the outgoing buf*/
842           ctr[proc]++;
843           *ptr[proc] = row;
844           ptr[proc]++;
845         }
846       }
847       /* Update the headers for the current IS */
848       for (j=0; j<size; j++) { /* Can Optimise this loop too */
849         if ((ctr_j = ctr[j])) {
850           sbuf1_j        = sbuf1[j];
851           k              = ++sbuf1_j[0];
852           sbuf1_j[2*k]   = ctr_j;
853           sbuf1_j[2*k-1] = i;
854         }
855       }
856     }
857 
858     /*  Now  post the sends */
859     ierr = PetscMalloc1(nrqs+1,&s_waits1);CHKERRQ(ierr);
860     for (i=0; i<nrqs; ++i) {
861       j    = pa[i];
862       ierr = MPI_Isend(sbuf1[j],w1[j],MPIU_INT,j,tag0,comm,s_waits1+i);CHKERRMPI(ierr);
863     }
864 
865     /* Post Receives to capture the buffer size */
866     ierr = PetscMalloc1(nrqs+1,&r_waits2);CHKERRQ(ierr);
867     ierr = PetscMalloc3(nrqs+1,&req_source2,nrqs+1,&rbuf2,nrqs+1,&rbuf3);CHKERRQ(ierr);
868     rbuf2[0] = tmp + msz;
869     for (i=1; i<nrqs; ++i) {
870       rbuf2[i] = rbuf2[i-1]+w1[pa[i-1]];
871     }
872     for (i=0; i<nrqs; ++i) {
873       j    = pa[i];
874       ierr = MPI_Irecv(rbuf2[i],w1[j],MPIU_INT,j,tag2,comm,r_waits2+i);CHKERRMPI(ierr);
875     }
876 
877     /* Send to other procs the buf size they should allocate */
878     /* Receive messages*/
879     ierr = PetscMalloc1(nrqr+1,&s_waits2);CHKERRQ(ierr);
880     ierr = PetscMalloc1(nrqr+1,&r_status1);CHKERRQ(ierr);
881     ierr = PetscMalloc3(nrqr,&sbuf2,nrqr,&req_size,nrqr,&req_source1);CHKERRQ(ierr);
882 
883     ierr = MPI_Waitall(nrqr,r_waits1,r_status1);CHKERRMPI(ierr);
884     for (i=0; i<nrqr; ++i) {
885       req_size[i] = 0;
886       rbuf1_i        = rbuf1[i];
887       start          = 2*rbuf1_i[0] + 1;
888       ierr           = MPI_Get_count(r_status1+i,MPIU_INT,&end);CHKERRMPI(ierr);
889       ierr           = PetscMalloc1(end+1,&sbuf2[i]);CHKERRQ(ierr);
890       sbuf2_i        = sbuf2[i];
891       for (j=start; j<end; j++) {
892         row             = rbuf1_i[j] - rstart;
893         ncols           = a_i[row+1] - a_i[row] + b_i[row+1] - b_i[row];
894         sbuf2_i[j]      = ncols;
895         req_size[i] += ncols;
896       }
897       req_source1[i] = r_status1[i].MPI_SOURCE;
898       /* form the header */
899       sbuf2_i[0] = req_size[i];
900       for (j=1; j<start; j++) sbuf2_i[j] = rbuf1_i[j];
901 
902       ierr = MPI_Isend(sbuf2_i,end,MPIU_INT,req_source1[i],tag2,comm,s_waits2+i);CHKERRMPI(ierr);
903     }
904 
905     ierr = PetscFree(r_status1);CHKERRQ(ierr);
906     ierr = PetscFree(r_waits1);CHKERRQ(ierr);
907     ierr = PetscFree4(w1,w2,w3,w4);CHKERRQ(ierr);
908 
909     /* Receive messages*/
910     ierr = PetscMalloc1(nrqs+1,&r_waits3);CHKERRQ(ierr);
911     ierr = PetscMalloc1(nrqs+1,&r_status2);CHKERRQ(ierr);
912 
913     ierr = MPI_Waitall(nrqs,r_waits2,r_status2);CHKERRMPI(ierr);
914     for (i=0; i<nrqs; ++i) {
915       ierr = PetscMalloc1(rbuf2[i][0]+1,&rbuf3[i]);CHKERRQ(ierr);
916       req_source2[i] = r_status2[i].MPI_SOURCE;
917       ierr = MPI_Irecv(rbuf3[i],rbuf2[i][0],MPIU_INT,req_source2[i],tag3,comm,r_waits3+i);CHKERRMPI(ierr);
918     }
919     ierr = PetscFree(r_status2);CHKERRQ(ierr);
920     ierr = PetscFree(r_waits2);CHKERRQ(ierr);
921 
922     /* Wait on sends1 and sends2 */
923     ierr = PetscMalloc1(nrqs+1,&s_status1);CHKERRQ(ierr);
924     ierr = PetscMalloc1(nrqr+1,&s_status2);CHKERRQ(ierr);
925 
926     if (nrqs) {ierr = MPI_Waitall(nrqs,s_waits1,s_status1);CHKERRMPI(ierr);}
927     if (nrqr) {ierr = MPI_Waitall(nrqr,s_waits2,s_status2);CHKERRMPI(ierr);}
928     ierr = PetscFree(s_status1);CHKERRQ(ierr);
929     ierr = PetscFree(s_status2);CHKERRQ(ierr);
930     ierr = PetscFree(s_waits1);CHKERRQ(ierr);
931     ierr = PetscFree(s_waits2);CHKERRQ(ierr);
932 
933     /* Now allocate sending buffers for a->j, and send them off */
934     ierr = PetscMalloc1(nrqr+1,&sbuf_aj);CHKERRQ(ierr);
935     for (i=0,j=0; i<nrqr; i++) j += req_size[i];
936     ierr = PetscMalloc1(j+1,&sbuf_aj[0]);CHKERRQ(ierr);
937     for (i=1; i<nrqr; i++) sbuf_aj[i] = sbuf_aj[i-1] + req_size[i-1];
938 
939     ierr = PetscMalloc1(nrqr+1,&s_waits3);CHKERRQ(ierr);
940     {
941 
942       for (i=0; i<nrqr; i++) {
943         rbuf1_i   = rbuf1[i];
944         sbuf_aj_i = sbuf_aj[i];
945         ct1       = 2*rbuf1_i[0] + 1;
946         ct2       = 0;
947         for (j=1,max1=rbuf1_i[0]; j<=max1; j++) {
948           kmax = rbuf1[i][2*j];
949           for (k=0; k<kmax; k++,ct1++) {
950             row    = rbuf1_i[ct1] - rstart;
951             nzA    = a_i[row+1] - a_i[row]; nzB = b_i[row+1] - b_i[row];
952             ncols  = nzA + nzB;
953             cworkA = a_j + a_i[row]; cworkB = b_j + b_i[row];
954 
955             /* load the column indices for this row into cols */
956             cols = sbuf_aj_i + ct2;
957             for (l=0; l<nzB; l++) {
958               if ((ctmp = bmap[cworkB[l]]) < cstart) cols[l] = ctmp;
959               else break;
960             }
961             imark = l;
962             for (l=0; l<nzA; l++) {cols[imark+l] = cstart + cworkA[l];}
963             for (l=imark; l<nzB; l++) cols[nzA+l] = bmap[cworkB[l]];
964             ct2 += ncols;
965           }
966         }
967         ierr = MPI_Isend(sbuf_aj_i,req_size[i],MPIU_INT,req_source1[i],tag3,comm,s_waits3+i);CHKERRMPI(ierr);
968       }
969     }
970     ierr = PetscMalloc2(nrqs+1,&r_status3,nrqr+1,&s_status3);CHKERRQ(ierr);
971 
972     /* create col map: global col of C -> local col of submatrices */
973 #if defined(PETSC_USE_CTABLE)
974     for (i=0; i<ismax; i++) {
975       if (!allcolumns[i]) {
976         ierr = PetscTableCreate(ncol[i]+1,c->Nbs+1,&cmap[i]);CHKERRQ(ierr);
977 
978         jmax   = ncol[i];
979         icol_i = icol[i];
980         cmap_i = cmap[i];
981         for (j=0; j<jmax; j++) {
982           ierr = PetscTableAdd(cmap[i],icol_i[j]+1,j+1,INSERT_VALUES);CHKERRQ(ierr);
983         }
984       } else cmap[i] = NULL;
985     }
986 #else
987     for (i=0; i<ismax; i++) {
988       if (!allcolumns[i]) {
989         ierr   = PetscCalloc1(c->Nbs,&cmap[i]);CHKERRQ(ierr);
990         jmax   = ncol[i];
991         icol_i = icol[i];
992         cmap_i = cmap[i];
993         for (j=0; j<jmax; j++) cmap_i[icol_i[j]] = j+1;
994       } else cmap[i] = NULL;
995     }
996 #endif
997 
998     /* Create lens which is required for MatCreate... */
999     for (i=0,j=0; i<ismax; i++) j += nrow[i];
1000     ierr = PetscMalloc1(ismax,&lens);CHKERRQ(ierr);
1001 
1002     if (ismax) {
1003       ierr = PetscCalloc1(j,&lens[0]);CHKERRQ(ierr);
1004     }
1005     for (i=1; i<ismax; i++) lens[i] = lens[i-1] + nrow[i-1];
1006 
1007     /* Update lens from local data */
1008     for (i=0; i<ismax; i++) {
1009       row2proc_i = row2proc[i];
1010       jmax = nrow[i];
1011       if (!allcolumns[i]) cmap_i = cmap[i];
1012       irow_i = irow[i];
1013       lens_i = lens[i];
1014       for (j=0; j<jmax; j++) {
1015         if (allrows[i]) row = j;
1016         else row = irow_i[j]; /* global blocked row of C */
1017 
1018         proc = row2proc_i[j];
1019         if (proc == rank) {
1020           /* Get indices from matA and then from matB */
1021 #if defined(PETSC_USE_CTABLE)
1022           PetscInt   tt;
1023 #endif
1024           row    = row - rstart;
1025           nzA    = a_i[row+1] - a_i[row];
1026           nzB    = b_i[row+1] - b_i[row];
1027           cworkA =  a_j + a_i[row];
1028           cworkB = b_j + b_i[row];
1029 
1030           if (!allcolumns[i]) {
1031 #if defined(PETSC_USE_CTABLE)
1032             for (k=0; k<nzA; k++) {
1033               ierr = PetscTableFind(cmap_i,cstart+cworkA[k]+1,&tt);CHKERRQ(ierr);
1034               if (tt) lens_i[j]++;
1035             }
1036             for (k=0; k<nzB; k++) {
1037               ierr = PetscTableFind(cmap_i,bmap[cworkB[k]]+1,&tt);CHKERRQ(ierr);
1038               if (tt) lens_i[j]++;
1039             }
1040 
1041 #else
1042             for (k=0; k<nzA; k++) {
1043               if (cmap_i[cstart + cworkA[k]]) lens_i[j]++;
1044             }
1045             for (k=0; k<nzB; k++) {
1046               if (cmap_i[bmap[cworkB[k]]]) lens_i[j]++;
1047             }
1048 #endif
1049           } else { /* allcolumns */
1050             lens_i[j] = nzA + nzB;
1051           }
1052         }
1053       }
1054     }
1055 
1056     /* Create row map: global row of C -> local row of submatrices */
1057     for (i=0; i<ismax; i++) {
1058       if (!allrows[i]) {
1059 #if defined(PETSC_USE_CTABLE)
1060         ierr   = PetscTableCreate(nrow[i]+1,c->Mbs+1,&rmap[i]);CHKERRQ(ierr);
1061         irow_i = irow[i];
1062         jmax   = nrow[i];
1063         for (j=0; j<jmax; j++) {
1064           if (allrows[i]) {
1065             ierr = PetscTableAdd(rmap[i],j+1,j+1,INSERT_VALUES);CHKERRQ(ierr);
1066           } else {
1067             ierr = PetscTableAdd(rmap[i],irow_i[j]+1,j+1,INSERT_VALUES);CHKERRQ(ierr);
1068           }
1069         }
1070 #else
1071         ierr   = PetscCalloc1(c->Mbs,&rmap[i]);CHKERRQ(ierr);
1072         rmap_i = rmap[i];
1073         irow_i = irow[i];
1074         jmax   = nrow[i];
1075         for (j=0; j<jmax; j++) {
1076           if (allrows[i]) rmap_i[j] = j;
1077           else rmap_i[irow_i[j]] = j;
1078         }
1079 #endif
1080       } else rmap[i] = NULL;
1081     }
1082 
1083     /* Update lens from offproc data */
1084     {
1085       PetscInt *rbuf2_i,*rbuf3_i,*sbuf1_i;
1086 
1087       ierr    = MPI_Waitall(nrqs,r_waits3,r_status3);CHKERRMPI(ierr);
1088       for (tmp2=0; tmp2<nrqs; tmp2++) {
1089         sbuf1_i = sbuf1[pa[tmp2]];
1090         jmax    = sbuf1_i[0];
1091         ct1     = 2*jmax+1;
1092         ct2     = 0;
1093         rbuf2_i = rbuf2[tmp2];
1094         rbuf3_i = rbuf3[tmp2];
1095         for (j=1; j<=jmax; j++) {
1096           is_no  = sbuf1_i[2*j-1];
1097           max1   = sbuf1_i[2*j];
1098           lens_i = lens[is_no];
1099           if (!allcolumns[is_no]) cmap_i = cmap[is_no];
1100           rmap_i = rmap[is_no];
1101           for (k=0; k<max1; k++,ct1++) {
1102             if (allrows[is_no]) {
1103               row = sbuf1_i[ct1];
1104             } else {
1105 #if defined(PETSC_USE_CTABLE)
1106               ierr = PetscTableFind(rmap_i,sbuf1_i[ct1]+1,&row);CHKERRQ(ierr);
1107               row--;
1108               if (row < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"row not found in table");
1109 #else
1110               row = rmap_i[sbuf1_i[ct1]]; /* the val in the new matrix to be */
1111 #endif
1112             }
1113             max2 = rbuf2_i[ct1];
1114             for (l=0; l<max2; l++,ct2++) {
1115               if (!allcolumns[is_no]) {
1116 #if defined(PETSC_USE_CTABLE)
1117                 ierr = PetscTableFind(cmap_i,rbuf3_i[ct2]+1,&tcol);CHKERRQ(ierr);
1118 #else
1119                 tcol = cmap_i[rbuf3_i[ct2]];
1120 #endif
1121                 if (tcol) lens_i[row]++;
1122               } else { /* allcolumns */
1123                 lens_i[row]++; /* lens_i[row] += max2 ? */
1124               }
1125             }
1126           }
1127         }
1128       }
1129     }
1130     ierr = PetscFree(r_waits3);CHKERRQ(ierr);
1131     if (nrqr) {ierr = MPI_Waitall(nrqr,s_waits3,s_status3);CHKERRMPI(ierr);}
1132     ierr = PetscFree2(r_status3,s_status3);CHKERRQ(ierr);
1133     ierr = PetscFree(s_waits3);CHKERRQ(ierr);
1134 
1135     /* Create the submatrices */
1136     for (i=0; i<ismax; i++) {
1137       PetscInt bs_tmp;
1138       if (ijonly) bs_tmp = 1;
1139       else        bs_tmp = bs;
1140 
1141       ierr = MatCreate(PETSC_COMM_SELF,submats+i);CHKERRQ(ierr);
1142       ierr = MatSetSizes(submats[i],nrow[i]*bs_tmp,ncol[i]*bs_tmp,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1143 
1144       ierr = MatSetType(submats[i],((PetscObject)A)->type_name);CHKERRQ(ierr);
1145       ierr = MatSeqBAIJSetPreallocation(submats[i],bs_tmp,0,lens[i]);CHKERRQ(ierr);
1146       ierr = MatSeqSBAIJSetPreallocation(submats[i],bs_tmp,0,lens[i]);CHKERRQ(ierr); /* this subroutine is used by SBAIJ routines */
1147 
1148       /* create struct Mat_SubSppt and attached it to submat */
1149       ierr = PetscNew(&smat_i);CHKERRQ(ierr);
1150       subc = (Mat_SeqBAIJ*)submats[i]->data;
1151       subc->submatis1 = smat_i;
1152 
1153       smat_i->destroy          = submats[i]->ops->destroy;
1154       submats[i]->ops->destroy = MatDestroySubMatrix_SeqBAIJ;
1155       submats[i]->factortype   = C->factortype;
1156 
1157       smat_i->id          = i;
1158       smat_i->nrqs        = nrqs;
1159       smat_i->nrqr        = nrqr;
1160       smat_i->rbuf1       = rbuf1;
1161       smat_i->rbuf2       = rbuf2;
1162       smat_i->rbuf3       = rbuf3;
1163       smat_i->sbuf2       = sbuf2;
1164       smat_i->req_source2 = req_source2;
1165 
1166       smat_i->sbuf1       = sbuf1;
1167       smat_i->ptr         = ptr;
1168       smat_i->tmp         = tmp;
1169       smat_i->ctr         = ctr;
1170 
1171       smat_i->pa           = pa;
1172       smat_i->req_size     = req_size;
1173       smat_i->req_source1  = req_source1;
1174 
1175       smat_i->allcolumns  = allcolumns[i];
1176       smat_i->allrows     = allrows[i];
1177       smat_i->singleis    = PETSC_FALSE;
1178       smat_i->row2proc    = row2proc[i];
1179       smat_i->rmap        = rmap[i];
1180       smat_i->cmap        = cmap[i];
1181     }
1182 
1183     if (!ismax) { /* Create dummy submats[0] for reuse struct subc */
1184       ierr = MatCreate(PETSC_COMM_SELF,&submats[0]);CHKERRQ(ierr);
1185       ierr = MatSetSizes(submats[0],0,0,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1186       ierr = MatSetType(submats[0],MATDUMMY);CHKERRQ(ierr);
1187 
1188       /* create struct Mat_SubSppt and attached it to submat */
1189       ierr = PetscNewLog(submats[0],&smat_i);CHKERRQ(ierr);
1190       submats[0]->data = (void*)smat_i;
1191 
1192       smat_i->destroy          = submats[0]->ops->destroy;
1193       submats[0]->ops->destroy = MatDestroySubMatrix_Dummy;
1194       submats[0]->factortype   = C->factortype;
1195 
1196       smat_i->id          = 0;
1197       smat_i->nrqs        = nrqs;
1198       smat_i->nrqr        = nrqr;
1199       smat_i->rbuf1       = rbuf1;
1200       smat_i->rbuf2       = rbuf2;
1201       smat_i->rbuf3       = rbuf3;
1202       smat_i->sbuf2       = sbuf2;
1203       smat_i->req_source2 = req_source2;
1204 
1205       smat_i->sbuf1       = sbuf1;
1206       smat_i->ptr         = ptr;
1207       smat_i->tmp         = tmp;
1208       smat_i->ctr         = ctr;
1209 
1210       smat_i->pa           = pa;
1211       smat_i->req_size     = req_size;
1212       smat_i->req_source1  = req_source1;
1213 
1214       smat_i->allcolumns  = PETSC_FALSE;
1215       smat_i->singleis    = PETSC_FALSE;
1216       smat_i->row2proc    = NULL;
1217       smat_i->rmap        = NULL;
1218       smat_i->cmap        = NULL;
1219     }
1220 
1221     if (ismax) {ierr = PetscFree(lens[0]);CHKERRQ(ierr);}
1222     ierr = PetscFree(lens);CHKERRQ(ierr);
1223     ierr = PetscFree(sbuf_aj[0]);CHKERRQ(ierr);
1224     ierr = PetscFree(sbuf_aj);CHKERRQ(ierr);
1225 
1226   } /* endof scall == MAT_INITIAL_MATRIX */
1227 
1228   /* Post recv matrix values */
1229   if (!ijonly) {
1230     ierr = PetscObjectGetNewTag((PetscObject)C,&tag4);CHKERRQ(ierr);
1231     ierr = PetscMalloc1(nrqs+1,&rbuf4);CHKERRQ(ierr);
1232     ierr = PetscMalloc1(nrqs+1,&r_waits4);CHKERRQ(ierr);
1233     ierr = PetscMalloc1(nrqs+1,&r_status4);CHKERRQ(ierr);
1234     ierr = PetscMalloc1(nrqr+1,&s_status4);CHKERRQ(ierr);
1235     for (i=0; i<nrqs; ++i) {
1236       ierr = PetscMalloc1(rbuf2[i][0]*bs2,&rbuf4[i]);CHKERRQ(ierr);
1237       ierr = MPI_Irecv(rbuf4[i],rbuf2[i][0]*bs2,MPIU_SCALAR,req_source2[i],tag4,comm,r_waits4+i);CHKERRMPI(ierr);
1238     }
1239 
1240     /* Allocate sending buffers for a->a, and send them off */
1241     ierr = PetscMalloc1(nrqr+1,&sbuf_aa);CHKERRQ(ierr);
1242     for (i=0,j=0; i<nrqr; i++) j += req_size[i];
1243 
1244     ierr = PetscMalloc1((j+1)*bs2,&sbuf_aa[0]);CHKERRQ(ierr);
1245     for (i=1; i<nrqr; i++) sbuf_aa[i] = sbuf_aa[i-1] + req_size[i-1]*bs2;
1246 
1247     ierr = PetscMalloc1(nrqr+1,&s_waits4);CHKERRQ(ierr);
1248 
1249     for (i=0; i<nrqr; i++) {
1250       rbuf1_i   = rbuf1[i];
1251       sbuf_aa_i = sbuf_aa[i];
1252       ct1       = 2*rbuf1_i[0]+1;
1253       ct2       = 0;
1254       for (j=1,max1=rbuf1_i[0]; j<=max1; j++) {
1255         kmax = rbuf1_i[2*j];
1256         for (k=0; k<kmax; k++,ct1++) {
1257           row    = rbuf1_i[ct1] - rstart;
1258           nzA    = a_i[row+1] - a_i[row];
1259           nzB    = b_i[row+1] - b_i[row];
1260           ncols  = nzA + nzB;
1261           cworkB = b_j + b_i[row];
1262           vworkA = a_a + a_i[row]*bs2;
1263           vworkB = b_a + b_i[row]*bs2;
1264 
1265           /* load the column values for this row into vals*/
1266           vals = sbuf_aa_i+ct2*bs2;
1267           for (l=0; l<nzB; l++) {
1268             if ((bmap[cworkB[l]]) < cstart) {
1269               ierr = PetscArraycpy(vals+l*bs2,vworkB+l*bs2,bs2);CHKERRQ(ierr);
1270             } else break;
1271           }
1272           imark = l;
1273           for (l=0; l<nzA; l++) {
1274             ierr = PetscArraycpy(vals+(imark+l)*bs2,vworkA+l*bs2,bs2);CHKERRQ(ierr);
1275           }
1276           for (l=imark; l<nzB; l++) {
1277             ierr = PetscArraycpy(vals+(nzA+l)*bs2,vworkB+l*bs2,bs2);CHKERRQ(ierr);
1278           }
1279 
1280           ct2 += ncols;
1281         }
1282       }
1283       ierr = MPI_Isend(sbuf_aa_i,req_size[i]*bs2,MPIU_SCALAR,req_source1[i],tag4,comm,s_waits4+i);CHKERRMPI(ierr);
1284     }
1285   }
1286 
1287   /* Assemble the matrices */
1288   /* First assemble the local rows */
1289   for (i=0; i<ismax; i++) {
1290     row2proc_i = row2proc[i];
1291     subc      = (Mat_SeqBAIJ*)submats[i]->data;
1292     imat_ilen = subc->ilen;
1293     imat_j    = subc->j;
1294     imat_i    = subc->i;
1295     imat_a    = subc->a;
1296 
1297     if (!allcolumns[i]) cmap_i = cmap[i];
1298     rmap_i = rmap[i];
1299     irow_i = irow[i];
1300     jmax   = nrow[i];
1301     for (j=0; j<jmax; j++) {
1302       if (allrows[i]) row = j;
1303       else row  = irow_i[j];
1304       proc = row2proc_i[j];
1305 
1306       if (proc == rank) {
1307 
1308         row    = row - rstart;
1309         nzA    = a_i[row+1] - a_i[row];
1310         nzB    = b_i[row+1] - b_i[row];
1311         cworkA = a_j + a_i[row];
1312         cworkB = b_j + b_i[row];
1313         if (!ijonly) {
1314           vworkA = a_a + a_i[row]*bs2;
1315           vworkB = b_a + b_i[row]*bs2;
1316         }
1317 
1318         if (allrows[i]) {
1319           row = row+rstart;
1320         } else {
1321 #if defined(PETSC_USE_CTABLE)
1322           ierr = PetscTableFind(rmap_i,row+rstart+1,&row);CHKERRQ(ierr);
1323           row--;
1324 
1325           if (row < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"row not found in table");
1326 #else
1327           row = rmap_i[row + rstart];
1328 #endif
1329         }
1330         mat_i = imat_i[row];
1331         if (!ijonly) mat_a = imat_a + mat_i*bs2;
1332         mat_j    = imat_j + mat_i;
1333         ilen = imat_ilen[row];
1334 
1335         /* load the column indices for this row into cols*/
1336         if (!allcolumns[i]) {
1337           for (l=0; l<nzB; l++) {
1338             if ((ctmp = bmap[cworkB[l]]) < cstart) {
1339 #if defined(PETSC_USE_CTABLE)
1340               ierr = PetscTableFind(cmap_i,ctmp+1,&tcol);CHKERRQ(ierr);
1341               if (tcol) {
1342 #else
1343               if ((tcol = cmap_i[ctmp])) {
1344 #endif
1345                 *mat_j++ = tcol - 1;
1346                 ierr     = PetscArraycpy(mat_a,vworkB+l*bs2,bs2);CHKERRQ(ierr);
1347                 mat_a   += bs2;
1348                 ilen++;
1349               }
1350             } else break;
1351           }
1352           imark = l;
1353           for (l=0; l<nzA; l++) {
1354 #if defined(PETSC_USE_CTABLE)
1355             ierr = PetscTableFind(cmap_i,cstart+cworkA[l]+1,&tcol);CHKERRQ(ierr);
1356             if (tcol) {
1357 #else
1358             if ((tcol = cmap_i[cstart + cworkA[l]])) {
1359 #endif
1360               *mat_j++ = tcol - 1;
1361               if (!ijonly) {
1362                 ierr   = PetscArraycpy(mat_a,vworkA+l*bs2,bs2);CHKERRQ(ierr);
1363                 mat_a += bs2;
1364               }
1365               ilen++;
1366             }
1367           }
1368           for (l=imark; l<nzB; l++) {
1369 #if defined(PETSC_USE_CTABLE)
1370             ierr = PetscTableFind(cmap_i,bmap[cworkB[l]]+1,&tcol);CHKERRQ(ierr);
1371             if (tcol) {
1372 #else
1373             if ((tcol = cmap_i[bmap[cworkB[l]]])) {
1374 #endif
1375               *mat_j++ = tcol - 1;
1376               if (!ijonly) {
1377                 ierr   = PetscArraycpy(mat_a,vworkB+l*bs2,bs2);CHKERRQ(ierr);
1378                 mat_a += bs2;
1379               }
1380               ilen++;
1381             }
1382           }
1383         } else { /* allcolumns */
1384           for (l=0; l<nzB; l++) {
1385             if ((ctmp = bmap[cworkB[l]]) < cstart) {
1386               *mat_j++ = ctmp;
1387               ierr     = PetscArraycpy(mat_a,vworkB+l*bs2,bs2);CHKERRQ(ierr);
1388               mat_a   += bs2;
1389               ilen++;
1390             } else break;
1391           }
1392           imark = l;
1393           for (l=0; l<nzA; l++) {
1394             *mat_j++ = cstart+cworkA[l];
1395             if (!ijonly) {
1396               ierr   = PetscArraycpy(mat_a,vworkA+l*bs2,bs2);CHKERRQ(ierr);
1397               mat_a += bs2;
1398             }
1399             ilen++;
1400           }
1401           for (l=imark; l<nzB; l++) {
1402             *mat_j++ = bmap[cworkB[l]];
1403             if (!ijonly) {
1404               ierr   = PetscArraycpy(mat_a,vworkB+l*bs2,bs2);CHKERRQ(ierr);
1405               mat_a += bs2;
1406             }
1407             ilen++;
1408           }
1409         }
1410         imat_ilen[row] = ilen;
1411       }
1412     }
1413   }
1414 
1415   /* Now assemble the off proc rows */
1416   if (!ijonly) {
1417     ierr = MPI_Waitall(nrqs,r_waits4,r_status4);CHKERRMPI(ierr);
1418   }
1419   for (tmp2=0; tmp2<nrqs; tmp2++) {
1420     sbuf1_i = sbuf1[pa[tmp2]];
1421     jmax    = sbuf1_i[0];
1422     ct1     = 2*jmax + 1;
1423     ct2     = 0;
1424     rbuf2_i = rbuf2[tmp2];
1425     rbuf3_i = rbuf3[tmp2];
1426     if (!ijonly) rbuf4_i = rbuf4[tmp2];
1427     for (j=1; j<=jmax; j++) {
1428       is_no     = sbuf1_i[2*j-1];
1429       rmap_i    = rmap[is_no];
1430       if (!allcolumns[is_no]) cmap_i = cmap[is_no];
1431       subc      = (Mat_SeqBAIJ*)submats[is_no]->data;
1432       imat_ilen = subc->ilen;
1433       imat_j    = subc->j;
1434       imat_i    = subc->i;
1435       if (!ijonly) imat_a    = subc->a;
1436       max1      = sbuf1_i[2*j];
1437       for (k=0; k<max1; k++,ct1++) { /* for each recved block row */
1438         row = sbuf1_i[ct1];
1439 
1440         if (allrows[is_no]) {
1441           row = sbuf1_i[ct1];
1442         } else {
1443 #if defined(PETSC_USE_CTABLE)
1444           ierr = PetscTableFind(rmap_i,row+1,&row);CHKERRQ(ierr);
1445           row--;
1446           if (row < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"row not found in table");
1447 #else
1448           row = rmap_i[row];
1449 #endif
1450         }
1451         ilen  = imat_ilen[row];
1452         mat_i = imat_i[row];
1453         if (!ijonly) mat_a = imat_a + mat_i*bs2;
1454         mat_j = imat_j + mat_i;
1455         max2  = rbuf2_i[ct1];
1456         if (!allcolumns[is_no]) {
1457           for (l=0; l<max2; l++,ct2++) {
1458 #if defined(PETSC_USE_CTABLE)
1459             ierr = PetscTableFind(cmap_i,rbuf3_i[ct2]+1,&tcol);CHKERRQ(ierr);
1460 #else
1461             tcol = cmap_i[rbuf3_i[ct2]];
1462 #endif
1463             if (tcol) {
1464               *mat_j++ = tcol - 1;
1465               if (!ijonly) {
1466                 ierr   = PetscArraycpy(mat_a,rbuf4_i+ct2*bs2,bs2);CHKERRQ(ierr);
1467                 mat_a += bs2;
1468               }
1469               ilen++;
1470             }
1471           }
1472         } else { /* allcolumns */
1473           for (l=0; l<max2; l++,ct2++) {
1474             *mat_j++ = rbuf3_i[ct2]; /* same global column index of C */
1475             if (!ijonly) {
1476               ierr   = PetscArraycpy(mat_a,rbuf4_i+ct2*bs2,bs2);CHKERRQ(ierr);
1477               mat_a += bs2;
1478             }
1479             ilen++;
1480           }
1481         }
1482         imat_ilen[row] = ilen;
1483       }
1484     }
1485   }
1486 
1487   if (!iscsorted) { /* sort column indices of the rows */
1488     MatScalar *work;
1489 
1490     ierr = PetscMalloc1(bs2,&work);CHKERRQ(ierr);
1491     for (i=0; i<ismax; i++) {
1492       subc      = (Mat_SeqBAIJ*)submats[i]->data;
1493       imat_ilen = subc->ilen;
1494       imat_j    = subc->j;
1495       imat_i    = subc->i;
1496       if (!ijonly) imat_a = subc->a;
1497       if (allcolumns[i]) continue;
1498 
1499       jmax = nrow[i];
1500       for (j=0; j<jmax; j++) {
1501         mat_i = imat_i[j];
1502         mat_j = imat_j + mat_i;
1503         ilen  = imat_ilen[j];
1504         if (ijonly) {
1505           ierr = PetscSortInt(ilen,mat_j);CHKERRQ(ierr);
1506         } else {
1507           mat_a = imat_a + mat_i*bs2;
1508           ierr = PetscSortIntWithDataArray(ilen,mat_j,mat_a,bs2*sizeof(MatScalar),work);CHKERRQ(ierr);
1509         }
1510       }
1511     }
1512     ierr = PetscFree(work);CHKERRQ(ierr);
1513   }
1514 
1515   if (!ijonly) {
1516     ierr = PetscFree(r_status4);CHKERRQ(ierr);
1517     ierr = PetscFree(r_waits4);CHKERRQ(ierr);
1518     if (nrqr) {ierr = MPI_Waitall(nrqr,s_waits4,s_status4);CHKERRMPI(ierr);}
1519     ierr = PetscFree(s_waits4);CHKERRQ(ierr);
1520     ierr = PetscFree(s_status4);CHKERRQ(ierr);
1521   }
1522 
1523   /* Restore the indices */
1524   for (i=0; i<ismax; i++) {
1525     if (!allrows[i]) {
1526       ierr = ISRestoreIndices(isrow[i],irow+i);CHKERRQ(ierr);
1527     }
1528     if (!allcolumns[i]) {
1529       ierr = ISRestoreIndices(iscol[i],icol+i);CHKERRQ(ierr);
1530     }
1531   }
1532 
1533   for (i=0; i<ismax; i++) {
1534     ierr = MatAssemblyBegin(submats[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1535     ierr = MatAssemblyEnd(submats[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1536   }
1537 
1538   ierr = PetscFree5(*(PetscInt***)&irow,*(PetscInt***)&icol,nrow,ncol,issorted);CHKERRQ(ierr);
1539   ierr = PetscFree5(row2proc,cmap,rmap,allcolumns,allrows);CHKERRQ(ierr);
1540 
1541   if (!ijonly) {
1542     ierr = PetscFree(sbuf_aa[0]);CHKERRQ(ierr);
1543     ierr = PetscFree(sbuf_aa);CHKERRQ(ierr);
1544 
1545     for (i=0; i<nrqs; ++i) {
1546       ierr = PetscFree(rbuf4[i]);CHKERRQ(ierr);
1547     }
1548     ierr = PetscFree(rbuf4);CHKERRQ(ierr);
1549   }
1550   c->ijonly = PETSC_FALSE; /* set back to the default */
1551   PetscFunctionReturn(0);
1552 }
1553