xref: /petsc/src/mat/impls/baij/mpi/baijov.c (revision c9ffca76cc97be42e0a38bc316b736a500837fe0)
1 
2 /*
3    Routines to compute overlapping regions of a parallel MPI matrix
4   and to find submatrices that were shared across processors.
5 */
6 #include <../src/mat/impls/baij/mpi/mpibaij.h>
7 #include <petscbt.h>
8 
9 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Local(Mat,PetscInt,char**,PetscInt*,PetscInt**);
10 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Receive(Mat,PetscInt,PetscInt**,PetscInt**,PetscInt*);
11 extern PetscErrorCode MatGetRow_MPIBAIJ(Mat,PetscInt,PetscInt*,PetscInt**,PetscScalar**);
12 extern PetscErrorCode MatRestoreRow_MPIBAIJ(Mat,PetscInt,PetscInt*,PetscInt**,PetscScalar**);
13 
14 PetscErrorCode MatIncreaseOverlap_MPIBAIJ(Mat C,PetscInt imax,IS is[],PetscInt ov)
15 {
16   PetscErrorCode ierr;
17   PetscInt       i,N=C->cmap->N, bs=C->rmap->bs;
18   IS             *is_new;
19 
20   PetscFunctionBegin;
21   ierr = PetscMalloc1(imax,&is_new);CHKERRQ(ierr);
22   /* Convert the indices into block format */
23   ierr = ISCompressIndicesGeneral(N,C->rmap->n,bs,imax,is,is_new);CHKERRQ(ierr);
24   if (ov < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap specified\n");
25   for (i=0; i<ov; ++i) {
26     ierr = MatIncreaseOverlap_MPIBAIJ_Once(C,imax,is_new);CHKERRQ(ierr);
27   }
28   for (i=0; i<imax; i++) {ierr = ISDestroy(&is[i]);CHKERRQ(ierr);}
29   ierr = ISExpandIndicesGeneral(N,N,bs,imax,is_new,is);CHKERRQ(ierr);
30   for (i=0; i<imax; i++) {ierr = ISDestroy(&is_new[i]);CHKERRQ(ierr);}
31   ierr = PetscFree(is_new);CHKERRQ(ierr);
32   PetscFunctionReturn(0);
33 }
34 
35 /*
36   Sample message format:
37   If a processor A wants processor B to process some elements corresponding
38   to index sets is[1], is[5]
39   mesg [0] = 2   (no of index sets in the mesg)
40   -----------
41   mesg [1] = 1 => is[1]
42   mesg [2] = sizeof(is[1]);
43   -----------
44   mesg [5] = 5  => is[5]
45   mesg [6] = sizeof(is[5]);
46   -----------
47   mesg [7]
48   mesg [n]  data(is[1])
49   -----------
50   mesg[n+1]
51   mesg[m]  data(is[5])
52   -----------
53 
54   Notes:
55   nrqs - no of requests sent (or to be sent out)
56   nrqr - no of requests recieved (which have to be or which have been processed
57 */
58 PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Once(Mat C,PetscInt imax,IS is[])
59 {
60   Mat_MPIBAIJ    *c = (Mat_MPIBAIJ*)C->data;
61   const PetscInt **idx,*idx_i;
62   PetscInt       *n,*w3,*w4,**data,len;
63   PetscErrorCode ierr;
64   PetscMPIInt    size,rank,tag1,tag2,*w2,*w1,nrqr;
65   PetscInt       Mbs,i,j,k,**rbuf,row,proc=-1,nrqs,msz,**outdat,**ptr;
66   PetscInt       *ctr,*pa,*tmp,*isz,*isz1,**xdata,**rbuf2,*d_p;
67   PetscMPIInt    *onodes1,*olengths1,*onodes2,*olengths2;
68   PetscBT        *table;
69   MPI_Comm       comm;
70   MPI_Request    *s_waits1,*r_waits1,*s_waits2,*r_waits2;
71   MPI_Status     *s_status,*recv_status;
72   char           *t_p;
73 
74   PetscFunctionBegin;
75   ierr = PetscObjectGetComm((PetscObject)C,&comm);CHKERRQ(ierr);
76   size = c->size;
77   rank = c->rank;
78   Mbs  = c->Mbs;
79 
80   ierr = PetscObjectGetNewTag((PetscObject)C,&tag1);CHKERRQ(ierr);
81   ierr = PetscObjectGetNewTag((PetscObject)C,&tag2);CHKERRQ(ierr);
82 
83   ierr = PetscMalloc2(imax+1,&idx,imax,&n);CHKERRQ(ierr);
84 
85   for (i=0; i<imax; i++) {
86     ierr = ISGetIndices(is[i],&idx[i]);CHKERRQ(ierr);
87     ierr = ISGetLocalSize(is[i],&n[i]);CHKERRQ(ierr);
88   }
89 
90   /* evaluate communication - mesg to who,length of mesg, and buffer space
91      required. Based on this, buffers are allocated, and data copied into them*/
92   ierr = PetscCalloc4(size,&w1,size,&w2,size,&w3,size,&w4);CHKERRQ(ierr);
93   for (i=0; i<imax; i++) {
94     ierr  = PetscMemzero(w4,size*sizeof(PetscInt));CHKERRQ(ierr); /* initialise work vector*/
95     idx_i = idx[i];
96     len   = n[i];
97     for (j=0; j<len; j++) {
98       row = idx_i[j];
99       if (row < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index set cannot have negative entries");
100       ierr = PetscLayoutFindOwner(C->rmap,row*C->rmap->bs,&proc);CHKERRQ(ierr);
101       w4[proc]++;
102     }
103     for (j=0; j<size; j++) {
104       if (w4[j]) { w1[j] += w4[j]; w3[j]++;}
105     }
106   }
107 
108   nrqs     = 0;              /* no of outgoing messages */
109   msz      = 0;              /* total mesg length (for all proc */
110   w1[rank] = 0;              /* no mesg sent to itself */
111   w3[rank] = 0;
112   for (i=0; i<size; i++) {
113     if (w1[i])  {w2[i] = 1; nrqs++;} /* there exists a message to proc i */
114   }
115   /* pa - is list of processors to communicate with */
116   ierr = PetscMalloc1(nrqs+1,&pa);CHKERRQ(ierr);
117   for (i=0,j=0; i<size; i++) {
118     if (w1[i]) {pa[j] = i; j++;}
119   }
120 
121   /* Each message would have a header = 1 + 2*(no of IS) + data */
122   for (i=0; i<nrqs; i++) {
123     j      = pa[i];
124     w1[j] += w2[j] + 2*w3[j];
125     msz   += w1[j];
126   }
127 
128   /* Determine the number of messages to expect, their lengths, from from-ids */
129   ierr = PetscGatherNumberOfMessages(comm,w2,w1,&nrqr);CHKERRQ(ierr);
130   ierr = PetscGatherMessageLengths(comm,nrqs,nrqr,w1,&onodes1,&olengths1);CHKERRQ(ierr);
131 
132   /* Now post the Irecvs corresponding to these messages */
133   ierr = PetscPostIrecvInt(comm,tag1,nrqr,onodes1,olengths1,&rbuf,&r_waits1);CHKERRQ(ierr);
134 
135   /* Allocate Memory for outgoing messages */
136   ierr = PetscMalloc4(size,&outdat,size,&ptr,msz,&tmp,size,&ctr);CHKERRQ(ierr);
137   ierr = PetscMemzero(outdat,size*sizeof(PetscInt*));CHKERRQ(ierr);
138   ierr = PetscMemzero(ptr,size*sizeof(PetscInt*));CHKERRQ(ierr);
139   {
140     PetscInt *iptr = tmp,ict  = 0;
141     for (i=0; i<nrqs; i++) {
142       j         = pa[i];
143       iptr     +=  ict;
144       outdat[j] = iptr;
145       ict       = w1[j];
146     }
147   }
148 
149   /* Form the outgoing messages */
150   /*plug in the headers*/
151   for (i=0; i<nrqs; i++) {
152     j            = pa[i];
153     outdat[j][0] = 0;
154     ierr         = PetscMemzero(outdat[j]+1,2*w3[j]*sizeof(PetscInt));CHKERRQ(ierr);
155     ptr[j]       = outdat[j] + 2*w3[j] + 1;
156   }
157 
158   /* Memory for doing local proc's work*/
159   {
160     ierr = PetscCalloc5(imax,&table, imax,&data, imax,&isz, Mbs*imax,&d_p, (Mbs/PETSC_BITS_PER_BYTE+1)*imax,&t_p);CHKERRQ(ierr);
161 
162     for (i=0; i<imax; i++) {
163       table[i] = t_p + (Mbs/PETSC_BITS_PER_BYTE+1)*i;
164       data[i]  = d_p + (Mbs)*i;
165     }
166   }
167 
168   /* Parse the IS and update local tables and the outgoing buf with the data*/
169   {
170     PetscInt n_i,*data_i,isz_i,*outdat_j,ctr_j;
171     PetscBT  table_i;
172 
173     for (i=0; i<imax; i++) {
174       ierr    = PetscMemzero(ctr,size*sizeof(PetscInt));CHKERRQ(ierr);
175       n_i     = n[i];
176       table_i = table[i];
177       idx_i   = idx[i];
178       data_i  = data[i];
179       isz_i   = isz[i];
180       for (j=0; j<n_i; j++) {   /* parse the indices of each IS */
181         row  = idx_i[j];
182         ierr = PetscLayoutFindOwner(C->rmap,row*C->rmap->bs,&proc);CHKERRQ(ierr);
183         if (proc != rank) { /* copy to the outgoing buffer */
184           ctr[proc]++;
185           *ptr[proc] = row;
186           ptr[proc]++;
187         } else { /* Update the local table */
188           if (!PetscBTLookupSet(table_i,row)) data_i[isz_i++] = row;
189         }
190       }
191       /* Update the headers for the current IS */
192       for (j=0; j<size; j++) { /* Can Optimise this loop by using pa[] */
193         if ((ctr_j = ctr[j])) {
194           outdat_j        = outdat[j];
195           k               = ++outdat_j[0];
196           outdat_j[2*k]   = ctr_j;
197           outdat_j[2*k-1] = i;
198         }
199       }
200       isz[i] = isz_i;
201     }
202   }
203 
204   /*  Now  post the sends */
205   ierr = PetscMalloc1(nrqs+1,&s_waits1);CHKERRQ(ierr);
206   for (i=0; i<nrqs; ++i) {
207     j    = pa[i];
208     ierr = MPI_Isend(outdat[j],w1[j],MPIU_INT,j,tag1,comm,s_waits1+i);CHKERRQ(ierr);
209   }
210 
211   /* No longer need the original indices*/
212   for (i=0; i<imax; ++i) {
213     ierr = ISRestoreIndices(is[i],idx+i);CHKERRQ(ierr);
214   }
215   ierr = PetscFree2(idx,n);CHKERRQ(ierr);
216 
217   for (i=0; i<imax; ++i) {
218     ierr = ISDestroy(&is[i]);CHKERRQ(ierr);
219   }
220 
221   /* Do Local work*/
222   ierr = MatIncreaseOverlap_MPIBAIJ_Local(C,imax,table,isz,data);CHKERRQ(ierr);
223 
224   /* Receive messages*/
225   ierr = PetscMalloc1(nrqr+1,&recv_status);CHKERRQ(ierr);
226   if (nrqr) {ierr = MPI_Waitall(nrqr,r_waits1,recv_status);CHKERRQ(ierr);}
227 
228   ierr = PetscMalloc1(nrqs+1,&s_status);CHKERRQ(ierr);
229   if (nrqs) {ierr = MPI_Waitall(nrqs,s_waits1,s_status);CHKERRQ(ierr);}
230 
231   /* Phase 1 sends are complete - deallocate buffers */
232   ierr = PetscFree4(outdat,ptr,tmp,ctr);CHKERRQ(ierr);
233   ierr = PetscFree4(w1,w2,w3,w4);CHKERRQ(ierr);
234 
235   ierr = PetscMalloc1(nrqr+1,&xdata);CHKERRQ(ierr);
236   ierr = PetscMalloc1(nrqr+1,&isz1);CHKERRQ(ierr);
237   ierr = MatIncreaseOverlap_MPIBAIJ_Receive(C,nrqr,rbuf,xdata,isz1);CHKERRQ(ierr);
238   ierr = PetscFree(rbuf[0]);CHKERRQ(ierr);
239   ierr = PetscFree(rbuf);CHKERRQ(ierr);
240 
241   /* Send the data back*/
242   /* Do a global reduction to know the buffer space req for incoming messages*/
243   {
244     PetscMPIInt *rw1;
245 
246     ierr = PetscCalloc1(size,&rw1);CHKERRQ(ierr);
247 
248     for (i=0; i<nrqr; ++i) {
249       proc = recv_status[i].MPI_SOURCE;
250       if (proc != onodes1[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"MPI_SOURCE mismatch");
251       rw1[proc] = isz1[i];
252     }
253 
254     ierr = PetscFree(onodes1);CHKERRQ(ierr);
255     ierr = PetscFree(olengths1);CHKERRQ(ierr);
256 
257     /* Determine the number of messages to expect, their lengths, from from-ids */
258     ierr = PetscGatherMessageLengths(comm,nrqr,nrqs,rw1,&onodes2,&olengths2);CHKERRQ(ierr);
259     ierr = PetscFree(rw1);CHKERRQ(ierr);
260   }
261   /* Now post the Irecvs corresponding to these messages */
262   ierr = PetscPostIrecvInt(comm,tag2,nrqs,onodes2,olengths2,&rbuf2,&r_waits2);CHKERRQ(ierr);
263 
264   /*  Now  post the sends */
265   ierr = PetscMalloc1(nrqr+1,&s_waits2);CHKERRQ(ierr);
266   for (i=0; i<nrqr; ++i) {
267     j    = recv_status[i].MPI_SOURCE;
268     ierr = MPI_Isend(xdata[i],isz1[i],MPIU_INT,j,tag2,comm,s_waits2+i);CHKERRQ(ierr);
269   }
270 
271   /* receive work done on other processors*/
272   {
273     PetscMPIInt idex;
274     PetscInt    is_no,ct1,max,*rbuf2_i,isz_i,*data_i,jmax;
275     PetscBT     table_i;
276     MPI_Status  *status2;
277 
278     ierr = PetscMalloc1(PetscMax(nrqr,nrqs)+1,&status2);CHKERRQ(ierr);
279     for (i=0; i<nrqs; ++i) {
280       ierr = MPI_Waitany(nrqs,r_waits2,&idex,status2+i);CHKERRQ(ierr);
281       /* Process the message*/
282       rbuf2_i = rbuf2[idex];
283       ct1     = 2*rbuf2_i[0]+1;
284       jmax    = rbuf2[idex][0];
285       for (j=1; j<=jmax; j++) {
286         max     = rbuf2_i[2*j];
287         is_no   = rbuf2_i[2*j-1];
288         isz_i   = isz[is_no];
289         data_i  = data[is_no];
290         table_i = table[is_no];
291         for (k=0; k<max; k++,ct1++) {
292           row = rbuf2_i[ct1];
293           if (!PetscBTLookupSet(table_i,row)) data_i[isz_i++] = row;
294         }
295         isz[is_no] = isz_i;
296       }
297     }
298     if (nrqr) {ierr = MPI_Waitall(nrqr,s_waits2,status2);CHKERRQ(ierr);}
299     ierr = PetscFree(status2);CHKERRQ(ierr);
300   }
301 
302   for (i=0; i<imax; ++i) {
303     ierr = ISCreateGeneral(PETSC_COMM_SELF,isz[i],data[i],PETSC_COPY_VALUES,is+i);CHKERRQ(ierr);
304   }
305 
306 
307   ierr = PetscFree(onodes2);CHKERRQ(ierr);
308   ierr = PetscFree(olengths2);CHKERRQ(ierr);
309 
310   ierr = PetscFree(pa);CHKERRQ(ierr);
311   ierr = PetscFree(rbuf2[0]);CHKERRQ(ierr);
312   ierr = PetscFree(rbuf2);CHKERRQ(ierr);
313   ierr = PetscFree(s_waits1);CHKERRQ(ierr);
314   ierr = PetscFree(r_waits1);CHKERRQ(ierr);
315   ierr = PetscFree(s_waits2);CHKERRQ(ierr);
316   ierr = PetscFree(r_waits2);CHKERRQ(ierr);
317   ierr = PetscFree5(table,data,isz,d_p,t_p);CHKERRQ(ierr);
318   ierr = PetscFree(s_status);CHKERRQ(ierr);
319   ierr = PetscFree(recv_status);CHKERRQ(ierr);
320   ierr = PetscFree(xdata[0]);CHKERRQ(ierr);
321   ierr = PetscFree(xdata);CHKERRQ(ierr);
322   ierr = PetscFree(isz1);CHKERRQ(ierr);
323   PetscFunctionReturn(0);
324 }
325 
326 /*
327    MatIncreaseOverlap_MPIBAIJ_Local - Called by MatincreaseOverlap, to do
328        the work on the local processor.
329 
330      Inputs:
331       C      - MAT_MPIBAIJ;
332       imax - total no of index sets processed at a time;
333       table  - an array of char - size = Mbs bits.
334 
335      Output:
336       isz    - array containing the count of the solution elements corresponding
337                to each index set;
338       data   - pointer to the solutions
339 */
340 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Local(Mat C,PetscInt imax,PetscBT *table,PetscInt *isz,PetscInt **data)
341 {
342   Mat_MPIBAIJ *c = (Mat_MPIBAIJ*)C->data;
343   Mat         A  = c->A,B = c->B;
344   Mat_SeqBAIJ *a = (Mat_SeqBAIJ*)A->data,*b = (Mat_SeqBAIJ*)B->data;
345   PetscInt    start,end,val,max,rstart,cstart,*ai,*aj;
346   PetscInt    *bi,*bj,*garray,i,j,k,row,*data_i,isz_i;
347   PetscBT     table_i;
348 
349   PetscFunctionBegin;
350   rstart = c->rstartbs;
351   cstart = c->cstartbs;
352   ai     = a->i;
353   aj     = a->j;
354   bi     = b->i;
355   bj     = b->j;
356   garray = c->garray;
357 
358 
359   for (i=0; i<imax; i++) {
360     data_i  = data[i];
361     table_i = table[i];
362     isz_i   = isz[i];
363     for (j=0,max=isz[i]; j<max; j++) {
364       row   = data_i[j] - rstart;
365       start = ai[row];
366       end   = ai[row+1];
367       for (k=start; k<end; k++) { /* Amat */
368         val = aj[k] + cstart;
369         if (!PetscBTLookupSet(table_i,val)) data_i[isz_i++] = val;
370       }
371       start = bi[row];
372       end   = bi[row+1];
373       for (k=start; k<end; k++) { /* Bmat */
374         val = garray[bj[k]];
375         if (!PetscBTLookupSet(table_i,val)) data_i[isz_i++] = val;
376       }
377     }
378     isz[i] = isz_i;
379   }
380   PetscFunctionReturn(0);
381 }
382 /*
383       MatIncreaseOverlap_MPIBAIJ_Receive - Process the recieved messages,
384          and return the output
385 
386          Input:
387            C    - the matrix
388            nrqr - no of messages being processed.
389            rbuf - an array of pointers to the recieved requests
390 
391          Output:
392            xdata - array of messages to be sent back
393            isz1  - size of each message
394 
395   For better efficiency perhaps we should malloc separately each xdata[i],
396 then if a remalloc is required we need only copy the data for that one row
397 rather than all previous rows as it is now where a single large chunck of
398 memory is used.
399 
400 */
401 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Receive(Mat C,PetscInt nrqr,PetscInt **rbuf,PetscInt **xdata,PetscInt * isz1)
402 {
403   Mat_MPIBAIJ    *c = (Mat_MPIBAIJ*)C->data;
404   Mat            A  = c->A,B = c->B;
405   Mat_SeqBAIJ    *a = (Mat_SeqBAIJ*)A->data,*b = (Mat_SeqBAIJ*)B->data;
406   PetscErrorCode ierr;
407   PetscInt       rstart,cstart,*ai,*aj,*bi,*bj,*garray,i,j,k;
408   PetscInt       row,total_sz,ct,ct1,ct2,ct3,mem_estimate,oct2,l,start,end;
409   PetscInt       val,max1,max2,Mbs,no_malloc =0,*tmp,new_estimate,ctr;
410   PetscInt       *rbuf_i,kmax,rbuf_0;
411   PetscBT        xtable;
412 
413   PetscFunctionBegin;
414   Mbs    = c->Mbs;
415   rstart = c->rstartbs;
416   cstart = c->cstartbs;
417   ai     = a->i;
418   aj     = a->j;
419   bi     = b->i;
420   bj     = b->j;
421   garray = c->garray;
422 
423 
424   for (i=0,ct=0,total_sz=0; i<nrqr; ++i) {
425     rbuf_i =  rbuf[i];
426     rbuf_0 =  rbuf_i[0];
427     ct    += rbuf_0;
428     for (j=1; j<=rbuf_0; j++) total_sz += rbuf_i[2*j];
429   }
430 
431   if (c->Mbs) max1 = ct*(a->nz +b->nz)/c->Mbs;
432   else        max1 = 1;
433   mem_estimate = 3*((total_sz > max1 ? total_sz : max1)+1);
434   ierr         = PetscMalloc1(mem_estimate,&xdata[0]);CHKERRQ(ierr);
435   ++no_malloc;
436   ierr = PetscBTCreate(Mbs,&xtable);CHKERRQ(ierr);
437   ierr = PetscMemzero(isz1,nrqr*sizeof(PetscInt));CHKERRQ(ierr);
438 
439   ct3 = 0;
440   for (i=0; i<nrqr; i++) { /* for easch mesg from proc i */
441     rbuf_i =  rbuf[i];
442     rbuf_0 =  rbuf_i[0];
443     ct1    =  2*rbuf_0+1;
444     ct2    =  ct1;
445     ct3   += ct1;
446     for (j=1; j<=rbuf_0; j++) { /* for each IS from proc i*/
447       ierr = PetscBTMemzero(Mbs,xtable);CHKERRQ(ierr);
448       oct2 = ct2;
449       kmax = rbuf_i[2*j];
450       for (k=0; k<kmax; k++,ct1++) {
451         row = rbuf_i[ct1];
452         if (!PetscBTLookupSet(xtable,row)) {
453           if (!(ct3 < mem_estimate)) {
454             new_estimate = (PetscInt)(1.5*mem_estimate)+1;
455             ierr         = PetscMalloc1(new_estimate,&tmp);CHKERRQ(ierr);
456             ierr         = PetscMemcpy(tmp,xdata[0],mem_estimate*sizeof(PetscInt));CHKERRQ(ierr);
457             ierr         = PetscFree(xdata[0]);CHKERRQ(ierr);
458             xdata[0]     = tmp;
459             mem_estimate = new_estimate; ++no_malloc;
460             for (ctr=1; ctr<=i; ctr++) xdata[ctr] = xdata[ctr-1] + isz1[ctr-1];
461           }
462           xdata[i][ct2++] = row;
463           ct3++;
464         }
465       }
466       for (k=oct2,max2=ct2; k<max2; k++)  {
467         row   = xdata[i][k] - rstart;
468         start = ai[row];
469         end   = ai[row+1];
470         for (l=start; l<end; l++) {
471           val = aj[l] + cstart;
472           if (!PetscBTLookupSet(xtable,val)) {
473             if (!(ct3 < mem_estimate)) {
474               new_estimate = (PetscInt)(1.5*mem_estimate)+1;
475               ierr         = PetscMalloc1(new_estimate,&tmp);CHKERRQ(ierr);
476               ierr         = PetscMemcpy(tmp,xdata[0],mem_estimate*sizeof(PetscInt));CHKERRQ(ierr);
477               ierr         = PetscFree(xdata[0]);CHKERRQ(ierr);
478               xdata[0]     = tmp;
479               mem_estimate = new_estimate; ++no_malloc;
480               for (ctr=1; ctr<=i; ctr++) xdata[ctr] = xdata[ctr-1] + isz1[ctr-1];
481             }
482             xdata[i][ct2++] = val;
483             ct3++;
484           }
485         }
486         start = bi[row];
487         end   = bi[row+1];
488         for (l=start; l<end; l++) {
489           val = garray[bj[l]];
490           if (!PetscBTLookupSet(xtable,val)) {
491             if (!(ct3 < mem_estimate)) {
492               new_estimate = (PetscInt)(1.5*mem_estimate)+1;
493               ierr         = PetscMalloc1(new_estimate,&tmp);CHKERRQ(ierr);
494               ierr         = PetscMemcpy(tmp,xdata[0],mem_estimate*sizeof(PetscInt));CHKERRQ(ierr);
495               ierr         = PetscFree(xdata[0]);CHKERRQ(ierr);
496               xdata[0]     = tmp;
497               mem_estimate = new_estimate; ++no_malloc;
498               for (ctr =1; ctr <=i; ctr++) xdata[ctr] = xdata[ctr-1] + isz1[ctr-1];
499             }
500             xdata[i][ct2++] = val;
501             ct3++;
502           }
503         }
504       }
505       /* Update the header*/
506       xdata[i][2*j]   = ct2 - oct2; /* Undo the vector isz1 and use only a var*/
507       xdata[i][2*j-1] = rbuf_i[2*j-1];
508     }
509     xdata[i][0] = rbuf_0;
510     xdata[i+1]  = xdata[i] + ct2;
511     isz1[i]     = ct2; /* size of each message */
512   }
513   ierr = PetscBTDestroy(&xtable);CHKERRQ(ierr);
514   ierr = PetscInfo3(C,"Allocated %D bytes, required %D, no of mallocs = %D\n",mem_estimate,ct3,no_malloc);CHKERRQ(ierr);
515   PetscFunctionReturn(0);
516 }
517 
518 PetscErrorCode MatGetSubMatrices_MPIBAIJ(Mat C,PetscInt ismax,const IS isrow[],const IS iscol[],MatReuse scall,Mat *submat[])
519 {
520   IS             *isrow_block,*iscol_block;
521   Mat_MPIBAIJ    *c = (Mat_MPIBAIJ*)C->data;
522   PetscErrorCode ierr;
523   PetscInt       nmax,nstages_local,nstages,i,pos,max_no,N=C->cmap->N,bs=C->rmap->bs;
524 
525   PetscFunctionBegin;
526   /* The compression and expansion should be avoided. Doesn't point
527      out errors, might change the indices, hence buggey */
528   ierr = PetscMalloc2(ismax+1,&isrow_block,ismax+1,&iscol_block);CHKERRQ(ierr);
529   ierr = ISCompressIndicesGeneral(N,C->rmap->n,bs,ismax,isrow,isrow_block);CHKERRQ(ierr);
530   ierr = ISCompressIndicesGeneral(N,C->cmap->n,bs,ismax,iscol,iscol_block);CHKERRQ(ierr);
531 
532   /* Allocate memory to hold all the submatrices */
533   if (scall == MAT_INITIAL_MATRIX) {
534     ierr = PetscMalloc1(ismax+1,submat);CHKERRQ(ierr);
535   }
536   /* Determine the number of stages through which submatrices are done */
537   nmax = 20*1000000 / (c->Nbs * sizeof(PetscInt));
538   if (!nmax) nmax = 1;
539   nstages_local = ismax/nmax + ((ismax % nmax) ? 1 : 0);
540 
541   /* Make sure every processor loops through the nstages */
542   ierr = MPIU_Allreduce(&nstages_local,&nstages,1,MPIU_INT,MPI_MAX,PetscObjectComm((PetscObject)C));CHKERRQ(ierr);
543   for (i=0,pos=0; i<nstages; i++) {
544     if (pos+nmax <= ismax) max_no = nmax;
545     else if (pos == ismax) max_no = 0;
546     else                   max_no = ismax-pos;
547 
548     ierr = MatGetSubMatrices_MPIBAIJ_local(C,max_no,isrow_block+pos,iscol_block+pos,scall,*submat+pos);CHKERRQ(ierr);
549     pos += max_no;
550   }
551 
552   for (i=0; i<ismax; i++) {
553     ierr = ISDestroy(&isrow_block[i]);CHKERRQ(ierr);
554     ierr = ISDestroy(&iscol_block[i]);CHKERRQ(ierr);
555   }
556   ierr = PetscFree2(isrow_block,iscol_block);CHKERRQ(ierr);
557   PetscFunctionReturn(0);
558 }
559 
560 #if defined(PETSC_USE_CTABLE)
561 PetscErrorCode PetscGetProc(const PetscInt row, const PetscMPIInt size, const PetscInt proc_gnode[], PetscMPIInt *rank)
562 {
563   PetscInt       nGlobalNd = proc_gnode[size];
564   PetscMPIInt    fproc;
565   PetscErrorCode ierr;
566 
567   PetscFunctionBegin;
568   ierr = PetscMPIIntCast((PetscInt)(((float)row * (float)size / (float)nGlobalNd + 0.5)),&fproc);CHKERRQ(ierr);
569   if (fproc > size) fproc = size;
570   while (row < proc_gnode[fproc] || row >= proc_gnode[fproc+1]) {
571     if (row < proc_gnode[fproc]) fproc--;
572     else                         fproc++;
573   }
574   *rank = fproc;
575   PetscFunctionReturn(0);
576 }
577 #endif
578 
579 /* -------------------------------------------------------------------------*/
580 /* This code is used for BAIJ and SBAIJ matrices (unfortunate dependency) */
581 
582 PetscErrorCode MatDestroy_MPIBAIJ_MatGetSubmatrices(Mat C)
583 {
584   PetscErrorCode ierr;
585   Mat_SeqBAIJ    *c = (Mat_SeqBAIJ*)C->data;
586   Mat_SubMat     *submatj = c->submatis1;
587   PetscInt       i;
588 
589   PetscFunctionBegin;
590   if (!submatj->id) { /* delete data that are linked only to submats[id=0] */
591     ierr = PetscFree4(submatj->sbuf1,submatj->ptr,submatj->tmp,submatj->ctr);CHKERRQ(ierr);
592 
593     for (i=0; i<submatj->nrqr; ++i) {
594       ierr = PetscFree(submatj->sbuf2[i]);CHKERRQ(ierr);
595     }
596     ierr = PetscFree3(submatj->sbuf2,submatj->req_size,submatj->req_source1);CHKERRQ(ierr);
597 
598     if (submatj->rbuf1) {
599       ierr = PetscFree(submatj->rbuf1[0]);CHKERRQ(ierr);
600       ierr = PetscFree(submatj->rbuf1);CHKERRQ(ierr);
601     }
602 
603     for (i=0; i<submatj->nrqs; ++i) {
604       ierr = PetscFree(submatj->rbuf3[i]);CHKERRQ(ierr);
605     }
606     ierr = PetscFree3(submatj->req_source2,submatj->rbuf2,submatj->rbuf3);CHKERRQ(ierr);
607     ierr = PetscFree(submatj->pa);CHKERRQ(ierr);
608   }
609 
610 #if defined(PETSC_USE_CTABLE)
611   ierr = PetscTableDestroy((PetscTable*)&submatj->rmap);CHKERRQ(ierr);
612   if (submatj->cmap_loc) {ierr = PetscFree(submatj->cmap_loc);CHKERRQ(ierr);}
613   ierr = PetscFree(submatj->rmap_loc);CHKERRQ(ierr);
614 #else
615   ierr = PetscFree(submatj->rmap);CHKERRQ(ierr);
616 #endif
617 
618   if (!submatj->allcolumns) {
619 #if defined(PETSC_USE_CTABLE)
620     ierr = PetscTableDestroy((PetscTable*)&submatj->cmap);CHKERRQ(ierr);
621 #else
622     ierr = PetscFree(submatj->cmap);CHKERRQ(ierr);
623 #endif
624   }
625   ierr = submatj->destroy(C);CHKERRQ(ierr);
626   ierr = PetscFree(submatj->row2proc);CHKERRQ(ierr);
627 
628   ierr = PetscFree(submatj);CHKERRQ(ierr);
629   PetscFunctionReturn(0);
630 }
631 
632 PetscErrorCode MatGetSubMatrices_MPIBAIJ_local(Mat C,PetscInt ismax,const IS isrow[],const IS iscol[],MatReuse scall,Mat *submats)
633 {
634   Mat_MPIBAIJ    *c = (Mat_MPIBAIJ*)C->data;
635   Mat            A  = c->A;
636   Mat_SeqBAIJ    *a = (Mat_SeqBAIJ*)A->data,*b = (Mat_SeqBAIJ*)c->B->data,*subc;
637   const PetscInt **icol,**irow;
638   PetscInt       *nrow,*ncol,start;
639   PetscErrorCode ierr;
640   PetscMPIInt    rank,size,tag0,tag2,tag3,tag4,*w1,*w2,*w3,*w4,nrqr;
641   PetscInt       **sbuf1,**sbuf2,i,j,k,l,ct1,ct2,**rbuf1,row,proc=-1;
642   PetscInt       nrqs=0,msz,**ptr=NULL,*req_size=NULL,*ctr=NULL,*pa,*tmp=NULL,tcol;
643   PetscInt       **rbuf3=NULL,*req_source1=NULL,*req_source2,**sbuf_aj,**rbuf2=NULL,max1,max2;
644   PetscInt       **lens,is_no,ncols,*cols,mat_i,*mat_j,tmp2,jmax;
645 #if defined(PETSC_USE_CTABLE)
646   PetscTable     *cmap,cmap_i=NULL,*rmap,rmap_i;
647 #else
648   PetscInt       **cmap,*cmap_i=NULL,**rmap,*rmap_i;
649 #endif
650   const PetscInt *irow_i;
651   PetscInt       ctr_j,*sbuf1_j,*sbuf_aj_i,*rbuf1_i,kmax,*lens_i;
652   MPI_Request    *s_waits1,*r_waits1,*s_waits2,*r_waits2,*r_waits3;
653   MPI_Request    *r_waits4,*s_waits3,*s_waits4;
654   MPI_Status     *r_status1,*r_status2,*s_status1,*s_status3,*s_status2;
655   MPI_Status     *r_status3,*r_status4,*s_status4;
656   MPI_Comm       comm;
657   PetscScalar    **rbuf4,*rbuf4_i,**sbuf_aa,*vals,*mat_a,*imat_a,*sbuf_aa_i;
658   PetscMPIInt    *onodes1,*olengths1,end;
659   PetscInt       **row2proc,*row2proc_i,*imat_ilen,*imat_j,*imat_i;
660   Mat_SubMat     **smats,*smat_i;
661   PetscBool      *issorted,colflag,iscsorted=PETSC_TRUE;
662   PetscInt       *sbuf1_i,*rbuf2_i,*rbuf3_i,ilen;
663 
664   PetscInt       bs=C->rmap->bs,bs2=c->bs2,rstart = c->rstartbs;
665   PetscBool      ijonly=c->ijonly; /* private flag indicates only matrix data structures are requested */
666   PetscInt       nzA,nzB,*a_i=a->i,*b_i=b->i,*a_j = a->j,*b_j = b->j,ctmp,imark,*cworkA,*cworkB;
667   PetscScalar    *vworkA,*vworkB,*a_a = a->a,*b_a = b->a;
668   PetscInt       cstart = c->cstartbs,*bmap = c->garray;
669   PetscBool      *allrows,*allcolumns;
670 
671   PetscFunctionBegin;
672   ierr = PetscObjectGetComm((PetscObject)C,&comm);CHKERRQ(ierr);
673   size = c->size;
674   rank = c->rank;
675 
676   ierr = PetscMalloc6(ismax,&smats,ismax,&row2proc,ismax,&cmap,ismax,&rmap,ismax,&allcolumns,ismax,&allrows);CHKERRQ(ierr);
677   ierr = PetscMalloc5(ismax,&irow,ismax,&icol,ismax,&nrow,ismax,&ncol,ismax,&issorted);CHKERRQ(ierr);
678 
679   for (i=0; i<ismax; i++) {
680     ierr = ISSorted(iscol[i],&issorted[i]);CHKERRQ(ierr);
681     if (!issorted[i]) iscsorted = issorted[i]; /* columns are not sorted! */
682     ierr = ISSorted(isrow[i],&issorted[i]);CHKERRQ(ierr);
683 
684     /* Check for special case: allcolumns */
685     ierr = ISIdentity(iscol[i],&colflag);CHKERRQ(ierr);
686     ierr = ISGetLocalSize(iscol[i],&ncol[i]);CHKERRQ(ierr);
687 
688     if (colflag && ncol[i] == c->Nbs) {
689       allcolumns[i] = PETSC_TRUE;
690       icol[i]       = NULL;
691       /* printf("[%d] allcolumns[%d] true\n",rank,i); */
692     } else {
693       allcolumns[i] = PETSC_FALSE;
694       ierr = ISGetIndices(iscol[i],&icol[i]);CHKERRQ(ierr);
695     }
696 
697     /* Check for special case: allrows */
698     ierr = ISIdentity(isrow[i],&colflag);CHKERRQ(ierr);
699     ierr = ISGetLocalSize(isrow[i],&nrow[i]);CHKERRQ(ierr);
700     if (colflag && nrow[i] == c->Mbs) {
701       allrows[i] = PETSC_TRUE;
702       irow[i]    = NULL;
703       /* printf("[%d] allrows[%d]\n",rank,i); */
704     } else {
705       allrows[i] = PETSC_FALSE;
706       ierr = ISGetIndices(isrow[i],&irow[i]);CHKERRQ(ierr);
707     }
708   }
709 
710   if (scall == MAT_REUSE_MATRIX) {
711     /* Assumes new rows are same length as the old rows */
712     for (i=0; i<ismax; i++) {
713       subc = (Mat_SeqBAIJ*)(submats[i]->data);
714       if (subc->mbs != nrow[i] || subc->nbs != ncol[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. wrong size");
715 
716       /* Initial matrix as if empty */
717       ierr = PetscMemzero(subc->ilen,subc->mbs*sizeof(PetscInt));CHKERRQ(ierr);
718 
719       /* Initial matrix as if empty */
720       submats[i]->factortype = C->factortype;
721 
722       smat_i   = subc->submatis1;
723       smats[i] = smat_i;
724 
725       nrqs        = smat_i->nrqs;
726       nrqr        = smat_i->nrqr;
727       rbuf1       = smat_i->rbuf1;
728       rbuf2       = smat_i->rbuf2;
729       rbuf3       = smat_i->rbuf3;
730       req_source2 = smat_i->req_source2;
731 
732       sbuf1     = smat_i->sbuf1;
733       sbuf2     = smat_i->sbuf2;
734       ptr       = smat_i->ptr;
735       tmp       = smat_i->tmp;
736       ctr       = smat_i->ctr;
737 
738       pa          = smat_i->pa;
739       req_size    = smat_i->req_size;
740       req_source1 = smat_i->req_source1;
741 
742       allcolumns[i] = smat_i->allcolumns;
743       allrows[i]    = smat_i->allrows;
744       row2proc[i]   = smat_i->row2proc;
745       rmap[i]       = smat_i->rmap;
746       cmap[i]       = smat_i->cmap;
747     }
748   } else { /* scall == MAT_INITIAL_MATRIX */
749     /* Get some new tags to keep the communication clean */
750     ierr = PetscObjectGetNewTag((PetscObject)C,&tag2);CHKERRQ(ierr);
751     ierr = PetscObjectGetNewTag((PetscObject)C,&tag3);CHKERRQ(ierr);
752 
753     /* evaluate communication - mesg to who, length of mesg, and buffer space
754      required. Based on this, buffers are allocated, and data copied into them*/
755     ierr = PetscCalloc4(size,&w1,size,&w2,size,&w3,size,&w4);CHKERRQ(ierr);   /* mesg size, initialize work vectors */
756 
757     for (i=0; i<ismax; i++) {
758       jmax   = nrow[i];
759       irow_i = irow[i];
760 
761       ierr   = PetscMalloc1(jmax,&row2proc_i);CHKERRQ(ierr);
762       row2proc[i] = row2proc_i;
763 
764       if (issorted[i]) proc = 0;
765       for (j=0; j<jmax; j++) {
766         if (!issorted[i]) proc = 0;
767         if (allrows[i]) row = j;
768         else row = irow_i[j];
769 
770         while (row >= c->rangebs[proc+1]) proc++;
771         w4[proc]++;
772         row2proc_i[j] = proc; /* map row index to proc */
773       }
774       for (j=0; j<size; j++) {
775         if (w4[j]) { w1[j] += w4[j];  w3[j]++; w4[j] = 0;}
776       }
777     }
778 
779     nrqs     = 0;              /* no of outgoing messages */
780     msz      = 0;              /* total mesg length (for all procs) */
781     w1[rank] = 0;              /* no mesg sent to self */
782     w3[rank] = 0;
783     for (i=0; i<size; i++) {
784       if (w1[i])  { w2[i] = 1; nrqs++;} /* there exists a message to proc i */
785     }
786     ierr = PetscMalloc1(nrqs+1,&pa);CHKERRQ(ierr); /*(proc -array)*/
787     for (i=0,j=0; i<size; i++) {
788       if (w1[i]) { pa[j] = i; j++; }
789     }
790 
791     /* Each message would have a header = 1 + 2*(no of IS) + data */
792     for (i=0; i<nrqs; i++) {
793       j      = pa[i];
794       w1[j] += w2[j] + 2* w3[j];
795       msz   += w1[j];
796     }
797     ierr = PetscInfo2(0,"Number of outgoing messages %D Total message length %D\n",nrqs,msz);CHKERRQ(ierr);
798 
799     /* Determine the number of messages to expect, their lengths, from from-ids */
800     ierr = PetscGatherNumberOfMessages(comm,w2,w1,&nrqr);CHKERRQ(ierr);
801     ierr = PetscGatherMessageLengths(comm,nrqs,nrqr,w1,&onodes1,&olengths1);CHKERRQ(ierr);
802 
803     /* Now post the Irecvs corresponding to these messages */
804     tag0 = ((PetscObject)C)->tag;
805     ierr = PetscPostIrecvInt(comm,tag0,nrqr,onodes1,olengths1,&rbuf1,&r_waits1);CHKERRQ(ierr);
806     /* printf("[%d] nrqs %d, nrqr %d\n",rank,nrqs,nrqr); */
807 
808     ierr = PetscFree(onodes1);CHKERRQ(ierr);
809     ierr = PetscFree(olengths1);CHKERRQ(ierr);
810 
811     /* Allocate Memory for outgoing messages */
812     ierr = PetscMalloc4(size,&sbuf1,size,&ptr,2*msz,&tmp,size,&ctr);CHKERRQ(ierr);
813     ierr = PetscMemzero(sbuf1,size*sizeof(PetscInt*));CHKERRQ(ierr);
814     ierr = PetscMemzero(ptr,size*sizeof(PetscInt*));CHKERRQ(ierr);
815 
816     {
817       PetscInt *iptr = tmp;
818       k    = 0;
819       for (i=0; i<nrqs; i++) {
820         j        = pa[i];
821         iptr    += k;
822         sbuf1[j] = iptr;
823         k        = w1[j];
824       }
825     }
826 
827     /* Form the outgoing messages. Initialize the header space */
828     for (i=0; i<nrqs; i++) {
829       j           = pa[i];
830       sbuf1[j][0] = 0;
831       ierr        = PetscMemzero(sbuf1[j]+1,2*w3[j]*sizeof(PetscInt));CHKERRQ(ierr);
832       ptr[j]      = sbuf1[j] + 2*w3[j] + 1;
833     }
834 
835     /* Parse the isrow and copy data into outbuf */
836     for (i=0; i<ismax; i++) {
837       row2proc_i = row2proc[i];
838       ierr   = PetscMemzero(ctr,size*sizeof(PetscInt));CHKERRQ(ierr);
839       irow_i = irow[i];
840       jmax   = nrow[i];
841       for (j=0; j<jmax; j++) {  /* parse the indices of each IS */
842         proc = row2proc_i[j];
843         if (allrows[i]) row = j;
844         else row = irow_i[j];
845 
846         if (proc != rank) { /* copy to the outgoing buf*/
847           ctr[proc]++;
848           *ptr[proc] = row;
849           ptr[proc]++;
850         }
851       }
852       /* Update the headers for the current IS */
853       for (j=0; j<size; j++) { /* Can Optimise this loop too */
854         if ((ctr_j = ctr[j])) {
855           sbuf1_j        = sbuf1[j];
856           k              = ++sbuf1_j[0];
857           sbuf1_j[2*k]   = ctr_j;
858           sbuf1_j[2*k-1] = i;
859         }
860       }
861     }
862 
863     /*  Now  post the sends */
864     ierr = PetscMalloc1(nrqs+1,&s_waits1);CHKERRQ(ierr);
865     for (i=0; i<nrqs; ++i) {
866       j    = pa[i];
867       ierr = MPI_Isend(sbuf1[j],w1[j],MPIU_INT,j,tag0,comm,s_waits1+i);CHKERRQ(ierr);
868     }
869 
870     /* Post Receives to capture the buffer size */
871     ierr = PetscMalloc1(nrqs+1,&r_waits2);CHKERRQ(ierr);
872     ierr = PetscMalloc3(nrqs+1,&req_source2,nrqs+1,&rbuf2,nrqs+1,&rbuf3);CHKERRQ(ierr);
873     rbuf2[0] = tmp + msz;
874     for (i=1; i<nrqs; ++i) {
875       rbuf2[i] = rbuf2[i-1]+w1[pa[i-1]];
876     }
877     for (i=0; i<nrqs; ++i) {
878       j    = pa[i];
879       ierr = MPI_Irecv(rbuf2[i],w1[j],MPIU_INT,j,tag2,comm,r_waits2+i);CHKERRQ(ierr);
880     }
881 
882     /* Send to other procs the buf size they should allocate */
883     /* Receive messages*/
884     ierr = PetscMalloc1(nrqr+1,&s_waits2);CHKERRQ(ierr);
885     ierr = PetscMalloc1(nrqr+1,&r_status1);CHKERRQ(ierr);
886     ierr = PetscMalloc3(nrqr,&sbuf2,nrqr,&req_size,nrqr,&req_source1);CHKERRQ(ierr);
887     {
888       PetscInt   *sAi = a->i,*sBi = b->i,id;
889       PetscInt   *sbuf2_i;
890 
891       ierr = MPI_Waitall(nrqr,r_waits1,r_status1);CHKERRQ(ierr);
892       for (i=0; i<nrqr; ++i) {
893         req_size[i] = 0;
894         rbuf1_i        = rbuf1[i];
895         start          = 2*rbuf1_i[0] + 1;
896         ierr           = MPI_Get_count(r_status1+i,MPIU_INT,&end);CHKERRQ(ierr);
897         ierr           = PetscMalloc1(end+1,&sbuf2[i]);CHKERRQ(ierr);
898         sbuf2_i        = sbuf2[i];
899         for (j=start; j<end; j++) {
900           id              = rbuf1_i[j] - rstart;
901           ncols           = sAi[id+1] - sAi[id] + sBi[id+1] - sBi[id];
902           sbuf2_i[j]      = ncols;
903           req_size[i] += ncols;
904         }
905         req_source1[i] = r_status1[i].MPI_SOURCE;
906         /* form the header */
907         sbuf2_i[0] = req_size[i];
908         for (j=1; j<start; j++) sbuf2_i[j] = rbuf1_i[j];
909 
910         ierr = MPI_Isend(sbuf2_i,end,MPIU_INT,req_source1[i],tag2,comm,s_waits2+i);CHKERRQ(ierr);
911       }
912     }
913     ierr = PetscFree(r_status1);CHKERRQ(ierr);
914     ierr = PetscFree(r_waits1);CHKERRQ(ierr);
915     ierr = PetscFree4(w1,w2,w3,w4);CHKERRQ(ierr);
916 
917     /* Receive messages*/
918     ierr = PetscMalloc1(nrqs+1,&r_waits3);CHKERRQ(ierr);
919     ierr = PetscMalloc1(nrqs+1,&r_status2);CHKERRQ(ierr);
920 
921     ierr = MPI_Waitall(nrqs,r_waits2,r_status2);CHKERRQ(ierr);
922     for (i=0; i<nrqs; ++i) {
923       ierr = PetscMalloc1(rbuf2[i][0]+1,&rbuf3[i]);CHKERRQ(ierr);
924       req_source2[i] = r_status2[i].MPI_SOURCE;
925       ierr = MPI_Irecv(rbuf3[i],rbuf2[i][0],MPIU_INT,req_source2[i],tag3,comm,r_waits3+i);CHKERRQ(ierr);
926     }
927     ierr = PetscFree(r_status2);CHKERRQ(ierr);
928     ierr = PetscFree(r_waits2);CHKERRQ(ierr);
929 
930     /* Wait on sends1 and sends2 */
931     ierr = PetscMalloc1(nrqs+1,&s_status1);CHKERRQ(ierr);
932     ierr = PetscMalloc1(nrqr+1,&s_status2);CHKERRQ(ierr);
933 
934     if (nrqs) {ierr = MPI_Waitall(nrqs,s_waits1,s_status1);CHKERRQ(ierr);}
935     if (nrqr) {ierr = MPI_Waitall(nrqr,s_waits2,s_status2);CHKERRQ(ierr);}
936     ierr = PetscFree(s_status1);CHKERRQ(ierr);
937     ierr = PetscFree(s_status2);CHKERRQ(ierr);
938     ierr = PetscFree(s_waits1);CHKERRQ(ierr);
939     ierr = PetscFree(s_waits2);CHKERRQ(ierr);
940 
941     /* Now allocate sending buffers for a->j, and send them off */
942     ierr = PetscMalloc1(nrqr+1,&sbuf_aj);CHKERRQ(ierr);
943     for (i=0,j=0; i<nrqr; i++) j += req_size[i];
944     ierr = PetscMalloc1(j+1,&sbuf_aj[0]);CHKERRQ(ierr);
945     for (i=1; i<nrqr; i++) sbuf_aj[i] = sbuf_aj[i-1] + req_size[i-1];
946 
947     ierr = PetscMalloc1(nrqr+1,&s_waits3);CHKERRQ(ierr);
948     {
949 
950       for (i=0; i<nrqr; i++) {
951         rbuf1_i   = rbuf1[i];
952         sbuf_aj_i = sbuf_aj[i];
953         ct1       = 2*rbuf1_i[0] + 1;
954         ct2       = 0;
955         for (j=1,max1=rbuf1_i[0]; j<=max1; j++) {
956           kmax = rbuf1[i][2*j];
957           for (k=0; k<kmax; k++,ct1++) {
958             row    = rbuf1_i[ct1] - rstart;
959             nzA    = a_i[row+1] - a_i[row]; nzB = b_i[row+1] - b_i[row];
960             ncols  = nzA + nzB;
961             cworkA = a_j + a_i[row]; cworkB = b_j + b_i[row];
962 
963             /* load the column indices for this row into cols */
964             cols = sbuf_aj_i + ct2;
965             for (l=0; l<nzB; l++) {
966               if ((ctmp = bmap[cworkB[l]]) < cstart) cols[l] = ctmp;
967               else break;
968             }
969             imark = l;
970             for (l=0; l<nzA; l++) {cols[imark+l] = cstart + cworkA[l];}
971             for (l=imark; l<nzB; l++) cols[nzA+l] = bmap[cworkB[l]];
972             ct2 += ncols;
973           }
974         }
975         ierr = MPI_Isend(sbuf_aj_i,req_size[i],MPIU_INT,req_source1[i],tag3,comm,s_waits3+i);CHKERRQ(ierr);
976       }
977     }
978     ierr = PetscMalloc2(nrqs+1,&r_status3,nrqr+1,&s_status3);CHKERRQ(ierr);
979 
980     /* create col map: global col of C -> local col of submatrices */
981     const PetscInt *icol_i;
982 #if defined(PETSC_USE_CTABLE)
983     for (i=0; i<ismax; i++) {
984       if (!allcolumns[i]) {
985         ierr = PetscTableCreate(ncol[i]+1,c->Nbs+1,&cmap[i]);CHKERRQ(ierr);
986 
987         jmax   = ncol[i];
988         icol_i = icol[i];
989         cmap_i = cmap[i];
990         for (j=0; j<jmax; j++) {
991           ierr = PetscTableAdd(cmap[i],icol_i[j]+1,j+1,INSERT_VALUES);CHKERRQ(ierr);
992         }
993       } else cmap[i] = NULL;
994     }
995 #else
996     for (i=0; i<ismax; i++) {
997       if (!allcolumns[i]) {
998         ierr   = PetscCalloc1(c->Nbs,&cmap[i]);CHKERRQ(ierr);
999         jmax   = ncol[i];
1000         icol_i = icol[i];
1001         cmap_i = cmap[i];
1002         for (j=0; j<jmax; j++) cmap_i[icol_i[j]] = j+1;
1003       } else cmap[i] = NULL;
1004     }
1005 #endif
1006 
1007     /* Create lens which is required for MatCreate... */
1008     for (i=0,j=0; i<ismax; i++) j += nrow[i];
1009     ierr = PetscMalloc1(ismax,&lens);CHKERRQ(ierr);
1010 
1011     if (ismax) {
1012       ierr = PetscCalloc1(j,&lens[0]);CHKERRQ(ierr);
1013     }
1014     for (i=1; i<ismax; i++) lens[i] = lens[i-1] + nrow[i-1];
1015 
1016     /* Update lens from local data */
1017     for (i=0; i<ismax; i++) {
1018       row2proc_i = row2proc[i];
1019       jmax = nrow[i];
1020       if (!allcolumns[i]) cmap_i = cmap[i];
1021       irow_i = irow[i];
1022       lens_i = lens[i];
1023       for (j=0; j<jmax; j++) {
1024         if (allrows[i]) row = j;
1025         else row = irow_i[j]; /* global blocked row of C */
1026 
1027         proc = row2proc_i[j];
1028         if (proc == rank) {
1029           /* Get indices from matA and then from matB */
1030 #if defined(PETSC_USE_CTABLE)
1031           PetscInt   tt;
1032 #endif
1033           row    = row - rstart;
1034           nzA    = a_i[row+1] - a_i[row];
1035           nzB    = b_i[row+1] - b_i[row];
1036           cworkA =  a_j + a_i[row];
1037           cworkB = b_j + b_i[row];
1038 
1039           if (!allcolumns[i]) {
1040 #if defined(PETSC_USE_CTABLE)
1041             for (k=0; k<nzA; k++) {
1042               ierr = PetscTableFind(cmap_i,cstart+cworkA[k]+1,&tt);CHKERRQ(ierr);
1043               if (tt) lens_i[j]++;
1044             }
1045             for (k=0; k<nzB; k++) {
1046               ierr = PetscTableFind(cmap_i,bmap[cworkB[k]]+1,&tt);CHKERRQ(ierr);
1047               if (tt) lens_i[j]++;
1048             }
1049 
1050 #else
1051             for (k=0; k<nzA; k++) {
1052               if (cmap_i[cstart + cworkA[k]]) lens_i[j]++;
1053             }
1054             for (k=0; k<nzB; k++) {
1055               if (cmap_i[bmap[cworkB[k]]]) lens_i[j]++;
1056             }
1057 #endif
1058           } else { /* allcolumns */
1059             lens_i[j] = nzA + nzB;
1060           }
1061         }
1062       }
1063     }
1064 
1065     /* Create row map: global row of C -> local row of submatrices */
1066 #if defined(PETSC_USE_CTABLE)
1067     for (i=0; i<ismax; i++) {
1068       ierr   = PetscTableCreate(nrow[i]+1,c->Mbs+1,&rmap[i]);CHKERRQ(ierr);
1069       irow_i = irow[i];
1070       jmax   = nrow[i];
1071       for (j=0; j<jmax; j++) {
1072         if (allrows[i]) {
1073           ierr = PetscTableAdd(rmap[i],j+1,j+1,INSERT_VALUES);CHKERRQ(ierr);
1074         } else {
1075           ierr = PetscTableAdd(rmap[i],irow_i[j]+1,j+1,INSERT_VALUES);CHKERRQ(ierr);
1076         }
1077       }
1078     }
1079 #else
1080     for (i=0; i<ismax; i++) {
1081       ierr   = PetscCalloc1(c->Mbs,&rmap[i]);CHKERRQ(ierr);
1082       rmap_i = rmap[i];
1083       irow_i = irow[i];
1084       jmax   = nrow[i];
1085       for (j=0; j<jmax; j++) {
1086         if (allrows[i]) rmap_i[j] = j;
1087         else rmap_i[irow_i[j]] = j;
1088       }
1089     }
1090 #endif
1091 
1092     /* Update lens from offproc data */
1093     {
1094       PetscInt *rbuf2_i,*rbuf3_i,*sbuf1_i;
1095 
1096       ierr    = MPI_Waitall(nrqs,r_waits3,r_status3);CHKERRQ(ierr);
1097       for (tmp2=0; tmp2<nrqs; tmp2++) {
1098         sbuf1_i = sbuf1[pa[tmp2]];
1099         jmax    = sbuf1_i[0];
1100         ct1     = 2*jmax+1;
1101         ct2     = 0;
1102         rbuf2_i = rbuf2[tmp2];
1103         rbuf3_i = rbuf3[tmp2];
1104         for (j=1; j<=jmax; j++) {
1105           is_no  = sbuf1_i[2*j-1];
1106           max1   = sbuf1_i[2*j];
1107           lens_i = lens[is_no];
1108           if (!allcolumns[is_no]) cmap_i = cmap[is_no];
1109           rmap_i = rmap[is_no];
1110           for (k=0; k<max1; k++,ct1++) {
1111 #if defined(PETSC_USE_CTABLE)
1112             ierr = PetscTableFind(rmap_i,sbuf1_i[ct1]+1,&row);CHKERRQ(ierr);
1113             row--;
1114             if (allrows[is_no] && sbuf1_i[ct1] != row) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"sbuf1_i[ct1] %d != row %d",sbuf1_i[ct1],row);
1115 
1116             if (row < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"row not found in table");
1117 #else
1118             row = rmap_i[sbuf1_i[ct1]]; /* the val in the new matrix to be */
1119 #endif
1120             max2 = rbuf2_i[ct1];
1121             for (l=0; l<max2; l++,ct2++) {
1122               if (!allcolumns[is_no]) {
1123 #if defined(PETSC_USE_CTABLE)
1124                 ierr = PetscTableFind(cmap_i,rbuf3_i[ct2]+1,&tcol);CHKERRQ(ierr);
1125 #else
1126                 tcol = cmap_i[rbuf3_i[ct2]];
1127 #endif
1128                 if (tcol) lens_i[row]++;
1129               } else { /* allcolumns */
1130                 lens_i[row]++; /* lens_i[row] += max2 ? */
1131               }
1132             }
1133           }
1134         }
1135       }
1136     }
1137     ierr = PetscFree(r_waits3);CHKERRQ(ierr);
1138     if (nrqr) {ierr = MPI_Waitall(nrqr,s_waits3,s_status3);CHKERRQ(ierr);}
1139     ierr = PetscFree2(r_status3,s_status3);CHKERRQ(ierr);
1140     ierr = PetscFree(s_waits3);CHKERRQ(ierr);
1141 
1142     /* Create the submatrices */
1143     for (i=0; i<ismax; i++) {
1144       PetscInt bs_tmp;
1145       if (ijonly) bs_tmp = 1;
1146       else        bs_tmp = bs;
1147 
1148       ierr = MatCreate(PETSC_COMM_SELF,submats+i);CHKERRQ(ierr);
1149       ierr = MatSetSizes(submats[i],nrow[i]*bs_tmp,ncol[i]*bs_tmp,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1150 
1151       ierr = MatSetType(submats[i],((PetscObject)A)->type_name);CHKERRQ(ierr);
1152       ierr = MatSeqBAIJSetPreallocation(submats[i],bs_tmp,0,lens[i]);CHKERRQ(ierr);
1153       ierr = MatSeqSBAIJSetPreallocation(submats[i],bs_tmp,0,lens[i]);CHKERRQ(ierr); /* this subroutine is used by SBAIJ routines */
1154 
1155       /* create struct Mat_SubMat and attached it to submat */
1156       ierr = PetscNew(&smat_i);CHKERRQ(ierr);
1157       subc = (Mat_SeqBAIJ*)submats[i]->data;
1158       subc->submatis1 = smat_i;
1159       smats[i]        = smat_i;
1160 
1161       smat_i->destroy          = submats[i]->ops->destroy;
1162       submats[i]->ops->destroy = MatDestroy_MPIBAIJ_MatGetSubmatrices;
1163       submats[i]->factortype   = C->factortype;
1164 
1165       smat_i->id          = i;
1166       smat_i->nrqs        = nrqs;
1167       smat_i->nrqr        = nrqr;
1168       smat_i->rbuf1       = rbuf1;
1169       smat_i->rbuf2       = rbuf2;
1170       smat_i->rbuf3       = rbuf3;
1171       smat_i->sbuf2       = sbuf2;
1172       smat_i->req_source2 = req_source2;
1173 
1174       smat_i->sbuf1       = sbuf1;
1175       smat_i->ptr         = ptr;
1176       smat_i->tmp         = tmp;
1177       smat_i->ctr         = ctr;
1178 
1179       smat_i->pa           = pa;
1180       smat_i->req_size     = req_size;
1181       smat_i->req_source1  = req_source1;
1182 
1183       smat_i->allcolumns  = allcolumns[i];
1184       smat_i->allrows     = allrows[i];
1185       smat_i->singleis    = PETSC_FALSE;
1186       smat_i->row2proc    = row2proc[i];
1187       smat_i->rmap        = rmap[i];
1188       smat_i->cmap        = cmap[i];
1189     }
1190 
1191     if (ismax) {ierr = PetscFree(lens[0]);CHKERRQ(ierr);}
1192     ierr = PetscFree(lens);CHKERRQ(ierr);
1193     ierr = PetscFree(sbuf_aj[0]);CHKERRQ(ierr);
1194     ierr = PetscFree(sbuf_aj);CHKERRQ(ierr);
1195 
1196   } /* endof scall == MAT_INITIAL_MATRIX */
1197 
1198   /* Post recv matrix values */
1199   ierr = PetscObjectGetNewTag((PetscObject)C,&tag4);CHKERRQ(ierr);
1200   ierr = PetscMalloc1(nrqs+1,&rbuf4);CHKERRQ(ierr);
1201   ierr = PetscMalloc1(nrqs+1,&r_waits4);CHKERRQ(ierr);
1202   ierr = PetscMalloc1(nrqs+1,&r_status4);CHKERRQ(ierr);
1203   ierr = PetscMalloc1(nrqr+1,&s_status4);CHKERRQ(ierr);
1204   for (i=0; i<nrqs; ++i) {
1205     ierr = PetscMalloc1(rbuf2[i][0]*bs2,&rbuf4[i]);CHKERRQ(ierr);
1206     ierr = MPI_Irecv(rbuf4[i],rbuf2[i][0]*bs2,MPIU_SCALAR,req_source2[i],tag4,comm,r_waits4+i);CHKERRQ(ierr);
1207   }
1208 
1209   /* Allocate sending buffers for a->a, and send them off */
1210   ierr = PetscMalloc1(nrqr+1,&sbuf_aa);CHKERRQ(ierr);
1211   for (i=0,j=0; i<nrqr; i++) j += req_size[i];
1212 
1213   ierr = PetscMalloc1((j+1)*bs2,&sbuf_aa[0]);CHKERRQ(ierr);
1214   for (i=1; i<nrqr; i++) sbuf_aa[i] = sbuf_aa[i-1] + req_size[i-1]*bs2;
1215 
1216   ierr = PetscMalloc1(nrqr+1,&s_waits4);CHKERRQ(ierr);
1217 
1218   for (i=0; i<nrqr; i++) {
1219     rbuf1_i   = rbuf1[i];
1220     sbuf_aa_i = sbuf_aa[i];
1221     ct1       = 2*rbuf1_i[0]+1;
1222     ct2       = 0;
1223     for (j=1,max1=rbuf1_i[0]; j<=max1; j++) {
1224       kmax = rbuf1_i[2*j];
1225       for (k=0; k<kmax; k++,ct1++) {
1226         row    = rbuf1_i[ct1] - rstart;
1227         nzA    = a_i[row+1] - a_i[row];
1228         nzB    = b_i[row+1] - b_i[row];
1229         ncols  = nzA + nzB;
1230         cworkB = b_j + b_i[row];
1231         vworkA = a_a + a_i[row]*bs2;
1232         vworkB = b_a + b_i[row]*bs2;
1233 
1234         /* load the column values for this row into vals*/
1235         vals = sbuf_aa_i+ct2*bs2;
1236         for (l=0; l<nzB; l++) {
1237           if ((bmap[cworkB[l]]) < cstart) {
1238             ierr = PetscMemcpy(vals+l*bs2,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1239           } else break;
1240         }
1241         imark = l;
1242         for (l=0; l<nzA; l++) {
1243           ierr = PetscMemcpy(vals+(imark+l)*bs2,vworkA+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);//error in proc[1]???
1244         }
1245         for (l=imark; l<nzB; l++) {
1246           ierr = PetscMemcpy(vals+(nzA+l)*bs2,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1247         }
1248 
1249         ct2 += ncols;
1250       }
1251     }
1252     ierr = MPI_Isend(sbuf_aa_i,req_size[i]*bs2,MPIU_SCALAR,req_source1[i],tag4,comm,s_waits4+i);CHKERRQ(ierr);
1253   }
1254 
1255   if (!ismax) {
1256     ierr = PetscFree(rbuf1[0]);CHKERRQ(ierr);
1257     ierr = PetscFree(rbuf1);CHKERRQ(ierr);
1258   }
1259 
1260   /* Assemble the matrices */
1261   /* First assemble the local rows */
1262   for (i=0; i<ismax; i++) {
1263     row2proc_i = row2proc[i];
1264     subc      = (Mat_SeqBAIJ*)submats[i]->data;
1265     imat_ilen = subc->ilen;
1266     imat_j    = subc->j;
1267     imat_i    = subc->i;
1268     imat_a    = subc->a;
1269 
1270     if (!allcolumns[i]) cmap_i = cmap[i];
1271     rmap_i = rmap[i];
1272     irow_i = irow[i];
1273     jmax   = nrow[i];
1274     for (j=0; j<jmax; j++) {
1275       if (allrows[i]) row = j;
1276       else row  = irow_i[j];
1277       proc = row2proc_i[j];
1278 
1279       if (proc == rank) {
1280 
1281         row    = row - rstart;
1282         nzA    = a_i[row+1] - a_i[row];
1283         nzB    = b_i[row+1] - b_i[row];
1284         cworkA = a_j + a_i[row];
1285         cworkB = b_j + b_i[row];
1286         if (!ijonly) {
1287           vworkA = a_a + a_i[row]*bs2;
1288           vworkB = b_a + b_i[row]*bs2;
1289         }
1290 #if defined(PETSC_USE_CTABLE)
1291         PetscInt Crow = row+rstart;
1292         ierr = PetscTableFind(rmap_i,row+rstart+1,&row);CHKERRQ(ierr);
1293         row--;
1294         if (allrows[i] && Crow != row) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Crow %d != row %d",Crow,row);
1295 
1296         if (row < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"row not found in table");
1297 #else
1298         row = rmap_i[row + rstart];
1299 #endif
1300         mat_i = imat_i[row];
1301         if (!ijonly) mat_a = imat_a + mat_i*bs2;
1302         mat_j    = imat_j + mat_i;
1303         ilen = imat_ilen[row];
1304 
1305         /* load the column indices for this row into cols*/
1306         if (!allcolumns[i]) {
1307           for (l=0; l<nzB; l++) {
1308             if ((ctmp = bmap[cworkB[l]]) < cstart) {
1309 #if defined(PETSC_USE_CTABLE)
1310               ierr = PetscTableFind(cmap_i,ctmp+1,&tcol);CHKERRQ(ierr);
1311               if (tcol) {
1312 #else
1313               if ((tcol = cmap_i[ctmp])) {
1314 #endif
1315                 *mat_j++ = tcol - 1;
1316                 ierr     = PetscMemcpy(mat_a,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1317                 mat_a   += bs2;
1318                 ilen++;
1319               }
1320             } else break;
1321           }
1322           imark = l;
1323           for (l=0; l<nzA; l++) {
1324 #if defined(PETSC_USE_CTABLE)
1325             ierr = PetscTableFind(cmap_i,cstart+cworkA[l]+1,&tcol);CHKERRQ(ierr);
1326             if (tcol) {
1327 #else
1328             if ((tcol = cmap_i[cstart + cworkA[l]])) {
1329 #endif
1330               *mat_j++ = tcol - 1;
1331               if (!ijonly) {
1332                 ierr   = PetscMemcpy(mat_a,vworkA+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1333                 mat_a += bs2;
1334               }
1335               ilen++;
1336             }
1337           }
1338           for (l=imark; l<nzB; l++) {
1339 #if defined(PETSC_USE_CTABLE)
1340             ierr = PetscTableFind(cmap_i,bmap[cworkB[l]]+1,&tcol);CHKERRQ(ierr);
1341             if (tcol) {
1342 #else
1343             if ((tcol = cmap_i[bmap[cworkB[l]]])) {
1344 #endif
1345               *mat_j++ = tcol - 1;
1346               if (!ijonly) {
1347                 ierr   = PetscMemcpy(mat_a,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1348                 mat_a += bs2;
1349               }
1350               ilen++;
1351             }
1352           }
1353         } else { /* allcolumns */
1354           for (l=0; l<nzB; l++) {
1355             if ((ctmp = bmap[cworkB[l]]) < cstart) {
1356               *mat_j++ = ctmp;
1357               ierr     = PetscMemcpy(mat_a,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1358               mat_a   += bs2;
1359               ilen++;
1360             } else break;
1361           }
1362           imark = l;
1363           for (l=0; l<nzA; l++) {
1364             *mat_j++ = cstart+cworkA[l];
1365             if (!ijonly) {
1366               ierr   = PetscMemcpy(mat_a,vworkA+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1367               mat_a += bs2;
1368             }
1369             ilen++;
1370           }
1371           for (l=imark; l<nzB; l++) {
1372             *mat_j++ = bmap[cworkB[l]];
1373             if (!ijonly) {
1374               ierr   = PetscMemcpy(mat_a,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1375               mat_a += bs2;
1376             }
1377             ilen++;
1378           }
1379         }
1380         imat_ilen[row] = ilen;
1381       }
1382     }
1383   }
1384 
1385   /* Now assemble the off proc rows */
1386   if (!ijonly) {
1387     ierr = MPI_Waitall(nrqs,r_waits4,r_status4);CHKERRQ(ierr);
1388   }
1389   for (tmp2=0; tmp2<nrqs; tmp2++) {
1390     sbuf1_i = sbuf1[pa[tmp2]];
1391     jmax    = sbuf1_i[0];
1392     ct1     = 2*jmax + 1;
1393     ct2     = 0;
1394     rbuf2_i = rbuf2[tmp2];
1395     rbuf3_i = rbuf3[tmp2];
1396     if (!ijonly) rbuf4_i = rbuf4[tmp2];
1397     for (j=1; j<=jmax; j++) {
1398       is_no     = sbuf1_i[2*j-1];
1399       rmap_i    = rmap[is_no];
1400       if (!allcolumns[is_no]) cmap_i = cmap[is_no];
1401       subc      = (Mat_SeqBAIJ*)submats[is_no]->data;
1402       imat_ilen = subc->ilen;
1403       imat_j    = subc->j;
1404       imat_i    = subc->i;
1405       if (!ijonly) imat_a    = subc->a;
1406       max1      = sbuf1_i[2*j];
1407       for (k=0; k<max1; k++,ct1++) { /* for each recved block row */
1408         row = sbuf1_i[ct1];
1409 #if defined(PETSC_USE_CTABLE)
1410         PetscInt Crow = row;
1411         ierr = PetscTableFind(rmap_i,row+1,&row);CHKERRQ(ierr);
1412         row--;
1413         if (row < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"row not found in table");
1414 
1415         if (allrows[is_no] && Crow != row) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Crow %d != row %d",Crow,row);
1416 #else
1417         row = rmap_i[row];
1418 #endif
1419         ilen  = imat_ilen[row];
1420         mat_i = imat_i[row];
1421         if (!ijonly) mat_a = imat_a + mat_i*bs2;
1422         mat_j = imat_j + mat_i;
1423         max2  = rbuf2_i[ct1];
1424         if (!allcolumns[is_no]) {
1425           for (l=0; l<max2; l++,ct2++) {
1426 #if defined(PETSC_USE_CTABLE)
1427             ierr = PetscTableFind(cmap_i,rbuf3_i[ct2]+1,&tcol);CHKERRQ(ierr);
1428 #else
1429             tcol = cmap_i[rbuf3_i[ct2]];
1430 #endif
1431             if (tcol) {
1432               *mat_j++ = tcol - 1;
1433               if (!ijonly) {
1434                 ierr   = PetscMemcpy(mat_a,rbuf4_i+ct2*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1435                 mat_a += bs2;
1436               }
1437               //*mat_a++ = rbuf4_i[ct2];
1438               ilen++;
1439             }
1440           }
1441         } else { /* allcolumns */
1442           for (l=0; l<max2; l++,ct2++) {
1443             *mat_j++ = rbuf3_i[ct2]; /* same global column index of C */
1444             //*mat_a++ = rbuf4_i[ct2];
1445             if (!ijonly) {
1446               ierr   = PetscMemcpy(mat_a,rbuf4_i+ct2*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1447               mat_a += bs2;
1448             }
1449             ilen++;
1450           }
1451         }
1452         imat_ilen[row] = ilen;
1453       }
1454     }
1455   }
1456 
1457   if (!iscsorted) { /* sort column indices of the rows */
1458     MatScalar *work;
1459 
1460     ierr = PetscMalloc1(bs2,&work);CHKERRQ(ierr);
1461     for (i=0; i<ismax; i++) {
1462       subc      = (Mat_SeqBAIJ*)submats[i]->data;
1463       imat_ilen = subc->ilen;
1464       imat_j    = subc->j;
1465       imat_i    = subc->i;
1466       if (ijonly) imat_a = subc->a;
1467 
1468       if (allcolumns[i]) continue;
1469       jmax = nrow[i];
1470       for (j=0; j<jmax; j++) {
1471         ilen  = imat_ilen[j];
1472         mat_i = imat_i[j];
1473         mat_j = imat_j + mat_i;
1474         if (ijonly) {
1475           ierr = PetscSortInt(ilen,mat_j);CHKERRQ(ierr);
1476         } else {
1477           mat_a = imat_a + mat_i;
1478           ierr  = PetscSortIntWithDataArray(ilen,mat_j,mat_a,bs2*sizeof(MatScalar),work);CHKERRQ(ierr);
1479         }
1480       }
1481     }
1482     ierr = PetscFree(work);CHKERRQ(ierr);
1483   }
1484 
1485   ierr = PetscFree(r_status4);CHKERRQ(ierr);
1486   ierr = PetscFree(r_waits4);CHKERRQ(ierr);
1487   if (nrqr) {ierr = MPI_Waitall(nrqr,s_waits4,s_status4);CHKERRQ(ierr);}
1488   ierr = PetscFree(s_waits4);CHKERRQ(ierr);
1489   ierr = PetscFree(s_status4);CHKERRQ(ierr);
1490 
1491   /* Restore the indices */
1492   for (i=0; i<ismax; i++) {
1493     if (!allrows[i]) {
1494       ierr = ISRestoreIndices(isrow[i],irow+i);CHKERRQ(ierr);
1495     }
1496     if (!allcolumns[i]) {
1497       ierr = ISRestoreIndices(iscol[i],icol+i);CHKERRQ(ierr);
1498     }
1499   }
1500 
1501   for (i=0; i<ismax; i++) {
1502     ierr = MatAssemblyBegin(submats[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1503     ierr = MatAssemblyEnd(submats[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1504   }
1505 
1506   /* Destroy allocated memory */
1507   if (!ismax) {
1508     ierr = PetscFree(pa);CHKERRQ(ierr);
1509 
1510     ierr = PetscFree4(sbuf1,ptr,tmp,ctr);CHKERRQ(ierr);
1511     for (i=0; i<nrqr; ++i) {
1512       ierr = PetscFree(sbuf2[i]);CHKERRQ(ierr);
1513     }
1514     for (i=0; i<nrqs; ++i) {
1515       ierr = PetscFree(rbuf3[i]);CHKERRQ(ierr);
1516     }
1517 
1518     ierr = PetscFree3(sbuf2,req_size,req_source1);CHKERRQ(ierr);
1519     ierr = PetscFree3(req_source2,rbuf2,rbuf3);CHKERRQ(ierr);
1520   }
1521 
1522   ierr = PetscFree(sbuf_aa[0]);CHKERRQ(ierr);
1523   ierr = PetscFree(sbuf_aa);CHKERRQ(ierr);
1524   ierr = PetscFree5(irow,icol,nrow,ncol,issorted);CHKERRQ(ierr);
1525 
1526   for (i=0; i<nrqs; ++i) {
1527     ierr = PetscFree(rbuf4[i]);CHKERRQ(ierr);
1528   }
1529   ierr = PetscFree(rbuf4);CHKERRQ(ierr);
1530 
1531   ierr = PetscFree6(smats,row2proc,cmap,rmap,allcolumns,allrows);CHKERRQ(ierr);
1532   PetscFunctionReturn(0);
1533 }
1534 
1535 //------------ endof new -------------
1536 
1537 PetscErrorCode MatGetSubMatrices_MPIBAIJ_local_old(Mat C,PetscInt ismax,const IS isrow[],const IS iscol[],MatReuse scall,PetscBool *allrows,PetscBool *allcolumns,Mat *submats)
1538 {
1539   Mat_MPIBAIJ    *c = (Mat_MPIBAIJ*)C->data;
1540   Mat            A  = c->A;
1541   Mat_SeqBAIJ    *a = (Mat_SeqBAIJ*)A->data,*b = (Mat_SeqBAIJ*)c->B->data,*mat;
1542   const PetscInt **irow,**icol,*irow_i;
1543   PetscInt       *nrow,*ncol,*w3,*w4,start;
1544   PetscErrorCode ierr;
1545   PetscMPIInt    size,tag0,tag1,tag2,tag3,*w1,*w2,nrqr,idex,end,proc;
1546   PetscInt       **sbuf1,**sbuf2,rank,i,j,k,l,ct1,ct2,**rbuf1,row;
1547   PetscInt       nrqs,msz,**ptr,*req_size,*ctr,*pa,*tmp,tcol;
1548   PetscInt       **rbuf3,*req_source,**sbuf_aj,**rbuf2,max1,max2;
1549   PetscInt       **lens,is_no,ncols,*cols,mat_i,*mat_j,tmp2,jmax;
1550   PetscInt       ctr_j,*sbuf1_j,*sbuf_aj_i,*rbuf1_i,kmax,*lens_i;
1551   PetscInt       bs     =C->rmap->bs,bs2=c->bs2,*a_j=a->j,*b_j=b->j,*cworkA,*cworkB;
1552   PetscInt       cstart = c->cstartbs,nzA,nzB,*a_i=a->i,*b_i=b->i,imark;
1553   PetscInt       *bmap  = c->garray,ctmp,rstart=c->rstartbs;
1554   MPI_Request    *s_waits1,*r_waits1,*s_waits2,*r_waits2,*r_waits3,*s_waits3;
1555   MPI_Status     *r_status1,*r_status2,*s_status1,*s_status3,*s_status2,*r_status3;
1556   MPI_Comm       comm;
1557   PetscBool      flag;
1558   PetscMPIInt    *onodes1,*olengths1;
1559   PetscBool      ijonly=c->ijonly; /* private flag indicates only matrix data structures are requested */
1560 
1561   /* variables below are used for the matrix numerical values - case of !ijonly */
1562   MPI_Request *r_waits4,*s_waits4;
1563   MPI_Status  *r_status4,*s_status4;
1564   MatScalar   **rbuf4,**sbuf_aa,*vals,*mat_a = NULL,*sbuf_aa_i,*vworkA = NULL,*vworkB = NULL;
1565   MatScalar   *a_a=a->a,*b_a=b->a;
1566 
1567 #if defined(PETSC_USE_CTABLE)
1568   PetscInt   tt;
1569   PetscTable *rmap,*cmap,rmap_i,cmap_i=NULL;
1570 #else
1571   PetscInt **cmap,*cmap_i=NULL,*rtable,*rmap_i,**rmap, Mbs = c->Mbs;
1572 #endif
1573 
1574   PetscFunctionBegin;
1575   ierr = PetscObjectGetComm((PetscObject)C,&comm);CHKERRQ(ierr);
1576   tag0 = ((PetscObject)C)->tag;
1577   size = c->size;
1578   rank = c->rank;
1579   //if (!rank) printf("MatGetSubMatrices_MPIBAIJ scall %d, bs %d\n",scall,bs);
1580 
1581   /* Get some new tags to keep the communication clean */
1582   ierr = PetscObjectGetNewTag((PetscObject)C,&tag1);CHKERRQ(ierr);
1583   ierr = PetscObjectGetNewTag((PetscObject)C,&tag2);CHKERRQ(ierr);
1584   ierr = PetscObjectGetNewTag((PetscObject)C,&tag3);CHKERRQ(ierr);
1585 
1586 #if defined(PETSC_USE_CTABLE)
1587   ierr = PetscMalloc4(ismax,&irow,ismax,&icol,ismax,&nrow,ismax,&ncol);CHKERRQ(ierr);
1588 #else
1589   ierr = PetscMalloc5(ismax,&irow,ismax,&icol,ismax,&nrow,ismax,&ncol,Mbs+1,&rtable);CHKERRQ(ierr);
1590   /* Create hash table for the mapping :row -> proc*/
1591   for (i=0,j=0; i<size; i++) {
1592     jmax = C->rmap->range[i+1]/bs;
1593     for (; j<jmax; j++) rtable[j] = i;
1594   }
1595 #endif
1596 
1597   for (i=0; i<ismax; i++) {
1598     if (allrows[i]) {
1599       irow[i] = NULL;
1600       nrow[i] = C->rmap->N/bs;
1601     } else {
1602       ierr = ISGetIndices(isrow[i],&irow[i]);CHKERRQ(ierr);
1603       ierr = ISGetLocalSize(isrow[i],&nrow[i]);CHKERRQ(ierr);
1604     }
1605 
1606     if (allcolumns[i]) {
1607       icol[i] = NULL;
1608       ncol[i] = C->cmap->N/bs;
1609     } else {
1610       ierr = ISGetIndices(iscol[i],&icol[i]);CHKERRQ(ierr);
1611       ierr = ISGetLocalSize(iscol[i],&ncol[i]);CHKERRQ(ierr);
1612     }
1613   }
1614 
1615   /* evaluate communication - mesg to who,length of mesg,and buffer space
1616      required. Based on this, buffers are allocated, and data copied into them*/
1617   ierr = PetscCalloc4(size,&w1,size,&w2,size,&w3,size,&w4);CHKERRQ(ierr);
1618   for (i=0; i<ismax; i++) {
1619     ierr   = PetscMemzero(w4,size*sizeof(PetscInt));CHKERRQ(ierr); /* initialise work vector*/
1620     jmax   = nrow[i];
1621     irow_i = irow[i];
1622     for (j=0; j<jmax; j++) {
1623       if (allrows[i]) row = j;
1624       else row = irow_i[j];
1625 
1626 #if defined(PETSC_USE_CTABLE)
1627       ierr = PetscGetProc(row,size,c->rangebs,&proc);CHKERRQ(ierr);
1628 #else
1629       proc = rtable[row];
1630 #endif
1631       w4[proc]++;
1632     }
1633     for (j=0; j<size; j++) {
1634       if (w4[j]) { w1[j] += w4[j];  w3[j]++;}
1635     }
1636   }
1637 
1638   nrqs     = 0;              /* no of outgoing messages */
1639   msz      = 0;              /* total mesg length for all proc */
1640   w1[rank] = 0;              /* no mesg sent to intself */
1641   w3[rank] = 0;
1642   for (i=0; i<size; i++) {
1643     if (w1[i])  { w2[i] = 1; nrqs++;} /* there exists a message to proc i */
1644   }
1645   ierr = PetscMalloc1(nrqs+1,&pa);CHKERRQ(ierr); /*(proc -array)*/
1646   for (i=0,j=0; i<size; i++) {
1647     if (w1[i]) { pa[j] = i; j++; }
1648   }
1649 
1650   /* Each message would have a header = 1 + 2*(no of IS) + data */
1651   for (i=0; i<nrqs; i++) {
1652     j     = pa[i];
1653     w1[j] += w2[j] + 2* w3[j];
1654     msz   += w1[j];
1655   }
1656 
1657   /* Determine the number of messages to expect, their lengths, from from-ids */
1658   ierr = PetscGatherNumberOfMessages(comm,w2,w1,&nrqr);CHKERRQ(ierr);
1659   ierr = PetscGatherMessageLengths(comm,nrqs,nrqr,w1,&onodes1,&olengths1);CHKERRQ(ierr);
1660 
1661   /* Now post the Irecvs corresponding to these messages */
1662   ierr = PetscPostIrecvInt(comm,tag0,nrqr,onodes1,olengths1,&rbuf1,&r_waits1);CHKERRQ(ierr);
1663 
1664   ierr = PetscFree(onodes1);CHKERRQ(ierr);
1665   ierr = PetscFree(olengths1);CHKERRQ(ierr);
1666 
1667   /* Allocate Memory for outgoing messages */
1668   ierr = PetscMalloc4(size,&sbuf1,size,&ptr,2*msz,&tmp,size,&ctr);CHKERRQ(ierr);
1669   ierr = PetscMemzero(sbuf1,size*sizeof(PetscInt*));CHKERRQ(ierr);
1670   ierr = PetscMemzero(ptr,size*sizeof(PetscInt*));CHKERRQ(ierr);
1671   {
1672     PetscInt *iptr = tmp,ict = 0;
1673     for (i=0; i<nrqs; i++) {
1674       j        = pa[i];
1675       iptr    += ict;
1676       sbuf1[j] = iptr;
1677       ict      = w1[j];
1678     }
1679   }
1680 
1681   /* Form the outgoing messages */
1682   /* Initialise the header space */
1683   for (i=0; i<nrqs; i++) {
1684     j           = pa[i];
1685     sbuf1[j][0] = 0;
1686     ierr        = PetscMemzero(sbuf1[j]+1,2*w3[j]*sizeof(PetscInt));CHKERRQ(ierr);
1687     ptr[j]      = sbuf1[j] + 2*w3[j] + 1;
1688   }
1689 
1690   /* Parse the isrow and copy data into outbuf */
1691   for (i=0; i<ismax; i++) {
1692     ierr   = PetscMemzero(ctr,size*sizeof(PetscInt));CHKERRQ(ierr);
1693     irow_i = irow[i];
1694     jmax   = nrow[i];
1695     for (j=0; j<jmax; j++) {  /* parse the indices of each IS */
1696       if (allrows[i]) row = j;
1697       else row = irow_i[j];
1698 
1699 #if defined(PETSC_USE_CTABLE)
1700       ierr = PetscGetProc(row,size,c->rangebs,&proc);CHKERRQ(ierr);
1701 #else
1702       proc = rtable[row];
1703 #endif
1704       if (proc != rank) { /* copy to the outgoing buf*/
1705         ctr[proc]++;
1706         *ptr[proc] = row;
1707         ptr[proc]++;
1708       }
1709     }
1710     /* Update the headers for the current IS */
1711     for (j=0; j<size; j++) { /* Can Optimise this loop too */
1712       if ((ctr_j = ctr[j])) {
1713         sbuf1_j        = sbuf1[j];
1714         k              = ++sbuf1_j[0];
1715         sbuf1_j[2*k]   = ctr_j;
1716         sbuf1_j[2*k-1] = i;
1717       }
1718     }
1719   }
1720 
1721   /*  Now  post the sends */
1722   ierr = PetscMalloc1(nrqs+1,&s_waits1);CHKERRQ(ierr);
1723   for (i=0; i<nrqs; ++i) {
1724     j    = pa[i];
1725     ierr = MPI_Isend(sbuf1[j],w1[j],MPIU_INT,j,tag0,comm,s_waits1+i);CHKERRQ(ierr);
1726   }
1727 
1728   /* Post Recieves to capture the buffer size */
1729   ierr     = PetscMalloc1(nrqs+1,&r_waits2);CHKERRQ(ierr);
1730   ierr     = PetscMalloc1(nrqs+1,&rbuf2);CHKERRQ(ierr);
1731   rbuf2[0] = tmp + msz;
1732   for (i=1; i<nrqs; ++i) {
1733     rbuf2[i] = rbuf2[i-1]+w1[pa[i-1]];
1734   }
1735   for (i=0; i<nrqs; ++i) {
1736     j        = pa[i];
1737     ierr     = MPI_Irecv(rbuf2[i],w1[j],MPIU_INT,j,tag1,comm,r_waits2+i);CHKERRQ(ierr);
1738   }
1739 
1740   /* Send to other procs the buf size they should allocate */
1741 
1742   /* Receive messages*/
1743   ierr = PetscMalloc1(nrqr+1,&s_waits2);CHKERRQ(ierr);
1744   ierr = PetscMalloc1(nrqr+1,&r_status1);CHKERRQ(ierr);
1745   ierr = PetscMalloc3(nrqr+1,&sbuf2,nrqr,&req_size,nrqr,&req_source);CHKERRQ(ierr);
1746   {
1747     Mat_SeqBAIJ *sA  = (Mat_SeqBAIJ*)c->A->data,*sB = (Mat_SeqBAIJ*)c->B->data;
1748     PetscInt    *sAi = sA->i,*sBi = sB->i,id,*sbuf2_i;
1749 
1750     for (i=0; i<nrqr; ++i) {
1751       ierr = MPI_Waitany(nrqr,r_waits1,&idex,r_status1+i);CHKERRQ(ierr);
1752 
1753       req_size[idex] = 0;
1754       rbuf1_i        = rbuf1[idex];
1755       start          = 2*rbuf1_i[0] + 1;
1756       ierr           = MPI_Get_count(r_status1+i,MPIU_INT,&end);CHKERRQ(ierr);
1757       ierr           = PetscMalloc1(end,&sbuf2[idex]);CHKERRQ(ierr);
1758       sbuf2_i        = sbuf2[idex];
1759       for (j=start; j<end; j++) {
1760         id              = rbuf1_i[j] - rstart;
1761         ncols           = sAi[id+1] - sAi[id] + sBi[id+1] - sBi[id];
1762         sbuf2_i[j]      = ncols;
1763         req_size[idex] += ncols;
1764       }
1765       req_source[idex] = r_status1[i].MPI_SOURCE;
1766       /* form the header */
1767       sbuf2_i[0] = req_size[idex];
1768       for (j=1; j<start; j++) sbuf2_i[j] = rbuf1_i[j];
1769       ierr = MPI_Isend(sbuf2_i,end,MPIU_INT,req_source[idex],tag1,comm,s_waits2+i);CHKERRQ(ierr);
1770     }
1771   }
1772   ierr = PetscFree(r_status1);CHKERRQ(ierr);
1773   ierr = PetscFree(r_waits1);CHKERRQ(ierr);
1774 
1775   /*  recv buffer sizes */
1776   /* Receive messages*/
1777   ierr = PetscMalloc1(nrqs+1,&rbuf3);CHKERRQ(ierr);
1778   ierr = PetscMalloc1(nrqs+1,&r_waits3);CHKERRQ(ierr);
1779   ierr = PetscMalloc1(nrqs+1,&r_status2);CHKERRQ(ierr);
1780   if (!ijonly) {
1781     ierr = PetscMalloc1(nrqs+1,&rbuf4);CHKERRQ(ierr);
1782     ierr = PetscMalloc1(nrqs+1,&r_waits4);CHKERRQ(ierr);
1783   }
1784 
1785   for (i=0; i<nrqs; ++i) {
1786     ierr = MPI_Waitany(nrqs,r_waits2,&idex,r_status2+i);CHKERRQ(ierr);
1787     ierr = PetscMalloc1(rbuf2[idex][0],&rbuf3[idex]);CHKERRQ(ierr);
1788     ierr = MPI_Irecv(rbuf3[idex],rbuf2[idex][0],MPIU_INT,r_status2[i].MPI_SOURCE,tag2,comm,r_waits3+idex);CHKERRQ(ierr);
1789     if (!ijonly) {
1790       ierr = PetscMalloc1(rbuf2[idex][0]*bs2,&rbuf4[idex]);CHKERRQ(ierr);
1791       ierr = MPI_Irecv(rbuf4[idex],rbuf2[idex][0]*bs2,MPIU_MATSCALAR,r_status2[i].MPI_SOURCE,tag3,comm,r_waits4+idex);CHKERRQ(ierr);
1792     }
1793   }
1794   ierr = PetscFree(r_status2);CHKERRQ(ierr);
1795   ierr = PetscFree(r_waits2);CHKERRQ(ierr);
1796 
1797   /* Wait on sends1 and sends2 */
1798   ierr = PetscMalloc1(nrqs+1,&s_status1);CHKERRQ(ierr);
1799   ierr = PetscMalloc1(nrqr+1,&s_status2);CHKERRQ(ierr);
1800 
1801   if (nrqs) {ierr = MPI_Waitall(nrqs,s_waits1,s_status1);CHKERRQ(ierr);}
1802   if (nrqr) {ierr = MPI_Waitall(nrqr,s_waits2,s_status2);CHKERRQ(ierr);}
1803   ierr = PetscFree(s_status1);CHKERRQ(ierr);
1804   ierr = PetscFree(s_status2);CHKERRQ(ierr);
1805   ierr = PetscFree(s_waits1);CHKERRQ(ierr);
1806   ierr = PetscFree(s_waits2);CHKERRQ(ierr);
1807 
1808   /* Now allocate buffers for a->j, and send them off */
1809   ierr = PetscMalloc1(nrqr+1,&sbuf_aj);CHKERRQ(ierr);
1810   for (i=0,j=0; i<nrqr; i++) j += req_size[i];
1811   ierr = PetscMalloc1(j+1,&sbuf_aj[0]);CHKERRQ(ierr);
1812   for (i=1; i<nrqr; i++) sbuf_aj[i] = sbuf_aj[i-1] + req_size[i-1];
1813 
1814   ierr = PetscMalloc1(nrqr+1,&s_waits3);CHKERRQ(ierr);
1815   {
1816     for (i=0; i<nrqr; i++) {
1817       rbuf1_i   = rbuf1[i];
1818       sbuf_aj_i = sbuf_aj[i];
1819       ct1       = 2*rbuf1_i[0] + 1;
1820       ct2       = 0;
1821       for (j=1,max1=rbuf1_i[0]; j<=max1; j++) {
1822         kmax = rbuf1[i][2*j];
1823         for (k=0; k<kmax; k++,ct1++) {
1824           row    = rbuf1_i[ct1] - rstart;
1825           nzA    = a_i[row+1] - a_i[row];
1826           nzB    = b_i[row+1] - b_i[row];
1827           ncols  = nzA + nzB;
1828           cworkA = a_j + a_i[row];
1829           cworkB = b_j + b_i[row];
1830 
1831           /* load the column indices for this row into cols*/
1832           cols = sbuf_aj_i + ct2;
1833           for (l=0; l<nzB; l++) {
1834             if ((ctmp = bmap[cworkB[l]]) < cstart) cols[l] = ctmp;
1835             else break;
1836           }
1837           imark = l;
1838           for (l=0; l<nzA; l++)   cols[imark+l] = cstart + cworkA[l];
1839           for (l=imark; l<nzB; l++) cols[nzA+l] = bmap[cworkB[l]];
1840           ct2 += ncols;
1841         }
1842       }
1843       ierr = MPI_Isend(sbuf_aj_i,req_size[i],MPIU_INT,req_source[i],tag2,comm,s_waits3+i);CHKERRQ(ierr);
1844     }
1845   }
1846   ierr = PetscMalloc1(nrqs+1,&r_status3);CHKERRQ(ierr);
1847   ierr = PetscMalloc1(nrqr+1,&s_status3);CHKERRQ(ierr);
1848 
1849   /* Allocate buffers for a->a, and send them off */
1850   if (!ijonly) {
1851     ierr = PetscMalloc1(nrqr+1,&sbuf_aa);CHKERRQ(ierr);
1852     for (i=0,j=0; i<nrqr; i++) j += req_size[i];
1853     ierr = PetscMalloc1((j+1)*bs2,&sbuf_aa[0]);CHKERRQ(ierr);
1854     for (i=1; i<nrqr; i++) sbuf_aa[i] = sbuf_aa[i-1] + req_size[i-1]*bs2;
1855 
1856     ierr = PetscMalloc1(nrqr+1,&s_waits4);CHKERRQ(ierr);
1857     {
1858       for (i=0; i<nrqr; i++) {
1859         rbuf1_i   = rbuf1[i];
1860         sbuf_aa_i = sbuf_aa[i];
1861         ct1       = 2*rbuf1_i[0]+1;
1862         ct2       = 0;
1863         for (j=1,max1=rbuf1_i[0]; j<=max1; j++) {
1864           kmax = rbuf1_i[2*j];
1865           for (k=0; k<kmax; k++,ct1++) {
1866             row    = rbuf1_i[ct1] - rstart;
1867             nzA    = a_i[row+1] - a_i[row];
1868             nzB    = b_i[row+1] - b_i[row];
1869             ncols  = nzA + nzB;
1870             cworkB = b_j + b_i[row];
1871             vworkA = a_a + a_i[row]*bs2;
1872             vworkB = b_a + b_i[row]*bs2;
1873 
1874             /* load the column values for this row into vals*/
1875             vals = sbuf_aa_i+ct2*bs2;
1876             for (l=0; l<nzB; l++) {
1877               if ((bmap[cworkB[l]]) < cstart) {
1878                 ierr = PetscMemcpy(vals+l*bs2,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1879               } else break;
1880             }
1881             imark = l;
1882             for (l=0; l<nzA; l++) {
1883               ierr = PetscMemcpy(vals+(imark+l)*bs2,vworkA+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1884             }
1885             for (l=imark; l<nzB; l++) {
1886               ierr = PetscMemcpy(vals+(nzA+l)*bs2,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1887             }
1888             ct2 += ncols;
1889           }
1890         }
1891         ierr = MPI_Isend(sbuf_aa_i,req_size[i]*bs2,MPIU_MATSCALAR,req_source[i],tag3,comm,s_waits4+i);CHKERRQ(ierr);
1892       }
1893     }
1894     ierr = PetscMalloc1(nrqs+1,&r_status4);CHKERRQ(ierr);
1895     ierr = PetscMalloc1(nrqr+1,&s_status4);CHKERRQ(ierr);
1896   }
1897   ierr = PetscFree(rbuf1[0]);CHKERRQ(ierr);
1898   ierr = PetscFree(rbuf1);CHKERRQ(ierr);
1899 
1900   /* Form the matrix */
1901   /* create col map: global col of C -> local col of submatrices */
1902   {
1903     const PetscInt *icol_i;
1904 #if defined(PETSC_USE_CTABLE)
1905     ierr = PetscMalloc1(1+ismax,&cmap);CHKERRQ(ierr);
1906     for (i=0; i<ismax; i++) {
1907       if (!allcolumns[i]) {
1908         ierr   = PetscTableCreate(ncol[i]+1,c->Nbs+1,&cmap[i]);CHKERRQ(ierr);
1909         jmax   = ncol[i];
1910         icol_i = icol[i];
1911         cmap_i = cmap[i];
1912         for (j=0; j<jmax; j++) {
1913           ierr = PetscTableAdd(cmap_i,icol_i[j]+1,j+1,INSERT_VALUES);CHKERRQ(ierr);
1914         }
1915       } else {
1916         cmap[i] = NULL;
1917       }
1918     }
1919 #else
1920     ierr = PetscMalloc1(ismax,&cmap);CHKERRQ(ierr);
1921     for (i=0; i<ismax; i++) {
1922       if (!allcolumns[i]) {
1923         ierr   = PetscCalloc1(c->Nbs,&cmap[i]);CHKERRQ(ierr);
1924         jmax   = ncol[i];
1925         icol_i = icol[i];
1926         cmap_i = cmap[i];
1927         for (j=0; j<jmax; j++) {
1928           cmap_i[icol_i[j]] = j+1;
1929         }
1930       } else { /* allcolumns[i] */
1931         cmap[i] = NULL;
1932       }
1933     }
1934 #endif
1935   }
1936 
1937   /* Create lens which is required for MatCreate... */
1938   for (i=0,j=0; i<ismax; i++) j += nrow[i];
1939   ierr    = PetscMalloc((1+ismax)*sizeof(PetscInt*)+ j*sizeof(PetscInt),&lens);CHKERRQ(ierr);
1940   lens[0] = (PetscInt*)(lens + ismax);
1941   ierr    = PetscMemzero(lens[0],j*sizeof(PetscInt));CHKERRQ(ierr);
1942   for (i=1; i<ismax; i++) lens[i] = lens[i-1] + nrow[i-1];
1943 
1944   /* Update lens from local data */
1945   for (i=0; i<ismax; i++) {
1946     jmax = nrow[i];
1947     if (!allcolumns[i]) cmap_i = cmap[i];
1948     irow_i = irow[i];
1949     lens_i = lens[i];
1950     for (j=0; j<jmax; j++) {
1951       if (allrows[i]) row = j;
1952       else row = irow_i[j];
1953 
1954 #if defined(PETSC_USE_CTABLE)
1955       ierr = PetscGetProc(row,size,c->rangebs,&proc);CHKERRQ(ierr);
1956 #else
1957       proc = rtable[row];
1958 #endif
1959       if (proc == rank) {
1960         /* Get indices from matA and then from matB */
1961         row    = row - rstart;
1962         nzA    = a_i[row+1] - a_i[row];
1963         nzB    = b_i[row+1] - b_i[row];
1964         cworkA =  a_j + a_i[row];
1965         cworkB = b_j + b_i[row];
1966         if (!allcolumns[i]) {
1967 #if defined(PETSC_USE_CTABLE)
1968           for (k=0; k<nzA; k++) {
1969             ierr = PetscTableFind(cmap_i,cstart+cworkA[k]+1,&tt);CHKERRQ(ierr);
1970             if (tt) lens_i[j]++;
1971           }
1972           for (k=0; k<nzB; k++) {
1973             ierr = PetscTableFind(cmap_i,bmap[cworkB[k]]+1,&tt);CHKERRQ(ierr);
1974             if (tt) lens_i[j]++;
1975           }
1976 
1977 #else
1978           for (k=0; k<nzA; k++) {
1979             if (cmap_i[cstart + cworkA[k]]) lens_i[j]++;
1980           }
1981           for (k=0; k<nzB; k++) {
1982             if (cmap_i[bmap[cworkB[k]]]) lens_i[j]++;
1983           }
1984 #endif
1985         } else { /* allcolumns */
1986           lens_i[j] = nzA + nzB;
1987         }
1988       }
1989     }
1990   }
1991 #if defined(PETSC_USE_CTABLE)
1992   /* Create row map*/
1993   ierr = PetscMalloc1(1+ismax,&rmap);CHKERRQ(ierr);
1994   for (i=0; i<ismax; i++) {
1995     ierr = PetscTableCreate(nrow[i]+1,c->Mbs+1,&rmap[i]);CHKERRQ(ierr);
1996   }
1997 #else
1998   /* Create row map*/
1999   ierr    = PetscMalloc((1+ismax)*sizeof(PetscInt*)+ ismax*Mbs*sizeof(PetscInt),&rmap);CHKERRQ(ierr);
2000   rmap[0] = (PetscInt*)(rmap + ismax);
2001   ierr    = PetscMemzero(rmap[0],ismax*Mbs*sizeof(PetscInt));CHKERRQ(ierr);
2002   for (i=1; i<ismax; i++) rmap[i] = rmap[i-1] + Mbs;
2003 #endif
2004   for (i=0; i<ismax; i++) {
2005     irow_i = irow[i];
2006     jmax   = nrow[i];
2007 #if defined(PETSC_USE_CTABLE)
2008     rmap_i = rmap[i];
2009     for (j=0; j<jmax; j++) {
2010       if (allrows[i]) {
2011         ierr = PetscTableAdd(rmap_i,j+1,j+1,INSERT_VALUES);CHKERRQ(ierr);
2012       } else {
2013         ierr = PetscTableAdd(rmap_i,irow_i[j]+1,j+1,INSERT_VALUES);CHKERRQ(ierr);
2014       }
2015     }
2016 #else
2017     rmap_i = rmap[i];
2018     for (j=0; j<jmax; j++) {
2019       if (allrows[i]) rmap_i[j] = j;
2020       else rmap_i[irow_i[j]] = j;
2021     }
2022 #endif
2023   }
2024 
2025   /* Update lens from offproc data */
2026   {
2027     PetscInt    *rbuf2_i,*rbuf3_i,*sbuf1_i;
2028     PetscMPIInt ii;
2029 
2030     for (tmp2=0; tmp2<nrqs; tmp2++) {
2031       ierr    = MPI_Waitany(nrqs,r_waits3,&ii,r_status3+tmp2);CHKERRQ(ierr);
2032       idex    = pa[ii];
2033       sbuf1_i = sbuf1[idex];
2034       jmax    = sbuf1_i[0];
2035       ct1     = 2*jmax+1;
2036       ct2     = 0;
2037       rbuf2_i = rbuf2[ii];
2038       rbuf3_i = rbuf3[ii];
2039       for (j=1; j<=jmax; j++) {
2040         is_no  = sbuf1_i[2*j-1];
2041         max1   = sbuf1_i[2*j];
2042         lens_i = lens[is_no];
2043         if (!allcolumns[is_no]) cmap_i = cmap[is_no];
2044         rmap_i = rmap[is_no];
2045         for (k=0; k<max1; k++,ct1++) {
2046 #if defined(PETSC_USE_CTABLE)
2047           ierr = PetscTableFind(rmap_i,sbuf1_i[ct1]+1,&row);CHKERRQ(ierr);
2048           row--;
2049           if (row < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"row not found in table");
2050 #else
2051           row = rmap_i[sbuf1_i[ct1]];  /* the val in the new matrix to be */
2052 #endif
2053           max2 = rbuf2_i[ct1];
2054           for (l=0; l<max2; l++,ct2++) {
2055             if (!allcolumns[is_no]) {
2056 #if defined(PETSC_USE_CTABLE)
2057               ierr = PetscTableFind(cmap_i,rbuf3_i[ct2]+1,&tt);CHKERRQ(ierr);
2058               if (tt) lens_i[row]++;
2059 #else
2060               if (cmap_i[rbuf3_i[ct2]]) lens_i[row]++;
2061 #endif
2062             } else { /* allcolumns */
2063               lens_i[row]++;
2064             }
2065           }
2066         }
2067       }
2068     }
2069   }
2070   ierr = PetscFree(r_status3);CHKERRQ(ierr);
2071   ierr = PetscFree(r_waits3);CHKERRQ(ierr);
2072   if (nrqr) {ierr = MPI_Waitall(nrqr,s_waits3,s_status3);CHKERRQ(ierr);}
2073   ierr = PetscFree(s_status3);CHKERRQ(ierr);
2074   ierr = PetscFree(s_waits3);CHKERRQ(ierr);
2075 
2076   /* Create the submatrices */
2077   if (scall == MAT_REUSE_MATRIX) {
2078     if (ijonly) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," MAT_REUSE_MATRIX and ijonly is not supported yet");
2079 
2080     for (i=0; i<ismax; i++) {
2081       mat = (Mat_SeqBAIJ*)(submats[i]->data);
2082       if ((mat->mbs != nrow[i]) || (mat->nbs != ncol[i] || C->rmap->bs != bs)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. wrong size");
2083       ierr = PetscMemcmp(mat->ilen,lens[i],mat->mbs *sizeof(PetscInt),&flag);CHKERRQ(ierr);
2084       if (!flag) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Cannot reuse matrix. wrong no of nonzeros");
2085       /* Initial matrix as if empty */
2086       ierr = PetscMemzero(mat->ilen,mat->mbs*sizeof(PetscInt));CHKERRQ(ierr);
2087 
2088       submats[i]->factortype = C->factortype;
2089     }
2090   } else {
2091     PetscInt bs_tmp;
2092     if (ijonly) bs_tmp = 1;
2093     else        bs_tmp = bs;
2094     for (i=0; i<ismax; i++) {
2095       ierr = MatCreate(PETSC_COMM_SELF,submats+i);CHKERRQ(ierr);
2096       ierr = MatSetSizes(submats[i],nrow[i]*bs_tmp,ncol[i]*bs_tmp,nrow[i]*bs_tmp,ncol[i]*bs_tmp);CHKERRQ(ierr);
2097       ierr = MatSetType(submats[i],((PetscObject)A)->type_name);CHKERRQ(ierr);
2098       ierr = MatSeqBAIJSetPreallocation(submats[i],bs_tmp,0,lens[i]);CHKERRQ(ierr);
2099       ierr = MatSeqSBAIJSetPreallocation(submats[i],bs_tmp,0,lens[i]);CHKERRQ(ierr); /* this subroutine is used by SBAIJ routines */
2100     }
2101   }
2102 
2103   /* Assemble the matrices */
2104   /* First assemble the local rows */
2105   {
2106     PetscInt  ilen_row,*imat_ilen,*imat_j,*imat_i;
2107     MatScalar *imat_a = NULL;
2108 
2109     for (i=0; i<ismax; i++) {
2110       mat       = (Mat_SeqBAIJ*)submats[i]->data;
2111       imat_ilen = mat->ilen;
2112       imat_j    = mat->j;
2113       imat_i    = mat->i;
2114       if (!ijonly) imat_a = mat->a;
2115       if (!allcolumns[i]) cmap_i = cmap[i];
2116       rmap_i = rmap[i];
2117       irow_i = irow[i];
2118       jmax   = nrow[i];
2119       for (j=0; j<jmax; j++) {
2120         if (allrows[i]) row = j;
2121         else row = irow_i[j];
2122 
2123 #if defined(PETSC_USE_CTABLE)
2124         ierr = PetscGetProc(row,size,c->rangebs,&proc);CHKERRQ(ierr);
2125 #else
2126         proc = rtable[row];
2127 #endif
2128         if (proc == rank) {
2129           row    = row - rstart;
2130           nzA    = a_i[row+1] - a_i[row];
2131           nzB    = b_i[row+1] - b_i[row];
2132           cworkA = a_j + a_i[row];
2133           cworkB = b_j + b_i[row];
2134           if (!ijonly) {
2135             vworkA = a_a + a_i[row]*bs2;
2136             vworkB = b_a + b_i[row]*bs2;
2137           }
2138 #if defined(PETSC_USE_CTABLE)
2139           ierr = PetscTableFind(rmap_i,row+rstart+1,&row);CHKERRQ(ierr);
2140           row--;
2141           if (row < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"row not found in table");
2142 #else
2143           row = rmap_i[row + rstart];
2144 #endif
2145           mat_i = imat_i[row];
2146           if (!ijonly) mat_a = imat_a + mat_i*bs2;
2147           mat_j    = imat_j + mat_i;
2148           ilen_row = imat_ilen[row];
2149 
2150           /* load the column indices for this row into cols*/
2151           if (!allcolumns[i]) {
2152             for (l=0; l<nzB; l++) {
2153               if ((ctmp = bmap[cworkB[l]]) < cstart) {
2154 #if defined(PETSC_USE_CTABLE)
2155                 ierr = PetscTableFind(cmap_i,ctmp+1,&tcol);CHKERRQ(ierr);
2156                 if (tcol) {
2157 #else
2158                 if ((tcol = cmap_i[ctmp])) {
2159 #endif
2160                   *mat_j++ = tcol - 1;
2161                   ierr     = PetscMemcpy(mat_a,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
2162                   mat_a   += bs2;
2163                   ilen_row++;
2164                 }
2165               } else break;
2166             }
2167             imark = l;
2168             for (l=0; l<nzA; l++) {
2169 #if defined(PETSC_USE_CTABLE)
2170               ierr = PetscTableFind(cmap_i,cstart+cworkA[l]+1,&tcol);CHKERRQ(ierr);
2171               if (tcol) {
2172 #else
2173               if ((tcol = cmap_i[cstart + cworkA[l]])) {
2174 #endif
2175                 *mat_j++ = tcol - 1;
2176                 if (!ijonly) {
2177                   ierr   = PetscMemcpy(mat_a,vworkA+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
2178                   mat_a += bs2;
2179                 }
2180                 ilen_row++;
2181               }
2182             }
2183             for (l=imark; l<nzB; l++) {
2184 #if defined(PETSC_USE_CTABLE)
2185               ierr = PetscTableFind(cmap_i,bmap[cworkB[l]]+1,&tcol);CHKERRQ(ierr);
2186               if (tcol) {
2187 #else
2188               if ((tcol = cmap_i[bmap[cworkB[l]]])) {
2189 #endif
2190                 *mat_j++ = tcol - 1;
2191                 if (!ijonly) {
2192                   ierr   = PetscMemcpy(mat_a,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
2193                   mat_a += bs2;
2194                 }
2195                 ilen_row++;
2196               }
2197             }
2198           } else { /* allcolumns */
2199             for (l=0; l<nzB; l++) {
2200               if ((ctmp = bmap[cworkB[l]]) < cstart) {
2201                 *mat_j++ = ctmp;
2202                 ierr     = PetscMemcpy(mat_a,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
2203                 mat_a   += bs2;
2204                 ilen_row++;
2205               } else break;
2206             }
2207             imark = l;
2208             for (l=0; l<nzA; l++) {
2209               *mat_j++ = cstart+cworkA[l];
2210               if (!ijonly) {
2211                 ierr   = PetscMemcpy(mat_a,vworkA+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
2212                 mat_a += bs2;
2213               }
2214               ilen_row++;
2215             }
2216             for (l=imark; l<nzB; l++) {
2217               *mat_j++ = bmap[cworkB[l]];
2218               if (!ijonly) {
2219                 ierr   = PetscMemcpy(mat_a,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
2220                 mat_a += bs2;
2221               }
2222               ilen_row++;
2223             }
2224           }
2225           imat_ilen[row] = ilen_row;
2226         }
2227       }
2228     }
2229   }
2230 
2231   /*   Now assemble the off proc rows*/
2232   {
2233     PetscInt    *sbuf1_i,*rbuf2_i,*rbuf3_i,*imat_ilen,ilen;
2234     PetscInt    *imat_j,*imat_i;
2235     MatScalar   *imat_a = NULL,*rbuf4_i = NULL;
2236     PetscMPIInt ii;
2237 
2238     for (tmp2=0; tmp2<nrqs; tmp2++) {
2239       if (ijonly) ii = tmp2;
2240       else {
2241         ierr = MPI_Waitany(nrqs,r_waits4,&ii,r_status4+tmp2);CHKERRQ(ierr);
2242       }
2243       idex    = pa[ii];
2244       sbuf1_i = sbuf1[idex];
2245       jmax    = sbuf1_i[0];
2246       ct1     = 2*jmax + 1;
2247       ct2     = 0;
2248       rbuf2_i = rbuf2[ii];
2249       rbuf3_i = rbuf3[ii];
2250       if (!ijonly) rbuf4_i = rbuf4[ii];
2251       for (j=1; j<=jmax; j++) {
2252         is_no = sbuf1_i[2*j-1];
2253         if (!allcolumns[is_no]) cmap_i = cmap[is_no];
2254         rmap_i    = rmap[is_no];
2255         mat       = (Mat_SeqBAIJ*)submats[is_no]->data;
2256         imat_ilen = mat->ilen;
2257         imat_j    = mat->j;
2258         imat_i    = mat->i;
2259         if (!ijonly) imat_a = mat->a;
2260         max1 = sbuf1_i[2*j];
2261         for (k=0; k<max1; k++,ct1++) {
2262           row = sbuf1_i[ct1];
2263 #if defined(PETSC_USE_CTABLE)
2264           ierr = PetscTableFind(rmap_i,row+1,&row);CHKERRQ(ierr);
2265           row--;
2266           if (row < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"row not found in table");
2267 #else
2268           row = rmap_i[row];
2269 #endif
2270           ilen  = imat_ilen[row];
2271           mat_i = imat_i[row];
2272           if (!ijonly) mat_a = imat_a + mat_i*bs2;
2273           mat_j = imat_j + mat_i;
2274           max2  = rbuf2_i[ct1];
2275 
2276           if (!allcolumns[is_no]) {
2277             for (l=0; l<max2; l++,ct2++) {
2278 #if defined(PETSC_USE_CTABLE)
2279               ierr = PetscTableFind(cmap_i,rbuf3_i[ct2]+1,&tcol);CHKERRQ(ierr);
2280               if (tcol) {
2281 #else
2282               if ((tcol = cmap_i[rbuf3_i[ct2]])) {
2283 #endif
2284                 *mat_j++ = tcol - 1;
2285                 if (!ijonly) {
2286                   ierr   = PetscMemcpy(mat_a,rbuf4_i+ct2*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
2287                   mat_a += bs2;
2288                 }
2289                 ilen++;
2290               }
2291             }
2292           } else { /* allcolumns */
2293             for (l=0; l<max2; l++,ct2++) {
2294               *mat_j++ = rbuf3_i[ct2];
2295               if (!ijonly) {
2296                 ierr   = PetscMemcpy(mat_a,rbuf4_i+ct2*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
2297                 mat_a += bs2;
2298               }
2299               ilen++;
2300             }
2301           }
2302           imat_ilen[row] = ilen;
2303         }
2304       }
2305     }
2306   }
2307   /* sort the rows */
2308   {
2309     PetscInt  ilen_row,*imat_ilen,*imat_j,*imat_i;
2310     MatScalar *imat_a = NULL;
2311     MatScalar *work;
2312 
2313     ierr = PetscMalloc1(bs2,&work);CHKERRQ(ierr);
2314     for (i=0; i<ismax; i++) {
2315       mat       = (Mat_SeqBAIJ*)submats[i]->data;
2316       imat_ilen = mat->ilen;
2317       imat_j    = mat->j;
2318       imat_i    = mat->i;
2319       if (!ijonly) imat_a = mat->a;
2320       if (allcolumns[i]) continue;
2321       jmax   = nrow[i];
2322       for (j=0; j<jmax; j++) {
2323         mat_i = imat_i[j];
2324         if (!ijonly) mat_a = imat_a + mat_i*bs2;
2325         mat_j    = imat_j + mat_i;
2326         ilen_row = imat_ilen[j];
2327         if (!ijonly) {ierr = PetscSortIntWithDataArray(ilen_row,mat_j,mat_a,bs2*sizeof(MatScalar),work);CHKERRQ(ierr);}
2328         else {ierr = PetscSortInt(ilen_row,mat_j);CHKERRQ(ierr);}
2329       }
2330     }
2331     ierr = PetscFree(work);CHKERRQ(ierr);
2332   }
2333   if (!ijonly) {
2334     ierr = PetscFree(r_status4);CHKERRQ(ierr);
2335     ierr = PetscFree(r_waits4);CHKERRQ(ierr);
2336     if (nrqr) {ierr = MPI_Waitall(nrqr,s_waits4,s_status4);CHKERRQ(ierr);}
2337     ierr = PetscFree(s_waits4);CHKERRQ(ierr);
2338     ierr = PetscFree(s_status4);CHKERRQ(ierr);
2339   }
2340 
2341   /* Restore the indices */
2342   for (i=0; i<ismax; i++) {
2343     if (!allrows[i]) {
2344       ierr = ISRestoreIndices(isrow[i],irow+i);CHKERRQ(ierr);
2345     }
2346     if (!allcolumns[i]) {
2347       ierr = ISRestoreIndices(iscol[i],icol+i);CHKERRQ(ierr);
2348     }
2349   }
2350 
2351   /* Destroy allocated memory */
2352 #if defined(PETSC_USE_CTABLE)
2353   ierr = PetscFree4(irow,icol,nrow,ncol);CHKERRQ(ierr);
2354 #else
2355   ierr = PetscFree5(irow,icol,nrow,ncol,rtable);CHKERRQ(ierr);
2356 #endif
2357   ierr = PetscFree4(w1,w2,w3,w4);CHKERRQ(ierr);
2358   ierr = PetscFree(pa);CHKERRQ(ierr);
2359 
2360   ierr = PetscFree4(sbuf1,ptr,tmp,ctr);CHKERRQ(ierr);
2361   ierr = PetscFree(sbuf1);CHKERRQ(ierr);
2362   ierr = PetscFree(rbuf2);CHKERRQ(ierr);
2363   for (i=0; i<nrqr; ++i) {
2364     ierr = PetscFree(sbuf2[i]);CHKERRQ(ierr);
2365   }
2366   for (i=0; i<nrqs; ++i) {
2367     ierr = PetscFree(rbuf3[i]);CHKERRQ(ierr);
2368   }
2369   ierr = PetscFree3(sbuf2,req_size,req_source);CHKERRQ(ierr);
2370   ierr = PetscFree(rbuf3);CHKERRQ(ierr);
2371   ierr = PetscFree(sbuf_aj[0]);CHKERRQ(ierr);
2372   ierr = PetscFree(sbuf_aj);CHKERRQ(ierr);
2373   if (!ijonly) {
2374     for (i=0; i<nrqs; ++i) {ierr = PetscFree(rbuf4[i]);CHKERRQ(ierr);}
2375     ierr = PetscFree(rbuf4);CHKERRQ(ierr);
2376     ierr = PetscFree(sbuf_aa[0]);CHKERRQ(ierr);
2377     ierr = PetscFree(sbuf_aa);CHKERRQ(ierr);
2378   }
2379 
2380 #if defined(PETSC_USE_CTABLE)
2381   for (i=0; i<ismax; i++) {
2382     ierr = PetscTableDestroy((PetscTable*)&rmap[i]);CHKERRQ(ierr);
2383   }
2384 #endif
2385   ierr = PetscFree(rmap);CHKERRQ(ierr);
2386 
2387   for (i=0; i<ismax; i++) {
2388     if (!allcolumns[i]) {
2389 #if defined(PETSC_USE_CTABLE)
2390       ierr = PetscTableDestroy((PetscTable*)&cmap[i]);CHKERRQ(ierr);
2391 #else
2392       ierr = PetscFree(cmap[i]);CHKERRQ(ierr);
2393 #endif
2394     }
2395   }
2396   ierr = PetscFree(cmap);CHKERRQ(ierr);
2397   ierr = PetscFree(lens);CHKERRQ(ierr);
2398 
2399   for (i=0; i<ismax; i++) {
2400     ierr = MatAssemblyBegin(submats[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2401     ierr = MatAssemblyEnd(submats[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2402   }
2403 
2404   c->ijonly = PETSC_FALSE; /* set back to the default */
2405   PetscFunctionReturn(0);
2406 }
2407 
2408