xref: /petsc/src/mat/impls/baij/mpi/baijov.c (revision 82094794e2b9d059cc8370a7dbd4702a5e945ede)
1 #define PETSCMAT_DLL
2 
3 /*
4    Routines to compute overlapping regions of a parallel MPI matrix
5   and to find submatrices that were shared across processors.
6 */
7 #include "../src/mat/impls/baij/mpi/mpibaij.h"
8 #include "petscbt.h"
9 
10 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Once(Mat,PetscInt,IS *);
11 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Local(Mat,PetscInt,char **,PetscInt*,PetscInt**);
12 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Receive(Mat,PetscInt,PetscInt **,PetscInt**,PetscInt*);
13 EXTERN PetscErrorCode MatGetRow_MPIBAIJ(Mat,PetscInt,PetscInt*,PetscInt**,PetscScalar**);
14 EXTERN PetscErrorCode MatRestoreRow_MPIBAIJ(Mat,PetscInt,PetscInt*,PetscInt**,PetscScalar**);
15 
16 #undef __FUNCT__
17 #define __FUNCT__ "MatIncreaseOverlap_MPIBAIJ"
18 PetscErrorCode MatIncreaseOverlap_MPIBAIJ(Mat C,PetscInt imax,IS is[],PetscInt ov)
19 {
20   PetscErrorCode ierr;
21   PetscInt       i,N=C->cmap->N, bs=C->rmap->bs;
22   IS             *is_new;
23 
24   PetscFunctionBegin;
25   ierr = PetscMalloc(imax*sizeof(IS),&is_new);CHKERRQ(ierr);
26   /* Convert the indices into block format */
27   ierr = ISCompressIndicesGeneral(N,bs,imax,is,is_new);CHKERRQ(ierr);
28   if (ov < 0){ SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap specified\n");}
29   for (i=0; i<ov; ++i) {
30     ierr = MatIncreaseOverlap_MPIBAIJ_Once(C,imax,is_new);CHKERRQ(ierr);
31   }
32   for (i=0; i<imax; i++) {ierr = ISDestroy(is[i]);CHKERRQ(ierr);}
33   ierr = ISExpandIndicesGeneral(N,bs,imax,is_new,is);CHKERRQ(ierr);
34   for (i=0; i<imax; i++) {ierr = ISDestroy(is_new[i]);CHKERRQ(ierr);}
35   ierr = PetscFree(is_new);CHKERRQ(ierr);
36   PetscFunctionReturn(0);
37 }
38 
39 /*
40   Sample message format:
41   If a processor A wants processor B to process some elements corresponding
42   to index sets is[1], is[5]
43   mesg [0] = 2   (no of index sets in the mesg)
44   -----------
45   mesg [1] = 1 => is[1]
46   mesg [2] = sizeof(is[1]);
47   -----------
48   mesg [5] = 5  => is[5]
49   mesg [6] = sizeof(is[5]);
50   -----------
51   mesg [7]
52   mesg [n]  data(is[1])
53   -----------
54   mesg[n+1]
55   mesg[m]  data(is[5])
56   -----------
57 
58   Notes:
59   nrqs - no of requests sent (or to be sent out)
60   nrqr - no of requests recieved (which have to be or which have been processed
61 */
62 #undef __FUNCT__
63 #define __FUNCT__ "MatIncreaseOverlap_MPIBAIJ_Once"
64 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Once(Mat C,PetscInt imax,IS is[])
65 {
66   Mat_MPIBAIJ    *c = (Mat_MPIBAIJ*)C->data;
67   const PetscInt **idx,*idx_i;
68   PetscInt       *n,*w3,*w4,*rtable,**data,len;
69   PetscErrorCode ierr;
70   PetscMPIInt    size,rank,tag1,tag2,*w2,*w1,nrqr;
71   PetscInt       Mbs,i,j,k,**rbuf,row,proc,nrqs,msz,**outdat,**ptr;
72   PetscInt       *ctr,*pa,*tmp,*isz,*isz1,**xdata,**rbuf2;
73   PetscMPIInt    *onodes1,*olengths1,*onodes2,*olengths2;
74   PetscBT        *table;
75   MPI_Comm       comm;
76   MPI_Request    *s_waits1,*r_waits1,*s_waits2,*r_waits2;
77   MPI_Status     *s_status,*recv_status;
78 
79   PetscFunctionBegin;
80   comm   = ((PetscObject)C)->comm;
81   size   = c->size;
82   rank   = c->rank;
83   Mbs    = c->Mbs;
84 
85   ierr = PetscObjectGetNewTag((PetscObject)C,&tag1);CHKERRQ(ierr);
86   ierr = PetscObjectGetNewTag((PetscObject)C,&tag2);CHKERRQ(ierr);
87 
88   ierr   = PetscMalloc((imax+1)*sizeof(PetscInt*)+ (imax + Mbs)*sizeof(PetscInt),&idx);CHKERRQ(ierr);
89   n      = (PetscInt*)(idx + imax);
90   rtable = n + imax;
91 
92   for (i=0; i<imax; i++) {
93     ierr = ISGetIndices(is[i],&idx[i]);CHKERRQ(ierr);
94     ierr = ISGetLocalSize(is[i],&n[i]);CHKERRQ(ierr);
95   }
96 
97   /* Create hash table for the mapping :row -> proc*/
98   for (i=0,j=0; i<size; i++) {
99     len = c->rangebs[i+1];
100     for (; j<len; j++) {
101       rtable[j] = i;
102     }
103   }
104 
105   /* evaluate communication - mesg to who,length of mesg, and buffer space
106      required. Based on this, buffers are allocated, and data copied into them*/
107   ierr = PetscMalloc(size*2*sizeof(PetscInt)+size*2*sizeof(PetscMPIInt),&w1);CHKERRQ(ierr);/*  mesg size */
108   w2   = w1 + size;                /* if w2[i] marked, then a message to proc i*/
109   w3   = (PetscInt*) (w2 + size);   /* no of IS that needs to be sent to proc i */
110   w4   = w3 + size;                /* temp work space used in determining w1, w2, w3 */
111   ierr = PetscMemzero(w1,size*sizeof(PetscInt)+2*size*sizeof(PetscMPIInt));CHKERRQ(ierr); /* initialise work vector*/
112   for (i=0; i<imax; i++) {
113     ierr  = PetscMemzero(w4,size*sizeof(PetscInt));CHKERRQ(ierr); /* initialise work vector*/
114     idx_i = idx[i];
115     len   = n[i];
116     for (j=0; j<len; j++) {
117       row  = idx_i[j];
118       if (row < 0) {
119         SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Index set cannot have negative entries");
120       }
121       proc = rtable[row];
122       w4[proc]++;
123     }
124     for (j=0; j<size; j++){
125       if (w4[j]) { w1[j] += w4[j]; w3[j]++;}
126     }
127   }
128 
129   nrqs     = 0;              /* no of outgoing messages */
130   msz      = 0;              /* total mesg length (for all proc */
131   w1[rank] = 0;              /* no mesg sent to itself */
132   w3[rank] = 0;
133   for (i=0; i<size; i++) {
134     if (w1[i])  {w2[i] = 1; nrqs++;} /* there exists a message to proc i */
135   }
136   /* pa - is list of processors to communicate with */
137   ierr = PetscMalloc((nrqs+1)*sizeof(PetscInt),&pa);CHKERRQ(ierr);
138   for (i=0,j=0; i<size; i++) {
139     if (w1[i]) {pa[j] = i; j++;}
140   }
141 
142   /* Each message would have a header = 1 + 2*(no of IS) + data */
143   for (i=0; i<nrqs; i++) {
144     j      = pa[i];
145     w1[j] += w2[j] + 2*w3[j];
146     msz   += w1[j];
147   }
148 
149   /* Determine the number of messages to expect, their lengths, from from-ids */
150   ierr = PetscGatherNumberOfMessages(comm,w2,w1,&nrqr);CHKERRQ(ierr);
151   ierr = PetscGatherMessageLengths(comm,nrqs,nrqr,w1,&onodes1,&olengths1);CHKERRQ(ierr);
152 
153   /* Now post the Irecvs corresponding to these messages */
154   ierr = PetscPostIrecvInt(comm,tag1,nrqr,onodes1,olengths1,&rbuf,&r_waits1);CHKERRQ(ierr);
155 
156   /* Allocate Memory for outgoing messages */
157   ierr = PetscMalloc(2*size*sizeof(PetscInt*) + (size+msz)*sizeof(PetscInt),&outdat);CHKERRQ(ierr);
158   ptr    = outdat + size;     /* Pointers to the data in outgoing buffers */
159   ierr   = PetscMemzero(outdat,2*size*sizeof(PetscInt*));CHKERRQ(ierr);
160   tmp    = (PetscInt*)(outdat + 2*size);
161   ctr    = tmp + msz;
162 
163   {
164     PetscInt *iptr = tmp,ict  = 0;
165     for (i=0; i<nrqs; i++) {
166       j         = pa[i];
167       iptr     +=  ict;
168       outdat[j] = iptr;
169       ict       = w1[j];
170     }
171   }
172 
173   /* Form the outgoing messages */
174   /*plug in the headers*/
175   for (i=0; i<nrqs; i++) {
176     j            = pa[i];
177     outdat[j][0] = 0;
178     ierr = PetscMemzero(outdat[j]+1,2*w3[j]*sizeof(PetscInt));CHKERRQ(ierr);
179     ptr[j]       = outdat[j] + 2*w3[j] + 1;
180   }
181 
182   /* Memory for doing local proc's work*/
183   {
184     PetscInt  *d_p;
185     char      *t_p;
186 
187     ierr = PetscMalloc((imax)*(sizeof(PetscBT) + sizeof(PetscInt*)+ sizeof(PetscInt)) +
188       (Mbs)*imax*sizeof(PetscInt)  + (Mbs/PETSC_BITS_PER_BYTE+1)*imax*sizeof(char),&table);CHKERRQ(ierr);
189     ierr = PetscMemzero(table,(imax)*(sizeof(PetscBT) + sizeof(PetscInt*)+ sizeof(PetscInt)) +
190       (Mbs)*imax*sizeof(PetscInt)  + (Mbs/PETSC_BITS_PER_BYTE+1)*imax*sizeof(char));CHKERRQ(ierr);
191     data = (PetscInt **)(table + imax);
192     isz  = (PetscInt  *)(data  + imax);
193     d_p  = (PetscInt  *)(isz   + imax);
194     t_p  = (char *)(d_p   + Mbs*imax);
195     for (i=0; i<imax; i++) {
196       table[i] = t_p + (Mbs/PETSC_BITS_PER_BYTE+1)*i;
197       data[i]  = d_p + (Mbs)*i;
198     }
199   }
200 
201   /* Parse the IS and update local tables and the outgoing buf with the data*/
202   {
203     PetscInt     n_i,*data_i,isz_i,*outdat_j,ctr_j;
204     PetscBT table_i;
205 
206     for (i=0; i<imax; i++) {
207       ierr    = PetscMemzero(ctr,size*sizeof(PetscInt));CHKERRQ(ierr);
208       n_i     = n[i];
209       table_i = table[i];
210       idx_i   = idx[i];
211       data_i  = data[i];
212       isz_i   = isz[i];
213       for (j=0;  j<n_i; j++) {  /* parse the indices of each IS */
214         row  = idx_i[j];
215         proc = rtable[row];
216         if (proc != rank) { /* copy to the outgoing buffer */
217           ctr[proc]++;
218           *ptr[proc] = row;
219           ptr[proc]++;
220         }
221         else { /* Update the local table */
222           if (!PetscBTLookupSet(table_i,row)) { data_i[isz_i++] = row;}
223         }
224       }
225       /* Update the headers for the current IS */
226       for (j=0; j<size; j++) { /* Can Optimise this loop by using pa[] */
227         if ((ctr_j = ctr[j])) {
228           outdat_j        = outdat[j];
229           k               = ++outdat_j[0];
230           outdat_j[2*k]   = ctr_j;
231           outdat_j[2*k-1] = i;
232         }
233       }
234       isz[i] = isz_i;
235     }
236   }
237 
238   /*  Now  post the sends */
239   ierr = PetscMalloc((nrqs+1)*sizeof(MPI_Request),&s_waits1);CHKERRQ(ierr);
240   for (i=0; i<nrqs; ++i) {
241     j    = pa[i];
242     ierr = MPI_Isend(outdat[j],w1[j],MPIU_INT,j,tag1,comm,s_waits1+i);CHKERRQ(ierr);
243   }
244 
245   /* No longer need the original indices*/
246   for (i=0; i<imax; ++i) {
247     ierr = ISRestoreIndices(is[i],idx+i);CHKERRQ(ierr);
248   }
249   ierr = PetscFree(idx);CHKERRQ(ierr);
250 
251   for (i=0; i<imax; ++i) {
252     ierr = ISDestroy(is[i]);CHKERRQ(ierr);
253   }
254 
255   /* Do Local work*/
256   ierr = MatIncreaseOverlap_MPIBAIJ_Local(C,imax,table,isz,data);CHKERRQ(ierr);
257 
258   /* Receive messages*/
259   ierr = PetscMalloc((nrqr+1)*sizeof(MPI_Status),&recv_status);CHKERRQ(ierr);
260   if (nrqr) {ierr = MPI_Waitall(nrqr,r_waits1,recv_status);CHKERRQ(ierr);}
261 
262   ierr = PetscMalloc((nrqs+1)*sizeof(MPI_Status),&s_status);CHKERRQ(ierr);
263   if (nrqs) {ierr = MPI_Waitall(nrqs,s_waits1,s_status);CHKERRQ(ierr);}
264 
265   /* Phase 1 sends are complete - deallocate buffers */
266   ierr = PetscFree(outdat);CHKERRQ(ierr);
267   ierr = PetscFree(w1);CHKERRQ(ierr);
268 
269   ierr = PetscMalloc((nrqr+1)*sizeof(PetscInt*),&xdata);CHKERRQ(ierr);
270   ierr = PetscMalloc((nrqr+1)*sizeof(PetscInt),&isz1);CHKERRQ(ierr);
271   ierr = MatIncreaseOverlap_MPIBAIJ_Receive(C,nrqr,rbuf,xdata,isz1);CHKERRQ(ierr);
272   ierr = PetscFree(rbuf);CHKERRQ(ierr);
273 
274   /* Send the data back*/
275   /* Do a global reduction to know the buffer space req for incoming messages*/
276   {
277     PetscMPIInt *rw1;
278 
279     ierr = PetscMalloc(size*sizeof(PetscInt),&rw1);CHKERRQ(ierr);
280     ierr = PetscMemzero(rw1,size*sizeof(PetscInt));CHKERRQ(ierr);
281 
282     for (i=0; i<nrqr; ++i) {
283       proc      = recv_status[i].MPI_SOURCE;
284       if (proc != onodes1[i]) SETERRQ(PETSC_ERR_PLIB,"MPI_SOURCE mismatch");
285       rw1[proc] = isz1[i];
286     }
287 
288     ierr = PetscFree(onodes1);CHKERRQ(ierr);
289     ierr = PetscFree(olengths1);CHKERRQ(ierr);
290 
291     /* Determine the number of messages to expect, their lengths, from from-ids */
292     ierr = PetscGatherMessageLengths(comm,nrqr,nrqs,rw1,&onodes2,&olengths2);CHKERRQ(ierr);
293     ierr = PetscFree(rw1);CHKERRQ(ierr);
294   }
295   /* Now post the Irecvs corresponding to these messages */
296   ierr = PetscPostIrecvInt(comm,tag2,nrqs,onodes2,olengths2,&rbuf2,&r_waits2);CHKERRQ(ierr);
297 
298   /*  Now  post the sends */
299   ierr = PetscMalloc((nrqr+1)*sizeof(MPI_Request),&s_waits2);CHKERRQ(ierr);
300   for (i=0; i<nrqr; ++i) {
301     j    = recv_status[i].MPI_SOURCE;
302     ierr = MPI_Isend(xdata[i],isz1[i],MPIU_INT,j,tag2,comm,s_waits2+i);CHKERRQ(ierr);
303   }
304 
305   /* receive work done on other processors*/
306   {
307     PetscMPIInt idex;
308     PetscInt    is_no,ct1,max,*rbuf2_i,isz_i,*data_i,jmax;
309     PetscBT     table_i;
310     MPI_Status  *status2;
311 
312     ierr = PetscMalloc((PetscMax(nrqr,nrqs)+1)*sizeof(MPI_Status),&status2);CHKERRQ(ierr);
313 
314     for (i=0; i<nrqs; ++i) {
315       ierr = MPI_Waitany(nrqs,r_waits2,&idex,status2+i);CHKERRQ(ierr);
316       /* Process the message*/
317       rbuf2_i = rbuf2[idex];
318       ct1     = 2*rbuf2_i[0]+1;
319       jmax    = rbuf2[idex][0];
320       for (j=1; j<=jmax; j++) {
321         max     = rbuf2_i[2*j];
322         is_no   = rbuf2_i[2*j-1];
323         isz_i   = isz[is_no];
324         data_i  = data[is_no];
325         table_i = table[is_no];
326         for (k=0; k<max; k++,ct1++) {
327           row = rbuf2_i[ct1];
328           if (!PetscBTLookupSet(table_i,row)) { data_i[isz_i++] = row;}
329         }
330         isz[is_no] = isz_i;
331       }
332     }
333     if (nrqr) {ierr = MPI_Waitall(nrqr,s_waits2,status2);CHKERRQ(ierr);}
334     ierr = PetscFree(status2);CHKERRQ(ierr);
335   }
336 
337   for (i=0; i<imax; ++i) {
338     ierr = ISCreateGeneral(PETSC_COMM_SELF,isz[i],data[i],is+i);CHKERRQ(ierr);
339   }
340 
341 
342   ierr = PetscFree(onodes2);CHKERRQ(ierr);
343   ierr = PetscFree(olengths2);CHKERRQ(ierr);
344 
345   ierr = PetscFree(pa);CHKERRQ(ierr);
346   ierr = PetscFree(rbuf2);CHKERRQ(ierr);
347   ierr = PetscFree(s_waits1);CHKERRQ(ierr);
348   ierr = PetscFree(r_waits1);CHKERRQ(ierr);
349   ierr = PetscFree(s_waits2);CHKERRQ(ierr);
350   ierr = PetscFree(r_waits2);CHKERRQ(ierr);
351   ierr = PetscFree(table);CHKERRQ(ierr);
352   ierr = PetscFree(s_status);CHKERRQ(ierr);
353   ierr = PetscFree(recv_status);CHKERRQ(ierr);
354   ierr = PetscFree(xdata[0]);CHKERRQ(ierr);
355   ierr = PetscFree(xdata);CHKERRQ(ierr);
356   ierr = PetscFree(isz1);CHKERRQ(ierr);
357   PetscFunctionReturn(0);
358 }
359 
360 #undef __FUNCT__
361 #define __FUNCT__ "MatIncreaseOverlap_MPIBAIJ_Local"
362 /*
363    MatIncreaseOverlap_MPIBAIJ_Local - Called by MatincreaseOverlap, to do
364        the work on the local processor.
365 
366      Inputs:
367       C      - MAT_MPIBAIJ;
368       imax - total no of index sets processed at a time;
369       table  - an array of char - size = Mbs bits.
370 
371      Output:
372       isz    - array containing the count of the solution elements corresponding
373                to each index set;
374       data   - pointer to the solutions
375 */
376 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Local(Mat C,PetscInt imax,PetscBT *table,PetscInt *isz,PetscInt **data)
377 {
378   Mat_MPIBAIJ *c = (Mat_MPIBAIJ*)C->data;
379   Mat         A = c->A,B = c->B;
380   Mat_SeqBAIJ *a = (Mat_SeqBAIJ*)A->data,*b = (Mat_SeqBAIJ*)B->data;
381   PetscInt    start,end,val,max,rstart,cstart,*ai,*aj;
382   PetscInt    *bi,*bj,*garray,i,j,k,row,*data_i,isz_i;
383   PetscBT     table_i;
384 
385   PetscFunctionBegin;
386   rstart = c->rstartbs;
387   cstart = c->cstartbs;
388   ai     = a->i;
389   aj     = a->j;
390   bi     = b->i;
391   bj     = b->j;
392   garray = c->garray;
393 
394 
395   for (i=0; i<imax; i++) {
396     data_i  = data[i];
397     table_i = table[i];
398     isz_i   = isz[i];
399     for (j=0,max=isz[i]; j<max; j++) {
400       row   = data_i[j] - rstart;
401       start = ai[row];
402       end   = ai[row+1];
403       for (k=start; k<end; k++) { /* Amat */
404         val = aj[k] + cstart;
405         if (!PetscBTLookupSet(table_i,val)) { data_i[isz_i++] = val;}
406       }
407       start = bi[row];
408       end   = bi[row+1];
409       for (k=start; k<end; k++) { /* Bmat */
410         val = garray[bj[k]];
411         if (!PetscBTLookupSet(table_i,val)) { data_i[isz_i++] = val;}
412       }
413     }
414     isz[i] = isz_i;
415   }
416   PetscFunctionReturn(0);
417 }
418 #undef __FUNCT__
419 #define __FUNCT__ "MatIncreaseOverlap_MPIBAIJ_Receive"
420 /*
421       MatIncreaseOverlap_MPIBAIJ_Receive - Process the recieved messages,
422          and return the output
423 
424          Input:
425            C    - the matrix
426            nrqr - no of messages being processed.
427            rbuf - an array of pointers to the recieved requests
428 
429          Output:
430            xdata - array of messages to be sent back
431            isz1  - size of each message
432 
433   For better efficiency perhaps we should malloc separately each xdata[i],
434 then if a remalloc is required we need only copy the data for that one row
435 rather than all previous rows as it is now where a single large chunck of
436 memory is used.
437 
438 */
439 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Receive(Mat C,PetscInt nrqr,PetscInt **rbuf,PetscInt **xdata,PetscInt * isz1)
440 {
441   Mat_MPIBAIJ    *c = (Mat_MPIBAIJ*)C->data;
442   Mat            A = c->A,B = c->B;
443   Mat_SeqBAIJ    *a = (Mat_SeqBAIJ*)A->data,*b = (Mat_SeqBAIJ*)B->data;
444   PetscErrorCode ierr;
445   PetscInt       rstart,cstart,*ai,*aj,*bi,*bj,*garray,i,j,k;
446   PetscInt       row,total_sz,ct,ct1,ct2,ct3,mem_estimate,oct2,l,start,end;
447   PetscInt       val,max1,max2,rank,Mbs,no_malloc =0,*tmp,new_estimate,ctr;
448   PetscInt       *rbuf_i,kmax,rbuf_0;
449   PetscBT        xtable;
450 
451   PetscFunctionBegin;
452   rank   = c->rank;
453   Mbs    = c->Mbs;
454   rstart = c->rstartbs;
455   cstart = c->cstartbs;
456   ai     = a->i;
457   aj     = a->j;
458   bi     = b->i;
459   bj     = b->j;
460   garray = c->garray;
461 
462 
463   for (i=0,ct=0,total_sz=0; i<nrqr; ++i) {
464     rbuf_i  =  rbuf[i];
465     rbuf_0  =  rbuf_i[0];
466     ct     += rbuf_0;
467     for (j=1; j<=rbuf_0; j++) { total_sz += rbuf_i[2*j]; }
468   }
469 
470   if (c->Mbs) max1 = ct*(a->nz +b->nz)/c->Mbs;
471   else        max1 = 1;
472   mem_estimate = 3*((total_sz > max1 ? total_sz : max1)+1);
473   ierr         = PetscMalloc(mem_estimate*sizeof(PetscInt),&xdata[0]);CHKERRQ(ierr);
474   ++no_malloc;
475   ierr         = PetscBTCreate(Mbs,xtable);CHKERRQ(ierr);
476   ierr         = PetscMemzero(isz1,nrqr*sizeof(PetscInt));CHKERRQ(ierr);
477 
478   ct3 = 0;
479   for (i=0; i<nrqr; i++) { /* for easch mesg from proc i */
480     rbuf_i =  rbuf[i];
481     rbuf_0 =  rbuf_i[0];
482     ct1    =  2*rbuf_0+1;
483     ct2    =  ct1;
484     ct3    += ct1;
485     for (j=1; j<=rbuf_0; j++) { /* for each IS from proc i*/
486       ierr = PetscBTMemzero(Mbs,xtable);CHKERRQ(ierr);
487       oct2 = ct2;
488       kmax = rbuf_i[2*j];
489       for (k=0; k<kmax; k++,ct1++) {
490         row = rbuf_i[ct1];
491         if (!PetscBTLookupSet(xtable,row)) {
492           if (!(ct3 < mem_estimate)) {
493             new_estimate = (PetscInt)(1.5*mem_estimate)+1;
494             ierr = PetscMalloc(new_estimate * sizeof(PetscInt),&tmp);CHKERRQ(ierr);
495             ierr = PetscMemcpy(tmp,xdata[0],mem_estimate*sizeof(PetscInt));CHKERRQ(ierr);
496             ierr = PetscFree(xdata[0]);CHKERRQ(ierr);
497             xdata[0]     = tmp;
498             mem_estimate = new_estimate; ++no_malloc;
499             for (ctr=1; ctr<=i; ctr++) { xdata[ctr] = xdata[ctr-1] + isz1[ctr-1];}
500           }
501           xdata[i][ct2++] = row;
502           ct3++;
503         }
504       }
505       for (k=oct2,max2=ct2; k<max2; k++)  {
506         row   = xdata[i][k] - rstart;
507         start = ai[row];
508         end   = ai[row+1];
509         for (l=start; l<end; l++) {
510           val = aj[l] + cstart;
511           if (!PetscBTLookupSet(xtable,val)) {
512             if (!(ct3 < mem_estimate)) {
513               new_estimate = (PetscInt)(1.5*mem_estimate)+1;
514               ierr = PetscMalloc(new_estimate * sizeof(PetscInt),&tmp);CHKERRQ(ierr);
515               ierr = PetscMemcpy(tmp,xdata[0],mem_estimate*sizeof(PetscInt));CHKERRQ(ierr);
516               ierr = PetscFree(xdata[0]);CHKERRQ(ierr);
517               xdata[0]     = tmp;
518               mem_estimate = new_estimate; ++no_malloc;
519               for (ctr=1; ctr<=i; ctr++) { xdata[ctr] = xdata[ctr-1] + isz1[ctr-1];}
520             }
521             xdata[i][ct2++] = val;
522             ct3++;
523           }
524         }
525         start = bi[row];
526         end   = bi[row+1];
527         for (l=start; l<end; l++) {
528           val = garray[bj[l]];
529           if (!PetscBTLookupSet(xtable,val)) {
530             if (!(ct3 < mem_estimate)) {
531               new_estimate = (PetscInt)(1.5*mem_estimate)+1;
532               ierr = PetscMalloc(new_estimate * sizeof(PetscInt),&tmp);CHKERRQ(ierr);
533               ierr = PetscMemcpy(tmp,xdata[0],mem_estimate*sizeof(PetscInt));CHKERRQ(ierr);
534               ierr = PetscFree(xdata[0]);CHKERRQ(ierr);
535               xdata[0]     = tmp;
536               mem_estimate = new_estimate; ++no_malloc;
537               for (ctr =1; ctr <=i; ctr++) { xdata[ctr] = xdata[ctr-1] + isz1[ctr-1];}
538             }
539             xdata[i][ct2++] = val;
540             ct3++;
541           }
542         }
543       }
544       /* Update the header*/
545       xdata[i][2*j]   = ct2 - oct2; /* Undo the vector isz1 and use only a var*/
546       xdata[i][2*j-1] = rbuf_i[2*j-1];
547     }
548     xdata[i][0] = rbuf_0;
549     xdata[i+1]  = xdata[i] + ct2;
550     isz1[i]     = ct2; /* size of each message */
551   }
552   ierr = PetscBTDestroy(xtable);CHKERRQ(ierr);
553   ierr = PetscInfo3(C,"Allocated %D bytes, required %D, no of mallocs = %D\n",mem_estimate,ct3,no_malloc);CHKERRQ(ierr);
554   PetscFunctionReturn(0);
555 }
556 
557 static PetscErrorCode MatGetSubMatrices_MPIBAIJ_local(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *);
558 
559 #undef __FUNCT__
560 #define __FUNCT__ "MatGetSubMatrices_MPIBAIJ"
561 PetscErrorCode MatGetSubMatrices_MPIBAIJ(Mat C,PetscInt ismax,const IS isrow[],const IS iscol[],MatReuse scall,Mat *submat[])
562 {
563   IS             *isrow_new,*iscol_new;
564   Mat_MPIBAIJ    *c = (Mat_MPIBAIJ*)C->data;
565   PetscErrorCode ierr;
566   PetscInt       nmax,nstages_local,nstages,i,pos,max_no,N=C->cmap->N,bs=C->rmap->bs;
567 
568   PetscFunctionBegin;
569   /* The compression and expansion should be avoided. Does'nt point
570      out errors might change the indices hence buggey */
571 
572   ierr = PetscMalloc(2*(ismax+1)*sizeof(IS),&isrow_new);CHKERRQ(ierr);
573   iscol_new = isrow_new + ismax;
574   ierr = ISCompressIndicesSorted(N,bs,ismax,isrow,isrow_new);CHKERRQ(ierr);
575   ierr = ISCompressIndicesSorted(N,bs,ismax,iscol,iscol_new);CHKERRQ(ierr);
576 
577   /* Allocate memory to hold all the submatrices */
578   if (scall != MAT_REUSE_MATRIX) {
579     ierr = PetscMalloc((ismax+1)*sizeof(Mat),submat);CHKERRQ(ierr);
580   }
581   /* Determine the number of stages through which submatrices are done */
582   nmax          = 20*1000000 / (c->Nbs * sizeof(PetscInt));
583   if (!nmax) nmax = 1;
584   nstages_local = ismax/nmax + ((ismax % nmax)?1:0);
585 
586   /* Make sure every processor loops through the nstages */
587   ierr = MPI_Allreduce(&nstages_local,&nstages,1,MPIU_INT,MPI_MAX,((PetscObject)C)->comm);CHKERRQ(ierr);
588   for (i=0,pos=0; i<nstages; i++) {
589     if (pos+nmax <= ismax) max_no = nmax;
590     else if (pos == ismax) max_no = 0;
591     else                   max_no = ismax-pos;
592     ierr = MatGetSubMatrices_MPIBAIJ_local(C,max_no,isrow_new+pos,iscol_new+pos,scall,*submat+pos);CHKERRQ(ierr);
593     pos += max_no;
594   }
595 
596   for (i=0; i<ismax; i++) {
597     ierr = ISDestroy(isrow_new[i]);CHKERRQ(ierr);
598     ierr = ISDestroy(iscol_new[i]);CHKERRQ(ierr);
599   }
600   ierr = PetscFree(isrow_new);CHKERRQ(ierr);
601   PetscFunctionReturn(0);
602 }
603 
604 #if defined (PETSC_USE_CTABLE)
605 #undef __FUNCT__
606 #define __FUNCT__ "PetscGetProc"
607 PetscErrorCode PetscGetProc(const PetscInt row, const PetscMPIInt size, const PetscInt proc_gnode[], PetscMPIInt *rank)
608 {
609   PetscInt    nGlobalNd = proc_gnode[size];
610   PetscMPIInt fproc = PetscMPIIntCast( (PetscInt)(((float)row * (float)size / (float)nGlobalNd + 0.5)));
611 
612   PetscFunctionBegin;
613   if (fproc > size) fproc = size;
614   while (row < proc_gnode[fproc] || row >= proc_gnode[fproc+1]) {
615     if (row < proc_gnode[fproc]) fproc--;
616     else                         fproc++;
617   }
618   *rank = fproc;
619   PetscFunctionReturn(0);
620 }
621 #endif
622 
623 /* -------------------------------------------------------------------------*/
624 /* This code is used for BAIJ and SBAIJ matrices (unfortunate dependency) */
625 #undef __FUNCT__
626 #define __FUNCT__ "MatGetSubMatrices_MPIBAIJ_local"
627 static PetscErrorCode MatGetSubMatrices_MPIBAIJ_local(Mat C,PetscInt ismax,const IS isrow[],const IS iscol[],MatReuse scall,Mat *submats)
628 {
629   Mat_MPIBAIJ    *c = (Mat_MPIBAIJ*)C->data;
630   Mat            A = c->A;
631   Mat_SeqBAIJ    *a = (Mat_SeqBAIJ*)A->data,*b = (Mat_SeqBAIJ*)c->B->data,*mat;
632   const PetscInt **irow,**icol,*irow_i;
633   PetscInt       *nrow,*ncol,*w3,*w4,start;
634   PetscErrorCode ierr;
635   PetscMPIInt    size,tag0,tag1,tag2,tag3,*w1,*w2,nrqr,idex,end,proc;
636   PetscInt       **sbuf1,**sbuf2,rank,i,j,k,l,ct1,ct2,**rbuf1,row;
637   PetscInt       nrqs,msz,**ptr,*req_size,*ctr,*pa,*tmp,tcol;
638   PetscInt       **rbuf3,*req_source,**sbuf_aj,**rbuf2,max1,max2;
639   PetscInt       **lens,is_no,ncols,*cols,mat_i,*mat_j,tmp2,jmax;
640   PetscInt       ctr_j,*sbuf1_j,*sbuf_aj_i,*rbuf1_i,kmax,*lens_i;
641   PetscInt       bs=C->rmap->bs,bs2=c->bs2,*a_j=a->j,*b_j=b->j,*cworkA,*cworkB;
642   PetscInt       cstart = c->cstartbs,nzA,nzB,*a_i=a->i,*b_i=b->i,imark;
643   PetscInt       *bmap = c->garray,ctmp,rstart=c->rstartbs;
644   MPI_Request    *s_waits1,*r_waits1,*s_waits2,*r_waits2,*r_waits3;
645   MPI_Request    *r_waits4,*s_waits3,*s_waits4;
646   MPI_Status     *r_status1,*r_status2,*s_status1,*s_status3,*s_status2;
647   MPI_Status     *r_status3,*r_status4,*s_status4;
648   MPI_Comm       comm;
649   MatScalar      **rbuf4,**sbuf_aa,*vals,*mat_a,*sbuf_aa_i,*vworkA,*vworkB;
650   MatScalar      *a_a=a->a,*b_a=b->a;
651   PetscTruth     flag,sorted;
652   PetscMPIInt    *onodes1,*olengths1;
653 
654 #if defined (PETSC_USE_CTABLE)
655   PetscInt tt;
656   PetscTable  *rowmaps,*colmaps,lrow1_grow1,lcol1_gcol1;
657 #else
658   PetscInt    **cmap,*cmap_i,*rtable,*rmap_i,**rmap, Mbs = c->Mbs;
659 #endif
660 
661   PetscFunctionBegin;
662   comm   = ((PetscObject)C)->comm;
663   tag0   = ((PetscObject)C)->tag;
664   size   = c->size;
665   rank   = c->rank;
666 
667   /* Get some new tags to keep the communication clean */
668   ierr = PetscObjectGetNewTag((PetscObject)C,&tag1);CHKERRQ(ierr);
669   ierr = PetscObjectGetNewTag((PetscObject)C,&tag2);CHKERRQ(ierr);
670   ierr = PetscObjectGetNewTag((PetscObject)C,&tag3);CHKERRQ(ierr);
671 
672   /* Check if the col indices are sorted */
673   for (i=0; i<ismax; i++) {
674     ierr = ISSorted(iscol[i],&sorted);CHKERRQ(ierr);
675     if (!sorted) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"IS is not sorted");
676   }
677 
678 #if defined(PETSC_USE_CTABLE)
679   ierr = PetscMalloc((2*ismax+1)*(sizeof(PetscInt*)+ sizeof(PetscInt)),&irow);CHKERRQ(ierr);
680 #else
681   ierr = PetscMalloc((2*ismax+1)*(sizeof(PetscInt*)+ sizeof(PetscInt)) +  (Mbs+1)*sizeof(PetscInt),&irow);CHKERRQ(ierr);
682 #endif
683 
684   icol = irow + ismax;
685   nrow = (PetscInt*)(icol + ismax);
686   ncol = nrow + ismax;
687 #if !defined (PETSC_USE_CTABLE)
688   rtable = ncol + ismax;
689   /* Create hash table for the mapping :row -> proc*/
690   for (i=0,j=0; i<size; i++) {
691     jmax = c->rowners[i+1];
692     for (; j<jmax; j++) {
693       rtable[j] = i;
694     }
695   }
696 #endif
697 
698   for (i=0; i<ismax; i++) {
699     ierr = ISGetIndices(isrow[i],&irow[i]);CHKERRQ(ierr);
700     ierr = ISGetIndices(iscol[i],&icol[i]);CHKERRQ(ierr);
701     ierr = ISGetLocalSize(isrow[i],&nrow[i]);CHKERRQ(ierr);
702     ierr = ISGetLocalSize(iscol[i],&ncol[i]);CHKERRQ(ierr);
703   }
704 
705   /* evaluate communication - mesg to who,length of mesg,and buffer space
706      required. Based on this, buffers are allocated, and data copied into them*/
707   ierr = PetscMalloc(size*2*sizeof(PetscInt) + size*2*sizeof(PetscMPIInt),&w1);CHKERRQ(ierr); /* mesg size */
708   w2   = w1 + size;               /* if w2[i] marked, then a message to proc i*/
709   w3   = (PetscInt*)(w2 + size);  /* no of IS that needs to be sent to proc i */
710   w4   = w3 + size;                /* temp work space used in determining w1, w2, w3 */
711   ierr = PetscMemzero(w1,size*sizeof(PetscInt)+2*size*sizeof(PetscMPIInt));CHKERRQ(ierr); /* initialise work vector*/
712   for (i=0; i<ismax; i++) {
713     ierr   = PetscMemzero(w4,size*sizeof(PetscInt));CHKERRQ(ierr); /* initialise work vector*/
714     jmax   = nrow[i];
715     irow_i = irow[i];
716     for (j=0; j<jmax; j++) {
717       row  = irow_i[j];
718 #if defined (PETSC_USE_CTABLE)
719       ierr = PetscGetProc(row,size,c->rangebs,&proc);CHKERRQ(ierr);
720 #else
721       proc = rtable[row];
722 #endif
723       w4[proc]++;
724     }
725     for (j=0; j<size; j++) {
726       if (w4[j]) { w1[j] += w4[j];  w3[j]++;}
727     }
728   }
729 
730   nrqs     = 0;              /* no of outgoing messages */
731   msz      = 0;              /* total mesg length for all proc */
732   w1[rank] = 0;              /* no mesg sent to intself */
733   w3[rank] = 0;
734   for (i=0; i<size; i++) {
735     if (w1[i])  { w2[i] = 1; nrqs++;} /* there exists a message to proc i */
736   }
737   ierr = PetscMalloc((nrqs+1)*sizeof(PetscInt),&pa);CHKERRQ(ierr); /*(proc -array)*/
738   for (i=0,j=0; i<size; i++) {
739     if (w1[i]) { pa[j] = i; j++; }
740   }
741 
742   /* Each message would have a header = 1 + 2*(no of IS) + data */
743   for (i=0; i<nrqs; i++) {
744     j     = pa[i];
745     w1[j] += w2[j] + 2* w3[j];
746     msz   += w1[j];
747   }
748 
749   /* Determine the number of messages to expect, their lengths, from from-ids */
750   ierr = PetscGatherNumberOfMessages(comm,w2,w1,&nrqr);CHKERRQ(ierr);
751   ierr = PetscGatherMessageLengths(comm,nrqs,nrqr,w1,&onodes1,&olengths1);CHKERRQ(ierr);
752 
753   /* Now post the Irecvs corresponding to these messages */
754   ierr = PetscPostIrecvInt(comm,tag0,nrqr,onodes1,olengths1,&rbuf1,&r_waits1);CHKERRQ(ierr);
755 
756   ierr = PetscFree(onodes1);CHKERRQ(ierr);
757   ierr = PetscFree(olengths1);CHKERRQ(ierr);
758 
759   /* Allocate Memory for outgoing messages */
760   ierr = PetscMalloc(2*size*sizeof(PetscInt*) + 2*msz*sizeof(PetscInt) + size*sizeof(PetscInt),&sbuf1);CHKERRQ(ierr);
761   ptr  = sbuf1 + size;   /* Pointers to the data in outgoing buffers */
762   ierr = PetscMemzero(sbuf1,2*size*sizeof(PetscInt*));CHKERRQ(ierr);
763   /* allocate memory for outgoing data + buf to receive the first reply */
764   tmp  = (PetscInt*)(ptr + size);
765   ctr  = tmp + 2*msz;
766 
767   {
768     PetscInt *iptr = tmp,ict = 0;
769     for (i=0; i<nrqs; i++) {
770       j         = pa[i];
771       iptr     += ict;
772       sbuf1[j]  = iptr;
773       ict       = w1[j];
774     }
775   }
776 
777   /* Form the outgoing messages */
778   /* Initialise the header space */
779   for (i=0; i<nrqs; i++) {
780     j           = pa[i];
781     sbuf1[j][0] = 0;
782     ierr        = PetscMemzero(sbuf1[j]+1,2*w3[j]*sizeof(PetscInt));CHKERRQ(ierr);
783     ptr[j]      = sbuf1[j] + 2*w3[j] + 1;
784   }
785 
786   /* Parse the isrow and copy data into outbuf */
787   for (i=0; i<ismax; i++) {
788     ierr   = PetscMemzero(ctr,size*sizeof(PetscInt));CHKERRQ(ierr);
789     irow_i = irow[i];
790     jmax   = nrow[i];
791     for (j=0; j<jmax; j++) {  /* parse the indices of each IS */
792       row  = irow_i[j];
793 #if defined (PETSC_USE_CTABLE)
794       ierr = PetscGetProc(row,size,c->rangebs,&proc);CHKERRQ(ierr);
795 #else
796       proc = rtable[row];
797 #endif
798       if (proc != rank) { /* copy to the outgoing buf*/
799         ctr[proc]++;
800         *ptr[proc] = row;
801         ptr[proc]++;
802       }
803     }
804     /* Update the headers for the current IS */
805     for (j=0; j<size; j++) { /* Can Optimise this loop too */
806       if ((ctr_j = ctr[j])) {
807         sbuf1_j        = sbuf1[j];
808         k              = ++sbuf1_j[0];
809         sbuf1_j[2*k]   = ctr_j;
810         sbuf1_j[2*k-1] = i;
811       }
812     }
813   }
814 
815   /*  Now  post the sends */
816   ierr = PetscMalloc((nrqs+1)*sizeof(MPI_Request),&s_waits1);CHKERRQ(ierr);
817   for (i=0; i<nrqs; ++i) {
818     j = pa[i];
819     ierr = MPI_Isend(sbuf1[j],w1[j],MPIU_INT,j,tag0,comm,s_waits1+i);CHKERRQ(ierr);
820   }
821 
822   /* Post Recieves to capture the buffer size */
823   ierr     = PetscMalloc((nrqs+1)*sizeof(MPI_Request),&r_waits2);CHKERRQ(ierr);
824   ierr     = PetscMalloc((nrqs+1)*sizeof(PetscInt*),&rbuf2);CHKERRQ(ierr);
825   rbuf2[0] = tmp + msz;
826   for (i=1; i<nrqs; ++i) {
827     j        = pa[i];
828     rbuf2[i] = rbuf2[i-1]+w1[pa[i-1]];
829   }
830   for (i=0; i<nrqs; ++i) {
831     j    = pa[i];
832     ierr = MPI_Irecv(rbuf2[i],w1[j],MPIU_INT,j,tag1,comm,r_waits2+i);CHKERRQ(ierr);
833   }
834 
835   /* Send to other procs the buf size they should allocate */
836 
837   /* Receive messages*/
838   ierr       = PetscMalloc((nrqr+1)*sizeof(MPI_Request),&s_waits2);CHKERRQ(ierr);
839   ierr       = PetscMalloc((nrqr+1)*sizeof(MPI_Status),&r_status1);CHKERRQ(ierr);
840   ierr       = PetscMalloc(2*nrqr*sizeof(PetscInt) + (nrqr+1)*sizeof(PetscInt*),&sbuf2);CHKERRQ(ierr);
841   req_size   = (PetscInt*)(sbuf2 + nrqr);
842   req_source = req_size + nrqr;
843 
844   {
845     Mat_SeqBAIJ *sA = (Mat_SeqBAIJ*)c->A->data,*sB = (Mat_SeqBAIJ*)c->B->data;
846     PetscInt    *sAi = sA->i,*sBi = sB->i,id,*sbuf2_i;
847 
848     for (i=0; i<nrqr; ++i) {
849       ierr = MPI_Waitany(nrqr,r_waits1,&idex,r_status1+i);CHKERRQ(ierr);
850       req_size[idex] = 0;
851       rbuf1_i         = rbuf1[idex];
852       start           = 2*rbuf1_i[0] + 1;
853       ierr            = MPI_Get_count(r_status1+i,MPIU_INT,&end);CHKERRQ(ierr);
854       ierr            = PetscMalloc(end*sizeof(PetscInt),&sbuf2[idex]);CHKERRQ(ierr);
855       sbuf2_i         = sbuf2[idex];
856       for (j=start; j<end; j++) {
857         id               = rbuf1_i[j] - rstart;
858         ncols            = sAi[id+1] - sAi[id] + sBi[id+1] - sBi[id];
859         sbuf2_i[j]       = ncols;
860         req_size[idex] += ncols;
861       }
862       req_source[idex] = r_status1[i].MPI_SOURCE;
863       /* form the header */
864       sbuf2_i[0]   = req_size[idex];
865       for (j=1; j<start; j++) { sbuf2_i[j] = rbuf1_i[j]; }
866       ierr = MPI_Isend(sbuf2_i,end,MPIU_INT,req_source[idex],tag1,comm,s_waits2+i);CHKERRQ(ierr);
867     }
868   }
869   ierr = PetscFree(r_status1);CHKERRQ(ierr);
870   ierr = PetscFree(r_waits1);CHKERRQ(ierr);
871 
872   /*  recv buffer sizes */
873   /* Receive messages*/
874 
875   ierr = PetscMalloc((nrqs+1)*sizeof(PetscInt*),&rbuf3);CHKERRQ(ierr);
876   ierr = PetscMalloc((nrqs+1)*sizeof(MatScalar*),&rbuf4);CHKERRQ(ierr);
877   ierr = PetscMalloc((nrqs+1)*sizeof(MPI_Request),&r_waits3);CHKERRQ(ierr);
878   ierr = PetscMalloc((nrqs+1)*sizeof(MPI_Request),&r_waits4);CHKERRQ(ierr);
879   ierr = PetscMalloc((nrqs+1)*sizeof(MPI_Status),&r_status2);CHKERRQ(ierr);
880 
881   for (i=0; i<nrqs; ++i) {
882     ierr = MPI_Waitany(nrqs,r_waits2,&idex,r_status2+i);CHKERRQ(ierr);
883     ierr = PetscMalloc(rbuf2[idex][0]*sizeof(PetscInt),&rbuf3[idex]);CHKERRQ(ierr);
884     ierr = PetscMalloc(rbuf2[idex][0]*bs2*sizeof(MatScalar),&rbuf4[idex]);CHKERRQ(ierr);
885     ierr = MPI_Irecv(rbuf3[idex],rbuf2[idex][0],MPIU_INT,r_status2[i].MPI_SOURCE,tag2,comm,r_waits3+idex);CHKERRQ(ierr);
886     ierr = MPI_Irecv(rbuf4[idex],rbuf2[idex][0]*bs2,MPIU_MATSCALAR,r_status2[i].MPI_SOURCE,tag3,comm,r_waits4+idex);CHKERRQ(ierr);
887   }
888   ierr = PetscFree(r_status2);CHKERRQ(ierr);
889   ierr = PetscFree(r_waits2);CHKERRQ(ierr);
890 
891   /* Wait on sends1 and sends2 */
892   ierr = PetscMalloc((nrqs+1)*sizeof(MPI_Status),&s_status1);CHKERRQ(ierr);
893   ierr = PetscMalloc((nrqr+1)*sizeof(MPI_Status),&s_status2);CHKERRQ(ierr);
894 
895   if (nrqs) {ierr = MPI_Waitall(nrqs,s_waits1,s_status1);CHKERRQ(ierr);}
896   if (nrqr) {ierr = MPI_Waitall(nrqr,s_waits2,s_status2);CHKERRQ(ierr);}
897   ierr = PetscFree(s_status1);CHKERRQ(ierr);
898   ierr = PetscFree(s_status2);CHKERRQ(ierr);
899   ierr = PetscFree(s_waits1);CHKERRQ(ierr);
900   ierr = PetscFree(s_waits2);CHKERRQ(ierr);
901 
902   /* Now allocate buffers for a->j, and send them off */
903   ierr = PetscMalloc((nrqr+1)*sizeof(PetscInt*),&sbuf_aj);CHKERRQ(ierr);
904   for (i=0,j=0; i<nrqr; i++) j += req_size[i];
905   ierr = PetscMalloc((j+1)*sizeof(PetscInt),&sbuf_aj[0]);CHKERRQ(ierr);
906   for (i=1; i<nrqr; i++)  sbuf_aj[i] = sbuf_aj[i-1] + req_size[i-1];
907 
908   ierr = PetscMalloc((nrqr+1)*sizeof(MPI_Request),&s_waits3);CHKERRQ(ierr);
909   {
910      for (i=0; i<nrqr; i++) {
911       rbuf1_i   = rbuf1[i];
912       sbuf_aj_i = sbuf_aj[i];
913       ct1       = 2*rbuf1_i[0] + 1;
914       ct2       = 0;
915       for (j=1,max1=rbuf1_i[0]; j<=max1; j++) {
916         kmax = rbuf1[i][2*j];
917         for (k=0; k<kmax; k++,ct1++) {
918           row    = rbuf1_i[ct1] - rstart;
919           nzA    = a_i[row+1] - a_i[row];     nzB = b_i[row+1] - b_i[row];
920           ncols  = nzA + nzB;
921           cworkA = a_j + a_i[row]; cworkB = b_j + b_i[row];
922 
923           /* load the column indices for this row into cols*/
924           cols  = sbuf_aj_i + ct2;
925           for (l=0; l<nzB; l++) {
926             if ((ctmp = bmap[cworkB[l]]) < cstart)  cols[l] = ctmp;
927             else break;
928           }
929           imark = l;
930           for (l=0; l<nzA; l++)   cols[imark+l] = cstart + cworkA[l];
931           for (l=imark; l<nzB; l++) cols[nzA+l] = bmap[cworkB[l]];
932           ct2 += ncols;
933         }
934       }
935       ierr = MPI_Isend(sbuf_aj_i,req_size[i],MPIU_INT,req_source[i],tag2,comm,s_waits3+i);CHKERRQ(ierr);
936     }
937   }
938   ierr = PetscMalloc((nrqs+1)*sizeof(MPI_Status),&r_status3);CHKERRQ(ierr);
939   ierr = PetscMalloc((nrqr+1)*sizeof(MPI_Status),&s_status3);CHKERRQ(ierr);
940 
941   /* Allocate buffers for a->a, and send them off */
942   ierr = PetscMalloc((nrqr+1)*sizeof(MatScalar *),&sbuf_aa);CHKERRQ(ierr);
943   for (i=0,j=0; i<nrqr; i++) j += req_size[i];
944   ierr = PetscMalloc((j+1)*bs2*sizeof(MatScalar),&sbuf_aa[0]);CHKERRQ(ierr);
945   for (i=1; i<nrqr; i++)  sbuf_aa[i] = sbuf_aa[i-1] + req_size[i-1]*bs2;
946 
947   ierr = PetscMalloc((nrqr+1)*sizeof(MPI_Request),&s_waits4);CHKERRQ(ierr);
948   {
949     for (i=0; i<nrqr; i++) {
950       rbuf1_i   = rbuf1[i];
951       sbuf_aa_i = sbuf_aa[i];
952       ct1       = 2*rbuf1_i[0]+1;
953       ct2       = 0;
954       for (j=1,max1=rbuf1_i[0]; j<=max1; j++) {
955         kmax = rbuf1_i[2*j];
956         for (k=0; k<kmax; k++,ct1++) {
957           row    = rbuf1_i[ct1] - rstart;
958           nzA    = a_i[row+1] - a_i[row];     nzB = b_i[row+1] - b_i[row];
959           ncols  = nzA + nzB;
960           cworkA = a_j + a_i[row];     cworkB = b_j + b_i[row];
961           vworkA = a_a + a_i[row]*bs2; vworkB = b_a + b_i[row]*bs2;
962 
963           /* load the column values for this row into vals*/
964           vals  = sbuf_aa_i+ct2*bs2;
965           for (l=0; l<nzB; l++) {
966             if ((bmap[cworkB[l]]) < cstart) {
967               ierr = PetscMemcpy(vals+l*bs2,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
968             }
969             else break;
970           }
971           imark = l;
972           for (l=0; l<nzA; l++) {
973             ierr = PetscMemcpy(vals+(imark+l)*bs2,vworkA+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
974           }
975           for (l=imark; l<nzB; l++) {
976             ierr = PetscMemcpy(vals+(nzA+l)*bs2,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
977           }
978           ct2 += ncols;
979         }
980       }
981       ierr = MPI_Isend(sbuf_aa_i,req_size[i]*bs2,MPIU_MATSCALAR,req_source[i],tag3,comm,s_waits4+i);CHKERRQ(ierr);
982     }
983   }
984   ierr = PetscMalloc((nrqs+1)*sizeof(MPI_Status),&r_status4);CHKERRQ(ierr);
985   ierr = PetscMalloc((nrqr+1)*sizeof(MPI_Status),&s_status4);CHKERRQ(ierr);
986   ierr = PetscFree(rbuf1);CHKERRQ(ierr);
987 
988   /* Form the matrix */
989   /* create col map */
990   {
991     const PetscInt *icol_i;
992 #if defined (PETSC_USE_CTABLE)
993     /* Create row map*/
994     ierr = PetscMalloc((1+ismax)*sizeof(PetscTable),&colmaps);CHKERRQ(ierr);
995     for (i=0; i<ismax; i++) {
996       ierr = PetscTableCreate(ncol[i]+1,&colmaps[i]);CHKERRQ(ierr);
997     }
998 #else
999     ierr    = PetscMalloc((1+ismax)*sizeof(PetscInt*)+ ismax*c->Nbs*sizeof(PetscInt),&cmap);CHKERRQ(ierr);
1000     cmap[0] = (PetscInt*)(cmap + ismax);
1001     ierr    = PetscMemzero(cmap[0],(1+ismax*c->Nbs)*sizeof(PetscInt));CHKERRQ(ierr);
1002     for (i=1; i<ismax; i++) { cmap[i] = cmap[i-1] + c->Nbs; }
1003 #endif
1004     for (i=0; i<ismax; i++) {
1005       jmax   = ncol[i];
1006       icol_i = icol[i];
1007 #if defined (PETSC_USE_CTABLE)
1008       lcol1_gcol1 = colmaps[i];
1009       for (j=0; j<jmax; j++) {
1010 	ierr = PetscTableAdd(lcol1_gcol1,icol_i[j]+1,j+1);CHKERRQ(ierr);
1011       }
1012 #else
1013       cmap_i = cmap[i];
1014       for (j=0; j<jmax; j++) {
1015         cmap_i[icol_i[j]] = j+1;
1016       }
1017 #endif
1018     }
1019   }
1020 
1021   /* Create lens which is required for MatCreate... */
1022   for (i=0,j=0; i<ismax; i++) { j += nrow[i]; }
1023   ierr    = PetscMalloc((1+ismax)*sizeof(PetscInt*)+ j*sizeof(PetscInt),&lens);CHKERRQ(ierr);
1024   lens[0] = (PetscInt*)(lens + ismax);
1025   ierr    = PetscMemzero(lens[0],j*sizeof(PetscInt));CHKERRQ(ierr);
1026   for (i=1; i<ismax; i++) { lens[i] = lens[i-1] + nrow[i-1]; }
1027 
1028   /* Update lens from local data */
1029   for (i=0; i<ismax; i++) {
1030     jmax   = nrow[i];
1031 #if defined (PETSC_USE_CTABLE)
1032     lcol1_gcol1 = colmaps[i];
1033 #else
1034     cmap_i = cmap[i];
1035 #endif
1036     irow_i = irow[i];
1037     lens_i = lens[i];
1038     for (j=0; j<jmax; j++) {
1039       row  = irow_i[j];
1040 #if defined (PETSC_USE_CTABLE)
1041       ierr = PetscGetProc(row,size,c->rangebs,&proc);CHKERRQ(ierr);
1042 #else
1043       proc = rtable[row];
1044 #endif
1045       if (proc == rank) {
1046         /* Get indices from matA and then from matB */
1047         row    = row - rstart;
1048         nzA    = a_i[row+1] - a_i[row];     nzB = b_i[row+1] - b_i[row];
1049         cworkA =  a_j + a_i[row]; cworkB = b_j + b_i[row];
1050 #if defined (PETSC_USE_CTABLE)
1051        for (k=0; k<nzA; k++) {
1052 	 ierr = PetscTableFind(lcol1_gcol1,cstart+cworkA[k]+1,&tt);CHKERRQ(ierr);
1053           if (tt) { lens_i[j]++; }
1054         }
1055         for (k=0; k<nzB; k++) {
1056 	  ierr = PetscTableFind(lcol1_gcol1,bmap[cworkB[k]]+1,&tt);CHKERRQ(ierr);
1057           if (tt) { lens_i[j]++; }
1058         }
1059 #else
1060         for (k=0; k<nzA; k++) {
1061           if (cmap_i[cstart + cworkA[k]]) { lens_i[j]++; }
1062         }
1063         for (k=0; k<nzB; k++) {
1064           if (cmap_i[bmap[cworkB[k]]]) { lens_i[j]++; }
1065         }
1066 #endif
1067       }
1068     }
1069   }
1070 #if defined (PETSC_USE_CTABLE)
1071   /* Create row map*/
1072   ierr = PetscMalloc((1+ismax)*sizeof(PetscTable),&rowmaps);CHKERRQ(ierr);
1073   for (i=0; i<ismax; i++){
1074     ierr = PetscTableCreate(nrow[i]+1,&rowmaps[i]);CHKERRQ(ierr);
1075   }
1076 #else
1077   /* Create row map*/
1078   ierr    = PetscMalloc((1+ismax)*sizeof(PetscInt*)+ ismax*Mbs*sizeof(PetscInt),&rmap);CHKERRQ(ierr);
1079   rmap[0] = (PetscInt*)(rmap + ismax);
1080   ierr    = PetscMemzero(rmap[0],ismax*Mbs*sizeof(PetscInt));CHKERRQ(ierr);
1081   for (i=1; i<ismax; i++) { rmap[i] = rmap[i-1] + Mbs;}
1082 #endif
1083   for (i=0; i<ismax; i++) {
1084     irow_i = irow[i];
1085     jmax   = nrow[i];
1086 #if defined (PETSC_USE_CTABLE)
1087     lrow1_grow1 = rowmaps[i];
1088     for (j=0; j<jmax; j++) {
1089       ierr = PetscTableAdd(lrow1_grow1,irow_i[j]+1,j+1);CHKERRQ(ierr);
1090     }
1091 #else
1092     rmap_i = rmap[i];
1093     for (j=0; j<jmax; j++) {
1094       rmap_i[irow_i[j]] = j;
1095     }
1096 #endif
1097   }
1098 
1099   /* Update lens from offproc data */
1100   {
1101     PetscInt    *rbuf2_i,*rbuf3_i,*sbuf1_i;
1102     PetscMPIInt ii;
1103 
1104     for (tmp2=0; tmp2<nrqs; tmp2++) {
1105       ierr    = MPI_Waitany(nrqs,r_waits3,&ii,r_status3+tmp2);CHKERRQ(ierr);
1106       idex   = pa[ii];
1107       sbuf1_i = sbuf1[idex];
1108       jmax    = sbuf1_i[0];
1109       ct1     = 2*jmax+1;
1110       ct2     = 0;
1111       rbuf2_i = rbuf2[ii];
1112       rbuf3_i = rbuf3[ii];
1113       for (j=1; j<=jmax; j++) {
1114         is_no   = sbuf1_i[2*j-1];
1115         max1    = sbuf1_i[2*j];
1116         lens_i  = lens[is_no];
1117 #if defined (PETSC_USE_CTABLE)
1118 	lcol1_gcol1 = colmaps[is_no];
1119 	lrow1_grow1 = rowmaps[is_no];
1120 #else
1121         cmap_i  = cmap[is_no];
1122 	rmap_i  = rmap[is_no];
1123 #endif
1124         for (k=0; k<max1; k++,ct1++) {
1125 #if defined (PETSC_USE_CTABLE)
1126 	  ierr = PetscTableFind(lrow1_grow1,sbuf1_i[ct1]+1,&row);CHKERRQ(ierr);
1127           row--;
1128           if (row < 0) { SETERRQ(PETSC_ERR_PLIB,"row not found in table"); }
1129 #else
1130           row  = rmap_i[sbuf1_i[ct1]]; /* the val in the new matrix to be */
1131 #endif
1132           max2 = rbuf2_i[ct1];
1133           for (l=0; l<max2; l++,ct2++) {
1134 #if defined (PETSC_USE_CTABLE)
1135 	    ierr = PetscTableFind(lcol1_gcol1,rbuf3_i[ct2]+1,&tt);CHKERRQ(ierr);
1136 	    if (tt) {
1137               lens_i[row]++;
1138             }
1139 #else
1140             if (cmap_i[rbuf3_i[ct2]]) {
1141               lens_i[row]++;
1142             }
1143 #endif
1144           }
1145         }
1146       }
1147     }
1148   }
1149   ierr = PetscFree(r_status3);CHKERRQ(ierr);
1150   ierr = PetscFree(r_waits3);CHKERRQ(ierr);
1151   if (nrqr) {ierr = MPI_Waitall(nrqr,s_waits3,s_status3);CHKERRQ(ierr);}
1152   ierr = PetscFree(s_status3);CHKERRQ(ierr);
1153   ierr = PetscFree(s_waits3);CHKERRQ(ierr);
1154 
1155   /* Create the submatrices */
1156   if (scall == MAT_REUSE_MATRIX) {
1157     /*
1158         Assumes new rows are same length as the old rows, hence bug!
1159     */
1160     for (i=0; i<ismax; i++) {
1161       mat = (Mat_SeqBAIJ *)(submats[i]->data);
1162       if ((mat->mbs != nrow[i]) || (mat->nbs != ncol[i] || C->rmap->bs != bs)) {
1163         SETERRQ(PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. wrong size");
1164       }
1165       ierr = PetscMemcmp(mat->ilen,lens[i],mat->mbs *sizeof(PetscInt),&flag);CHKERRQ(ierr);
1166       if (!flag) {
1167         SETERRQ(PETSC_ERR_ARG_INCOMP,"Cannot reuse matrix. wrong no of nonzeros");
1168       }
1169       /* Initial matrix as if empty */
1170       ierr = PetscMemzero(mat->ilen,mat->mbs*sizeof(PetscInt));CHKERRQ(ierr);
1171       submats[i]->factor = C->factor;
1172     }
1173   } else {
1174     for (i=0; i<ismax; i++) {
1175       ierr = MatCreate(PETSC_COMM_SELF,submats+i);CHKERRQ(ierr);
1176       ierr = MatSetSizes(submats[i],nrow[i]*bs,ncol[i]*bs,nrow[i]*bs,ncol[i]*bs);CHKERRQ(ierr);
1177       ierr = MatSetType(submats[i],((PetscObject)A)->type_name);CHKERRQ(ierr);
1178       ierr = MatSeqBAIJSetPreallocation(submats[i],C->rmap->bs,0,lens[i]);CHKERRQ(ierr);
1179       ierr = MatSeqSBAIJSetPreallocation(submats[i],C->rmap->bs,0,lens[i]);CHKERRQ(ierr);
1180     }
1181   }
1182 
1183   /* Assemble the matrices */
1184   /* First assemble the local rows */
1185   {
1186     PetscInt       ilen_row,*imat_ilen,*imat_j,*imat_i;
1187     MatScalar *imat_a;
1188 
1189     for (i=0; i<ismax; i++) {
1190       mat       = (Mat_SeqBAIJ*)submats[i]->data;
1191       imat_ilen = mat->ilen;
1192       imat_j    = mat->j;
1193       imat_i    = mat->i;
1194       imat_a    = mat->a;
1195 
1196 #if defined (PETSC_USE_CTABLE)
1197       lcol1_gcol1 = colmaps[i];
1198       lrow1_grow1 = rowmaps[i];
1199 #else
1200       cmap_i    = cmap[i];
1201       rmap_i    = rmap[i];
1202 #endif
1203       irow_i    = irow[i];
1204       jmax      = nrow[i];
1205       for (j=0; j<jmax; j++) {
1206         row      = irow_i[j];
1207 #if defined (PETSC_USE_CTABLE)
1208 	ierr = PetscGetProc(row,size,c->rangebs,&proc);CHKERRQ(ierr);
1209 #else
1210 	proc = rtable[row];
1211 #endif
1212         if (proc == rank) {
1213           row      = row - rstart;
1214           nzA      = a_i[row+1] - a_i[row];
1215           nzB      = b_i[row+1] - b_i[row];
1216           cworkA   = a_j + a_i[row];
1217           cworkB   = b_j + b_i[row];
1218           vworkA   = a_a + a_i[row]*bs2;
1219           vworkB   = b_a + b_i[row]*bs2;
1220 #if defined (PETSC_USE_CTABLE)
1221 	  ierr = PetscTableFind(lrow1_grow1,row+rstart+1,&row);CHKERRQ(ierr);
1222           row--;
1223           if (row < 0) { SETERRQ(PETSC_ERR_PLIB,"row not found in table"); }
1224 #else
1225           row      = rmap_i[row + rstart];
1226 #endif
1227           mat_i    = imat_i[row];
1228           mat_a    = imat_a + mat_i*bs2;
1229           mat_j    = imat_j + mat_i;
1230           ilen_row = imat_ilen[row];
1231 
1232           /* load the column indices for this row into cols*/
1233           for (l=0; l<nzB; l++) {
1234 	    if ((ctmp = bmap[cworkB[l]]) < cstart) {
1235 #if defined (PETSC_USE_CTABLE)
1236 	      ierr = PetscTableFind(lcol1_gcol1,ctmp+1,&tcol);CHKERRQ(ierr);
1237 	      if (tcol) {
1238 #else
1239               if ((tcol = cmap_i[ctmp])) {
1240 #endif
1241                 *mat_j++ = tcol - 1;
1242                 ierr     = PetscMemcpy(mat_a,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1243                 mat_a   += bs2;
1244                 ilen_row++;
1245               }
1246             } else break;
1247           }
1248           imark = l;
1249           for (l=0; l<nzA; l++) {
1250 #if defined (PETSC_USE_CTABLE)
1251 	    ierr = PetscTableFind(lcol1_gcol1,cstart+cworkA[l]+1,&tcol);CHKERRQ(ierr);
1252 	    if (tcol) {
1253 #else
1254 	    if ((tcol = cmap_i[cstart + cworkA[l]])) {
1255 #endif
1256               *mat_j++ = tcol - 1;
1257               ierr     = PetscMemcpy(mat_a,vworkA+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1258               mat_a   += bs2;
1259               ilen_row++;
1260             }
1261           }
1262           for (l=imark; l<nzB; l++) {
1263 #if defined (PETSC_USE_CTABLE)
1264 	    ierr = PetscTableFind(lcol1_gcol1,bmap[cworkB[l]]+1,&tcol);CHKERRQ(ierr);
1265 	    if (tcol) {
1266 #else
1267             if ((tcol = cmap_i[bmap[cworkB[l]]])) {
1268 #endif
1269               *mat_j++ = tcol - 1;
1270               ierr     = PetscMemcpy(mat_a,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1271               mat_a   += bs2;
1272               ilen_row++;
1273             }
1274           }
1275           imat_ilen[row] = ilen_row;
1276         }
1277       }
1278 
1279     }
1280   }
1281 
1282   /*   Now assemble the off proc rows*/
1283   {
1284     PetscInt    *sbuf1_i,*rbuf2_i,*rbuf3_i,*imat_ilen,ilen;
1285     PetscInt    *imat_j,*imat_i;
1286     MatScalar   *imat_a,*rbuf4_i;
1287     PetscMPIInt ii;
1288 
1289     for (tmp2=0; tmp2<nrqs; tmp2++) {
1290       ierr    = MPI_Waitany(nrqs,r_waits4,&ii,r_status4+tmp2);CHKERRQ(ierr);
1291       idex   = pa[ii];
1292       sbuf1_i = sbuf1[idex];
1293       jmax    = sbuf1_i[0];
1294       ct1     = 2*jmax + 1;
1295       ct2     = 0;
1296       rbuf2_i = rbuf2[ii];
1297       rbuf3_i = rbuf3[ii];
1298       rbuf4_i = rbuf4[ii];
1299       for (j=1; j<=jmax; j++) {
1300         is_no     = sbuf1_i[2*j-1];
1301 #if defined (PETSC_USE_CTABLE)
1302 	lrow1_grow1 = rowmaps[is_no];
1303 	lcol1_gcol1 = colmaps[is_no];
1304 #else
1305         rmap_i    = rmap[is_no];
1306         cmap_i    = cmap[is_no];
1307 #endif
1308         mat       = (Mat_SeqBAIJ*)submats[is_no]->data;
1309         imat_ilen = mat->ilen;
1310         imat_j    = mat->j;
1311         imat_i    = mat->i;
1312         imat_a    = mat->a;
1313         max1      = sbuf1_i[2*j];
1314         for (k=0; k<max1; k++,ct1++) {
1315           row   = sbuf1_i[ct1];
1316 #if defined (PETSC_USE_CTABLE)
1317 	  ierr = PetscTableFind(lrow1_grow1,row+1,&row);CHKERRQ(ierr);
1318           row--;
1319           if(row < 0) { SETERRQ(PETSC_ERR_PLIB,"row not found in table"); }
1320 #else
1321           row   = rmap_i[row];
1322 #endif
1323           ilen  = imat_ilen[row];
1324           mat_i = imat_i[row];
1325           mat_a = imat_a + mat_i*bs2;
1326           mat_j = imat_j + mat_i;
1327           max2 = rbuf2_i[ct1];
1328           for (l=0; l<max2; l++,ct2++) {
1329 #if defined (PETSC_USE_CTABLE)
1330 	    ierr = PetscTableFind(lcol1_gcol1,rbuf3_i[ct2]+1,&tcol);CHKERRQ(ierr);
1331 	    if (tcol) {
1332 #else
1333 	    if ((tcol = cmap_i[rbuf3_i[ct2]])) {
1334 #endif
1335               *mat_j++    = tcol - 1;
1336               /* *mat_a++= rbuf4_i[ct2]; */
1337               ierr        = PetscMemcpy(mat_a,rbuf4_i+ct2*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1338               mat_a      += bs2;
1339               ilen++;
1340             }
1341           }
1342           imat_ilen[row] = ilen;
1343         }
1344       }
1345     }
1346   }
1347   ierr = PetscFree(r_status4);CHKERRQ(ierr);
1348   ierr = PetscFree(r_waits4);CHKERRQ(ierr);
1349   if (nrqr) {ierr = MPI_Waitall(nrqr,s_waits4,s_status4);CHKERRQ(ierr);}
1350   ierr = PetscFree(s_waits4);CHKERRQ(ierr);
1351   ierr = PetscFree(s_status4);CHKERRQ(ierr);
1352 
1353   /* Restore the indices */
1354   for (i=0; i<ismax; i++) {
1355     ierr = ISRestoreIndices(isrow[i],irow+i);CHKERRQ(ierr);
1356     ierr = ISRestoreIndices(iscol[i],icol+i);CHKERRQ(ierr);
1357   }
1358 
1359   /* Destroy allocated memory */
1360   ierr = PetscFree(irow);CHKERRQ(ierr);
1361   ierr = PetscFree(w1);CHKERRQ(ierr);
1362   ierr = PetscFree(pa);CHKERRQ(ierr);
1363 
1364   ierr = PetscFree(sbuf1);CHKERRQ(ierr);
1365   ierr = PetscFree(rbuf2);CHKERRQ(ierr);
1366   for (i=0; i<nrqr; ++i) {
1367     ierr = PetscFree(sbuf2[i]);CHKERRQ(ierr);
1368   }
1369   for (i=0; i<nrqs; ++i) {
1370     ierr = PetscFree(rbuf3[i]);CHKERRQ(ierr);
1371     ierr = PetscFree(rbuf4[i]);CHKERRQ(ierr);
1372   }
1373 
1374   ierr = PetscFree(sbuf2);CHKERRQ(ierr);
1375   ierr = PetscFree(rbuf3);CHKERRQ(ierr);
1376   ierr = PetscFree(rbuf4);CHKERRQ(ierr);
1377   ierr = PetscFree(sbuf_aj[0]);CHKERRQ(ierr);
1378   ierr = PetscFree(sbuf_aj);CHKERRQ(ierr);
1379   ierr = PetscFree(sbuf_aa[0]);CHKERRQ(ierr);
1380   ierr = PetscFree(sbuf_aa);CHKERRQ(ierr);
1381 
1382 #if defined (PETSC_USE_CTABLE)
1383   for (i=0; i<ismax; i++){
1384     ierr = PetscTableDestroy(rowmaps[i]);CHKERRQ(ierr);
1385     ierr = PetscTableDestroy(colmaps[i]);CHKERRQ(ierr);
1386   }
1387   ierr = PetscFree(colmaps);CHKERRQ(ierr);
1388   ierr = PetscFree(rowmaps);CHKERRQ(ierr);
1389 #else
1390   ierr = PetscFree(rmap);CHKERRQ(ierr);
1391   ierr = PetscFree(cmap);CHKERRQ(ierr);
1392 #endif
1393   ierr = PetscFree(lens);CHKERRQ(ierr);
1394 
1395   for (i=0; i<ismax; i++) {
1396     ierr = MatAssemblyBegin(submats[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1397     ierr = MatAssemblyEnd(submats[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1398   }
1399   PetscFunctionReturn(0);
1400 }
1401 
1402