xref: /petsc/src/mat/impls/baij/mpi/baijov.c (revision a906b49b2a5591d6d10cd79ed4ebe774a5561b3a)
1 
2 /*
3    Routines to compute overlapping regions of a parallel MPI matrix
4   and to find submatrices that were shared across processors.
5 */
6 #include <../src/mat/impls/baij/mpi/mpibaij.h>
7 #include <petscbt.h>
8 
9 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Local(Mat,PetscInt,char**,PetscInt*,PetscInt**);
10 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Receive(Mat,PetscInt,PetscInt**,PetscInt**,PetscInt*);
11 extern PetscErrorCode MatGetRow_MPIBAIJ(Mat,PetscInt,PetscInt*,PetscInt**,PetscScalar**);
12 extern PetscErrorCode MatRestoreRow_MPIBAIJ(Mat,PetscInt,PetscInt*,PetscInt**,PetscScalar**);
13 
14 #undef __FUNCT__
15 #define __FUNCT__ "MatIncreaseOverlap_MPIBAIJ"
16 PetscErrorCode MatIncreaseOverlap_MPIBAIJ(Mat C,PetscInt imax,IS is[],PetscInt ov)
17 {
18   PetscErrorCode ierr;
19   PetscInt       i,N=C->cmap->N, bs=C->rmap->bs;
20   IS             *is_new;
21 
22   PetscFunctionBegin;
23   ierr = PetscMalloc1(imax,&is_new);CHKERRQ(ierr);
24   /* Convert the indices into block format */
25   ierr = ISCompressIndicesGeneral(N,C->rmap->n,bs,imax,is,is_new);CHKERRQ(ierr);
26   if (ov < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap specified\n");
27   for (i=0; i<ov; ++i) {
28     ierr = MatIncreaseOverlap_MPIBAIJ_Once(C,imax,is_new);CHKERRQ(ierr);
29   }
30   for (i=0; i<imax; i++) {ierr = ISDestroy(&is[i]);CHKERRQ(ierr);}
31   ierr = ISExpandIndicesGeneral(N,N,bs,imax,is_new,is);CHKERRQ(ierr);
32   for (i=0; i<imax; i++) {ierr = ISDestroy(&is_new[i]);CHKERRQ(ierr);}
33   ierr = PetscFree(is_new);CHKERRQ(ierr);
34   PetscFunctionReturn(0);
35 }
36 
37 /*
38   Sample message format:
39   If a processor A wants processor B to process some elements corresponding
40   to index sets is[1], is[5]
41   mesg [0] = 2   (no of index sets in the mesg)
42   -----------
43   mesg [1] = 1 => is[1]
44   mesg [2] = sizeof(is[1]);
45   -----------
46   mesg [5] = 5  => is[5]
47   mesg [6] = sizeof(is[5]);
48   -----------
49   mesg [7]
50   mesg [n]  data(is[1])
51   -----------
52   mesg[n+1]
53   mesg[m]  data(is[5])
54   -----------
55 
56   Notes:
57   nrqs - no of requests sent (or to be sent out)
58   nrqr - no of requests recieved (which have to be or which have been processed
59 */
60 #undef __FUNCT__
61 #define __FUNCT__ "MatIncreaseOverlap_MPIBAIJ_Once"
62 PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Once(Mat C,PetscInt imax,IS is[])
63 {
64   Mat_MPIBAIJ    *c = (Mat_MPIBAIJ*)C->data;
65   const PetscInt **idx,*idx_i;
66   PetscInt       *n,*w3,*w4,**data,len;
67   PetscErrorCode ierr;
68   PetscMPIInt    size,rank,tag1,tag2,*w2,*w1,nrqr;
69   PetscInt       Mbs,i,j,k,**rbuf,row,proc=-1,nrqs,msz,**outdat,**ptr;
70   PetscInt       *ctr,*pa,*tmp,*isz,*isz1,**xdata,**rbuf2,*d_p;
71   PetscMPIInt    *onodes1,*olengths1,*onodes2,*olengths2;
72   PetscBT        *table;
73   MPI_Comm       comm;
74   MPI_Request    *s_waits1,*r_waits1,*s_waits2,*r_waits2;
75   MPI_Status     *s_status,*recv_status;
76   char           *t_p;
77 
78   PetscFunctionBegin;
79   ierr = PetscObjectGetComm((PetscObject)C,&comm);CHKERRQ(ierr);
80   size = c->size;
81   rank = c->rank;
82   Mbs  = c->Mbs;
83 
84   ierr = PetscObjectGetNewTag((PetscObject)C,&tag1);CHKERRQ(ierr);
85   ierr = PetscObjectGetNewTag((PetscObject)C,&tag2);CHKERRQ(ierr);
86 
87   ierr = PetscMalloc2(imax+1,&idx,imax,&n);CHKERRQ(ierr);
88 
89   for (i=0; i<imax; i++) {
90     ierr = ISGetIndices(is[i],&idx[i]);CHKERRQ(ierr);
91     ierr = ISGetLocalSize(is[i],&n[i]);CHKERRQ(ierr);
92   }
93 
94   /* evaluate communication - mesg to who,length of mesg, and buffer space
95      required. Based on this, buffers are allocated, and data copied into them*/
96   ierr = PetscMalloc4(size,&w1,size,&w2,size,&w3,size,&w4);CHKERRQ(ierr);
97   ierr = PetscMemzero(w1,size*sizeof(PetscMPIInt));CHKERRQ(ierr);
98   ierr = PetscMemzero(w2,size*sizeof(PetscMPIInt));CHKERRQ(ierr);
99   ierr = PetscMemzero(w3,size*sizeof(PetscInt));CHKERRQ(ierr);
100   for (i=0; i<imax; i++) {
101     ierr  = PetscMemzero(w4,size*sizeof(PetscInt));CHKERRQ(ierr); /* initialise work vector*/
102     idx_i = idx[i];
103     len   = n[i];
104     for (j=0; j<len; j++) {
105       row = idx_i[j];
106       if (row < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index set cannot have negative entries");
107       ierr = PetscLayoutFindOwner(C->rmap,row*C->rmap->bs,&proc);CHKERRQ(ierr);
108       w4[proc]++;
109     }
110     for (j=0; j<size; j++) {
111       if (w4[j]) { w1[j] += w4[j]; w3[j]++;}
112     }
113   }
114 
115   nrqs     = 0;              /* no of outgoing messages */
116   msz      = 0;              /* total mesg length (for all proc */
117   w1[rank] = 0;              /* no mesg sent to itself */
118   w3[rank] = 0;
119   for (i=0; i<size; i++) {
120     if (w1[i])  {w2[i] = 1; nrqs++;} /* there exists a message to proc i */
121   }
122   /* pa - is list of processors to communicate with */
123   ierr = PetscMalloc1((nrqs+1),&pa);CHKERRQ(ierr);
124   for (i=0,j=0; i<size; i++) {
125     if (w1[i]) {pa[j] = i; j++;}
126   }
127 
128   /* Each message would have a header = 1 + 2*(no of IS) + data */
129   for (i=0; i<nrqs; i++) {
130     j      = pa[i];
131     w1[j] += w2[j] + 2*w3[j];
132     msz   += w1[j];
133   }
134 
135   /* Determine the number of messages to expect, their lengths, from from-ids */
136   ierr = PetscGatherNumberOfMessages(comm,w2,w1,&nrqr);CHKERRQ(ierr);
137   ierr = PetscGatherMessageLengths(comm,nrqs,nrqr,w1,&onodes1,&olengths1);CHKERRQ(ierr);
138 
139   /* Now post the Irecvs corresponding to these messages */
140   ierr = PetscPostIrecvInt(comm,tag1,nrqr,onodes1,olengths1,&rbuf,&r_waits1);CHKERRQ(ierr);
141 
142   /* Allocate Memory for outgoing messages */
143   ierr = PetscMalloc4(size,&outdat,size,&ptr,msz,&tmp,size,&ctr);CHKERRQ(ierr);
144   ierr = PetscMemzero(outdat,size*sizeof(PetscInt*));CHKERRQ(ierr);
145   ierr = PetscMemzero(ptr,size*sizeof(PetscInt*));CHKERRQ(ierr);
146   {
147     PetscInt *iptr = tmp,ict  = 0;
148     for (i=0; i<nrqs; i++) {
149       j         = pa[i];
150       iptr     +=  ict;
151       outdat[j] = iptr;
152       ict       = w1[j];
153     }
154   }
155 
156   /* Form the outgoing messages */
157   /*plug in the headers*/
158   for (i=0; i<nrqs; i++) {
159     j            = pa[i];
160     outdat[j][0] = 0;
161     ierr         = PetscMemzero(outdat[j]+1,2*w3[j]*sizeof(PetscInt));CHKERRQ(ierr);
162     ptr[j]       = outdat[j] + 2*w3[j] + 1;
163   }
164 
165   /* Memory for doing local proc's work*/
166   {
167     ierr = PetscCalloc5(imax,&table, imax,&data, imax,&isz, Mbs*imax,&d_p, (Mbs/PETSC_BITS_PER_BYTE+1)*imax,&t_p);CHKERRQ(ierr);
168 
169     for (i=0; i<imax; i++) {
170       table[i] = t_p + (Mbs/PETSC_BITS_PER_BYTE+1)*i;
171       data[i]  = d_p + (Mbs)*i;
172     }
173   }
174 
175   /* Parse the IS and update local tables and the outgoing buf with the data*/
176   {
177     PetscInt n_i,*data_i,isz_i,*outdat_j,ctr_j;
178     PetscBT  table_i;
179 
180     for (i=0; i<imax; i++) {
181       ierr    = PetscMemzero(ctr,size*sizeof(PetscInt));CHKERRQ(ierr);
182       n_i     = n[i];
183       table_i = table[i];
184       idx_i   = idx[i];
185       data_i  = data[i];
186       isz_i   = isz[i];
187       for (j=0; j<n_i; j++) {   /* parse the indices of each IS */
188         row  = idx_i[j];
189         ierr = PetscLayoutFindOwner(C->rmap,row*C->rmap->bs,&proc);CHKERRQ(ierr);
190         if (proc != rank) { /* copy to the outgoing buffer */
191           ctr[proc]++;
192           *ptr[proc] = row;
193           ptr[proc]++;
194         } else { /* Update the local table */
195           if (!PetscBTLookupSet(table_i,row)) data_i[isz_i++] = row;
196         }
197       }
198       /* Update the headers for the current IS */
199       for (j=0; j<size; j++) { /* Can Optimise this loop by using pa[] */
200         if ((ctr_j = ctr[j])) {
201           outdat_j        = outdat[j];
202           k               = ++outdat_j[0];
203           outdat_j[2*k]   = ctr_j;
204           outdat_j[2*k-1] = i;
205         }
206       }
207       isz[i] = isz_i;
208     }
209   }
210 
211   /*  Now  post the sends */
212   ierr = PetscMalloc1((nrqs+1),&s_waits1);CHKERRQ(ierr);
213   for (i=0; i<nrqs; ++i) {
214     j    = pa[i];
215     ierr = MPI_Isend(outdat[j],w1[j],MPIU_INT,j,tag1,comm,s_waits1+i);CHKERRQ(ierr);
216   }
217 
218   /* No longer need the original indices*/
219   for (i=0; i<imax; ++i) {
220     ierr = ISRestoreIndices(is[i],idx+i);CHKERRQ(ierr);
221   }
222   ierr = PetscFree2(idx,n);CHKERRQ(ierr);
223 
224   for (i=0; i<imax; ++i) {
225     ierr = ISDestroy(&is[i]);CHKERRQ(ierr);
226   }
227 
228   /* Do Local work*/
229   ierr = MatIncreaseOverlap_MPIBAIJ_Local(C,imax,table,isz,data);CHKERRQ(ierr);
230 
231   /* Receive messages*/
232   ierr = PetscMalloc1((nrqr+1),&recv_status);CHKERRQ(ierr);
233   if (nrqr) {ierr = MPI_Waitall(nrqr,r_waits1,recv_status);CHKERRQ(ierr);}
234 
235   ierr = PetscMalloc1((nrqs+1),&s_status);CHKERRQ(ierr);
236   if (nrqs) {ierr = MPI_Waitall(nrqs,s_waits1,s_status);CHKERRQ(ierr);}
237 
238   /* Phase 1 sends are complete - deallocate buffers */
239   ierr = PetscFree4(outdat,ptr,tmp,ctr);CHKERRQ(ierr);
240   ierr = PetscFree4(w1,w2,w3,w4);CHKERRQ(ierr);
241 
242   ierr = PetscMalloc1((nrqr+1),&xdata);CHKERRQ(ierr);
243   ierr = PetscMalloc1((nrqr+1),&isz1);CHKERRQ(ierr);
244   ierr = MatIncreaseOverlap_MPIBAIJ_Receive(C,nrqr,rbuf,xdata,isz1);CHKERRQ(ierr);
245   ierr = PetscFree(rbuf[0]);CHKERRQ(ierr);
246   ierr = PetscFree(rbuf);CHKERRQ(ierr);
247 
248   /* Send the data back*/
249   /* Do a global reduction to know the buffer space req for incoming messages*/
250   {
251     PetscMPIInt *rw1;
252 
253     ierr = PetscMalloc1(size,&rw1);CHKERRQ(ierr);
254     ierr = PetscMemzero(rw1,size*sizeof(PetscInt));CHKERRQ(ierr);
255 
256     for (i=0; i<nrqr; ++i) {
257       proc = recv_status[i].MPI_SOURCE;
258       if (proc != onodes1[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"MPI_SOURCE mismatch");
259       rw1[proc] = isz1[i];
260     }
261 
262     ierr = PetscFree(onodes1);CHKERRQ(ierr);
263     ierr = PetscFree(olengths1);CHKERRQ(ierr);
264 
265     /* Determine the number of messages to expect, their lengths, from from-ids */
266     ierr = PetscGatherMessageLengths(comm,nrqr,nrqs,rw1,&onodes2,&olengths2);CHKERRQ(ierr);
267     ierr = PetscFree(rw1);CHKERRQ(ierr);
268   }
269   /* Now post the Irecvs corresponding to these messages */
270   ierr = PetscPostIrecvInt(comm,tag2,nrqs,onodes2,olengths2,&rbuf2,&r_waits2);CHKERRQ(ierr);
271 
272   /*  Now  post the sends */
273   ierr = PetscMalloc1((nrqr+1),&s_waits2);CHKERRQ(ierr);
274   for (i=0; i<nrqr; ++i) {
275     j    = recv_status[i].MPI_SOURCE;
276     ierr = MPI_Isend(xdata[i],isz1[i],MPIU_INT,j,tag2,comm,s_waits2+i);CHKERRQ(ierr);
277   }
278 
279   /* receive work done on other processors*/
280   {
281     PetscMPIInt idex;
282     PetscInt    is_no,ct1,max,*rbuf2_i,isz_i,*data_i,jmax;
283     PetscBT     table_i;
284     MPI_Status  *status2;
285 
286     ierr = PetscMalloc((PetscMax(nrqr,nrqs)+1)*sizeof(MPI_Status),&status2);CHKERRQ(ierr);
287     for (i=0; i<nrqs; ++i) {
288       ierr = MPI_Waitany(nrqs,r_waits2,&idex,status2+i);CHKERRQ(ierr);
289       /* Process the message*/
290       rbuf2_i = rbuf2[idex];
291       ct1     = 2*rbuf2_i[0]+1;
292       jmax    = rbuf2[idex][0];
293       for (j=1; j<=jmax; j++) {
294         max     = rbuf2_i[2*j];
295         is_no   = rbuf2_i[2*j-1];
296         isz_i   = isz[is_no];
297         data_i  = data[is_no];
298         table_i = table[is_no];
299         for (k=0; k<max; k++,ct1++) {
300           row = rbuf2_i[ct1];
301           if (!PetscBTLookupSet(table_i,row)) data_i[isz_i++] = row;
302         }
303         isz[is_no] = isz_i;
304       }
305     }
306     if (nrqr) {ierr = MPI_Waitall(nrqr,s_waits2,status2);CHKERRQ(ierr);}
307     ierr = PetscFree(status2);CHKERRQ(ierr);
308   }
309 
310   for (i=0; i<imax; ++i) {
311     ierr = ISCreateGeneral(PETSC_COMM_SELF,isz[i],data[i],PETSC_COPY_VALUES,is+i);CHKERRQ(ierr);
312   }
313 
314 
315   ierr = PetscFree(onodes2);CHKERRQ(ierr);
316   ierr = PetscFree(olengths2);CHKERRQ(ierr);
317 
318   ierr = PetscFree(pa);CHKERRQ(ierr);
319   ierr = PetscFree(rbuf2[0]);CHKERRQ(ierr);
320   ierr = PetscFree(rbuf2);CHKERRQ(ierr);
321   ierr = PetscFree(s_waits1);CHKERRQ(ierr);
322   ierr = PetscFree(r_waits1);CHKERRQ(ierr);
323   ierr = PetscFree(s_waits2);CHKERRQ(ierr);
324   ierr = PetscFree(r_waits2);CHKERRQ(ierr);
325   ierr = PetscFree5(table,data,isz,d_p,t_p);CHKERRQ(ierr);
326   ierr = PetscFree(s_status);CHKERRQ(ierr);
327   ierr = PetscFree(recv_status);CHKERRQ(ierr);
328   ierr = PetscFree(xdata[0]);CHKERRQ(ierr);
329   ierr = PetscFree(xdata);CHKERRQ(ierr);
330   ierr = PetscFree(isz1);CHKERRQ(ierr);
331   PetscFunctionReturn(0);
332 }
333 
334 #undef __FUNCT__
335 #define __FUNCT__ "MatIncreaseOverlap_MPIBAIJ_Local"
336 /*
337    MatIncreaseOverlap_MPIBAIJ_Local - Called by MatincreaseOverlap, to do
338        the work on the local processor.
339 
340      Inputs:
341       C      - MAT_MPIBAIJ;
342       imax - total no of index sets processed at a time;
343       table  - an array of char - size = Mbs bits.
344 
345      Output:
346       isz    - array containing the count of the solution elements corresponding
347                to each index set;
348       data   - pointer to the solutions
349 */
350 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Local(Mat C,PetscInt imax,PetscBT *table,PetscInt *isz,PetscInt **data)
351 {
352   Mat_MPIBAIJ *c = (Mat_MPIBAIJ*)C->data;
353   Mat         A  = c->A,B = c->B;
354   Mat_SeqBAIJ *a = (Mat_SeqBAIJ*)A->data,*b = (Mat_SeqBAIJ*)B->data;
355   PetscInt    start,end,val,max,rstart,cstart,*ai,*aj;
356   PetscInt    *bi,*bj,*garray,i,j,k,row,*data_i,isz_i;
357   PetscBT     table_i;
358 
359   PetscFunctionBegin;
360   rstart = c->rstartbs;
361   cstart = c->cstartbs;
362   ai     = a->i;
363   aj     = a->j;
364   bi     = b->i;
365   bj     = b->j;
366   garray = c->garray;
367 
368 
369   for (i=0; i<imax; i++) {
370     data_i  = data[i];
371     table_i = table[i];
372     isz_i   = isz[i];
373     for (j=0,max=isz[i]; j<max; j++) {
374       row   = data_i[j] - rstart;
375       start = ai[row];
376       end   = ai[row+1];
377       for (k=start; k<end; k++) { /* Amat */
378         val = aj[k] + cstart;
379         if (!PetscBTLookupSet(table_i,val)) data_i[isz_i++] = val;
380       }
381       start = bi[row];
382       end   = bi[row+1];
383       for (k=start; k<end; k++) { /* Bmat */
384         val = garray[bj[k]];
385         if (!PetscBTLookupSet(table_i,val)) data_i[isz_i++] = val;
386       }
387     }
388     isz[i] = isz_i;
389   }
390   PetscFunctionReturn(0);
391 }
392 #undef __FUNCT__
393 #define __FUNCT__ "MatIncreaseOverlap_MPIBAIJ_Receive"
394 /*
395       MatIncreaseOverlap_MPIBAIJ_Receive - Process the recieved messages,
396          and return the output
397 
398          Input:
399            C    - the matrix
400            nrqr - no of messages being processed.
401            rbuf - an array of pointers to the recieved requests
402 
403          Output:
404            xdata - array of messages to be sent back
405            isz1  - size of each message
406 
407   For better efficiency perhaps we should malloc separately each xdata[i],
408 then if a remalloc is required we need only copy the data for that one row
409 rather than all previous rows as it is now where a single large chunck of
410 memory is used.
411 
412 */
413 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Receive(Mat C,PetscInt nrqr,PetscInt **rbuf,PetscInt **xdata,PetscInt * isz1)
414 {
415   Mat_MPIBAIJ    *c = (Mat_MPIBAIJ*)C->data;
416   Mat            A  = c->A,B = c->B;
417   Mat_SeqBAIJ    *a = (Mat_SeqBAIJ*)A->data,*b = (Mat_SeqBAIJ*)B->data;
418   PetscErrorCode ierr;
419   PetscInt       rstart,cstart,*ai,*aj,*bi,*bj,*garray,i,j,k;
420   PetscInt       row,total_sz,ct,ct1,ct2,ct3,mem_estimate,oct2,l,start,end;
421   PetscInt       val,max1,max2,Mbs,no_malloc =0,*tmp,new_estimate,ctr;
422   PetscInt       *rbuf_i,kmax,rbuf_0;
423   PetscBT        xtable;
424 
425   PetscFunctionBegin;
426   Mbs    = c->Mbs;
427   rstart = c->rstartbs;
428   cstart = c->cstartbs;
429   ai     = a->i;
430   aj     = a->j;
431   bi     = b->i;
432   bj     = b->j;
433   garray = c->garray;
434 
435 
436   for (i=0,ct=0,total_sz=0; i<nrqr; ++i) {
437     rbuf_i =  rbuf[i];
438     rbuf_0 =  rbuf_i[0];
439     ct    += rbuf_0;
440     for (j=1; j<=rbuf_0; j++) total_sz += rbuf_i[2*j];
441   }
442 
443   if (c->Mbs) max1 = ct*(a->nz +b->nz)/c->Mbs;
444   else        max1 = 1;
445   mem_estimate = 3*((total_sz > max1 ? total_sz : max1)+1);
446   ierr         = PetscMalloc1(mem_estimate,&xdata[0]);CHKERRQ(ierr);
447   ++no_malloc;
448   ierr = PetscBTCreate(Mbs,&xtable);CHKERRQ(ierr);
449   ierr = PetscMemzero(isz1,nrqr*sizeof(PetscInt));CHKERRQ(ierr);
450 
451   ct3 = 0;
452   for (i=0; i<nrqr; i++) { /* for easch mesg from proc i */
453     rbuf_i =  rbuf[i];
454     rbuf_0 =  rbuf_i[0];
455     ct1    =  2*rbuf_0+1;
456     ct2    =  ct1;
457     ct3   += ct1;
458     for (j=1; j<=rbuf_0; j++) { /* for each IS from proc i*/
459       ierr = PetscBTMemzero(Mbs,xtable);CHKERRQ(ierr);
460       oct2 = ct2;
461       kmax = rbuf_i[2*j];
462       for (k=0; k<kmax; k++,ct1++) {
463         row = rbuf_i[ct1];
464         if (!PetscBTLookupSet(xtable,row)) {
465           if (!(ct3 < mem_estimate)) {
466             new_estimate = (PetscInt)(1.5*mem_estimate)+1;
467             ierr         = PetscMalloc1(new_estimate,&tmp);CHKERRQ(ierr);
468             ierr         = PetscMemcpy(tmp,xdata[0],mem_estimate*sizeof(PetscInt));CHKERRQ(ierr);
469             ierr         = PetscFree(xdata[0]);CHKERRQ(ierr);
470             xdata[0]     = tmp;
471             mem_estimate = new_estimate; ++no_malloc;
472             for (ctr=1; ctr<=i; ctr++) xdata[ctr] = xdata[ctr-1] + isz1[ctr-1];
473           }
474           xdata[i][ct2++] = row;
475           ct3++;
476         }
477       }
478       for (k=oct2,max2=ct2; k<max2; k++)  {
479         row   = xdata[i][k] - rstart;
480         start = ai[row];
481         end   = ai[row+1];
482         for (l=start; l<end; l++) {
483           val = aj[l] + cstart;
484           if (!PetscBTLookupSet(xtable,val)) {
485             if (!(ct3 < mem_estimate)) {
486               new_estimate = (PetscInt)(1.5*mem_estimate)+1;
487               ierr         = PetscMalloc1(new_estimate,&tmp);CHKERRQ(ierr);
488               ierr         = PetscMemcpy(tmp,xdata[0],mem_estimate*sizeof(PetscInt));CHKERRQ(ierr);
489               ierr         = PetscFree(xdata[0]);CHKERRQ(ierr);
490               xdata[0]     = tmp;
491               mem_estimate = new_estimate; ++no_malloc;
492               for (ctr=1; ctr<=i; ctr++) xdata[ctr] = xdata[ctr-1] + isz1[ctr-1];
493             }
494             xdata[i][ct2++] = val;
495             ct3++;
496           }
497         }
498         start = bi[row];
499         end   = bi[row+1];
500         for (l=start; l<end; l++) {
501           val = garray[bj[l]];
502           if (!PetscBTLookupSet(xtable,val)) {
503             if (!(ct3 < mem_estimate)) {
504               new_estimate = (PetscInt)(1.5*mem_estimate)+1;
505               ierr         = PetscMalloc1(new_estimate,&tmp);CHKERRQ(ierr);
506               ierr         = PetscMemcpy(tmp,xdata[0],mem_estimate*sizeof(PetscInt));CHKERRQ(ierr);
507               ierr         = PetscFree(xdata[0]);CHKERRQ(ierr);
508               xdata[0]     = tmp;
509               mem_estimate = new_estimate; ++no_malloc;
510               for (ctr =1; ctr <=i; ctr++) xdata[ctr] = xdata[ctr-1] + isz1[ctr-1];
511             }
512             xdata[i][ct2++] = val;
513             ct3++;
514           }
515         }
516       }
517       /* Update the header*/
518       xdata[i][2*j]   = ct2 - oct2; /* Undo the vector isz1 and use only a var*/
519       xdata[i][2*j-1] = rbuf_i[2*j-1];
520     }
521     xdata[i][0] = rbuf_0;
522     xdata[i+1]  = xdata[i] + ct2;
523     isz1[i]     = ct2; /* size of each message */
524   }
525   ierr = PetscBTDestroy(&xtable);CHKERRQ(ierr);
526   ierr = PetscInfo3(C,"Allocated %D bytes, required %D, no of mallocs = %D\n",mem_estimate,ct3,no_malloc);CHKERRQ(ierr);
527   PetscFunctionReturn(0);
528 }
529 
530 #undef __FUNCT__
531 #define __FUNCT__ "MatGetSubMatrices_MPIBAIJ"
532 PetscErrorCode MatGetSubMatrices_MPIBAIJ(Mat C,PetscInt ismax,const IS isrow[],const IS iscol[],MatReuse scall,Mat *submat[])
533 {
534   IS             *isrow_new,*iscol_new;
535   Mat_MPIBAIJ    *c = (Mat_MPIBAIJ*)C->data;
536   PetscErrorCode ierr;
537   PetscInt       nmax,nstages_local,nstages,i,pos,max_no,ncol,nrow,N=C->cmap->N,bs=C->rmap->bs;
538   PetscBool      colflag,*allcolumns,*allrows;
539 
540   PetscFunctionBegin;
541   /* Currently, unsorted column indices will result in inverted column indices in the resulting submatrices. */
542   for (i = 0; i < ismax; ++i) {
543     PetscBool sorted;
544     ierr = ISSorted(iscol[i], &sorted);CHKERRQ(ierr);
545     if (!sorted) SETERRQ1(((PetscObject)iscol[i])->comm, PETSC_ERR_SUP, "Column index set %D not sorted", i);
546   }
547   /* The compression and expansion should be avoided. Doesn't point
548      out errors, might change the indices, hence buggey */
549   ierr = PetscMalloc2(ismax+1,&isrow_new,ismax+1,&iscol_new);CHKERRQ(ierr);
550   ierr = ISCompressIndicesGeneral(N,C->rmap->n,bs,ismax,isrow,isrow_new);CHKERRQ(ierr);
551   ierr = ISCompressIndicesGeneral(N,C->cmap->n,bs,ismax,iscol,iscol_new);CHKERRQ(ierr);
552 
553   /* Check for special case: each processor gets entire matrix columns */
554   ierr = PetscMalloc2(ismax+1,&allcolumns,ismax+1,&allrows);CHKERRQ(ierr);
555   for (i=0; i<ismax; i++) {
556     ierr = ISIdentity(iscol[i],&colflag);CHKERRQ(ierr);
557     ierr = ISGetLocalSize(iscol[i],&ncol);CHKERRQ(ierr);
558     if (colflag && ncol == C->cmap->N) allcolumns[i] = PETSC_TRUE;
559     else allcolumns[i] = PETSC_FALSE;
560 
561     ierr = ISIdentity(isrow[i],&colflag);CHKERRQ(ierr);
562     ierr = ISGetLocalSize(isrow[i],&nrow);CHKERRQ(ierr);
563     if (colflag && nrow == C->rmap->N) allrows[i] = PETSC_TRUE;
564     else allrows[i] = PETSC_FALSE;
565   }
566 
567   /* Allocate memory to hold all the submatrices */
568   if (scall != MAT_REUSE_MATRIX) {
569     ierr = PetscMalloc1((ismax+1),submat);CHKERRQ(ierr);
570   }
571   /* Determine the number of stages through which submatrices are done */
572   nmax = 20*1000000 / (c->Nbs * sizeof(PetscInt));
573   if (!nmax) nmax = 1;
574   nstages_local = ismax/nmax + ((ismax % nmax) ? 1 : 0);
575 
576   /* Make sure every processor loops through the nstages */
577   ierr = MPI_Allreduce(&nstages_local,&nstages,1,MPIU_INT,MPI_MAX,PetscObjectComm((PetscObject)C));CHKERRQ(ierr);
578   for (i=0,pos=0; i<nstages; i++) {
579     if (pos+nmax <= ismax) max_no = nmax;
580     else if (pos == ismax) max_no = 0;
581     else                   max_no = ismax-pos;
582     ierr = MatGetSubMatrices_MPIBAIJ_local(C,max_no,isrow_new+pos,iscol_new+pos,scall,allrows+pos,allcolumns+pos,*submat+pos);CHKERRQ(ierr);
583     pos += max_no;
584   }
585 
586   for (i=0; i<ismax; i++) {
587     ierr = ISDestroy(&isrow_new[i]);CHKERRQ(ierr);
588     ierr = ISDestroy(&iscol_new[i]);CHKERRQ(ierr);
589   }
590   ierr = PetscFree2(isrow_new,iscol_new);CHKERRQ(ierr);
591   ierr = PetscFree2(allcolumns,allrows);CHKERRQ(ierr);
592   PetscFunctionReturn(0);
593 }
594 
595 #if defined(PETSC_USE_CTABLE)
596 #undef __FUNCT__
597 #define __FUNCT__ "PetscGetProc"
598 PetscErrorCode PetscGetProc(const PetscInt row, const PetscMPIInt size, const PetscInt proc_gnode[], PetscMPIInt *rank)
599 {
600   PetscInt       nGlobalNd = proc_gnode[size];
601   PetscMPIInt    fproc;
602   PetscErrorCode ierr;
603 
604   PetscFunctionBegin;
605   ierr = PetscMPIIntCast((PetscInt)(((float)row * (float)size / (float)nGlobalNd + 0.5)),&fproc);CHKERRQ(ierr);
606   if (fproc > size) fproc = size;
607   while (row < proc_gnode[fproc] || row >= proc_gnode[fproc+1]) {
608     if (row < proc_gnode[fproc]) fproc--;
609     else                         fproc++;
610   }
611   *rank = fproc;
612   PetscFunctionReturn(0);
613 }
614 #endif
615 
616 /* -------------------------------------------------------------------------*/
617 /* This code is used for BAIJ and SBAIJ matrices (unfortunate dependency) */
618 #undef __FUNCT__
619 #define __FUNCT__ "MatGetSubMatrices_MPIBAIJ_local"
620 PetscErrorCode MatGetSubMatrices_MPIBAIJ_local(Mat C,PetscInt ismax,const IS isrow[],const IS iscol[],MatReuse scall,PetscBool *allrows,PetscBool *allcolumns,Mat *submats)
621 {
622   Mat_MPIBAIJ    *c = (Mat_MPIBAIJ*)C->data;
623   Mat            A  = c->A;
624   Mat_SeqBAIJ    *a = (Mat_SeqBAIJ*)A->data,*b = (Mat_SeqBAIJ*)c->B->data,*mat;
625   const PetscInt **irow,**icol,*irow_i;
626   PetscInt       *nrow,*ncol,*w3,*w4,start;
627   PetscErrorCode ierr;
628   PetscMPIInt    size,tag0,tag1,tag2,tag3,*w1,*w2,nrqr,idex,end,proc;
629   PetscInt       **sbuf1,**sbuf2,rank,i,j,k,l,ct1,ct2,**rbuf1,row;
630   PetscInt       nrqs,msz,**ptr,*req_size,*ctr,*pa,*tmp,tcol;
631   PetscInt       **rbuf3,*req_source,**sbuf_aj,**rbuf2,max1,max2;
632   PetscInt       **lens,is_no,ncols,*cols,mat_i,*mat_j,tmp2,jmax;
633   PetscInt       ctr_j,*sbuf1_j,*sbuf_aj_i,*rbuf1_i,kmax,*lens_i;
634   PetscInt       bs     =C->rmap->bs,bs2=c->bs2,*a_j=a->j,*b_j=b->j,*cworkA,*cworkB;
635   PetscInt       cstart = c->cstartbs,nzA,nzB,*a_i=a->i,*b_i=b->i,imark;
636   PetscInt       *bmap  = c->garray,ctmp,rstart=c->rstartbs;
637   MPI_Request    *s_waits1,*r_waits1,*s_waits2,*r_waits2,*r_waits3,*s_waits3;
638   MPI_Status     *r_status1,*r_status2,*s_status1,*s_status3,*s_status2,*r_status3;
639   MPI_Comm       comm;
640   PetscBool      flag;
641   PetscMPIInt    *onodes1,*olengths1;
642   PetscBool      ijonly=c->ijonly; /* private flag indicates only matrix data structures are requested */
643 
644   /* variables below are used for the matrix numerical values - case of !ijonly */
645   MPI_Request *r_waits4,*s_waits4;
646   MPI_Status  *r_status4,*s_status4;
647   MatScalar   **rbuf4,**sbuf_aa,*vals,*mat_a = NULL,*sbuf_aa_i,*vworkA = NULL,*vworkB = NULL;
648   MatScalar   *a_a=a->a,*b_a=b->a;
649 
650 #if defined(PETSC_USE_CTABLE)
651   PetscInt   tt;
652   PetscTable *rmap,*cmap,rmap_i,cmap_i=NULL;
653 #else
654   PetscInt **cmap,*cmap_i=NULL,*rtable,*rmap_i,**rmap, Mbs = c->Mbs;
655 #endif
656 
657   PetscFunctionBegin;
658   ierr = PetscObjectGetComm((PetscObject)C,&comm);CHKERRQ(ierr);
659   tag0 = ((PetscObject)C)->tag;
660   size = c->size;
661   rank = c->rank;
662 
663   /* Get some new tags to keep the communication clean */
664   ierr = PetscObjectGetNewTag((PetscObject)C,&tag1);CHKERRQ(ierr);
665   ierr = PetscObjectGetNewTag((PetscObject)C,&tag2);CHKERRQ(ierr);
666   ierr = PetscObjectGetNewTag((PetscObject)C,&tag3);CHKERRQ(ierr);
667 
668 #if defined(PETSC_USE_CTABLE)
669   ierr = PetscMalloc4(ismax,&irow,ismax,&icol,ismax,&nrow,ismax,&ncol);CHKERRQ(ierr);
670 #else
671   ierr = PetscMalloc5(ismax,&irow,ismax,&icol,ismax,&nrow,ismax,&ncol,Mbs+1,&rtable);CHKERRQ(ierr);
672   /* Create hash table for the mapping :row -> proc*/
673   for (i=0,j=0; i<size; i++) {
674     jmax = C->rmap->range[i+1]/bs;
675     for (; j<jmax; j++) rtable[j] = i;
676   }
677 #endif
678 
679   for (i=0; i<ismax; i++) {
680     if (allrows[i]) {
681       irow[i] = NULL;
682       nrow[i] = C->rmap->N/bs;
683     } else {
684       ierr = ISGetIndices(isrow[i],&irow[i]);CHKERRQ(ierr);
685       ierr = ISGetLocalSize(isrow[i],&nrow[i]);CHKERRQ(ierr);
686     }
687 
688     if (allcolumns[i]) {
689       icol[i] = NULL;
690       ncol[i] = C->cmap->N/bs;
691     } else {
692       ierr = ISGetIndices(iscol[i],&icol[i]);CHKERRQ(ierr);
693       ierr = ISGetLocalSize(iscol[i],&ncol[i]);CHKERRQ(ierr);
694     }
695   }
696 
697   /* evaluate communication - mesg to who,length of mesg,and buffer space
698      required. Based on this, buffers are allocated, and data copied into them*/
699   ierr = PetscMalloc4(size,&w1,size,&w2,size,&w3,size,&w4);CHKERRQ(ierr);
700   ierr = PetscMemzero(w1,size*sizeof(PetscMPIInt));CHKERRQ(ierr);
701   ierr = PetscMemzero(w2,size*sizeof(PetscMPIInt));CHKERRQ(ierr);
702   ierr = PetscMemzero(w3,size*sizeof(PetscInt));CHKERRQ(ierr);
703   for (i=0; i<ismax; i++) {
704     ierr   = PetscMemzero(w4,size*sizeof(PetscInt));CHKERRQ(ierr); /* initialise work vector*/
705     jmax   = nrow[i];
706     irow_i = irow[i];
707     for (j=0; j<jmax; j++) {
708       if (allrows[i]) row = j;
709       else row = irow_i[j];
710 
711 #if defined(PETSC_USE_CTABLE)
712       ierr = PetscGetProc(row,size,c->rangebs,&proc);CHKERRQ(ierr);
713 #else
714       proc = rtable[row];
715 #endif
716       w4[proc]++;
717     }
718     for (j=0; j<size; j++) {
719       if (w4[j]) { w1[j] += w4[j];  w3[j]++;}
720     }
721   }
722 
723   nrqs     = 0;              /* no of outgoing messages */
724   msz      = 0;              /* total mesg length for all proc */
725   w1[rank] = 0;              /* no mesg sent to intself */
726   w3[rank] = 0;
727   for (i=0; i<size; i++) {
728     if (w1[i])  { w2[i] = 1; nrqs++;} /* there exists a message to proc i */
729   }
730   ierr = PetscMalloc1((nrqs+1),&pa);CHKERRQ(ierr); /*(proc -array)*/
731   for (i=0,j=0; i<size; i++) {
732     if (w1[i]) { pa[j] = i; j++; }
733   }
734 
735   /* Each message would have a header = 1 + 2*(no of IS) + data */
736   for (i=0; i<nrqs; i++) {
737     j     = pa[i];
738     w1[j] += w2[j] + 2* w3[j];
739     msz   += w1[j];
740   }
741 
742   /* Determine the number of messages to expect, their lengths, from from-ids */
743   ierr = PetscGatherNumberOfMessages(comm,w2,w1,&nrqr);CHKERRQ(ierr);
744   ierr = PetscGatherMessageLengths(comm,nrqs,nrqr,w1,&onodes1,&olengths1);CHKERRQ(ierr);
745 
746   /* Now post the Irecvs corresponding to these messages */
747   ierr = PetscPostIrecvInt(comm,tag0,nrqr,onodes1,olengths1,&rbuf1,&r_waits1);CHKERRQ(ierr);
748 
749   ierr = PetscFree(onodes1);CHKERRQ(ierr);
750   ierr = PetscFree(olengths1);CHKERRQ(ierr);
751 
752   /* Allocate Memory for outgoing messages */
753   ierr = PetscMalloc4(size,&sbuf1,size,&ptr,2*msz,&tmp,size,&ctr);CHKERRQ(ierr);
754   ierr = PetscMemzero(sbuf1,size*sizeof(PetscInt*));CHKERRQ(ierr);
755   ierr = PetscMemzero(ptr,size*sizeof(PetscInt*));CHKERRQ(ierr);
756   {
757     PetscInt *iptr = tmp,ict = 0;
758     for (i=0; i<nrqs; i++) {
759       j        = pa[i];
760       iptr    += ict;
761       sbuf1[j] = iptr;
762       ict      = w1[j];
763     }
764   }
765 
766   /* Form the outgoing messages */
767   /* Initialise the header space */
768   for (i=0; i<nrqs; i++) {
769     j           = pa[i];
770     sbuf1[j][0] = 0;
771     ierr        = PetscMemzero(sbuf1[j]+1,2*w3[j]*sizeof(PetscInt));CHKERRQ(ierr);
772     ptr[j]      = sbuf1[j] + 2*w3[j] + 1;
773   }
774 
775   /* Parse the isrow and copy data into outbuf */
776   for (i=0; i<ismax; i++) {
777     ierr   = PetscMemzero(ctr,size*sizeof(PetscInt));CHKERRQ(ierr);
778     irow_i = irow[i];
779     jmax   = nrow[i];
780     for (j=0; j<jmax; j++) {  /* parse the indices of each IS */
781       if (allrows[i]) row = j;
782       else row = irow_i[j];
783 
784 #if defined(PETSC_USE_CTABLE)
785       ierr = PetscGetProc(row,size,c->rangebs,&proc);CHKERRQ(ierr);
786 #else
787       proc = rtable[row];
788 #endif
789       if (proc != rank) { /* copy to the outgoing buf*/
790         ctr[proc]++;
791         *ptr[proc] = row;
792         ptr[proc]++;
793       }
794     }
795     /* Update the headers for the current IS */
796     for (j=0; j<size; j++) { /* Can Optimise this loop too */
797       if ((ctr_j = ctr[j])) {
798         sbuf1_j        = sbuf1[j];
799         k              = ++sbuf1_j[0];
800         sbuf1_j[2*k]   = ctr_j;
801         sbuf1_j[2*k-1] = i;
802       }
803     }
804   }
805 
806   /*  Now  post the sends */
807   ierr = PetscMalloc1((nrqs+1),&s_waits1);CHKERRQ(ierr);
808   for (i=0; i<nrqs; ++i) {
809     j    = pa[i];
810     ierr = MPI_Isend(sbuf1[j],w1[j],MPIU_INT,j,tag0,comm,s_waits1+i);CHKERRQ(ierr);
811   }
812 
813   /* Post Recieves to capture the buffer size */
814   ierr     = PetscMalloc1((nrqs+1),&r_waits2);CHKERRQ(ierr);
815   ierr     = PetscMalloc1((nrqs+1),&rbuf2);CHKERRQ(ierr);
816   rbuf2[0] = tmp + msz;
817   for (i=1; i<nrqs; ++i) {
818     j        = pa[i];
819     rbuf2[i] = rbuf2[i-1]+w1[pa[i-1]];
820   }
821   for (i=0; i<nrqs; ++i) {
822     j    = pa[i];
823     ierr = MPI_Irecv(rbuf2[i],w1[j],MPIU_INT,j,tag1,comm,r_waits2+i);CHKERRQ(ierr);
824   }
825 
826   /* Send to other procs the buf size they should allocate */
827 
828   /* Receive messages*/
829   ierr = PetscMalloc1((nrqr+1),&s_waits2);CHKERRQ(ierr);
830   ierr = PetscMalloc1((nrqr+1),&r_status1);CHKERRQ(ierr);
831   ierr = PetscMalloc3(nrqr+1,&sbuf2,nrqr,&req_size,nrqr,&req_source);CHKERRQ(ierr);
832   {
833     Mat_SeqBAIJ *sA  = (Mat_SeqBAIJ*)c->A->data,*sB = (Mat_SeqBAIJ*)c->B->data;
834     PetscInt    *sAi = sA->i,*sBi = sB->i,id,*sbuf2_i;
835 
836     for (i=0; i<nrqr; ++i) {
837       ierr = MPI_Waitany(nrqr,r_waits1,&idex,r_status1+i);CHKERRQ(ierr);
838 
839       req_size[idex] = 0;
840       rbuf1_i        = rbuf1[idex];
841       start          = 2*rbuf1_i[0] + 1;
842       ierr           = MPI_Get_count(r_status1+i,MPIU_INT,&end);CHKERRQ(ierr);
843       ierr           = PetscMalloc1(end,&sbuf2[idex]);CHKERRQ(ierr);
844       sbuf2_i        = sbuf2[idex];
845       for (j=start; j<end; j++) {
846         id              = rbuf1_i[j] - rstart;
847         ncols           = sAi[id+1] - sAi[id] + sBi[id+1] - sBi[id];
848         sbuf2_i[j]      = ncols;
849         req_size[idex] += ncols;
850       }
851       req_source[idex] = r_status1[i].MPI_SOURCE;
852       /* form the header */
853       sbuf2_i[0] = req_size[idex];
854       for (j=1; j<start; j++) sbuf2_i[j] = rbuf1_i[j];
855       ierr = MPI_Isend(sbuf2_i,end,MPIU_INT,req_source[idex],tag1,comm,s_waits2+i);CHKERRQ(ierr);
856     }
857   }
858   ierr = PetscFree(r_status1);CHKERRQ(ierr);
859   ierr = PetscFree(r_waits1);CHKERRQ(ierr);
860 
861   /*  recv buffer sizes */
862   /* Receive messages*/
863   ierr = PetscMalloc1((nrqs+1),&rbuf3);CHKERRQ(ierr);
864   ierr = PetscMalloc1((nrqs+1),&r_waits3);CHKERRQ(ierr);
865   ierr = PetscMalloc1((nrqs+1),&r_status2);CHKERRQ(ierr);
866   if (!ijonly) {
867     ierr = PetscMalloc1((nrqs+1),&rbuf4);CHKERRQ(ierr);
868     ierr = PetscMalloc1((nrqs+1),&r_waits4);CHKERRQ(ierr);
869   }
870 
871   for (i=0; i<nrqs; ++i) {
872     ierr = MPI_Waitany(nrqs,r_waits2,&idex,r_status2+i);CHKERRQ(ierr);
873     ierr = PetscMalloc1(rbuf2[idex][0],&rbuf3[idex]);CHKERRQ(ierr);
874     ierr = MPI_Irecv(rbuf3[idex],rbuf2[idex][0],MPIU_INT,r_status2[i].MPI_SOURCE,tag2,comm,r_waits3+idex);CHKERRQ(ierr);
875     if (!ijonly) {
876       ierr = PetscMalloc1(rbuf2[idex][0]*bs2,&rbuf4[idex]);CHKERRQ(ierr);
877       ierr = MPI_Irecv(rbuf4[idex],rbuf2[idex][0]*bs2,MPIU_MATSCALAR,r_status2[i].MPI_SOURCE,tag3,comm,r_waits4+idex);CHKERRQ(ierr);
878     }
879   }
880   ierr = PetscFree(r_status2);CHKERRQ(ierr);
881   ierr = PetscFree(r_waits2);CHKERRQ(ierr);
882 
883   /* Wait on sends1 and sends2 */
884   ierr = PetscMalloc1((nrqs+1),&s_status1);CHKERRQ(ierr);
885   ierr = PetscMalloc1((nrqr+1),&s_status2);CHKERRQ(ierr);
886 
887   if (nrqs) {ierr = MPI_Waitall(nrqs,s_waits1,s_status1);CHKERRQ(ierr);}
888   if (nrqr) {ierr = MPI_Waitall(nrqr,s_waits2,s_status2);CHKERRQ(ierr);}
889   ierr = PetscFree(s_status1);CHKERRQ(ierr);
890   ierr = PetscFree(s_status2);CHKERRQ(ierr);
891   ierr = PetscFree(s_waits1);CHKERRQ(ierr);
892   ierr = PetscFree(s_waits2);CHKERRQ(ierr);
893 
894   /* Now allocate buffers for a->j, and send them off */
895   ierr = PetscMalloc1((nrqr+1),&sbuf_aj);CHKERRQ(ierr);
896   for (i=0,j=0; i<nrqr; i++) j += req_size[i];
897   ierr = PetscMalloc1((j+1),&sbuf_aj[0]);CHKERRQ(ierr);
898   for (i=1; i<nrqr; i++) sbuf_aj[i] = sbuf_aj[i-1] + req_size[i-1];
899 
900   ierr = PetscMalloc1((nrqr+1),&s_waits3);CHKERRQ(ierr);
901   {
902     for (i=0; i<nrqr; i++) {
903       rbuf1_i   = rbuf1[i];
904       sbuf_aj_i = sbuf_aj[i];
905       ct1       = 2*rbuf1_i[0] + 1;
906       ct2       = 0;
907       for (j=1,max1=rbuf1_i[0]; j<=max1; j++) {
908         kmax = rbuf1[i][2*j];
909         for (k=0; k<kmax; k++,ct1++) {
910           row    = rbuf1_i[ct1] - rstart;
911           nzA    = a_i[row+1] - a_i[row];     nzB = b_i[row+1] - b_i[row];
912           ncols  = nzA + nzB;
913           cworkA = a_j + a_i[row]; cworkB = b_j + b_i[row];
914 
915           /* load the column indices for this row into cols*/
916           cols = sbuf_aj_i + ct2;
917           for (l=0; l<nzB; l++) {
918             if ((ctmp = bmap[cworkB[l]]) < cstart) cols[l] = ctmp;
919             else break;
920           }
921           imark = l;
922           for (l=0; l<nzA; l++)   cols[imark+l] = cstart + cworkA[l];
923           for (l=imark; l<nzB; l++) cols[nzA+l] = bmap[cworkB[l]];
924           ct2 += ncols;
925         }
926       }
927       ierr = MPI_Isend(sbuf_aj_i,req_size[i],MPIU_INT,req_source[i],tag2,comm,s_waits3+i);CHKERRQ(ierr);
928     }
929   }
930   ierr = PetscMalloc1((nrqs+1),&r_status3);CHKERRQ(ierr);
931   ierr = PetscMalloc1((nrqr+1),&s_status3);CHKERRQ(ierr);
932 
933   /* Allocate buffers for a->a, and send them off */
934   if (!ijonly) {
935     ierr = PetscMalloc1((nrqr+1),&sbuf_aa);CHKERRQ(ierr);
936     for (i=0,j=0; i<nrqr; i++) j += req_size[i];
937     ierr = PetscMalloc1((j+1)*bs2,&sbuf_aa[0]);CHKERRQ(ierr);
938     for (i=1; i<nrqr; i++) sbuf_aa[i] = sbuf_aa[i-1] + req_size[i-1]*bs2;
939 
940     ierr = PetscMalloc1((nrqr+1),&s_waits4);CHKERRQ(ierr);
941     {
942       for (i=0; i<nrqr; i++) {
943         rbuf1_i   = rbuf1[i];
944         sbuf_aa_i = sbuf_aa[i];
945         ct1       = 2*rbuf1_i[0]+1;
946         ct2       = 0;
947         for (j=1,max1=rbuf1_i[0]; j<=max1; j++) {
948           kmax = rbuf1_i[2*j];
949           for (k=0; k<kmax; k++,ct1++) {
950             row    = rbuf1_i[ct1] - rstart;
951             nzA    = a_i[row+1] - a_i[row];     nzB = b_i[row+1] - b_i[row];
952             ncols  = nzA + nzB;
953             cworkA = a_j + a_i[row];     cworkB = b_j + b_i[row];
954             vworkA = a_a + a_i[row]*bs2; vworkB = b_a + b_i[row]*bs2;
955 
956             /* load the column values for this row into vals*/
957             vals = sbuf_aa_i+ct2*bs2;
958             for (l=0; l<nzB; l++) {
959               if ((bmap[cworkB[l]]) < cstart) {
960                 ierr = PetscMemcpy(vals+l*bs2,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
961               } else break;
962             }
963             imark = l;
964             for (l=0; l<nzA; l++) {
965               ierr = PetscMemcpy(vals+(imark+l)*bs2,vworkA+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
966             }
967             for (l=imark; l<nzB; l++) {
968               ierr = PetscMemcpy(vals+(nzA+l)*bs2,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
969             }
970             ct2 += ncols;
971           }
972         }
973         ierr = MPI_Isend(sbuf_aa_i,req_size[i]*bs2,MPIU_MATSCALAR,req_source[i],tag3,comm,s_waits4+i);CHKERRQ(ierr);
974       }
975     }
976     ierr = PetscMalloc1((nrqs+1),&r_status4);CHKERRQ(ierr);
977     ierr = PetscMalloc1((nrqr+1),&s_status4);CHKERRQ(ierr);
978   }
979   ierr = PetscFree(rbuf1[0]);CHKERRQ(ierr);
980   ierr = PetscFree(rbuf1);CHKERRQ(ierr);
981 
982   /* Form the matrix */
983   /* create col map: global col of C -> local col of submatrices */
984   {
985     const PetscInt *icol_i;
986 #if defined(PETSC_USE_CTABLE)
987     ierr = PetscMalloc1((1+ismax),&cmap);CHKERRQ(ierr);
988     for (i=0; i<ismax; i++) {
989       if (!allcolumns[i]) {
990         ierr   = PetscTableCreate(ncol[i]+1,c->Nbs+1,&cmap[i]);CHKERRQ(ierr);
991         jmax   = ncol[i];
992         icol_i = icol[i];
993         cmap_i = cmap[i];
994         for (j=0; j<jmax; j++) {
995           ierr = PetscTableAdd(cmap_i,icol_i[j]+1,j+1,INSERT_VALUES);CHKERRQ(ierr);
996         }
997       } else {
998         cmap[i] = NULL;
999       }
1000     }
1001 #else
1002     ierr = PetscMalloc1(ismax,&cmap);CHKERRQ(ierr);
1003     for (i=0; i<ismax; i++) {
1004       if (!allcolumns[i]) {
1005         ierr   = PetscMalloc1(c->Nbs,&cmap[i]);CHKERRQ(ierr);
1006         ierr   = PetscMemzero(cmap[i],c->Nbs*sizeof(PetscInt));CHKERRQ(ierr);
1007         jmax   = ncol[i];
1008         icol_i = icol[i];
1009         cmap_i = cmap[i];
1010         for (j=0; j<jmax; j++) {
1011           cmap_i[icol_i[j]] = j+1;
1012         }
1013       } else { /* allcolumns[i] */
1014         cmap[i] = NULL;
1015       }
1016     }
1017 #endif
1018   }
1019 
1020   /* Create lens which is required for MatCreate... */
1021   for (i=0,j=0; i<ismax; i++) j += nrow[i];
1022   ierr    = PetscMalloc1((1+ismax)*sizeof(PetscInt*)+ j,&lens);CHKERRQ(ierr);
1023   lens[0] = (PetscInt*)(lens + ismax);
1024   ierr    = PetscMemzero(lens[0],j*sizeof(PetscInt));CHKERRQ(ierr);
1025   for (i=1; i<ismax; i++) lens[i] = lens[i-1] + nrow[i-1];
1026 
1027   /* Update lens from local data */
1028   for (i=0; i<ismax; i++) {
1029     jmax = nrow[i];
1030     if (!allcolumns[i]) cmap_i = cmap[i];
1031     irow_i = irow[i];
1032     lens_i = lens[i];
1033     for (j=0; j<jmax; j++) {
1034       if (allrows[i]) row = j;
1035       else row = irow_i[j];
1036 
1037 #if defined(PETSC_USE_CTABLE)
1038       ierr = PetscGetProc(row,size,c->rangebs,&proc);CHKERRQ(ierr);
1039 #else
1040       proc = rtable[row];
1041 #endif
1042       if (proc == rank) {
1043         /* Get indices from matA and then from matB */
1044         row    = row - rstart;
1045         nzA    = a_i[row+1] - a_i[row];     nzB = b_i[row+1] - b_i[row];
1046         cworkA =  a_j + a_i[row]; cworkB = b_j + b_i[row];
1047         if (!allcolumns[i]) {
1048 #if defined(PETSC_USE_CTABLE)
1049           for (k=0; k<nzA; k++) {
1050             ierr = PetscTableFind(cmap_i,cstart+cworkA[k]+1,&tt);CHKERRQ(ierr);
1051             if (tt) lens_i[j]++;
1052           }
1053           for (k=0; k<nzB; k++) {
1054             ierr = PetscTableFind(cmap_i,bmap[cworkB[k]]+1,&tt);CHKERRQ(ierr);
1055             if (tt) lens_i[j]++;
1056           }
1057 
1058 #else
1059           for (k=0; k<nzA; k++) {
1060             if (cmap_i[cstart + cworkA[k]]) lens_i[j]++;
1061           }
1062           for (k=0; k<nzB; k++) {
1063             if (cmap_i[bmap[cworkB[k]]]) lens_i[j]++;
1064           }
1065 #endif
1066         } else { /* allcolumns */
1067           lens_i[j] = nzA + nzB;
1068         }
1069       }
1070     }
1071   }
1072 #if defined(PETSC_USE_CTABLE)
1073   /* Create row map*/
1074   ierr = PetscMalloc1((1+ismax),&rmap);CHKERRQ(ierr);
1075   for (i=0; i<ismax; i++) {
1076     ierr = PetscTableCreate(nrow[i]+1,c->Mbs+1,&rmap[i]);CHKERRQ(ierr);
1077   }
1078 #else
1079   /* Create row map*/
1080   ierr    = PetscMalloc1((1+ismax)*sizeof(PetscInt*)+ ismax*Mbs,&rmap);CHKERRQ(ierr);
1081   rmap[0] = (PetscInt*)(rmap + ismax);
1082   ierr    = PetscMemzero(rmap[0],ismax*Mbs*sizeof(PetscInt));CHKERRQ(ierr);
1083   for (i=1; i<ismax; i++) rmap[i] = rmap[i-1] + Mbs;
1084 #endif
1085   for (i=0; i<ismax; i++) {
1086     irow_i = irow[i];
1087     jmax   = nrow[i];
1088 #if defined(PETSC_USE_CTABLE)
1089     rmap_i = rmap[i];
1090     for (j=0; j<jmax; j++) {
1091       if (allrows[i]) {
1092         ierr = PetscTableAdd(rmap_i,j+1,j+1,INSERT_VALUES);CHKERRQ(ierr);
1093       } else {
1094         ierr = PetscTableAdd(rmap_i,irow_i[j]+1,j+1,INSERT_VALUES);CHKERRQ(ierr);
1095       }
1096     }
1097 #else
1098     rmap_i = rmap[i];
1099     for (j=0; j<jmax; j++) {
1100       if (allrows[i]) rmap_i[j] = j;
1101       else rmap_i[irow_i[j]] = j;
1102     }
1103 #endif
1104   }
1105 
1106   /* Update lens from offproc data */
1107   {
1108     PetscInt    *rbuf2_i,*rbuf3_i,*sbuf1_i;
1109     PetscMPIInt ii;
1110 
1111     for (tmp2=0; tmp2<nrqs; tmp2++) {
1112       ierr    = MPI_Waitany(nrqs,r_waits3,&ii,r_status3+tmp2);CHKERRQ(ierr);
1113       idex    = pa[ii];
1114       sbuf1_i = sbuf1[idex];
1115       jmax    = sbuf1_i[0];
1116       ct1     = 2*jmax+1;
1117       ct2     = 0;
1118       rbuf2_i = rbuf2[ii];
1119       rbuf3_i = rbuf3[ii];
1120       for (j=1; j<=jmax; j++) {
1121         is_no  = sbuf1_i[2*j-1];
1122         max1   = sbuf1_i[2*j];
1123         lens_i = lens[is_no];
1124         if (!allcolumns[is_no]) cmap_i = cmap[is_no];
1125         rmap_i = rmap[is_no];
1126         for (k=0; k<max1; k++,ct1++) {
1127 #if defined(PETSC_USE_CTABLE)
1128           ierr = PetscTableFind(rmap_i,sbuf1_i[ct1]+1,&row);CHKERRQ(ierr);
1129           row--;
1130           if (row < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"row not found in table");
1131 #else
1132           row = rmap_i[sbuf1_i[ct1]];  /* the val in the new matrix to be */
1133 #endif
1134           max2 = rbuf2_i[ct1];
1135           for (l=0; l<max2; l++,ct2++) {
1136             if (!allcolumns[is_no]) {
1137 #if defined(PETSC_USE_CTABLE)
1138               ierr = PetscTableFind(cmap_i,rbuf3_i[ct2]+1,&tt);CHKERRQ(ierr);
1139               if (tt) lens_i[row]++;
1140 #else
1141               if (cmap_i[rbuf3_i[ct2]]) lens_i[row]++;
1142 #endif
1143             } else { /* allcolumns */
1144               lens_i[row]++;
1145             }
1146           }
1147         }
1148       }
1149     }
1150   }
1151   ierr = PetscFree(r_status3);CHKERRQ(ierr);
1152   ierr = PetscFree(r_waits3);CHKERRQ(ierr);
1153   if (nrqr) {ierr = MPI_Waitall(nrqr,s_waits3,s_status3);CHKERRQ(ierr);}
1154   ierr = PetscFree(s_status3);CHKERRQ(ierr);
1155   ierr = PetscFree(s_waits3);CHKERRQ(ierr);
1156 
1157   /* Create the submatrices */
1158   if (scall == MAT_REUSE_MATRIX) {
1159     if (ijonly) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," MAT_REUSE_MATRIX and ijonly is not supported yet");
1160     /*
1161         Assumes new rows are same length as the old rows, hence bug!
1162     */
1163     for (i=0; i<ismax; i++) {
1164       mat = (Mat_SeqBAIJ*)(submats[i]->data);
1165       if ((mat->mbs != nrow[i]) || (mat->nbs != ncol[i] || C->rmap->bs != bs)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. wrong size");
1166       ierr = PetscMemcmp(mat->ilen,lens[i],mat->mbs *sizeof(PetscInt),&flag);CHKERRQ(ierr);
1167       if (!flag) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Cannot reuse matrix. wrong no of nonzeros");
1168       /* Initial matrix as if empty */
1169       ierr = PetscMemzero(mat->ilen,mat->mbs*sizeof(PetscInt));CHKERRQ(ierr);
1170 
1171       submats[i]->factortype = C->factortype;
1172     }
1173   } else {
1174     PetscInt bs_tmp;
1175     if (ijonly) bs_tmp = 1;
1176     else        bs_tmp = bs;
1177     for (i=0; i<ismax; i++) {
1178       ierr = MatCreate(PETSC_COMM_SELF,submats+i);CHKERRQ(ierr);
1179       ierr = MatSetSizes(submats[i],nrow[i]*bs_tmp,ncol[i]*bs_tmp,nrow[i]*bs_tmp,ncol[i]*bs_tmp);CHKERRQ(ierr);
1180       ierr = MatSetType(submats[i],((PetscObject)A)->type_name);CHKERRQ(ierr);
1181       ierr = MatSeqBAIJSetPreallocation(submats[i],bs_tmp,0,lens[i]);CHKERRQ(ierr);
1182       ierr = MatSeqSBAIJSetPreallocation(submats[i],bs_tmp,0,lens[i]);CHKERRQ(ierr); /* this subroutine is used by SBAIJ routines */
1183     }
1184   }
1185 
1186   /* Assemble the matrices */
1187   /* First assemble the local rows */
1188   {
1189     PetscInt  ilen_row,*imat_ilen,*imat_j,*imat_i;
1190     MatScalar *imat_a = NULL;
1191 
1192     for (i=0; i<ismax; i++) {
1193       mat       = (Mat_SeqBAIJ*)submats[i]->data;
1194       imat_ilen = mat->ilen;
1195       imat_j    = mat->j;
1196       imat_i    = mat->i;
1197       if (!ijonly) imat_a = mat->a;
1198       if (!allcolumns[i]) cmap_i = cmap[i];
1199       rmap_i = rmap[i];
1200       irow_i = irow[i];
1201       jmax   = nrow[i];
1202       for (j=0; j<jmax; j++) {
1203         if (allrows[i]) row = j;
1204         else row = irow_i[j];
1205 
1206 #if defined(PETSC_USE_CTABLE)
1207         ierr = PetscGetProc(row,size,c->rangebs,&proc);CHKERRQ(ierr);
1208 #else
1209         proc = rtable[row];
1210 #endif
1211         if (proc == rank) {
1212           row    = row - rstart;
1213           nzA    = a_i[row+1] - a_i[row];
1214           nzB    = b_i[row+1] - b_i[row];
1215           cworkA = a_j + a_i[row];
1216           cworkB = b_j + b_i[row];
1217           if (!ijonly) {
1218             vworkA = a_a + a_i[row]*bs2;
1219             vworkB = b_a + b_i[row]*bs2;
1220           }
1221 #if defined(PETSC_USE_CTABLE)
1222           ierr = PetscTableFind(rmap_i,row+rstart+1,&row);CHKERRQ(ierr);
1223           row--;
1224           if (row < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"row not found in table");
1225 #else
1226           row = rmap_i[row + rstart];
1227 #endif
1228           mat_i = imat_i[row];
1229           if (!ijonly) mat_a = imat_a + mat_i*bs2;
1230           mat_j    = imat_j + mat_i;
1231           ilen_row = imat_ilen[row];
1232 
1233           /* load the column indices for this row into cols*/
1234           if (!allcolumns[i]) {
1235             for (l=0; l<nzB; l++) {
1236               if ((ctmp = bmap[cworkB[l]]) < cstart) {
1237 #if defined(PETSC_USE_CTABLE)
1238                 ierr = PetscTableFind(cmap_i,ctmp+1,&tcol);CHKERRQ(ierr);
1239                 if (tcol) {
1240 #else
1241                 if ((tcol = cmap_i[ctmp])) {
1242 #endif
1243                   *mat_j++ = tcol - 1;
1244                   ierr     = PetscMemcpy(mat_a,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1245                   mat_a   += bs2;
1246                   ilen_row++;
1247                 }
1248               } else break;
1249             }
1250             imark = l;
1251             for (l=0; l<nzA; l++) {
1252 #if defined(PETSC_USE_CTABLE)
1253               ierr = PetscTableFind(cmap_i,cstart+cworkA[l]+1,&tcol);CHKERRQ(ierr);
1254               if (tcol) {
1255 #else
1256               if ((tcol = cmap_i[cstart + cworkA[l]])) {
1257 #endif
1258                 *mat_j++ = tcol - 1;
1259                 if (!ijonly) {
1260                   ierr   = PetscMemcpy(mat_a,vworkA+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1261                   mat_a += bs2;
1262                 }
1263                 ilen_row++;
1264               }
1265             }
1266             for (l=imark; l<nzB; l++) {
1267 #if defined(PETSC_USE_CTABLE)
1268               ierr = PetscTableFind(cmap_i,bmap[cworkB[l]]+1,&tcol);CHKERRQ(ierr);
1269               if (tcol) {
1270 #else
1271               if ((tcol = cmap_i[bmap[cworkB[l]]])) {
1272 #endif
1273                 *mat_j++ = tcol - 1;
1274                 if (!ijonly) {
1275                   ierr   = PetscMemcpy(mat_a,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1276                   mat_a += bs2;
1277                 }
1278                 ilen_row++;
1279               }
1280             }
1281           } else { /* allcolumns */
1282             for (l=0; l<nzB; l++) {
1283               if ((ctmp = bmap[cworkB[l]]) < cstart) {
1284                 *mat_j++ = ctmp;
1285                 ierr     = PetscMemcpy(mat_a,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1286                 mat_a   += bs2;
1287                 ilen_row++;
1288               } else break;
1289             }
1290             imark = l;
1291             for (l=0; l<nzA; l++) {
1292               *mat_j++ = cstart+cworkA[l];
1293               if (!ijonly) {
1294                 ierr   = PetscMemcpy(mat_a,vworkA+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1295                 mat_a += bs2;
1296               }
1297               ilen_row++;
1298             }
1299             for (l=imark; l<nzB; l++) {
1300               *mat_j++ = bmap[cworkB[l]];
1301               if (!ijonly) {
1302                 ierr   = PetscMemcpy(mat_a,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1303                 mat_a += bs2;
1304               }
1305               ilen_row++;
1306             }
1307           }
1308           imat_ilen[row] = ilen_row;
1309         }
1310       }
1311     }
1312   }
1313 
1314   /*   Now assemble the off proc rows*/
1315   {
1316     PetscInt    *sbuf1_i,*rbuf2_i,*rbuf3_i,*imat_ilen,ilen;
1317     PetscInt    *imat_j,*imat_i;
1318     MatScalar   *imat_a = NULL,*rbuf4_i = NULL;
1319     PetscMPIInt ii;
1320 
1321     for (tmp2=0; tmp2<nrqs; tmp2++) {
1322       if (ijonly) ii = tmp2;
1323       else {
1324         ierr = MPI_Waitany(nrqs,r_waits4,&ii,r_status4+tmp2);CHKERRQ(ierr);
1325       }
1326       idex    = pa[ii];
1327       sbuf1_i = sbuf1[idex];
1328       jmax    = sbuf1_i[0];
1329       ct1     = 2*jmax + 1;
1330       ct2     = 0;
1331       rbuf2_i = rbuf2[ii];
1332       rbuf3_i = rbuf3[ii];
1333       if (!ijonly) rbuf4_i = rbuf4[ii];
1334       for (j=1; j<=jmax; j++) {
1335         is_no = sbuf1_i[2*j-1];
1336         if (!allcolumns[is_no]) cmap_i = cmap[is_no];
1337         rmap_i    = rmap[is_no];
1338         mat       = (Mat_SeqBAIJ*)submats[is_no]->data;
1339         imat_ilen = mat->ilen;
1340         imat_j    = mat->j;
1341         imat_i    = mat->i;
1342         if (!ijonly) imat_a = mat->a;
1343         max1 = sbuf1_i[2*j];
1344         for (k=0; k<max1; k++,ct1++) {
1345           row = sbuf1_i[ct1];
1346 #if defined(PETSC_USE_CTABLE)
1347           ierr = PetscTableFind(rmap_i,row+1,&row);CHKERRQ(ierr);
1348           row--;
1349           if (row < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"row not found in table");
1350 #else
1351           row = rmap_i[row];
1352 #endif
1353           ilen  = imat_ilen[row];
1354           mat_i = imat_i[row];
1355           if (!ijonly) mat_a = imat_a + mat_i*bs2;
1356           mat_j = imat_j + mat_i;
1357           max2  = rbuf2_i[ct1];
1358 
1359           if (!allcolumns[is_no]) {
1360             for (l=0; l<max2; l++,ct2++) {
1361 #if defined(PETSC_USE_CTABLE)
1362               ierr = PetscTableFind(cmap_i,rbuf3_i[ct2]+1,&tcol);CHKERRQ(ierr);
1363               if (tcol) {
1364 #else
1365               if ((tcol = cmap_i[rbuf3_i[ct2]])) {
1366 #endif
1367                 *mat_j++ = tcol - 1;
1368                 if (!ijonly) {
1369                   ierr   = PetscMemcpy(mat_a,rbuf4_i+ct2*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1370                   mat_a += bs2;
1371                 }
1372                 ilen++;
1373               }
1374             }
1375           } else { /* allcolumns */
1376             for (l=0; l<max2; l++,ct2++) {
1377               *mat_j++ = rbuf3_i[ct2];
1378               if (!ijonly) {
1379                 ierr   = PetscMemcpy(mat_a,rbuf4_i+ct2*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1380                 mat_a += bs2;
1381               }
1382               ilen++;
1383             }
1384           }
1385           imat_ilen[row] = ilen;
1386         }
1387       }
1388     }
1389   }
1390   if (!ijonly) {
1391     ierr = PetscFree(r_status4);CHKERRQ(ierr);
1392     ierr = PetscFree(r_waits4);CHKERRQ(ierr);
1393     if (nrqr) {ierr = MPI_Waitall(nrqr,s_waits4,s_status4);CHKERRQ(ierr);}
1394     ierr = PetscFree(s_waits4);CHKERRQ(ierr);
1395     ierr = PetscFree(s_status4);CHKERRQ(ierr);
1396   }
1397 
1398   /* Restore the indices */
1399   for (i=0; i<ismax; i++) {
1400     if (!allrows[i]) {
1401       ierr = ISRestoreIndices(isrow[i],irow+i);CHKERRQ(ierr);
1402     }
1403     if (!allcolumns[i]) {
1404       ierr = ISRestoreIndices(iscol[i],icol+i);CHKERRQ(ierr);
1405     }
1406   }
1407 
1408   /* Destroy allocated memory */
1409 #if defined(PETSC_USE_CTABLE)
1410   ierr = PetscFree4(irow,icol,nrow,ncol);CHKERRQ(ierr);
1411 #else
1412   ierr = PetscFree5(irow,icol,nrow,ncol,rtable);CHKERRQ(ierr);
1413 #endif
1414   ierr = PetscFree4(w1,w2,w3,w4);CHKERRQ(ierr);
1415   ierr = PetscFree(pa);CHKERRQ(ierr);
1416 
1417   ierr = PetscFree4(sbuf1,ptr,tmp,ctr);CHKERRQ(ierr);
1418   ierr = PetscFree(sbuf1);CHKERRQ(ierr);
1419   ierr = PetscFree(rbuf2);CHKERRQ(ierr);
1420   for (i=0; i<nrqr; ++i) {
1421     ierr = PetscFree(sbuf2[i]);CHKERRQ(ierr);
1422   }
1423   for (i=0; i<nrqs; ++i) {
1424     ierr = PetscFree(rbuf3[i]);CHKERRQ(ierr);
1425   }
1426   ierr = PetscFree3(sbuf2,req_size,req_source);CHKERRQ(ierr);
1427   ierr = PetscFree(rbuf3);CHKERRQ(ierr);
1428   ierr = PetscFree(sbuf_aj[0]);CHKERRQ(ierr);
1429   ierr = PetscFree(sbuf_aj);CHKERRQ(ierr);
1430   if (!ijonly) {
1431     for (i=0; i<nrqs; ++i) {ierr = PetscFree(rbuf4[i]);CHKERRQ(ierr);}
1432     ierr = PetscFree(rbuf4);CHKERRQ(ierr);
1433     ierr = PetscFree(sbuf_aa[0]);CHKERRQ(ierr);
1434     ierr = PetscFree(sbuf_aa);CHKERRQ(ierr);
1435   }
1436 
1437 #if defined(PETSC_USE_CTABLE)
1438   for (i=0; i<ismax; i++) {
1439     ierr = PetscTableDestroy((PetscTable*)&rmap[i]);CHKERRQ(ierr);
1440   }
1441 #endif
1442   ierr = PetscFree(rmap);CHKERRQ(ierr);
1443 
1444   for (i=0; i<ismax; i++) {
1445     if (!allcolumns[i]) {
1446 #if defined(PETSC_USE_CTABLE)
1447       ierr = PetscTableDestroy((PetscTable*)&cmap[i]);CHKERRQ(ierr);
1448 #else
1449       ierr = PetscFree(cmap[i]);CHKERRQ(ierr);
1450 #endif
1451     }
1452   }
1453   ierr = PetscFree(cmap);CHKERRQ(ierr);
1454   ierr = PetscFree(lens);CHKERRQ(ierr);
1455 
1456   for (i=0; i<ismax; i++) {
1457     ierr = MatAssemblyBegin(submats[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1458     ierr = MatAssemblyEnd(submats[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1459   }
1460 
1461   c->ijonly = PETSC_FALSE; /* set back to the default */
1462   PetscFunctionReturn(0);
1463 }
1464 
1465