xref: /petsc/src/mat/impls/baij/mpi/baijov.c (revision b60d1aa9dffe7f0839cca942e8a5760aa5de1720)
1 
2 /*
3    Routines to compute overlapping regions of a parallel MPI matrix
4   and to find submatrices that were shared across processors.
5 */
6 #include <../src/mat/impls/baij/mpi/mpibaij.h>
7 #include <petscbt.h>
8 
9 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Local(Mat,PetscInt,char**,PetscInt*,PetscInt**);
10 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Receive(Mat,PetscInt,PetscInt**,PetscInt**,PetscInt*);
11 extern PetscErrorCode MatGetRow_MPIBAIJ(Mat,PetscInt,PetscInt*,PetscInt**,PetscScalar**);
12 extern PetscErrorCode MatRestoreRow_MPIBAIJ(Mat,PetscInt,PetscInt*,PetscInt**,PetscScalar**);
13 
14 #undef __FUNCT__
15 #define __FUNCT__ "MatIncreaseOverlap_MPIBAIJ"
16 PetscErrorCode MatIncreaseOverlap_MPIBAIJ(Mat C,PetscInt imax,IS is[],PetscInt ov)
17 {
18   PetscErrorCode ierr;
19   PetscInt       i,N=C->cmap->N, bs=C->rmap->bs;
20   IS             *is_new;
21 
22   PetscFunctionBegin;
23   ierr = PetscMalloc1(imax,&is_new);CHKERRQ(ierr);
24   /* Convert the indices into block format */
25   ierr = ISCompressIndicesGeneral(N,C->rmap->n,bs,imax,is,is_new);CHKERRQ(ierr);
26   if (ov < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap specified\n");
27   for (i=0; i<ov; ++i) {
28     ierr = MatIncreaseOverlap_MPIBAIJ_Once(C,imax,is_new);CHKERRQ(ierr);
29   }
30   for (i=0; i<imax; i++) {ierr = ISDestroy(&is[i]);CHKERRQ(ierr);}
31   ierr = ISExpandIndicesGeneral(N,N,bs,imax,is_new,is);CHKERRQ(ierr);
32   for (i=0; i<imax; i++) {ierr = ISDestroy(&is_new[i]);CHKERRQ(ierr);}
33   ierr = PetscFree(is_new);CHKERRQ(ierr);
34   PetscFunctionReturn(0);
35 }
36 
37 /*
38   Sample message format:
39   If a processor A wants processor B to process some elements corresponding
40   to index sets is[1], is[5]
41   mesg [0] = 2   (no of index sets in the mesg)
42   -----------
43   mesg [1] = 1 => is[1]
44   mesg [2] = sizeof(is[1]);
45   -----------
46   mesg [5] = 5  => is[5]
47   mesg [6] = sizeof(is[5]);
48   -----------
49   mesg [7]
50   mesg [n]  data(is[1])
51   -----------
52   mesg[n+1]
53   mesg[m]  data(is[5])
54   -----------
55 
56   Notes:
57   nrqs - no of requests sent (or to be sent out)
58   nrqr - no of requests recieved (which have to be or which have been processed
59 */
60 #undef __FUNCT__
61 #define __FUNCT__ "MatIncreaseOverlap_MPIBAIJ_Once"
62 PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Once(Mat C,PetscInt imax,IS is[])
63 {
64   Mat_MPIBAIJ    *c = (Mat_MPIBAIJ*)C->data;
65   const PetscInt **idx,*idx_i;
66   PetscInt       *n,*w3,*w4,**data,len;
67   PetscErrorCode ierr;
68   PetscMPIInt    size,rank,tag1,tag2,*w2,*w1,nrqr;
69   PetscInt       Mbs,i,j,k,**rbuf,row,proc=-1,nrqs,msz,**outdat,**ptr;
70   PetscInt       *ctr,*pa,*tmp,*isz,*isz1,**xdata,**rbuf2,*d_p;
71   PetscMPIInt    *onodes1,*olengths1,*onodes2,*olengths2;
72   PetscBT        *table;
73   MPI_Comm       comm;
74   MPI_Request    *s_waits1,*r_waits1,*s_waits2,*r_waits2;
75   MPI_Status     *s_status,*recv_status;
76   char           *t_p;
77 
78   PetscFunctionBegin;
79   ierr = PetscObjectGetComm((PetscObject)C,&comm);CHKERRQ(ierr);
80   size = c->size;
81   rank = c->rank;
82   Mbs  = c->Mbs;
83 
84   ierr = PetscObjectGetNewTag((PetscObject)C,&tag1);CHKERRQ(ierr);
85   ierr = PetscObjectGetNewTag((PetscObject)C,&tag2);CHKERRQ(ierr);
86 
87   ierr = PetscMalloc2(imax+1,&idx,imax,&n);CHKERRQ(ierr);
88 
89   for (i=0; i<imax; i++) {
90     ierr = ISGetIndices(is[i],&idx[i]);CHKERRQ(ierr);
91     ierr = ISGetLocalSize(is[i],&n[i]);CHKERRQ(ierr);
92   }
93 
94   /* evaluate communication - mesg to who,length of mesg, and buffer space
95      required. Based on this, buffers are allocated, and data copied into them*/
96   ierr = PetscCalloc4(size,&w1,size,&w2,size,&w3,size,&w4);CHKERRQ(ierr);
97   for (i=0; i<imax; i++) {
98     ierr  = PetscMemzero(w4,size*sizeof(PetscInt));CHKERRQ(ierr); /* initialise work vector*/
99     idx_i = idx[i];
100     len   = n[i];
101     for (j=0; j<len; j++) {
102       row = idx_i[j];
103       if (row < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index set cannot have negative entries");
104       ierr = PetscLayoutFindOwner(C->rmap,row*C->rmap->bs,&proc);CHKERRQ(ierr);
105       w4[proc]++;
106     }
107     for (j=0; j<size; j++) {
108       if (w4[j]) { w1[j] += w4[j]; w3[j]++;}
109     }
110   }
111 
112   nrqs     = 0;              /* no of outgoing messages */
113   msz      = 0;              /* total mesg length (for all proc */
114   w1[rank] = 0;              /* no mesg sent to itself */
115   w3[rank] = 0;
116   for (i=0; i<size; i++) {
117     if (w1[i])  {w2[i] = 1; nrqs++;} /* there exists a message to proc i */
118   }
119   /* pa - is list of processors to communicate with */
120   ierr = PetscMalloc1(nrqs+1,&pa);CHKERRQ(ierr);
121   for (i=0,j=0; i<size; i++) {
122     if (w1[i]) {pa[j] = i; j++;}
123   }
124 
125   /* Each message would have a header = 1 + 2*(no of IS) + data */
126   for (i=0; i<nrqs; i++) {
127     j      = pa[i];
128     w1[j] += w2[j] + 2*w3[j];
129     msz   += w1[j];
130   }
131 
132   /* Determine the number of messages to expect, their lengths, from from-ids */
133   ierr = PetscGatherNumberOfMessages(comm,w2,w1,&nrqr);CHKERRQ(ierr);
134   ierr = PetscGatherMessageLengths(comm,nrqs,nrqr,w1,&onodes1,&olengths1);CHKERRQ(ierr);
135 
136   /* Now post the Irecvs corresponding to these messages */
137   ierr = PetscPostIrecvInt(comm,tag1,nrqr,onodes1,olengths1,&rbuf,&r_waits1);CHKERRQ(ierr);
138 
139   /* Allocate Memory for outgoing messages */
140   ierr = PetscMalloc4(size,&outdat,size,&ptr,msz,&tmp,size,&ctr);CHKERRQ(ierr);
141   ierr = PetscMemzero(outdat,size*sizeof(PetscInt*));CHKERRQ(ierr);
142   ierr = PetscMemzero(ptr,size*sizeof(PetscInt*));CHKERRQ(ierr);
143   {
144     PetscInt *iptr = tmp,ict  = 0;
145     for (i=0; i<nrqs; i++) {
146       j         = pa[i];
147       iptr     +=  ict;
148       outdat[j] = iptr;
149       ict       = w1[j];
150     }
151   }
152 
153   /* Form the outgoing messages */
154   /*plug in the headers*/
155   for (i=0; i<nrqs; i++) {
156     j            = pa[i];
157     outdat[j][0] = 0;
158     ierr         = PetscMemzero(outdat[j]+1,2*w3[j]*sizeof(PetscInt));CHKERRQ(ierr);
159     ptr[j]       = outdat[j] + 2*w3[j] + 1;
160   }
161 
162   /* Memory for doing local proc's work*/
163   {
164     ierr = PetscCalloc5(imax,&table, imax,&data, imax,&isz, Mbs*imax,&d_p, (Mbs/PETSC_BITS_PER_BYTE+1)*imax,&t_p);CHKERRQ(ierr);
165 
166     for (i=0; i<imax; i++) {
167       table[i] = t_p + (Mbs/PETSC_BITS_PER_BYTE+1)*i;
168       data[i]  = d_p + (Mbs)*i;
169     }
170   }
171 
172   /* Parse the IS and update local tables and the outgoing buf with the data*/
173   {
174     PetscInt n_i,*data_i,isz_i,*outdat_j,ctr_j;
175     PetscBT  table_i;
176 
177     for (i=0; i<imax; i++) {
178       ierr    = PetscMemzero(ctr,size*sizeof(PetscInt));CHKERRQ(ierr);
179       n_i     = n[i];
180       table_i = table[i];
181       idx_i   = idx[i];
182       data_i  = data[i];
183       isz_i   = isz[i];
184       for (j=0; j<n_i; j++) {   /* parse the indices of each IS */
185         row  = idx_i[j];
186         ierr = PetscLayoutFindOwner(C->rmap,row*C->rmap->bs,&proc);CHKERRQ(ierr);
187         if (proc != rank) { /* copy to the outgoing buffer */
188           ctr[proc]++;
189           *ptr[proc] = row;
190           ptr[proc]++;
191         } else { /* Update the local table */
192           if (!PetscBTLookupSet(table_i,row)) data_i[isz_i++] = row;
193         }
194       }
195       /* Update the headers for the current IS */
196       for (j=0; j<size; j++) { /* Can Optimise this loop by using pa[] */
197         if ((ctr_j = ctr[j])) {
198           outdat_j        = outdat[j];
199           k               = ++outdat_j[0];
200           outdat_j[2*k]   = ctr_j;
201           outdat_j[2*k-1] = i;
202         }
203       }
204       isz[i] = isz_i;
205     }
206   }
207 
208   /*  Now  post the sends */
209   ierr = PetscMalloc1(nrqs+1,&s_waits1);CHKERRQ(ierr);
210   for (i=0; i<nrqs; ++i) {
211     j    = pa[i];
212     ierr = MPI_Isend(outdat[j],w1[j],MPIU_INT,j,tag1,comm,s_waits1+i);CHKERRQ(ierr);
213   }
214 
215   /* No longer need the original indices*/
216   for (i=0; i<imax; ++i) {
217     ierr = ISRestoreIndices(is[i],idx+i);CHKERRQ(ierr);
218   }
219   ierr = PetscFree2(idx,n);CHKERRQ(ierr);
220 
221   for (i=0; i<imax; ++i) {
222     ierr = ISDestroy(&is[i]);CHKERRQ(ierr);
223   }
224 
225   /* Do Local work*/
226   ierr = MatIncreaseOverlap_MPIBAIJ_Local(C,imax,table,isz,data);CHKERRQ(ierr);
227 
228   /* Receive messages*/
229   ierr = PetscMalloc1(nrqr+1,&recv_status);CHKERRQ(ierr);
230   if (nrqr) {ierr = MPI_Waitall(nrqr,r_waits1,recv_status);CHKERRQ(ierr);}
231 
232   ierr = PetscMalloc1(nrqs+1,&s_status);CHKERRQ(ierr);
233   if (nrqs) {ierr = MPI_Waitall(nrqs,s_waits1,s_status);CHKERRQ(ierr);}
234 
235   /* Phase 1 sends are complete - deallocate buffers */
236   ierr = PetscFree4(outdat,ptr,tmp,ctr);CHKERRQ(ierr);
237   ierr = PetscFree4(w1,w2,w3,w4);CHKERRQ(ierr);
238 
239   ierr = PetscMalloc1(nrqr+1,&xdata);CHKERRQ(ierr);
240   ierr = PetscMalloc1(nrqr+1,&isz1);CHKERRQ(ierr);
241   ierr = MatIncreaseOverlap_MPIBAIJ_Receive(C,nrqr,rbuf,xdata,isz1);CHKERRQ(ierr);
242   ierr = PetscFree(rbuf[0]);CHKERRQ(ierr);
243   ierr = PetscFree(rbuf);CHKERRQ(ierr);
244 
245   /* Send the data back*/
246   /* Do a global reduction to know the buffer space req for incoming messages*/
247   {
248     PetscMPIInt *rw1;
249 
250     ierr = PetscCalloc1(size,&rw1);CHKERRQ(ierr);
251 
252     for (i=0; i<nrqr; ++i) {
253       proc = recv_status[i].MPI_SOURCE;
254       if (proc != onodes1[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"MPI_SOURCE mismatch");
255       rw1[proc] = isz1[i];
256     }
257 
258     ierr = PetscFree(onodes1);CHKERRQ(ierr);
259     ierr = PetscFree(olengths1);CHKERRQ(ierr);
260 
261     /* Determine the number of messages to expect, their lengths, from from-ids */
262     ierr = PetscGatherMessageLengths(comm,nrqr,nrqs,rw1,&onodes2,&olengths2);CHKERRQ(ierr);
263     ierr = PetscFree(rw1);CHKERRQ(ierr);
264   }
265   /* Now post the Irecvs corresponding to these messages */
266   ierr = PetscPostIrecvInt(comm,tag2,nrqs,onodes2,olengths2,&rbuf2,&r_waits2);CHKERRQ(ierr);
267 
268   /*  Now  post the sends */
269   ierr = PetscMalloc1(nrqr+1,&s_waits2);CHKERRQ(ierr);
270   for (i=0; i<nrqr; ++i) {
271     j    = recv_status[i].MPI_SOURCE;
272     ierr = MPI_Isend(xdata[i],isz1[i],MPIU_INT,j,tag2,comm,s_waits2+i);CHKERRQ(ierr);
273   }
274 
275   /* receive work done on other processors*/
276   {
277     PetscMPIInt idex;
278     PetscInt    is_no,ct1,max,*rbuf2_i,isz_i,*data_i,jmax;
279     PetscBT     table_i;
280     MPI_Status  *status2;
281 
282     ierr = PetscMalloc1(PetscMax(nrqr,nrqs)+1,&status2);CHKERRQ(ierr);
283     for (i=0; i<nrqs; ++i) {
284       ierr = MPI_Waitany(nrqs,r_waits2,&idex,status2+i);CHKERRQ(ierr);
285       /* Process the message*/
286       rbuf2_i = rbuf2[idex];
287       ct1     = 2*rbuf2_i[0]+1;
288       jmax    = rbuf2[idex][0];
289       for (j=1; j<=jmax; j++) {
290         max     = rbuf2_i[2*j];
291         is_no   = rbuf2_i[2*j-1];
292         isz_i   = isz[is_no];
293         data_i  = data[is_no];
294         table_i = table[is_no];
295         for (k=0; k<max; k++,ct1++) {
296           row = rbuf2_i[ct1];
297           if (!PetscBTLookupSet(table_i,row)) data_i[isz_i++] = row;
298         }
299         isz[is_no] = isz_i;
300       }
301     }
302     if (nrqr) {ierr = MPI_Waitall(nrqr,s_waits2,status2);CHKERRQ(ierr);}
303     ierr = PetscFree(status2);CHKERRQ(ierr);
304   }
305 
306   for (i=0; i<imax; ++i) {
307     ierr = ISCreateGeneral(PETSC_COMM_SELF,isz[i],data[i],PETSC_COPY_VALUES,is+i);CHKERRQ(ierr);
308   }
309 
310 
311   ierr = PetscFree(onodes2);CHKERRQ(ierr);
312   ierr = PetscFree(olengths2);CHKERRQ(ierr);
313 
314   ierr = PetscFree(pa);CHKERRQ(ierr);
315   ierr = PetscFree(rbuf2[0]);CHKERRQ(ierr);
316   ierr = PetscFree(rbuf2);CHKERRQ(ierr);
317   ierr = PetscFree(s_waits1);CHKERRQ(ierr);
318   ierr = PetscFree(r_waits1);CHKERRQ(ierr);
319   ierr = PetscFree(s_waits2);CHKERRQ(ierr);
320   ierr = PetscFree(r_waits2);CHKERRQ(ierr);
321   ierr = PetscFree5(table,data,isz,d_p,t_p);CHKERRQ(ierr);
322   ierr = PetscFree(s_status);CHKERRQ(ierr);
323   ierr = PetscFree(recv_status);CHKERRQ(ierr);
324   ierr = PetscFree(xdata[0]);CHKERRQ(ierr);
325   ierr = PetscFree(xdata);CHKERRQ(ierr);
326   ierr = PetscFree(isz1);CHKERRQ(ierr);
327   PetscFunctionReturn(0);
328 }
329 
330 #undef __FUNCT__
331 #define __FUNCT__ "MatIncreaseOverlap_MPIBAIJ_Local"
332 /*
333    MatIncreaseOverlap_MPIBAIJ_Local - Called by MatincreaseOverlap, to do
334        the work on the local processor.
335 
336      Inputs:
337       C      - MAT_MPIBAIJ;
338       imax - total no of index sets processed at a time;
339       table  - an array of char - size = Mbs bits.
340 
341      Output:
342       isz    - array containing the count of the solution elements corresponding
343                to each index set;
344       data   - pointer to the solutions
345 */
346 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Local(Mat C,PetscInt imax,PetscBT *table,PetscInt *isz,PetscInt **data)
347 {
348   Mat_MPIBAIJ *c = (Mat_MPIBAIJ*)C->data;
349   Mat         A  = c->A,B = c->B;
350   Mat_SeqBAIJ *a = (Mat_SeqBAIJ*)A->data,*b = (Mat_SeqBAIJ*)B->data;
351   PetscInt    start,end,val,max,rstart,cstart,*ai,*aj;
352   PetscInt    *bi,*bj,*garray,i,j,k,row,*data_i,isz_i;
353   PetscBT     table_i;
354 
355   PetscFunctionBegin;
356   rstart = c->rstartbs;
357   cstart = c->cstartbs;
358   ai     = a->i;
359   aj     = a->j;
360   bi     = b->i;
361   bj     = b->j;
362   garray = c->garray;
363 
364 
365   for (i=0; i<imax; i++) {
366     data_i  = data[i];
367     table_i = table[i];
368     isz_i   = isz[i];
369     for (j=0,max=isz[i]; j<max; j++) {
370       row   = data_i[j] - rstart;
371       start = ai[row];
372       end   = ai[row+1];
373       for (k=start; k<end; k++) { /* Amat */
374         val = aj[k] + cstart;
375         if (!PetscBTLookupSet(table_i,val)) data_i[isz_i++] = val;
376       }
377       start = bi[row];
378       end   = bi[row+1];
379       for (k=start; k<end; k++) { /* Bmat */
380         val = garray[bj[k]];
381         if (!PetscBTLookupSet(table_i,val)) data_i[isz_i++] = val;
382       }
383     }
384     isz[i] = isz_i;
385   }
386   PetscFunctionReturn(0);
387 }
388 #undef __FUNCT__
389 #define __FUNCT__ "MatIncreaseOverlap_MPIBAIJ_Receive"
390 /*
391       MatIncreaseOverlap_MPIBAIJ_Receive - Process the recieved messages,
392          and return the output
393 
394          Input:
395            C    - the matrix
396            nrqr - no of messages being processed.
397            rbuf - an array of pointers to the recieved requests
398 
399          Output:
400            xdata - array of messages to be sent back
401            isz1  - size of each message
402 
403   For better efficiency perhaps we should malloc separately each xdata[i],
404 then if a remalloc is required we need only copy the data for that one row
405 rather than all previous rows as it is now where a single large chunck of
406 memory is used.
407 
408 */
409 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Receive(Mat C,PetscInt nrqr,PetscInt **rbuf,PetscInt **xdata,PetscInt * isz1)
410 {
411   Mat_MPIBAIJ    *c = (Mat_MPIBAIJ*)C->data;
412   Mat            A  = c->A,B = c->B;
413   Mat_SeqBAIJ    *a = (Mat_SeqBAIJ*)A->data,*b = (Mat_SeqBAIJ*)B->data;
414   PetscErrorCode ierr;
415   PetscInt       rstart,cstart,*ai,*aj,*bi,*bj,*garray,i,j,k;
416   PetscInt       row,total_sz,ct,ct1,ct2,ct3,mem_estimate,oct2,l,start,end;
417   PetscInt       val,max1,max2,Mbs,no_malloc =0,*tmp,new_estimate,ctr;
418   PetscInt       *rbuf_i,kmax,rbuf_0;
419   PetscBT        xtable;
420 
421   PetscFunctionBegin;
422   Mbs    = c->Mbs;
423   rstart = c->rstartbs;
424   cstart = c->cstartbs;
425   ai     = a->i;
426   aj     = a->j;
427   bi     = b->i;
428   bj     = b->j;
429   garray = c->garray;
430 
431 
432   for (i=0,ct=0,total_sz=0; i<nrqr; ++i) {
433     rbuf_i =  rbuf[i];
434     rbuf_0 =  rbuf_i[0];
435     ct    += rbuf_0;
436     for (j=1; j<=rbuf_0; j++) total_sz += rbuf_i[2*j];
437   }
438 
439   if (c->Mbs) max1 = ct*(a->nz +b->nz)/c->Mbs;
440   else        max1 = 1;
441   mem_estimate = 3*((total_sz > max1 ? total_sz : max1)+1);
442   ierr         = PetscMalloc1(mem_estimate,&xdata[0]);CHKERRQ(ierr);
443   ++no_malloc;
444   ierr = PetscBTCreate(Mbs,&xtable);CHKERRQ(ierr);
445   ierr = PetscMemzero(isz1,nrqr*sizeof(PetscInt));CHKERRQ(ierr);
446 
447   ct3 = 0;
448   for (i=0; i<nrqr; i++) { /* for easch mesg from proc i */
449     rbuf_i =  rbuf[i];
450     rbuf_0 =  rbuf_i[0];
451     ct1    =  2*rbuf_0+1;
452     ct2    =  ct1;
453     ct3   += ct1;
454     for (j=1; j<=rbuf_0; j++) { /* for each IS from proc i*/
455       ierr = PetscBTMemzero(Mbs,xtable);CHKERRQ(ierr);
456       oct2 = ct2;
457       kmax = rbuf_i[2*j];
458       for (k=0; k<kmax; k++,ct1++) {
459         row = rbuf_i[ct1];
460         if (!PetscBTLookupSet(xtable,row)) {
461           if (!(ct3 < mem_estimate)) {
462             new_estimate = (PetscInt)(1.5*mem_estimate)+1;
463             ierr         = PetscMalloc1(new_estimate,&tmp);CHKERRQ(ierr);
464             ierr         = PetscMemcpy(tmp,xdata[0],mem_estimate*sizeof(PetscInt));CHKERRQ(ierr);
465             ierr         = PetscFree(xdata[0]);CHKERRQ(ierr);
466             xdata[0]     = tmp;
467             mem_estimate = new_estimate; ++no_malloc;
468             for (ctr=1; ctr<=i; ctr++) xdata[ctr] = xdata[ctr-1] + isz1[ctr-1];
469           }
470           xdata[i][ct2++] = row;
471           ct3++;
472         }
473       }
474       for (k=oct2,max2=ct2; k<max2; k++)  {
475         row   = xdata[i][k] - rstart;
476         start = ai[row];
477         end   = ai[row+1];
478         for (l=start; l<end; l++) {
479           val = aj[l] + cstart;
480           if (!PetscBTLookupSet(xtable,val)) {
481             if (!(ct3 < mem_estimate)) {
482               new_estimate = (PetscInt)(1.5*mem_estimate)+1;
483               ierr         = PetscMalloc1(new_estimate,&tmp);CHKERRQ(ierr);
484               ierr         = PetscMemcpy(tmp,xdata[0],mem_estimate*sizeof(PetscInt));CHKERRQ(ierr);
485               ierr         = PetscFree(xdata[0]);CHKERRQ(ierr);
486               xdata[0]     = tmp;
487               mem_estimate = new_estimate; ++no_malloc;
488               for (ctr=1; ctr<=i; ctr++) xdata[ctr] = xdata[ctr-1] + isz1[ctr-1];
489             }
490             xdata[i][ct2++] = val;
491             ct3++;
492           }
493         }
494         start = bi[row];
495         end   = bi[row+1];
496         for (l=start; l<end; l++) {
497           val = garray[bj[l]];
498           if (!PetscBTLookupSet(xtable,val)) {
499             if (!(ct3 < mem_estimate)) {
500               new_estimate = (PetscInt)(1.5*mem_estimate)+1;
501               ierr         = PetscMalloc1(new_estimate,&tmp);CHKERRQ(ierr);
502               ierr         = PetscMemcpy(tmp,xdata[0],mem_estimate*sizeof(PetscInt));CHKERRQ(ierr);
503               ierr         = PetscFree(xdata[0]);CHKERRQ(ierr);
504               xdata[0]     = tmp;
505               mem_estimate = new_estimate; ++no_malloc;
506               for (ctr =1; ctr <=i; ctr++) xdata[ctr] = xdata[ctr-1] + isz1[ctr-1];
507             }
508             xdata[i][ct2++] = val;
509             ct3++;
510           }
511         }
512       }
513       /* Update the header*/
514       xdata[i][2*j]   = ct2 - oct2; /* Undo the vector isz1 and use only a var*/
515       xdata[i][2*j-1] = rbuf_i[2*j-1];
516     }
517     xdata[i][0] = rbuf_0;
518     xdata[i+1]  = xdata[i] + ct2;
519     isz1[i]     = ct2; /* size of each message */
520   }
521   ierr = PetscBTDestroy(&xtable);CHKERRQ(ierr);
522   ierr = PetscInfo3(C,"Allocated %D bytes, required %D, no of mallocs = %D\n",mem_estimate,ct3,no_malloc);CHKERRQ(ierr);
523   PetscFunctionReturn(0);
524 }
525 
526 #undef __FUNCT__
527 #define __FUNCT__ "MatGetSubMatrices_MPIBAIJ"
528 PetscErrorCode MatGetSubMatrices_MPIBAIJ(Mat C,PetscInt ismax,const IS isrow[],const IS iscol[],MatReuse scall,Mat *submat[])
529 {
530   IS             *isrow_new,*iscol_new;
531   Mat_MPIBAIJ    *c = (Mat_MPIBAIJ*)C->data;
532   PetscErrorCode ierr;
533   PetscInt       nmax,nstages_local,nstages,i,pos,max_no,ncol,nrow,N=C->cmap->N,bs=C->rmap->bs;
534   PetscBool      colflag,*allcolumns,*allrows;
535 
536   PetscFunctionBegin;
537   /* The compression and expansion should be avoided. Doesn't point
538      out errors, might change the indices, hence buggey */
539   ierr = PetscMalloc2(ismax+1,&isrow_new,ismax+1,&iscol_new);CHKERRQ(ierr);
540   ierr = ISCompressIndicesGeneral(N,C->rmap->n,bs,ismax,isrow,isrow_new);CHKERRQ(ierr);
541   ierr = ISCompressIndicesGeneral(N,C->cmap->n,bs,ismax,iscol,iscol_new);CHKERRQ(ierr);
542 
543   /* Check for special case: each processor gets entire matrix columns */
544   ierr = PetscMalloc2(ismax+1,&allcolumns,ismax+1,&allrows);CHKERRQ(ierr);
545   for (i=0; i<ismax; i++) {
546     ierr = ISIdentity(iscol[i],&colflag);CHKERRQ(ierr);
547     ierr = ISGetLocalSize(iscol[i],&ncol);CHKERRQ(ierr);
548     if (colflag && ncol == C->cmap->N) allcolumns[i] = PETSC_TRUE;
549     else allcolumns[i] = PETSC_FALSE;
550 
551     ierr = ISIdentity(isrow[i],&colflag);CHKERRQ(ierr);
552     ierr = ISGetLocalSize(isrow[i],&nrow);CHKERRQ(ierr);
553     if (colflag && nrow == C->rmap->N) allrows[i] = PETSC_TRUE;
554     else allrows[i] = PETSC_FALSE;
555   }
556 
557   /* Allocate memory to hold all the submatrices */
558   if (scall != MAT_REUSE_MATRIX) {
559     ierr = PetscMalloc1(ismax+1,submat);CHKERRQ(ierr);
560   }
561   /* Determine the number of stages through which submatrices are done */
562   nmax = 20*1000000 / (c->Nbs * sizeof(PetscInt));
563   if (!nmax) nmax = 1;
564   nstages_local = ismax/nmax + ((ismax % nmax) ? 1 : 0);
565 
566   /* Make sure every processor loops through the nstages */
567   ierr = MPI_Allreduce(&nstages_local,&nstages,1,MPIU_INT,MPI_MAX,PetscObjectComm((PetscObject)C));CHKERRQ(ierr);
568   for (i=0,pos=0; i<nstages; i++) {
569     if (pos+nmax <= ismax) max_no = nmax;
570     else if (pos == ismax) max_no = 0;
571     else                   max_no = ismax-pos;
572     ierr = MatGetSubMatrices_MPIBAIJ_local(C,max_no,isrow_new+pos,iscol_new+pos,scall,allrows+pos,allcolumns+pos,*submat+pos);CHKERRQ(ierr);
573     pos += max_no;
574   }
575 
576   for (i=0; i<ismax; i++) {
577     ierr = ISDestroy(&isrow_new[i]);CHKERRQ(ierr);
578     ierr = ISDestroy(&iscol_new[i]);CHKERRQ(ierr);
579   }
580   ierr = PetscFree2(isrow_new,iscol_new);CHKERRQ(ierr);
581   ierr = PetscFree2(allcolumns,allrows);CHKERRQ(ierr);
582   PetscFunctionReturn(0);
583 }
584 
585 #if defined(PETSC_USE_CTABLE)
586 #undef __FUNCT__
587 #define __FUNCT__ "PetscGetProc"
588 PetscErrorCode PetscGetProc(const PetscInt row, const PetscMPIInt size, const PetscInt proc_gnode[], PetscMPIInt *rank)
589 {
590   PetscInt       nGlobalNd = proc_gnode[size];
591   PetscMPIInt    fproc;
592   PetscErrorCode ierr;
593 
594   PetscFunctionBegin;
595   ierr = PetscMPIIntCast((PetscInt)(((float)row * (float)size / (float)nGlobalNd + 0.5)),&fproc);CHKERRQ(ierr);
596   if (fproc > size) fproc = size;
597   while (row < proc_gnode[fproc] || row >= proc_gnode[fproc+1]) {
598     if (row < proc_gnode[fproc]) fproc--;
599     else                         fproc++;
600   }
601   *rank = fproc;
602   PetscFunctionReturn(0);
603 }
604 #endif
605 
606 /* -------------------------------------------------------------------------*/
607 /* This code is used for BAIJ and SBAIJ matrices (unfortunate dependency) */
608 #undef __FUNCT__
609 #define __FUNCT__ "MatGetSubMatrices_MPIBAIJ_local"
610 PetscErrorCode MatGetSubMatrices_MPIBAIJ_local(Mat C,PetscInt ismax,const IS isrow[],const IS iscol[],MatReuse scall,PetscBool *allrows,PetscBool *allcolumns,Mat *submats)
611 {
612   Mat_MPIBAIJ    *c = (Mat_MPIBAIJ*)C->data;
613   Mat            A  = c->A;
614   Mat_SeqBAIJ    *a = (Mat_SeqBAIJ*)A->data,*b = (Mat_SeqBAIJ*)c->B->data,*mat;
615   const PetscInt **irow,**icol,*irow_i;
616   PetscInt       *nrow,*ncol,*w3,*w4,start;
617   PetscErrorCode ierr;
618   PetscMPIInt    size,tag0,tag1,tag2,tag3,*w1,*w2,nrqr,idex,end,proc;
619   PetscInt       **sbuf1,**sbuf2,rank,i,j,k,l,ct1,ct2,**rbuf1,row;
620   PetscInt       nrqs,msz,**ptr,*req_size,*ctr,*pa,*tmp,tcol;
621   PetscInt       **rbuf3,*req_source,**sbuf_aj,**rbuf2,max1,max2;
622   PetscInt       **lens,is_no,ncols,*cols,mat_i,*mat_j,tmp2,jmax;
623   PetscInt       ctr_j,*sbuf1_j,*sbuf_aj_i,*rbuf1_i,kmax,*lens_i;
624   PetscInt       bs     =C->rmap->bs,bs2=c->bs2,*a_j=a->j,*b_j=b->j,*cworkA,*cworkB;
625   PetscInt       cstart = c->cstartbs,nzA,nzB,*a_i=a->i,*b_i=b->i,imark;
626   PetscInt       *bmap  = c->garray,ctmp,rstart=c->rstartbs;
627   MPI_Request    *s_waits1,*r_waits1,*s_waits2,*r_waits2,*r_waits3,*s_waits3;
628   MPI_Status     *r_status1,*r_status2,*s_status1,*s_status3,*s_status2,*r_status3;
629   MPI_Comm       comm;
630   PetscBool      flag;
631   PetscMPIInt    *onodes1,*olengths1;
632   PetscBool      ijonly=c->ijonly; /* private flag indicates only matrix data structures are requested */
633 
634   /* variables below are used for the matrix numerical values - case of !ijonly */
635   MPI_Request *r_waits4,*s_waits4;
636   MPI_Status  *r_status4,*s_status4;
637   MatScalar   **rbuf4,**sbuf_aa,*vals,*mat_a = NULL,*sbuf_aa_i,*vworkA = NULL,*vworkB = NULL;
638   MatScalar   *a_a=a->a,*b_a=b->a;
639 
640 #if defined(PETSC_USE_CTABLE)
641   PetscInt   tt;
642   PetscTable *rmap,*cmap,rmap_i,cmap_i=NULL;
643 #else
644   PetscInt **cmap,*cmap_i=NULL,*rtable,*rmap_i,**rmap, Mbs = c->Mbs;
645 #endif
646 
647   PetscFunctionBegin;
648   ierr = PetscObjectGetComm((PetscObject)C,&comm);CHKERRQ(ierr);
649   tag0 = ((PetscObject)C)->tag;
650   size = c->size;
651   rank = c->rank;
652 
653   /* Get some new tags to keep the communication clean */
654   ierr = PetscObjectGetNewTag((PetscObject)C,&tag1);CHKERRQ(ierr);
655   ierr = PetscObjectGetNewTag((PetscObject)C,&tag2);CHKERRQ(ierr);
656   ierr = PetscObjectGetNewTag((PetscObject)C,&tag3);CHKERRQ(ierr);
657 
658 #if defined(PETSC_USE_CTABLE)
659   ierr = PetscMalloc4(ismax,&irow,ismax,&icol,ismax,&nrow,ismax,&ncol);CHKERRQ(ierr);
660 #else
661   ierr = PetscMalloc5(ismax,&irow,ismax,&icol,ismax,&nrow,ismax,&ncol,Mbs+1,&rtable);CHKERRQ(ierr);
662   /* Create hash table for the mapping :row -> proc*/
663   for (i=0,j=0; i<size; i++) {
664     jmax = C->rmap->range[i+1]/bs;
665     for (; j<jmax; j++) rtable[j] = i;
666   }
667 #endif
668 
669   for (i=0; i<ismax; i++) {
670     if (allrows[i]) {
671       irow[i] = NULL;
672       nrow[i] = C->rmap->N/bs;
673     } else {
674       ierr = ISGetIndices(isrow[i],&irow[i]);CHKERRQ(ierr);
675       ierr = ISGetLocalSize(isrow[i],&nrow[i]);CHKERRQ(ierr);
676     }
677 
678     if (allcolumns[i]) {
679       icol[i] = NULL;
680       ncol[i] = C->cmap->N/bs;
681     } else {
682       ierr = ISGetIndices(iscol[i],&icol[i]);CHKERRQ(ierr);
683       ierr = ISGetLocalSize(iscol[i],&ncol[i]);CHKERRQ(ierr);
684     }
685   }
686 
687   /* evaluate communication - mesg to who,length of mesg,and buffer space
688      required. Based on this, buffers are allocated, and data copied into them*/
689   ierr = PetscCalloc4(size,&w1,size,&w2,size,&w3,size,&w4);CHKERRQ(ierr);
690   for (i=0; i<ismax; i++) {
691     ierr   = PetscMemzero(w4,size*sizeof(PetscInt));CHKERRQ(ierr); /* initialise work vector*/
692     jmax   = nrow[i];
693     irow_i = irow[i];
694     for (j=0; j<jmax; j++) {
695       if (allrows[i]) row = j;
696       else row = irow_i[j];
697 
698 #if defined(PETSC_USE_CTABLE)
699       ierr = PetscGetProc(row,size,c->rangebs,&proc);CHKERRQ(ierr);
700 #else
701       proc = rtable[row];
702 #endif
703       w4[proc]++;
704     }
705     for (j=0; j<size; j++) {
706       if (w4[j]) { w1[j] += w4[j];  w3[j]++;}
707     }
708   }
709 
710   nrqs     = 0;              /* no of outgoing messages */
711   msz      = 0;              /* total mesg length for all proc */
712   w1[rank] = 0;              /* no mesg sent to intself */
713   w3[rank] = 0;
714   for (i=0; i<size; i++) {
715     if (w1[i])  { w2[i] = 1; nrqs++;} /* there exists a message to proc i */
716   }
717   ierr = PetscMalloc1(nrqs+1,&pa);CHKERRQ(ierr); /*(proc -array)*/
718   for (i=0,j=0; i<size; i++) {
719     if (w1[i]) { pa[j] = i; j++; }
720   }
721 
722   /* Each message would have a header = 1 + 2*(no of IS) + data */
723   for (i=0; i<nrqs; i++) {
724     j     = pa[i];
725     w1[j] += w2[j] + 2* w3[j];
726     msz   += w1[j];
727   }
728 
729   /* Determine the number of messages to expect, their lengths, from from-ids */
730   ierr = PetscGatherNumberOfMessages(comm,w2,w1,&nrqr);CHKERRQ(ierr);
731   ierr = PetscGatherMessageLengths(comm,nrqs,nrqr,w1,&onodes1,&olengths1);CHKERRQ(ierr);
732 
733   /* Now post the Irecvs corresponding to these messages */
734   ierr = PetscPostIrecvInt(comm,tag0,nrqr,onodes1,olengths1,&rbuf1,&r_waits1);CHKERRQ(ierr);
735 
736   ierr = PetscFree(onodes1);CHKERRQ(ierr);
737   ierr = PetscFree(olengths1);CHKERRQ(ierr);
738 
739   /* Allocate Memory for outgoing messages */
740   ierr = PetscMalloc4(size,&sbuf1,size,&ptr,2*msz,&tmp,size,&ctr);CHKERRQ(ierr);
741   ierr = PetscMemzero(sbuf1,size*sizeof(PetscInt*));CHKERRQ(ierr);
742   ierr = PetscMemzero(ptr,size*sizeof(PetscInt*));CHKERRQ(ierr);
743   {
744     PetscInt *iptr = tmp,ict = 0;
745     for (i=0; i<nrqs; i++) {
746       j        = pa[i];
747       iptr    += ict;
748       sbuf1[j] = iptr;
749       ict      = w1[j];
750     }
751   }
752 
753   /* Form the outgoing messages */
754   /* Initialise the header space */
755   for (i=0; i<nrqs; i++) {
756     j           = pa[i];
757     sbuf1[j][0] = 0;
758     ierr        = PetscMemzero(sbuf1[j]+1,2*w3[j]*sizeof(PetscInt));CHKERRQ(ierr);
759     ptr[j]      = sbuf1[j] + 2*w3[j] + 1;
760   }
761 
762   /* Parse the isrow and copy data into outbuf */
763   for (i=0; i<ismax; i++) {
764     ierr   = PetscMemzero(ctr,size*sizeof(PetscInt));CHKERRQ(ierr);
765     irow_i = irow[i];
766     jmax   = nrow[i];
767     for (j=0; j<jmax; j++) {  /* parse the indices of each IS */
768       if (allrows[i]) row = j;
769       else row = irow_i[j];
770 
771 #if defined(PETSC_USE_CTABLE)
772       ierr = PetscGetProc(row,size,c->rangebs,&proc);CHKERRQ(ierr);
773 #else
774       proc = rtable[row];
775 #endif
776       if (proc != rank) { /* copy to the outgoing buf*/
777         ctr[proc]++;
778         *ptr[proc] = row;
779         ptr[proc]++;
780       }
781     }
782     /* Update the headers for the current IS */
783     for (j=0; j<size; j++) { /* Can Optimise this loop too */
784       if ((ctr_j = ctr[j])) {
785         sbuf1_j        = sbuf1[j];
786         k              = ++sbuf1_j[0];
787         sbuf1_j[2*k]   = ctr_j;
788         sbuf1_j[2*k-1] = i;
789       }
790     }
791   }
792 
793   /*  Now  post the sends */
794   ierr = PetscMalloc1(nrqs+1,&s_waits1);CHKERRQ(ierr);
795   for (i=0; i<nrqs; ++i) {
796     j    = pa[i];
797     ierr = MPI_Isend(sbuf1[j],w1[j],MPIU_INT,j,tag0,comm,s_waits1+i);CHKERRQ(ierr);
798   }
799 
800   /* Post Recieves to capture the buffer size */
801   ierr     = PetscMalloc1(nrqs+1,&r_waits2);CHKERRQ(ierr);
802   ierr     = PetscMalloc1(nrqs+1,&rbuf2);CHKERRQ(ierr);
803   rbuf2[0] = tmp + msz;
804   for (i=1; i<nrqs; ++i) {
805     j        = pa[i];
806     rbuf2[i] = rbuf2[i-1]+w1[pa[i-1]];
807   }
808   for (i=0; i<nrqs; ++i) {
809     j    = pa[i];
810     ierr = MPI_Irecv(rbuf2[i],w1[j],MPIU_INT,j,tag1,comm,r_waits2+i);CHKERRQ(ierr);
811   }
812 
813   /* Send to other procs the buf size they should allocate */
814 
815   /* Receive messages*/
816   ierr = PetscMalloc1(nrqr+1,&s_waits2);CHKERRQ(ierr);
817   ierr = PetscMalloc1(nrqr+1,&r_status1);CHKERRQ(ierr);
818   ierr = PetscMalloc3(nrqr+1,&sbuf2,nrqr,&req_size,nrqr,&req_source);CHKERRQ(ierr);
819   {
820     Mat_SeqBAIJ *sA  = (Mat_SeqBAIJ*)c->A->data,*sB = (Mat_SeqBAIJ*)c->B->data;
821     PetscInt    *sAi = sA->i,*sBi = sB->i,id,*sbuf2_i;
822 
823     for (i=0; i<nrqr; ++i) {
824       ierr = MPI_Waitany(nrqr,r_waits1,&idex,r_status1+i);CHKERRQ(ierr);
825 
826       req_size[idex] = 0;
827       rbuf1_i        = rbuf1[idex];
828       start          = 2*rbuf1_i[0] + 1;
829       ierr           = MPI_Get_count(r_status1+i,MPIU_INT,&end);CHKERRQ(ierr);
830       ierr           = PetscMalloc1(end,&sbuf2[idex]);CHKERRQ(ierr);
831       sbuf2_i        = sbuf2[idex];
832       for (j=start; j<end; j++) {
833         id              = rbuf1_i[j] - rstart;
834         ncols           = sAi[id+1] - sAi[id] + sBi[id+1] - sBi[id];
835         sbuf2_i[j]      = ncols;
836         req_size[idex] += ncols;
837       }
838       req_source[idex] = r_status1[i].MPI_SOURCE;
839       /* form the header */
840       sbuf2_i[0] = req_size[idex];
841       for (j=1; j<start; j++) sbuf2_i[j] = rbuf1_i[j];
842       ierr = MPI_Isend(sbuf2_i,end,MPIU_INT,req_source[idex],tag1,comm,s_waits2+i);CHKERRQ(ierr);
843     }
844   }
845   ierr = PetscFree(r_status1);CHKERRQ(ierr);
846   ierr = PetscFree(r_waits1);CHKERRQ(ierr);
847 
848   /*  recv buffer sizes */
849   /* Receive messages*/
850   ierr = PetscMalloc1(nrqs+1,&rbuf3);CHKERRQ(ierr);
851   ierr = PetscMalloc1(nrqs+1,&r_waits3);CHKERRQ(ierr);
852   ierr = PetscMalloc1(nrqs+1,&r_status2);CHKERRQ(ierr);
853   if (!ijonly) {
854     ierr = PetscMalloc1(nrqs+1,&rbuf4);CHKERRQ(ierr);
855     ierr = PetscMalloc1(nrqs+1,&r_waits4);CHKERRQ(ierr);
856   }
857 
858   for (i=0; i<nrqs; ++i) {
859     ierr = MPI_Waitany(nrqs,r_waits2,&idex,r_status2+i);CHKERRQ(ierr);
860     ierr = PetscMalloc1(rbuf2[idex][0],&rbuf3[idex]);CHKERRQ(ierr);
861     ierr = MPI_Irecv(rbuf3[idex],rbuf2[idex][0],MPIU_INT,r_status2[i].MPI_SOURCE,tag2,comm,r_waits3+idex);CHKERRQ(ierr);
862     if (!ijonly) {
863       ierr = PetscMalloc1(rbuf2[idex][0]*bs2,&rbuf4[idex]);CHKERRQ(ierr);
864       ierr = MPI_Irecv(rbuf4[idex],rbuf2[idex][0]*bs2,MPIU_MATSCALAR,r_status2[i].MPI_SOURCE,tag3,comm,r_waits4+idex);CHKERRQ(ierr);
865     }
866   }
867   ierr = PetscFree(r_status2);CHKERRQ(ierr);
868   ierr = PetscFree(r_waits2);CHKERRQ(ierr);
869 
870   /* Wait on sends1 and sends2 */
871   ierr = PetscMalloc1(nrqs+1,&s_status1);CHKERRQ(ierr);
872   ierr = PetscMalloc1(nrqr+1,&s_status2);CHKERRQ(ierr);
873 
874   if (nrqs) {ierr = MPI_Waitall(nrqs,s_waits1,s_status1);CHKERRQ(ierr);}
875   if (nrqr) {ierr = MPI_Waitall(nrqr,s_waits2,s_status2);CHKERRQ(ierr);}
876   ierr = PetscFree(s_status1);CHKERRQ(ierr);
877   ierr = PetscFree(s_status2);CHKERRQ(ierr);
878   ierr = PetscFree(s_waits1);CHKERRQ(ierr);
879   ierr = PetscFree(s_waits2);CHKERRQ(ierr);
880 
881   /* Now allocate buffers for a->j, and send them off */
882   ierr = PetscMalloc1(nrqr+1,&sbuf_aj);CHKERRQ(ierr);
883   for (i=0,j=0; i<nrqr; i++) j += req_size[i];
884   ierr = PetscMalloc1(j+1,&sbuf_aj[0]);CHKERRQ(ierr);
885   for (i=1; i<nrqr; i++) sbuf_aj[i] = sbuf_aj[i-1] + req_size[i-1];
886 
887   ierr = PetscMalloc1(nrqr+1,&s_waits3);CHKERRQ(ierr);
888   {
889     for (i=0; i<nrqr; i++) {
890       rbuf1_i   = rbuf1[i];
891       sbuf_aj_i = sbuf_aj[i];
892       ct1       = 2*rbuf1_i[0] + 1;
893       ct2       = 0;
894       for (j=1,max1=rbuf1_i[0]; j<=max1; j++) {
895         kmax = rbuf1[i][2*j];
896         for (k=0; k<kmax; k++,ct1++) {
897           row    = rbuf1_i[ct1] - rstart;
898           nzA    = a_i[row+1] - a_i[row];     nzB = b_i[row+1] - b_i[row];
899           ncols  = nzA + nzB;
900           cworkA = a_j + a_i[row]; cworkB = b_j + b_i[row];
901 
902           /* load the column indices for this row into cols*/
903           cols = sbuf_aj_i + ct2;
904           for (l=0; l<nzB; l++) {
905             if ((ctmp = bmap[cworkB[l]]) < cstart) cols[l] = ctmp;
906             else break;
907           }
908           imark = l;
909           for (l=0; l<nzA; l++)   cols[imark+l] = cstart + cworkA[l];
910           for (l=imark; l<nzB; l++) cols[nzA+l] = bmap[cworkB[l]];
911           ct2 += ncols;
912         }
913       }
914       ierr = MPI_Isend(sbuf_aj_i,req_size[i],MPIU_INT,req_source[i],tag2,comm,s_waits3+i);CHKERRQ(ierr);
915     }
916   }
917   ierr = PetscMalloc1(nrqs+1,&r_status3);CHKERRQ(ierr);
918   ierr = PetscMalloc1(nrqr+1,&s_status3);CHKERRQ(ierr);
919 
920   /* Allocate buffers for a->a, and send them off */
921   if (!ijonly) {
922     ierr = PetscMalloc1(nrqr+1,&sbuf_aa);CHKERRQ(ierr);
923     for (i=0,j=0; i<nrqr; i++) j += req_size[i];
924     ierr = PetscMalloc1((j+1)*bs2,&sbuf_aa[0]);CHKERRQ(ierr);
925     for (i=1; i<nrqr; i++) sbuf_aa[i] = sbuf_aa[i-1] + req_size[i-1]*bs2;
926 
927     ierr = PetscMalloc1(nrqr+1,&s_waits4);CHKERRQ(ierr);
928     {
929       for (i=0; i<nrqr; i++) {
930         rbuf1_i   = rbuf1[i];
931         sbuf_aa_i = sbuf_aa[i];
932         ct1       = 2*rbuf1_i[0]+1;
933         ct2       = 0;
934         for (j=1,max1=rbuf1_i[0]; j<=max1; j++) {
935           kmax = rbuf1_i[2*j];
936           for (k=0; k<kmax; k++,ct1++) {
937             row    = rbuf1_i[ct1] - rstart;
938             nzA    = a_i[row+1] - a_i[row];     nzB = b_i[row+1] - b_i[row];
939             ncols  = nzA + nzB;
940             cworkA = a_j + a_i[row];     cworkB = b_j + b_i[row];
941             vworkA = a_a + a_i[row]*bs2; vworkB = b_a + b_i[row]*bs2;
942 
943             /* load the column values for this row into vals*/
944             vals = sbuf_aa_i+ct2*bs2;
945             for (l=0; l<nzB; l++) {
946               if ((bmap[cworkB[l]]) < cstart) {
947                 ierr = PetscMemcpy(vals+l*bs2,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
948               } else break;
949             }
950             imark = l;
951             for (l=0; l<nzA; l++) {
952               ierr = PetscMemcpy(vals+(imark+l)*bs2,vworkA+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
953             }
954             for (l=imark; l<nzB; l++) {
955               ierr = PetscMemcpy(vals+(nzA+l)*bs2,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
956             }
957             ct2 += ncols;
958           }
959         }
960         ierr = MPI_Isend(sbuf_aa_i,req_size[i]*bs2,MPIU_MATSCALAR,req_source[i],tag3,comm,s_waits4+i);CHKERRQ(ierr);
961       }
962     }
963     ierr = PetscMalloc1(nrqs+1,&r_status4);CHKERRQ(ierr);
964     ierr = PetscMalloc1(nrqr+1,&s_status4);CHKERRQ(ierr);
965   }
966   ierr = PetscFree(rbuf1[0]);CHKERRQ(ierr);
967   ierr = PetscFree(rbuf1);CHKERRQ(ierr);
968 
969   /* Form the matrix */
970   /* create col map: global col of C -> local col of submatrices */
971   {
972     const PetscInt *icol_i;
973 #if defined(PETSC_USE_CTABLE)
974     ierr = PetscMalloc1(1+ismax,&cmap);CHKERRQ(ierr);
975     for (i=0; i<ismax; i++) {
976       if (!allcolumns[i]) {
977         ierr   = PetscTableCreate(ncol[i]+1,c->Nbs+1,&cmap[i]);CHKERRQ(ierr);
978         jmax   = ncol[i];
979         icol_i = icol[i];
980         cmap_i = cmap[i];
981         for (j=0; j<jmax; j++) {
982           ierr = PetscTableAdd(cmap_i,icol_i[j]+1,j+1,INSERT_VALUES);CHKERRQ(ierr);
983         }
984       } else {
985         cmap[i] = NULL;
986       }
987     }
988 #else
989     ierr = PetscMalloc1(ismax,&cmap);CHKERRQ(ierr);
990     for (i=0; i<ismax; i++) {
991       if (!allcolumns[i]) {
992         ierr   = PetscCalloc1(c->Nbs,&cmap[i]);CHKERRQ(ierr);
993         jmax   = ncol[i];
994         icol_i = icol[i];
995         cmap_i = cmap[i];
996         for (j=0; j<jmax; j++) {
997           cmap_i[icol_i[j]] = j+1;
998         }
999       } else { /* allcolumns[i] */
1000         cmap[i] = NULL;
1001       }
1002     }
1003 #endif
1004   }
1005 
1006   /* Create lens which is required for MatCreate... */
1007   for (i=0,j=0; i<ismax; i++) j += nrow[i];
1008   ierr    = PetscMalloc((1+ismax)*sizeof(PetscInt*)+ j*sizeof(PetscInt),&lens);CHKERRQ(ierr);
1009   lens[0] = (PetscInt*)(lens + ismax);
1010   ierr    = PetscMemzero(lens[0],j*sizeof(PetscInt));CHKERRQ(ierr);
1011   for (i=1; i<ismax; i++) lens[i] = lens[i-1] + nrow[i-1];
1012 
1013   /* Update lens from local data */
1014   for (i=0; i<ismax; i++) {
1015     jmax = nrow[i];
1016     if (!allcolumns[i]) cmap_i = cmap[i];
1017     irow_i = irow[i];
1018     lens_i = lens[i];
1019     for (j=0; j<jmax; j++) {
1020       if (allrows[i]) row = j;
1021       else row = irow_i[j];
1022 
1023 #if defined(PETSC_USE_CTABLE)
1024       ierr = PetscGetProc(row,size,c->rangebs,&proc);CHKERRQ(ierr);
1025 #else
1026       proc = rtable[row];
1027 #endif
1028       if (proc == rank) {
1029         /* Get indices from matA and then from matB */
1030         row    = row - rstart;
1031         nzA    = a_i[row+1] - a_i[row];     nzB = b_i[row+1] - b_i[row];
1032         cworkA =  a_j + a_i[row]; cworkB = b_j + b_i[row];
1033         if (!allcolumns[i]) {
1034 #if defined(PETSC_USE_CTABLE)
1035           for (k=0; k<nzA; k++) {
1036             ierr = PetscTableFind(cmap_i,cstart+cworkA[k]+1,&tt);CHKERRQ(ierr);
1037             if (tt) lens_i[j]++;
1038           }
1039           for (k=0; k<nzB; k++) {
1040             ierr = PetscTableFind(cmap_i,bmap[cworkB[k]]+1,&tt);CHKERRQ(ierr);
1041             if (tt) lens_i[j]++;
1042           }
1043 
1044 #else
1045           for (k=0; k<nzA; k++) {
1046             if (cmap_i[cstart + cworkA[k]]) lens_i[j]++;
1047           }
1048           for (k=0; k<nzB; k++) {
1049             if (cmap_i[bmap[cworkB[k]]]) lens_i[j]++;
1050           }
1051 #endif
1052         } else { /* allcolumns */
1053           lens_i[j] = nzA + nzB;
1054         }
1055       }
1056     }
1057   }
1058 #if defined(PETSC_USE_CTABLE)
1059   /* Create row map*/
1060   ierr = PetscMalloc1(1+ismax,&rmap);CHKERRQ(ierr);
1061   for (i=0; i<ismax; i++) {
1062     ierr = PetscTableCreate(nrow[i]+1,c->Mbs+1,&rmap[i]);CHKERRQ(ierr);
1063   }
1064 #else
1065   /* Create row map*/
1066   ierr    = PetscMalloc((1+ismax)*sizeof(PetscInt*)+ ismax*Mbs*sizeof(PetscInt),&rmap);CHKERRQ(ierr);
1067   rmap[0] = (PetscInt*)(rmap + ismax);
1068   ierr    = PetscMemzero(rmap[0],ismax*Mbs*sizeof(PetscInt));CHKERRQ(ierr);
1069   for (i=1; i<ismax; i++) rmap[i] = rmap[i-1] + Mbs;
1070 #endif
1071   for (i=0; i<ismax; i++) {
1072     irow_i = irow[i];
1073     jmax   = nrow[i];
1074 #if defined(PETSC_USE_CTABLE)
1075     rmap_i = rmap[i];
1076     for (j=0; j<jmax; j++) {
1077       if (allrows[i]) {
1078         ierr = PetscTableAdd(rmap_i,j+1,j+1,INSERT_VALUES);CHKERRQ(ierr);
1079       } else {
1080         ierr = PetscTableAdd(rmap_i,irow_i[j]+1,j+1,INSERT_VALUES);CHKERRQ(ierr);
1081       }
1082     }
1083 #else
1084     rmap_i = rmap[i];
1085     for (j=0; j<jmax; j++) {
1086       if (allrows[i]) rmap_i[j] = j;
1087       else rmap_i[irow_i[j]] = j;
1088     }
1089 #endif
1090   }
1091 
1092   /* Update lens from offproc data */
1093   {
1094     PetscInt    *rbuf2_i,*rbuf3_i,*sbuf1_i;
1095     PetscMPIInt ii;
1096 
1097     for (tmp2=0; tmp2<nrqs; tmp2++) {
1098       ierr    = MPI_Waitany(nrqs,r_waits3,&ii,r_status3+tmp2);CHKERRQ(ierr);
1099       idex    = pa[ii];
1100       sbuf1_i = sbuf1[idex];
1101       jmax    = sbuf1_i[0];
1102       ct1     = 2*jmax+1;
1103       ct2     = 0;
1104       rbuf2_i = rbuf2[ii];
1105       rbuf3_i = rbuf3[ii];
1106       for (j=1; j<=jmax; j++) {
1107         is_no  = sbuf1_i[2*j-1];
1108         max1   = sbuf1_i[2*j];
1109         lens_i = lens[is_no];
1110         if (!allcolumns[is_no]) cmap_i = cmap[is_no];
1111         rmap_i = rmap[is_no];
1112         for (k=0; k<max1; k++,ct1++) {
1113 #if defined(PETSC_USE_CTABLE)
1114           ierr = PetscTableFind(rmap_i,sbuf1_i[ct1]+1,&row);CHKERRQ(ierr);
1115           row--;
1116           if (row < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"row not found in table");
1117 #else
1118           row = rmap_i[sbuf1_i[ct1]];  /* the val in the new matrix to be */
1119 #endif
1120           max2 = rbuf2_i[ct1];
1121           for (l=0; l<max2; l++,ct2++) {
1122             if (!allcolumns[is_no]) {
1123 #if defined(PETSC_USE_CTABLE)
1124               ierr = PetscTableFind(cmap_i,rbuf3_i[ct2]+1,&tt);CHKERRQ(ierr);
1125               if (tt) lens_i[row]++;
1126 #else
1127               if (cmap_i[rbuf3_i[ct2]]) lens_i[row]++;
1128 #endif
1129             } else { /* allcolumns */
1130               lens_i[row]++;
1131             }
1132           }
1133         }
1134       }
1135     }
1136   }
1137   ierr = PetscFree(r_status3);CHKERRQ(ierr);
1138   ierr = PetscFree(r_waits3);CHKERRQ(ierr);
1139   if (nrqr) {ierr = MPI_Waitall(nrqr,s_waits3,s_status3);CHKERRQ(ierr);}
1140   ierr = PetscFree(s_status3);CHKERRQ(ierr);
1141   ierr = PetscFree(s_waits3);CHKERRQ(ierr);
1142 
1143   /* Create the submatrices */
1144   if (scall == MAT_REUSE_MATRIX) {
1145     if (ijonly) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," MAT_REUSE_MATRIX and ijonly is not supported yet");
1146     /*
1147         Assumes new rows are same length as the old rows, hence bug!
1148     */
1149     for (i=0; i<ismax; i++) {
1150       mat = (Mat_SeqBAIJ*)(submats[i]->data);
1151       if ((mat->mbs != nrow[i]) || (mat->nbs != ncol[i] || C->rmap->bs != bs)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. wrong size");
1152       ierr = PetscMemcmp(mat->ilen,lens[i],mat->mbs *sizeof(PetscInt),&flag);CHKERRQ(ierr);
1153       if (!flag) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Cannot reuse matrix. wrong no of nonzeros");
1154       /* Initial matrix as if empty */
1155       ierr = PetscMemzero(mat->ilen,mat->mbs*sizeof(PetscInt));CHKERRQ(ierr);
1156 
1157       submats[i]->factortype = C->factortype;
1158     }
1159   } else {
1160     PetscInt bs_tmp;
1161     if (ijonly) bs_tmp = 1;
1162     else        bs_tmp = bs;
1163     for (i=0; i<ismax; i++) {
1164       ierr = MatCreate(PETSC_COMM_SELF,submats+i);CHKERRQ(ierr);
1165       ierr = MatSetSizes(submats[i],nrow[i]*bs_tmp,ncol[i]*bs_tmp,nrow[i]*bs_tmp,ncol[i]*bs_tmp);CHKERRQ(ierr);
1166       ierr = MatSetType(submats[i],((PetscObject)A)->type_name);CHKERRQ(ierr);
1167       ierr = MatSeqBAIJSetPreallocation(submats[i],bs_tmp,0,lens[i]);CHKERRQ(ierr);
1168       ierr = MatSeqSBAIJSetPreallocation(submats[i],bs_tmp,0,lens[i]);CHKERRQ(ierr); /* this subroutine is used by SBAIJ routines */
1169     }
1170   }
1171 
1172   /* Assemble the matrices */
1173   /* First assemble the local rows */
1174   {
1175     PetscInt  ilen_row,*imat_ilen,*imat_j,*imat_i;
1176     MatScalar *imat_a = NULL;
1177 
1178     for (i=0; i<ismax; i++) {
1179       mat       = (Mat_SeqBAIJ*)submats[i]->data;
1180       imat_ilen = mat->ilen;
1181       imat_j    = mat->j;
1182       imat_i    = mat->i;
1183       if (!ijonly) imat_a = mat->a;
1184       if (!allcolumns[i]) cmap_i = cmap[i];
1185       rmap_i = rmap[i];
1186       irow_i = irow[i];
1187       jmax   = nrow[i];
1188       for (j=0; j<jmax; j++) {
1189         if (allrows[i]) row = j;
1190         else row = irow_i[j];
1191 
1192 #if defined(PETSC_USE_CTABLE)
1193         ierr = PetscGetProc(row,size,c->rangebs,&proc);CHKERRQ(ierr);
1194 #else
1195         proc = rtable[row];
1196 #endif
1197         if (proc == rank) {
1198           row    = row - rstart;
1199           nzA    = a_i[row+1] - a_i[row];
1200           nzB    = b_i[row+1] - b_i[row];
1201           cworkA = a_j + a_i[row];
1202           cworkB = b_j + b_i[row];
1203           if (!ijonly) {
1204             vworkA = a_a + a_i[row]*bs2;
1205             vworkB = b_a + b_i[row]*bs2;
1206           }
1207 #if defined(PETSC_USE_CTABLE)
1208           ierr = PetscTableFind(rmap_i,row+rstart+1,&row);CHKERRQ(ierr);
1209           row--;
1210           if (row < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"row not found in table");
1211 #else
1212           row = rmap_i[row + rstart];
1213 #endif
1214           mat_i = imat_i[row];
1215           if (!ijonly) mat_a = imat_a + mat_i*bs2;
1216           mat_j    = imat_j + mat_i;
1217           ilen_row = imat_ilen[row];
1218 
1219           /* load the column indices for this row into cols*/
1220           if (!allcolumns[i]) {
1221             for (l=0; l<nzB; l++) {
1222               if ((ctmp = bmap[cworkB[l]]) < cstart) {
1223 #if defined(PETSC_USE_CTABLE)
1224                 ierr = PetscTableFind(cmap_i,ctmp+1,&tcol);CHKERRQ(ierr);
1225                 if (tcol) {
1226 #else
1227                 if ((tcol = cmap_i[ctmp])) {
1228 #endif
1229                   *mat_j++ = tcol - 1;
1230                   ierr     = PetscMemcpy(mat_a,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1231                   mat_a   += bs2;
1232                   ilen_row++;
1233                 }
1234               } else break;
1235             }
1236             imark = l;
1237             for (l=0; l<nzA; l++) {
1238 #if defined(PETSC_USE_CTABLE)
1239               ierr = PetscTableFind(cmap_i,cstart+cworkA[l]+1,&tcol);CHKERRQ(ierr);
1240               if (tcol) {
1241 #else
1242               if ((tcol = cmap_i[cstart + cworkA[l]])) {
1243 #endif
1244                 *mat_j++ = tcol - 1;
1245                 if (!ijonly) {
1246                   ierr   = PetscMemcpy(mat_a,vworkA+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1247                   mat_a += bs2;
1248                 }
1249                 ilen_row++;
1250               }
1251             }
1252             for (l=imark; l<nzB; l++) {
1253 #if defined(PETSC_USE_CTABLE)
1254               ierr = PetscTableFind(cmap_i,bmap[cworkB[l]]+1,&tcol);CHKERRQ(ierr);
1255               if (tcol) {
1256 #else
1257               if ((tcol = cmap_i[bmap[cworkB[l]]])) {
1258 #endif
1259                 *mat_j++ = tcol - 1;
1260                 if (!ijonly) {
1261                   ierr   = PetscMemcpy(mat_a,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1262                   mat_a += bs2;
1263                 }
1264                 ilen_row++;
1265               }
1266             }
1267           } else { /* allcolumns */
1268             for (l=0; l<nzB; l++) {
1269               if ((ctmp = bmap[cworkB[l]]) < cstart) {
1270                 *mat_j++ = ctmp;
1271                 ierr     = PetscMemcpy(mat_a,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1272                 mat_a   += bs2;
1273                 ilen_row++;
1274               } else break;
1275             }
1276             imark = l;
1277             for (l=0; l<nzA; l++) {
1278               *mat_j++ = cstart+cworkA[l];
1279               if (!ijonly) {
1280                 ierr   = PetscMemcpy(mat_a,vworkA+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1281                 mat_a += bs2;
1282               }
1283               ilen_row++;
1284             }
1285             for (l=imark; l<nzB; l++) {
1286               *mat_j++ = bmap[cworkB[l]];
1287               if (!ijonly) {
1288                 ierr   = PetscMemcpy(mat_a,vworkB+l*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1289                 mat_a += bs2;
1290               }
1291               ilen_row++;
1292             }
1293           }
1294           imat_ilen[row] = ilen_row;
1295         }
1296       }
1297     }
1298   }
1299 
1300   /*   Now assemble the off proc rows*/
1301   {
1302     PetscInt    *sbuf1_i,*rbuf2_i,*rbuf3_i,*imat_ilen,ilen;
1303     PetscInt    *imat_j,*imat_i;
1304     MatScalar   *imat_a = NULL,*rbuf4_i = NULL;
1305     PetscMPIInt ii;
1306 
1307     for (tmp2=0; tmp2<nrqs; tmp2++) {
1308       if (ijonly) ii = tmp2;
1309       else {
1310         ierr = MPI_Waitany(nrqs,r_waits4,&ii,r_status4+tmp2);CHKERRQ(ierr);
1311       }
1312       idex    = pa[ii];
1313       sbuf1_i = sbuf1[idex];
1314       jmax    = sbuf1_i[0];
1315       ct1     = 2*jmax + 1;
1316       ct2     = 0;
1317       rbuf2_i = rbuf2[ii];
1318       rbuf3_i = rbuf3[ii];
1319       if (!ijonly) rbuf4_i = rbuf4[ii];
1320       for (j=1; j<=jmax; j++) {
1321         is_no = sbuf1_i[2*j-1];
1322         if (!allcolumns[is_no]) cmap_i = cmap[is_no];
1323         rmap_i    = rmap[is_no];
1324         mat       = (Mat_SeqBAIJ*)submats[is_no]->data;
1325         imat_ilen = mat->ilen;
1326         imat_j    = mat->j;
1327         imat_i    = mat->i;
1328         if (!ijonly) imat_a = mat->a;
1329         max1 = sbuf1_i[2*j];
1330         for (k=0; k<max1; k++,ct1++) {
1331           row = sbuf1_i[ct1];
1332 #if defined(PETSC_USE_CTABLE)
1333           ierr = PetscTableFind(rmap_i,row+1,&row);CHKERRQ(ierr);
1334           row--;
1335           if (row < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"row not found in table");
1336 #else
1337           row = rmap_i[row];
1338 #endif
1339           ilen  = imat_ilen[row];
1340           mat_i = imat_i[row];
1341           if (!ijonly) mat_a = imat_a + mat_i*bs2;
1342           mat_j = imat_j + mat_i;
1343           max2  = rbuf2_i[ct1];
1344 
1345           if (!allcolumns[is_no]) {
1346             for (l=0; l<max2; l++,ct2++) {
1347 #if defined(PETSC_USE_CTABLE)
1348               ierr = PetscTableFind(cmap_i,rbuf3_i[ct2]+1,&tcol);CHKERRQ(ierr);
1349               if (tcol) {
1350 #else
1351               if ((tcol = cmap_i[rbuf3_i[ct2]])) {
1352 #endif
1353                 *mat_j++ = tcol - 1;
1354                 if (!ijonly) {
1355                   ierr   = PetscMemcpy(mat_a,rbuf4_i+ct2*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1356                   mat_a += bs2;
1357                 }
1358                 ilen++;
1359               }
1360             }
1361           } else { /* allcolumns */
1362             for (l=0; l<max2; l++,ct2++) {
1363               *mat_j++ = rbuf3_i[ct2];
1364               if (!ijonly) {
1365                 ierr   = PetscMemcpy(mat_a,rbuf4_i+ct2*bs2,bs2*sizeof(MatScalar));CHKERRQ(ierr);
1366                 mat_a += bs2;
1367               }
1368               ilen++;
1369             }
1370           }
1371           imat_ilen[row] = ilen;
1372         }
1373       }
1374     }
1375   }
1376   /* sort the rows */
1377   {
1378     PetscInt  ilen_row,*imat_ilen,*imat_j,*imat_i;
1379     MatScalar *imat_a = NULL;
1380     MatScalar *work;
1381 
1382     ierr = PetscMalloc1(bs2,&work);CHKERRQ(ierr);
1383     for (i=0; i<ismax; i++) {
1384       mat       = (Mat_SeqBAIJ*)submats[i]->data;
1385       imat_ilen = mat->ilen;
1386       imat_j    = mat->j;
1387       imat_i    = mat->i;
1388       if (!ijonly) imat_a = mat->a;
1389       if (allcolumns[i]) continue;
1390       jmax   = nrow[i];
1391       for (j=0; j<jmax; j++) {
1392         mat_i = imat_i[j];
1393         if (!ijonly) mat_a = imat_a + mat_i*bs2;
1394         mat_j    = imat_j + mat_i;
1395         ilen_row = imat_ilen[j];
1396         if (!ijonly) {ierr = PetscSortIntWithDataArray(ilen_row,mat_j,mat_a,bs2*sizeof(MatScalar),work);CHKERRQ(ierr);}
1397         else {ierr = PetscSortInt(ilen_row,mat_j);CHKERRQ(ierr);}
1398       }
1399     }
1400     ierr = PetscFree(work);CHKERRQ(ierr);
1401   }
1402   if (!ijonly) {
1403     ierr = PetscFree(r_status4);CHKERRQ(ierr);
1404     ierr = PetscFree(r_waits4);CHKERRQ(ierr);
1405     if (nrqr) {ierr = MPI_Waitall(nrqr,s_waits4,s_status4);CHKERRQ(ierr);}
1406     ierr = PetscFree(s_waits4);CHKERRQ(ierr);
1407     ierr = PetscFree(s_status4);CHKERRQ(ierr);
1408   }
1409 
1410   /* Restore the indices */
1411   for (i=0; i<ismax; i++) {
1412     if (!allrows[i]) {
1413       ierr = ISRestoreIndices(isrow[i],irow+i);CHKERRQ(ierr);
1414     }
1415     if (!allcolumns[i]) {
1416       ierr = ISRestoreIndices(iscol[i],icol+i);CHKERRQ(ierr);
1417     }
1418   }
1419 
1420   /* Destroy allocated memory */
1421 #if defined(PETSC_USE_CTABLE)
1422   ierr = PetscFree4(irow,icol,nrow,ncol);CHKERRQ(ierr);
1423 #else
1424   ierr = PetscFree5(irow,icol,nrow,ncol,rtable);CHKERRQ(ierr);
1425 #endif
1426   ierr = PetscFree4(w1,w2,w3,w4);CHKERRQ(ierr);
1427   ierr = PetscFree(pa);CHKERRQ(ierr);
1428 
1429   ierr = PetscFree4(sbuf1,ptr,tmp,ctr);CHKERRQ(ierr);
1430   ierr = PetscFree(sbuf1);CHKERRQ(ierr);
1431   ierr = PetscFree(rbuf2);CHKERRQ(ierr);
1432   for (i=0; i<nrqr; ++i) {
1433     ierr = PetscFree(sbuf2[i]);CHKERRQ(ierr);
1434   }
1435   for (i=0; i<nrqs; ++i) {
1436     ierr = PetscFree(rbuf3[i]);CHKERRQ(ierr);
1437   }
1438   ierr = PetscFree3(sbuf2,req_size,req_source);CHKERRQ(ierr);
1439   ierr = PetscFree(rbuf3);CHKERRQ(ierr);
1440   ierr = PetscFree(sbuf_aj[0]);CHKERRQ(ierr);
1441   ierr = PetscFree(sbuf_aj);CHKERRQ(ierr);
1442   if (!ijonly) {
1443     for (i=0; i<nrqs; ++i) {ierr = PetscFree(rbuf4[i]);CHKERRQ(ierr);}
1444     ierr = PetscFree(rbuf4);CHKERRQ(ierr);
1445     ierr = PetscFree(sbuf_aa[0]);CHKERRQ(ierr);
1446     ierr = PetscFree(sbuf_aa);CHKERRQ(ierr);
1447   }
1448 
1449 #if defined(PETSC_USE_CTABLE)
1450   for (i=0; i<ismax; i++) {
1451     ierr = PetscTableDestroy((PetscTable*)&rmap[i]);CHKERRQ(ierr);
1452   }
1453 #endif
1454   ierr = PetscFree(rmap);CHKERRQ(ierr);
1455 
1456   for (i=0; i<ismax; i++) {
1457     if (!allcolumns[i]) {
1458 #if defined(PETSC_USE_CTABLE)
1459       ierr = PetscTableDestroy((PetscTable*)&cmap[i]);CHKERRQ(ierr);
1460 #else
1461       ierr = PetscFree(cmap[i]);CHKERRQ(ierr);
1462 #endif
1463     }
1464   }
1465   ierr = PetscFree(cmap);CHKERRQ(ierr);
1466   ierr = PetscFree(lens);CHKERRQ(ierr);
1467 
1468   for (i=0; i<ismax; i++) {
1469     ierr = MatAssemblyBegin(submats[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1470     ierr = MatAssemblyEnd(submats[i],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1471   }
1472 
1473   c->ijonly = PETSC_FALSE; /* set back to the default */
1474   PetscFunctionReturn(0);
1475 }
1476 
1477