xref: /petsc/src/mat/impls/baij/mpi/baijov.c (revision e0f5bfbec699682fa3e8b8532b1176849ea4e12a)
1 
2 /*
3    Routines to compute overlapping regions of a parallel MPI matrix
4   and to find submatrices that were shared across processors.
5 */
6 #include <../src/mat/impls/baij/mpi/mpibaij.h>
7 #include <petscbt.h>
8 
9 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Local(Mat, PetscInt, char **, PetscInt *, PetscInt **);
10 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Receive(Mat, PetscInt, PetscInt **, PetscInt **, PetscInt *);
11 extern PetscErrorCode MatGetRow_MPIBAIJ(Mat, PetscInt, PetscInt *, PetscInt **, PetscScalar **);
12 extern PetscErrorCode MatRestoreRow_MPIBAIJ(Mat, PetscInt, PetscInt *, PetscInt **, PetscScalar **);
13 
14 PetscErrorCode MatIncreaseOverlap_MPIBAIJ(Mat C, PetscInt imax, IS is[], PetscInt ov) {
15   PetscInt i, N = C->cmap->N, bs = C->rmap->bs;
16   IS      *is_new;
17 
18   PetscFunctionBegin;
19   PetscCall(PetscMalloc1(imax, &is_new));
20   /* Convert the indices into block format */
21   PetscCall(ISCompressIndicesGeneral(N, C->rmap->n, bs, imax, is, is_new));
22   PetscCheck(ov >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative overlap specified");
23   for (i = 0; i < ov; ++i) PetscCall(MatIncreaseOverlap_MPIBAIJ_Once(C, imax, is_new));
24   for (i = 0; i < imax; i++) PetscCall(ISDestroy(&is[i]));
25   PetscCall(ISExpandIndicesGeneral(N, N, bs, imax, is_new, is));
26   for (i = 0; i < imax; i++) PetscCall(ISDestroy(&is_new[i]));
27   PetscCall(PetscFree(is_new));
28   PetscFunctionReturn(0);
29 }
30 
31 /*
32   Sample message format:
33   If a processor A wants processor B to process some elements corresponding
34   to index sets is[1], is[5]
35   mesg [0] = 2   (no of index sets in the mesg)
36   -----------
37   mesg [1] = 1 => is[1]
38   mesg [2] = sizeof(is[1]);
39   -----------
40   mesg [5] = 5  => is[5]
41   mesg [6] = sizeof(is[5]);
42   -----------
43   mesg [7]
44   mesg [n]  data(is[1])
45   -----------
46   mesg[n+1]
47   mesg[m]  data(is[5])
48   -----------
49 
50   Notes:
51   nrqs - no of requests sent (or to be sent out)
52   nrqr - no of requests received (which have to be or which have been processed)
53 */
54 PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Once(Mat C, PetscInt imax, IS is[]) {
55   Mat_MPIBAIJ     *c = (Mat_MPIBAIJ *)C->data;
56   const PetscInt **idx, *idx_i;
57   PetscInt        *n, *w3, *w4, **data, len;
58   PetscMPIInt      size, rank, tag1, tag2, *w2, *w1, nrqr;
59   PetscInt         Mbs, i, j, k, **rbuf, row, nrqs, msz, **outdat, **ptr;
60   PetscInt        *ctr, *pa, *tmp, *isz, *isz1, **xdata, **rbuf2, *d_p;
61   PetscMPIInt     *onodes1, *olengths1, *onodes2, *olengths2, proc = -1;
62   PetscBT         *table;
63   MPI_Comm         comm, *iscomms;
64   MPI_Request     *s_waits1, *r_waits1, *s_waits2, *r_waits2;
65   char            *t_p;
66 
67   PetscFunctionBegin;
68   PetscCall(PetscObjectGetComm((PetscObject)C, &comm));
69   size = c->size;
70   rank = c->rank;
71   Mbs  = c->Mbs;
72 
73   PetscCall(PetscObjectGetNewTag((PetscObject)C, &tag1));
74   PetscCall(PetscObjectGetNewTag((PetscObject)C, &tag2));
75 
76   PetscCall(PetscMalloc2(imax + 1, (PetscInt ***)&idx, imax, &n));
77 
78   for (i = 0; i < imax; i++) {
79     PetscCall(ISGetIndices(is[i], &idx[i]));
80     PetscCall(ISGetLocalSize(is[i], &n[i]));
81   }
82 
83   /* evaluate communication - mesg to who,length of mesg, and buffer space
84      required. Based on this, buffers are allocated, and data copied into them*/
85   PetscCall(PetscCalloc4(size, &w1, size, &w2, size, &w3, size, &w4));
86   for (i = 0; i < imax; i++) {
87     PetscCall(PetscArrayzero(w4, size)); /* initialise work vector*/
88     idx_i = idx[i];
89     len   = n[i];
90     for (j = 0; j < len; j++) {
91       row = idx_i[j];
92       PetscCheck(row >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Index set cannot have negative entries");
93       PetscCall(PetscLayoutFindOwner(C->rmap, row * C->rmap->bs, &proc));
94       w4[proc]++;
95     }
96     for (j = 0; j < size; j++) {
97       if (w4[j]) {
98         w1[j] += w4[j];
99         w3[j]++;
100       }
101     }
102   }
103 
104   nrqs     = 0; /* no of outgoing messages */
105   msz      = 0; /* total mesg length (for all proc */
106   w1[rank] = 0; /* no mesg sent to itself */
107   w3[rank] = 0;
108   for (i = 0; i < size; i++) {
109     if (w1[i]) {
110       w2[i] = 1;
111       nrqs++;
112     } /* there exists a message to proc i */
113   }
114   /* pa - is list of processors to communicate with */
115   PetscCall(PetscMalloc1(nrqs, &pa));
116   for (i = 0, j = 0; i < size; i++) {
117     if (w1[i]) {
118       pa[j] = i;
119       j++;
120     }
121   }
122 
123   /* Each message would have a header = 1 + 2*(no of IS) + data */
124   for (i = 0; i < nrqs; i++) {
125     j = pa[i];
126     w1[j] += w2[j] + 2 * w3[j];
127     msz += w1[j];
128   }
129 
130   /* Determine the number of messages to expect, their lengths, from from-ids */
131   PetscCall(PetscGatherNumberOfMessages(comm, w2, w1, &nrqr));
132   PetscCall(PetscGatherMessageLengths(comm, nrqs, nrqr, w1, &onodes1, &olengths1));
133 
134   /* Now post the Irecvs corresponding to these messages */
135   PetscCall(PetscPostIrecvInt(comm, tag1, nrqr, onodes1, olengths1, &rbuf, &r_waits1));
136 
137   /* Allocate Memory for outgoing messages */
138   PetscCall(PetscMalloc4(size, &outdat, size, &ptr, msz, &tmp, size, &ctr));
139   PetscCall(PetscArrayzero(outdat, size));
140   PetscCall(PetscArrayzero(ptr, size));
141   {
142     PetscInt *iptr = tmp, ict = 0;
143     for (i = 0; i < nrqs; i++) {
144       j = pa[i];
145       iptr += ict;
146       outdat[j] = iptr;
147       ict       = w1[j];
148     }
149   }
150 
151   /* Form the outgoing messages */
152   /*plug in the headers*/
153   for (i = 0; i < nrqs; i++) {
154     j            = pa[i];
155     outdat[j][0] = 0;
156     PetscCall(PetscArrayzero(outdat[j] + 1, 2 * w3[j]));
157     ptr[j] = outdat[j] + 2 * w3[j] + 1;
158   }
159 
160   /* Memory for doing local proc's work*/
161   {
162     PetscCall(PetscCalloc5(imax, &table, imax, &data, imax, &isz, Mbs * imax, &d_p, (Mbs / PETSC_BITS_PER_BYTE + 1) * imax, &t_p));
163 
164     for (i = 0; i < imax; i++) {
165       table[i] = t_p + (Mbs / PETSC_BITS_PER_BYTE + 1) * i;
166       data[i]  = d_p + (Mbs)*i;
167     }
168   }
169 
170   /* Parse the IS and update local tables and the outgoing buf with the data*/
171   {
172     PetscInt n_i, *data_i, isz_i, *outdat_j, ctr_j;
173     PetscBT  table_i;
174 
175     for (i = 0; i < imax; i++) {
176       PetscCall(PetscArrayzero(ctr, size));
177       n_i     = n[i];
178       table_i = table[i];
179       idx_i   = idx[i];
180       data_i  = data[i];
181       isz_i   = isz[i];
182       for (j = 0; j < n_i; j++) { /* parse the indices of each IS */
183         row = idx_i[j];
184         PetscCall(PetscLayoutFindOwner(C->rmap, row * C->rmap->bs, &proc));
185         if (proc != rank) { /* copy to the outgoing buffer */
186           ctr[proc]++;
187           *ptr[proc] = row;
188           ptr[proc]++;
189         } else { /* Update the local table */
190           if (!PetscBTLookupSet(table_i, row)) data_i[isz_i++] = row;
191         }
192       }
193       /* Update the headers for the current IS */
194       for (j = 0; j < size; j++) { /* Can Optimise this loop by using pa[] */
195         if ((ctr_j = ctr[j])) {
196           outdat_j            = outdat[j];
197           k                   = ++outdat_j[0];
198           outdat_j[2 * k]     = ctr_j;
199           outdat_j[2 * k - 1] = i;
200         }
201       }
202       isz[i] = isz_i;
203     }
204   }
205 
206   /*  Now  post the sends */
207   PetscCall(PetscMalloc1(nrqs, &s_waits1));
208   for (i = 0; i < nrqs; ++i) {
209     j = pa[i];
210     PetscCallMPI(MPI_Isend(outdat[j], w1[j], MPIU_INT, j, tag1, comm, s_waits1 + i));
211   }
212 
213   /* No longer need the original indices*/
214   for (i = 0; i < imax; ++i) PetscCall(ISRestoreIndices(is[i], idx + i));
215   PetscCall(PetscFree2(*(PetscInt ***)&idx, n));
216 
217   PetscCall(PetscMalloc1(imax, &iscomms));
218   for (i = 0; i < imax; ++i) {
219     PetscCall(PetscCommDuplicate(PetscObjectComm((PetscObject)is[i]), &iscomms[i], NULL));
220     PetscCall(ISDestroy(&is[i]));
221   }
222 
223   /* Do Local work*/
224   PetscCall(MatIncreaseOverlap_MPIBAIJ_Local(C, imax, table, isz, data));
225 
226   /* Receive messages*/
227   PetscCallMPI(MPI_Waitall(nrqr, r_waits1, MPI_STATUSES_IGNORE));
228   PetscCallMPI(MPI_Waitall(nrqs, s_waits1, MPI_STATUSES_IGNORE));
229 
230   /* Phase 1 sends are complete - deallocate buffers */
231   PetscCall(PetscFree4(outdat, ptr, tmp, ctr));
232   PetscCall(PetscFree4(w1, w2, w3, w4));
233 
234   PetscCall(PetscMalloc1(nrqr, &xdata));
235   PetscCall(PetscMalloc1(nrqr, &isz1));
236   PetscCall(MatIncreaseOverlap_MPIBAIJ_Receive(C, nrqr, rbuf, xdata, isz1));
237   if (rbuf) {
238     PetscCall(PetscFree(rbuf[0]));
239     PetscCall(PetscFree(rbuf));
240   }
241 
242   /* Send the data back*/
243   /* Do a global reduction to know the buffer space req for incoming messages*/
244   {
245     PetscMPIInt *rw1;
246 
247     PetscCall(PetscCalloc1(size, &rw1));
248 
249     for (i = 0; i < nrqr; ++i) {
250       proc      = onodes1[i];
251       rw1[proc] = isz1[i];
252     }
253 
254     /* Determine the number of messages to expect, their lengths, from from-ids */
255     PetscCall(PetscGatherMessageLengths(comm, nrqr, nrqs, rw1, &onodes2, &olengths2));
256     PetscCall(PetscFree(rw1));
257   }
258   /* Now post the Irecvs corresponding to these messages */
259   PetscCall(PetscPostIrecvInt(comm, tag2, nrqs, onodes2, olengths2, &rbuf2, &r_waits2));
260 
261   /*  Now  post the sends */
262   PetscCall(PetscMalloc1(nrqr, &s_waits2));
263   for (i = 0; i < nrqr; ++i) {
264     j = onodes1[i];
265     PetscCallMPI(MPI_Isend(xdata[i], isz1[i], MPIU_INT, j, tag2, comm, s_waits2 + i));
266   }
267 
268   PetscCall(PetscFree(onodes1));
269   PetscCall(PetscFree(olengths1));
270 
271   /* receive work done on other processors*/
272   {
273     PetscMPIInt idex;
274     PetscInt    is_no, ct1, max, *rbuf2_i, isz_i, *data_i, jmax;
275     PetscBT     table_i;
276 
277     for (i = 0; i < nrqs; ++i) {
278       PetscCallMPI(MPI_Waitany(nrqs, r_waits2, &idex, MPI_STATUS_IGNORE));
279       /* Process the message*/
280       rbuf2_i = rbuf2[idex];
281       ct1     = 2 * rbuf2_i[0] + 1;
282       jmax    = rbuf2[idex][0];
283       for (j = 1; j <= jmax; j++) {
284         max     = rbuf2_i[2 * j];
285         is_no   = rbuf2_i[2 * j - 1];
286         isz_i   = isz[is_no];
287         data_i  = data[is_no];
288         table_i = table[is_no];
289         for (k = 0; k < max; k++, ct1++) {
290           row = rbuf2_i[ct1];
291           if (!PetscBTLookupSet(table_i, row)) data_i[isz_i++] = row;
292         }
293         isz[is_no] = isz_i;
294       }
295     }
296     PetscCallMPI(MPI_Waitall(nrqr, s_waits2, MPI_STATUSES_IGNORE));
297   }
298 
299   for (i = 0; i < imax; ++i) {
300     PetscCall(ISCreateGeneral(iscomms[i], isz[i], data[i], PETSC_COPY_VALUES, is + i));
301     PetscCall(PetscCommDestroy(&iscomms[i]));
302   }
303 
304   PetscCall(PetscFree(iscomms));
305   PetscCall(PetscFree(onodes2));
306   PetscCall(PetscFree(olengths2));
307 
308   PetscCall(PetscFree(pa));
309   if (rbuf2) {
310     PetscCall(PetscFree(rbuf2[0]));
311     PetscCall(PetscFree(rbuf2));
312   }
313   PetscCall(PetscFree(s_waits1));
314   PetscCall(PetscFree(r_waits1));
315   PetscCall(PetscFree(s_waits2));
316   PetscCall(PetscFree(r_waits2));
317   PetscCall(PetscFree5(table, data, isz, d_p, t_p));
318   if (xdata) {
319     PetscCall(PetscFree(xdata[0]));
320     PetscCall(PetscFree(xdata));
321   }
322   PetscCall(PetscFree(isz1));
323   PetscFunctionReturn(0);
324 }
325 
326 /*
327    MatIncreaseOverlap_MPIBAIJ_Local - Called by MatincreaseOverlap, to do
328        the work on the local processor.
329 
330      Inputs:
331       C      - MAT_MPIBAIJ;
332       imax - total no of index sets processed at a time;
333       table  - an array of char - size = Mbs bits.
334 
335      Output:
336       isz    - array containing the count of the solution elements corresponding
337                to each index set;
338       data   - pointer to the solutions
339 */
340 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Local(Mat C, PetscInt imax, PetscBT *table, PetscInt *isz, PetscInt **data) {
341   Mat_MPIBAIJ *c = (Mat_MPIBAIJ *)C->data;
342   Mat          A = c->A, B = c->B;
343   Mat_SeqBAIJ *a = (Mat_SeqBAIJ *)A->data, *b = (Mat_SeqBAIJ *)B->data;
344   PetscInt     start, end, val, max, rstart, cstart, *ai, *aj;
345   PetscInt    *bi, *bj, *garray, i, j, k, row, *data_i, isz_i;
346   PetscBT      table_i;
347 
348   PetscFunctionBegin;
349   rstart = c->rstartbs;
350   cstart = c->cstartbs;
351   ai     = a->i;
352   aj     = a->j;
353   bi     = b->i;
354   bj     = b->j;
355   garray = c->garray;
356 
357   for (i = 0; i < imax; i++) {
358     data_i  = data[i];
359     table_i = table[i];
360     isz_i   = isz[i];
361     for (j = 0, max = isz[i]; j < max; j++) {
362       row   = data_i[j] - rstart;
363       start = ai[row];
364       end   = ai[row + 1];
365       for (k = start; k < end; k++) { /* Amat */
366         val = aj[k] + cstart;
367         if (!PetscBTLookupSet(table_i, val)) data_i[isz_i++] = val;
368       }
369       start = bi[row];
370       end   = bi[row + 1];
371       for (k = start; k < end; k++) { /* Bmat */
372         val = garray[bj[k]];
373         if (!PetscBTLookupSet(table_i, val)) data_i[isz_i++] = val;
374       }
375     }
376     isz[i] = isz_i;
377   }
378   PetscFunctionReturn(0);
379 }
380 /*
381       MatIncreaseOverlap_MPIBAIJ_Receive - Process the received messages,
382          and return the output
383 
384          Input:
385            C    - the matrix
386            nrqr - no of messages being processed.
387            rbuf - an array of pointers to the received requests
388 
389          Output:
390            xdata - array of messages to be sent back
391            isz1  - size of each message
392 
393   For better efficiency perhaps we should malloc separately each xdata[i],
394 then if a remalloc is required we need only copy the data for that one row
395 rather than all previous rows as it is now where a single large chunk of
396 memory is used.
397 
398 */
399 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Receive(Mat C, PetscInt nrqr, PetscInt **rbuf, PetscInt **xdata, PetscInt *isz1) {
400   Mat_MPIBAIJ *c = (Mat_MPIBAIJ *)C->data;
401   Mat          A = c->A, B = c->B;
402   Mat_SeqBAIJ *a = (Mat_SeqBAIJ *)A->data, *b = (Mat_SeqBAIJ *)B->data;
403   PetscInt     rstart, cstart, *ai, *aj, *bi, *bj, *garray, i, j, k;
404   PetscInt     row, total_sz, ct, ct1, ct2, ct3, mem_estimate, oct2, l, start, end;
405   PetscInt     val, max1, max2, Mbs, no_malloc = 0, *tmp, new_estimate, ctr;
406   PetscInt    *rbuf_i, kmax, rbuf_0;
407   PetscBT      xtable;
408 
409   PetscFunctionBegin;
410   Mbs    = c->Mbs;
411   rstart = c->rstartbs;
412   cstart = c->cstartbs;
413   ai     = a->i;
414   aj     = a->j;
415   bi     = b->i;
416   bj     = b->j;
417   garray = c->garray;
418 
419   for (i = 0, ct = 0, total_sz = 0; i < nrqr; ++i) {
420     rbuf_i = rbuf[i];
421     rbuf_0 = rbuf_i[0];
422     ct += rbuf_0;
423     for (j = 1; j <= rbuf_0; j++) total_sz += rbuf_i[2 * j];
424   }
425 
426   if (c->Mbs) max1 = ct * (a->nz + b->nz) / c->Mbs;
427   else max1 = 1;
428   mem_estimate = 3 * ((total_sz > max1 ? total_sz : max1) + 1);
429   if (nrqr) {
430     PetscCall(PetscMalloc1(mem_estimate, &xdata[0]));
431     ++no_malloc;
432   }
433   PetscCall(PetscBTCreate(Mbs, &xtable));
434   PetscCall(PetscArrayzero(isz1, nrqr));
435 
436   ct3 = 0;
437   for (i = 0; i < nrqr; i++) { /* for easch mesg from proc i */
438     rbuf_i = rbuf[i];
439     rbuf_0 = rbuf_i[0];
440     ct1    = 2 * rbuf_0 + 1;
441     ct2    = ct1;
442     ct3 += ct1;
443     for (j = 1; j <= rbuf_0; j++) { /* for each IS from proc i*/
444       PetscCall(PetscBTMemzero(Mbs, xtable));
445       oct2 = ct2;
446       kmax = rbuf_i[2 * j];
447       for (k = 0; k < kmax; k++, ct1++) {
448         row = rbuf_i[ct1];
449         if (!PetscBTLookupSet(xtable, row)) {
450           if (!(ct3 < mem_estimate)) {
451             new_estimate = (PetscInt)(1.5 * mem_estimate) + 1;
452             PetscCall(PetscMalloc1(new_estimate, &tmp));
453             PetscCall(PetscArraycpy(tmp, xdata[0], mem_estimate));
454             PetscCall(PetscFree(xdata[0]));
455             xdata[0]     = tmp;
456             mem_estimate = new_estimate;
457             ++no_malloc;
458             for (ctr = 1; ctr <= i; ctr++) xdata[ctr] = xdata[ctr - 1] + isz1[ctr - 1];
459           }
460           xdata[i][ct2++] = row;
461           ct3++;
462         }
463       }
464       for (k = oct2, max2 = ct2; k < max2; k++) {
465         row   = xdata[i][k] - rstart;
466         start = ai[row];
467         end   = ai[row + 1];
468         for (l = start; l < end; l++) {
469           val = aj[l] + cstart;
470           if (!PetscBTLookupSet(xtable, val)) {
471             if (!(ct3 < mem_estimate)) {
472               new_estimate = (PetscInt)(1.5 * mem_estimate) + 1;
473               PetscCall(PetscMalloc1(new_estimate, &tmp));
474               PetscCall(PetscArraycpy(tmp, xdata[0], mem_estimate));
475               PetscCall(PetscFree(xdata[0]));
476               xdata[0]     = tmp;
477               mem_estimate = new_estimate;
478               ++no_malloc;
479               for (ctr = 1; ctr <= i; ctr++) xdata[ctr] = xdata[ctr - 1] + isz1[ctr - 1];
480             }
481             xdata[i][ct2++] = val;
482             ct3++;
483           }
484         }
485         start = bi[row];
486         end   = bi[row + 1];
487         for (l = start; l < end; l++) {
488           val = garray[bj[l]];
489           if (!PetscBTLookupSet(xtable, val)) {
490             if (!(ct3 < mem_estimate)) {
491               new_estimate = (PetscInt)(1.5 * mem_estimate) + 1;
492               PetscCall(PetscMalloc1(new_estimate, &tmp));
493               PetscCall(PetscArraycpy(tmp, xdata[0], mem_estimate));
494               PetscCall(PetscFree(xdata[0]));
495               xdata[0]     = tmp;
496               mem_estimate = new_estimate;
497               ++no_malloc;
498               for (ctr = 1; ctr <= i; ctr++) xdata[ctr] = xdata[ctr - 1] + isz1[ctr - 1];
499             }
500             xdata[i][ct2++] = val;
501             ct3++;
502           }
503         }
504       }
505       /* Update the header*/
506       xdata[i][2 * j]     = ct2 - oct2; /* Undo the vector isz1 and use only a var*/
507       xdata[i][2 * j - 1] = rbuf_i[2 * j - 1];
508     }
509     xdata[i][0] = rbuf_0;
510     if (i + 1 < nrqr) xdata[i + 1] = xdata[i] + ct2;
511     isz1[i] = ct2; /* size of each message */
512   }
513   PetscCall(PetscBTDestroy(&xtable));
514   PetscCall(PetscInfo(C, "Allocated %" PetscInt_FMT " bytes, required %" PetscInt_FMT ", no of mallocs = %" PetscInt_FMT "\n", mem_estimate, ct3, no_malloc));
515   PetscFunctionReturn(0);
516 }
517 
518 PetscErrorCode MatCreateSubMatrices_MPIBAIJ(Mat C, PetscInt ismax, const IS isrow[], const IS iscol[], MatReuse scall, Mat *submat[]) {
519   IS          *isrow_block, *iscol_block;
520   Mat_MPIBAIJ *c = (Mat_MPIBAIJ *)C->data;
521   PetscInt     nmax, nstages_local, nstages, i, pos, max_no, N = C->cmap->N, bs = C->rmap->bs;
522   Mat_SeqBAIJ *subc;
523   Mat_SubSppt *smat;
524 
525   PetscFunctionBegin;
526   /* The compression and expansion should be avoided. Doesn't point
527      out errors, might change the indices, hence buggey */
528   PetscCall(PetscMalloc2(ismax + 1, &isrow_block, ismax + 1, &iscol_block));
529   PetscCall(ISCompressIndicesGeneral(N, C->rmap->n, bs, ismax, isrow, isrow_block));
530   PetscCall(ISCompressIndicesGeneral(N, C->cmap->n, bs, ismax, iscol, iscol_block));
531 
532   /* Determine the number of stages through which submatrices are done */
533   if (!C->cmap->N) nmax = 20 * 1000000 / sizeof(PetscInt);
534   else nmax = 20 * 1000000 / (c->Nbs * sizeof(PetscInt));
535   if (!nmax) nmax = 1;
536 
537   if (scall == MAT_INITIAL_MATRIX) {
538     nstages_local = ismax / nmax + ((ismax % nmax) ? 1 : 0); /* local nstages */
539 
540     /* Make sure every processor loops through the nstages */
541     PetscCall(MPIU_Allreduce(&nstages_local, &nstages, 1, MPIU_INT, MPI_MAX, PetscObjectComm((PetscObject)C)));
542 
543     /* Allocate memory to hold all the submatrices and dummy submatrices */
544     PetscCall(PetscCalloc1(ismax + nstages, submat));
545   } else { /* MAT_REUSE_MATRIX */
546     if (ismax) {
547       subc = (Mat_SeqBAIJ *)((*submat)[0]->data);
548       smat = subc->submatis1;
549     } else { /* (*submat)[0] is a dummy matrix */
550       smat = (Mat_SubSppt *)(*submat)[0]->data;
551     }
552     PetscCheck(smat, PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "MatCreateSubMatrices(...,MAT_REUSE_MATRIX,...) requires submat");
553     nstages = smat->nstages;
554   }
555 
556   for (i = 0, pos = 0; i < nstages; i++) {
557     if (pos + nmax <= ismax) max_no = nmax;
558     else if (pos >= ismax) max_no = 0;
559     else max_no = ismax - pos;
560 
561     PetscCall(MatCreateSubMatrices_MPIBAIJ_local(C, max_no, isrow_block + pos, iscol_block + pos, scall, *submat + pos));
562     if (!max_no) {
563       if (scall == MAT_INITIAL_MATRIX) { /* submat[pos] is a dummy matrix */
564         smat          = (Mat_SubSppt *)(*submat)[pos]->data;
565         smat->nstages = nstages;
566       }
567       pos++; /* advance to next dummy matrix if any */
568     } else pos += max_no;
569   }
570 
571   if (scall == MAT_INITIAL_MATRIX && ismax) {
572     /* save nstages for reuse */
573     subc          = (Mat_SeqBAIJ *)((*submat)[0]->data);
574     smat          = subc->submatis1;
575     smat->nstages = nstages;
576   }
577 
578   for (i = 0; i < ismax; i++) {
579     PetscCall(ISDestroy(&isrow_block[i]));
580     PetscCall(ISDestroy(&iscol_block[i]));
581   }
582   PetscCall(PetscFree2(isrow_block, iscol_block));
583   PetscFunctionReturn(0);
584 }
585 
586 #if defined(PETSC_USE_CTABLE)
587 PetscErrorCode PetscGetProc(const PetscInt row, const PetscMPIInt size, const PetscInt proc_gnode[], PetscMPIInt *rank) {
588   PetscInt    nGlobalNd = proc_gnode[size];
589   PetscMPIInt fproc;
590 
591   PetscFunctionBegin;
592   PetscCall(PetscMPIIntCast((PetscInt)(((float)row * (float)size / (float)nGlobalNd + 0.5)), &fproc));
593   if (fproc > size) fproc = size;
594   while (row < proc_gnode[fproc] || row >= proc_gnode[fproc + 1]) {
595     if (row < proc_gnode[fproc]) fproc--;
596     else fproc++;
597   }
598   *rank = fproc;
599   PetscFunctionReturn(0);
600 }
601 #endif
602 
603 /* -------------------------------------------------------------------------*/
604 /* This code is used for BAIJ and SBAIJ matrices (unfortunate dependency) */
605 PetscErrorCode MatCreateSubMatrices_MPIBAIJ_local(Mat C, PetscInt ismax, const IS isrow[], const IS iscol[], MatReuse scall, Mat *submats) {
606   Mat_MPIBAIJ     *c = (Mat_MPIBAIJ *)C->data;
607   Mat              A = c->A;
608   Mat_SeqBAIJ     *a = (Mat_SeqBAIJ *)A->data, *b = (Mat_SeqBAIJ *)c->B->data, *subc;
609   const PetscInt **icol, **irow;
610   PetscInt        *nrow, *ncol, start;
611   PetscMPIInt      rank, size, tag0, tag2, tag3, tag4, *w1, *w2, *w3, *w4, nrqr;
612   PetscInt       **sbuf1, **sbuf2, *sbuf2_i, i, j, k, l, ct1, ct2, **rbuf1, row, proc = -1;
613   PetscInt         nrqs = 0, msz, **ptr = NULL, *req_size = NULL, *ctr = NULL, *pa, *tmp = NULL, tcol;
614   PetscInt       **rbuf3 = NULL, *req_source1 = NULL, *req_source2, **sbuf_aj, **rbuf2 = NULL, max1, max2;
615   PetscInt       **lens, is_no, ncols, *cols, mat_i, *mat_j, tmp2, jmax;
616 #if defined(PETSC_USE_CTABLE)
617   PetscTable *cmap, cmap_i = NULL, *rmap, rmap_i;
618 #else
619   PetscInt **cmap, *cmap_i = NULL, **rmap, *rmap_i;
620 #endif
621   const PetscInt *irow_i, *icol_i;
622   PetscInt        ctr_j, *sbuf1_j, *sbuf_aj_i, *rbuf1_i, kmax, *lens_i;
623   MPI_Request    *s_waits1, *r_waits1, *s_waits2, *r_waits2, *r_waits3;
624   MPI_Request    *r_waits4, *s_waits3, *s_waits4;
625   MPI_Comm        comm;
626   PetscScalar   **rbuf4, *rbuf4_i = NULL, **sbuf_aa, *vals, *mat_a = NULL, *imat_a = NULL, *sbuf_aa_i;
627   PetscMPIInt    *onodes1, *olengths1, end;
628   PetscInt      **row2proc, *row2proc_i, *imat_ilen, *imat_j, *imat_i;
629   Mat_SubSppt    *smat_i;
630   PetscBool      *issorted, colflag, iscsorted = PETSC_TRUE;
631   PetscInt       *sbuf1_i, *rbuf2_i, *rbuf3_i, ilen;
632   PetscInt        bs = C->rmap->bs, bs2 = c->bs2, rstart = c->rstartbs;
633   PetscBool       ijonly = c->ijonly; /* private flag indicates only matrix data structures are requested */
634   PetscInt        nzA, nzB, *a_i = a->i, *b_i = b->i, *a_j = a->j, *b_j = b->j, ctmp, imark, *cworkA, *cworkB;
635   PetscScalar    *vworkA = NULL, *vworkB = NULL, *a_a = a->a, *b_a = b->a;
636   PetscInt        cstart = c->cstartbs, *bmap = c->garray;
637   PetscBool      *allrows, *allcolumns;
638 
639   PetscFunctionBegin;
640   PetscCall(PetscObjectGetComm((PetscObject)C, &comm));
641   size = c->size;
642   rank = c->rank;
643 
644   PetscCall(PetscMalloc5(ismax, &row2proc, ismax, &cmap, ismax, &rmap, ismax + 1, &allcolumns, ismax, &allrows));
645   PetscCall(PetscMalloc5(ismax, (PetscInt ***)&irow, ismax, (PetscInt ***)&icol, ismax, &nrow, ismax, &ncol, ismax, &issorted));
646 
647   for (i = 0; i < ismax; i++) {
648     PetscCall(ISSorted(iscol[i], &issorted[i]));
649     if (!issorted[i]) iscsorted = issorted[i]; /* columns are not sorted! */
650     PetscCall(ISSorted(isrow[i], &issorted[i]));
651 
652     /* Check for special case: allcolumns */
653     PetscCall(ISIdentity(iscol[i], &colflag));
654     PetscCall(ISGetLocalSize(iscol[i], &ncol[i]));
655 
656     if (colflag && ncol[i] == c->Nbs) {
657       allcolumns[i] = PETSC_TRUE;
658       icol[i]       = NULL;
659     } else {
660       allcolumns[i] = PETSC_FALSE;
661       PetscCall(ISGetIndices(iscol[i], &icol[i]));
662     }
663 
664     /* Check for special case: allrows */
665     PetscCall(ISIdentity(isrow[i], &colflag));
666     PetscCall(ISGetLocalSize(isrow[i], &nrow[i]));
667     if (colflag && nrow[i] == c->Mbs) {
668       allrows[i] = PETSC_TRUE;
669       irow[i]    = NULL;
670     } else {
671       allrows[i] = PETSC_FALSE;
672       PetscCall(ISGetIndices(isrow[i], &irow[i]));
673     }
674   }
675 
676   if (scall == MAT_REUSE_MATRIX) {
677     /* Assumes new rows are same length as the old rows */
678     for (i = 0; i < ismax; i++) {
679       subc = (Mat_SeqBAIJ *)(submats[i]->data);
680       PetscCheck(subc->mbs == nrow[i] && subc->nbs == ncol[i], PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Cannot reuse matrix. wrong size");
681 
682       /* Initial matrix as if empty */
683       PetscCall(PetscArrayzero(subc->ilen, subc->mbs));
684 
685       /* Initial matrix as if empty */
686       submats[i]->factortype = C->factortype;
687 
688       smat_i = subc->submatis1;
689 
690       nrqs        = smat_i->nrqs;
691       nrqr        = smat_i->nrqr;
692       rbuf1       = smat_i->rbuf1;
693       rbuf2       = smat_i->rbuf2;
694       rbuf3       = smat_i->rbuf3;
695       req_source2 = smat_i->req_source2;
696 
697       sbuf1 = smat_i->sbuf1;
698       sbuf2 = smat_i->sbuf2;
699       ptr   = smat_i->ptr;
700       tmp   = smat_i->tmp;
701       ctr   = smat_i->ctr;
702 
703       pa          = smat_i->pa;
704       req_size    = smat_i->req_size;
705       req_source1 = smat_i->req_source1;
706 
707       allcolumns[i] = smat_i->allcolumns;
708       allrows[i]    = smat_i->allrows;
709       row2proc[i]   = smat_i->row2proc;
710       rmap[i]       = smat_i->rmap;
711       cmap[i]       = smat_i->cmap;
712     }
713 
714     if (!ismax) { /* Get dummy submatrices and retrieve struct submatis1 */
715       PetscCheck(submats[0], PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "submats are null, cannot reuse");
716       smat_i = (Mat_SubSppt *)submats[0]->data;
717 
718       nrqs        = smat_i->nrqs;
719       nrqr        = smat_i->nrqr;
720       rbuf1       = smat_i->rbuf1;
721       rbuf2       = smat_i->rbuf2;
722       rbuf3       = smat_i->rbuf3;
723       req_source2 = smat_i->req_source2;
724 
725       sbuf1 = smat_i->sbuf1;
726       sbuf2 = smat_i->sbuf2;
727       ptr   = smat_i->ptr;
728       tmp   = smat_i->tmp;
729       ctr   = smat_i->ctr;
730 
731       pa          = smat_i->pa;
732       req_size    = smat_i->req_size;
733       req_source1 = smat_i->req_source1;
734 
735       allcolumns[0] = PETSC_FALSE;
736     }
737   } else { /* scall == MAT_INITIAL_MATRIX */
738     /* Get some new tags to keep the communication clean */
739     PetscCall(PetscObjectGetNewTag((PetscObject)C, &tag2));
740     PetscCall(PetscObjectGetNewTag((PetscObject)C, &tag3));
741 
742     /* evaluate communication - mesg to who, length of mesg, and buffer space
743      required. Based on this, buffers are allocated, and data copied into them*/
744     PetscCall(PetscCalloc4(size, &w1, size, &w2, size, &w3, size, &w4)); /* mesg size, initialize work vectors */
745 
746     for (i = 0; i < ismax; i++) {
747       jmax   = nrow[i];
748       irow_i = irow[i];
749 
750       PetscCall(PetscMalloc1(jmax, &row2proc_i));
751       row2proc[i] = row2proc_i;
752 
753       if (issorted[i]) proc = 0;
754       for (j = 0; j < jmax; j++) {
755         if (!issorted[i]) proc = 0;
756         if (allrows[i]) row = j;
757         else row = irow_i[j];
758 
759         while (row >= c->rangebs[proc + 1]) proc++;
760         w4[proc]++;
761         row2proc_i[j] = proc; /* map row index to proc */
762       }
763       for (j = 0; j < size; j++) {
764         if (w4[j]) {
765           w1[j] += w4[j];
766           w3[j]++;
767           w4[j] = 0;
768         }
769       }
770     }
771 
772     nrqs     = 0; /* no of outgoing messages */
773     msz      = 0; /* total mesg length (for all procs) */
774     w1[rank] = 0; /* no mesg sent to self */
775     w3[rank] = 0;
776     for (i = 0; i < size; i++) {
777       if (w1[i]) {
778         w2[i] = 1;
779         nrqs++;
780       } /* there exists a message to proc i */
781     }
782     PetscCall(PetscMalloc1(nrqs, &pa)); /*(proc -array)*/
783     for (i = 0, j = 0; i < size; i++) {
784       if (w1[i]) {
785         pa[j] = i;
786         j++;
787       }
788     }
789 
790     /* Each message would have a header = 1 + 2*(no of IS) + data */
791     for (i = 0; i < nrqs; i++) {
792       j = pa[i];
793       w1[j] += w2[j] + 2 * w3[j];
794       msz += w1[j];
795     }
796     PetscCall(PetscInfo(0, "Number of outgoing messages %" PetscInt_FMT " Total message length %" PetscInt_FMT "\n", nrqs, msz));
797 
798     /* Determine the number of messages to expect, their lengths, from from-ids */
799     PetscCall(PetscGatherNumberOfMessages(comm, w2, w1, &nrqr));
800     PetscCall(PetscGatherMessageLengths(comm, nrqs, nrqr, w1, &onodes1, &olengths1));
801 
802     /* Now post the Irecvs corresponding to these messages */
803     PetscCall(PetscObjectGetNewTag((PetscObject)C, &tag0));
804     PetscCall(PetscPostIrecvInt(comm, tag0, nrqr, onodes1, olengths1, &rbuf1, &r_waits1));
805 
806     /* Allocate Memory for outgoing messages */
807     PetscCall(PetscMalloc4(size, &sbuf1, size, &ptr, 2 * msz, &tmp, size, &ctr));
808     PetscCall(PetscArrayzero(sbuf1, size));
809     PetscCall(PetscArrayzero(ptr, size));
810 
811     {
812       PetscInt *iptr = tmp;
813       k              = 0;
814       for (i = 0; i < nrqs; i++) {
815         j = pa[i];
816         iptr += k;
817         sbuf1[j] = iptr;
818         k        = w1[j];
819       }
820     }
821 
822     /* Form the outgoing messages. Initialize the header space */
823     for (i = 0; i < nrqs; i++) {
824       j           = pa[i];
825       sbuf1[j][0] = 0;
826       PetscCall(PetscArrayzero(sbuf1[j] + 1, 2 * w3[j]));
827       ptr[j] = sbuf1[j] + 2 * w3[j] + 1;
828     }
829 
830     /* Parse the isrow and copy data into outbuf */
831     for (i = 0; i < ismax; i++) {
832       row2proc_i = row2proc[i];
833       PetscCall(PetscArrayzero(ctr, size));
834       irow_i = irow[i];
835       jmax   = nrow[i];
836       for (j = 0; j < jmax; j++) { /* parse the indices of each IS */
837         proc = row2proc_i[j];
838         if (allrows[i]) row = j;
839         else row = irow_i[j];
840 
841         if (proc != rank) { /* copy to the outgoing buf*/
842           ctr[proc]++;
843           *ptr[proc] = row;
844           ptr[proc]++;
845         }
846       }
847       /* Update the headers for the current IS */
848       for (j = 0; j < size; j++) { /* Can Optimise this loop too */
849         if ((ctr_j = ctr[j])) {
850           sbuf1_j            = sbuf1[j];
851           k                  = ++sbuf1_j[0];
852           sbuf1_j[2 * k]     = ctr_j;
853           sbuf1_j[2 * k - 1] = i;
854         }
855       }
856     }
857 
858     /*  Now  post the sends */
859     PetscCall(PetscMalloc1(nrqs, &s_waits1));
860     for (i = 0; i < nrqs; ++i) {
861       j = pa[i];
862       PetscCallMPI(MPI_Isend(sbuf1[j], w1[j], MPIU_INT, j, tag0, comm, s_waits1 + i));
863     }
864 
865     /* Post Receives to capture the buffer size */
866     PetscCall(PetscMalloc1(nrqs, &r_waits2));
867     PetscCall(PetscMalloc3(nrqs, &req_source2, nrqs, &rbuf2, nrqs, &rbuf3));
868     if (nrqs) rbuf2[0] = tmp + msz;
869     for (i = 1; i < nrqs; ++i) rbuf2[i] = rbuf2[i - 1] + w1[pa[i - 1]];
870     for (i = 0; i < nrqs; ++i) {
871       j = pa[i];
872       PetscCallMPI(MPI_Irecv(rbuf2[i], w1[j], MPIU_INT, j, tag2, comm, r_waits2 + i));
873     }
874 
875     /* Send to other procs the buf size they should allocate */
876     /* Receive messages*/
877     PetscCall(PetscMalloc1(nrqr, &s_waits2));
878     PetscCall(PetscMalloc3(nrqr, &sbuf2, nrqr, &req_size, nrqr, &req_source1));
879 
880     PetscCallMPI(MPI_Waitall(nrqr, r_waits1, MPI_STATUSES_IGNORE));
881     for (i = 0; i < nrqr; ++i) {
882       req_size[i] = 0;
883       rbuf1_i     = rbuf1[i];
884       start       = 2 * rbuf1_i[0] + 1;
885       end         = olengths1[i];
886       PetscCall(PetscMalloc1(end, &sbuf2[i]));
887       sbuf2_i = sbuf2[i];
888       for (j = start; j < end; j++) {
889         row        = rbuf1_i[j] - rstart;
890         ncols      = a_i[row + 1] - a_i[row] + b_i[row + 1] - b_i[row];
891         sbuf2_i[j] = ncols;
892         req_size[i] += ncols;
893       }
894       req_source1[i] = onodes1[i];
895       /* form the header */
896       sbuf2_i[0]     = req_size[i];
897       for (j = 1; j < start; j++) sbuf2_i[j] = rbuf1_i[j];
898 
899       PetscCallMPI(MPI_Isend(sbuf2_i, end, MPIU_INT, req_source1[i], tag2, comm, s_waits2 + i));
900     }
901 
902     PetscCall(PetscFree(onodes1));
903     PetscCall(PetscFree(olengths1));
904 
905     PetscCall(PetscFree(r_waits1));
906     PetscCall(PetscFree4(w1, w2, w3, w4));
907 
908     /* Receive messages*/
909     PetscCall(PetscMalloc1(nrqs, &r_waits3));
910 
911     PetscCallMPI(MPI_Waitall(nrqs, r_waits2, MPI_STATUSES_IGNORE));
912     for (i = 0; i < nrqs; ++i) {
913       PetscCall(PetscMalloc1(rbuf2[i][0], &rbuf3[i]));
914       req_source2[i] = pa[i];
915       PetscCallMPI(MPI_Irecv(rbuf3[i], rbuf2[i][0], MPIU_INT, req_source2[i], tag3, comm, r_waits3 + i));
916     }
917     PetscCall(PetscFree(r_waits2));
918 
919     /* Wait on sends1 and sends2 */
920     PetscCallMPI(MPI_Waitall(nrqs, s_waits1, MPI_STATUSES_IGNORE));
921     PetscCallMPI(MPI_Waitall(nrqr, s_waits2, MPI_STATUSES_IGNORE));
922     PetscCall(PetscFree(s_waits1));
923     PetscCall(PetscFree(s_waits2));
924 
925     /* Now allocate sending buffers for a->j, and send them off */
926     PetscCall(PetscMalloc1(nrqr, &sbuf_aj));
927     for (i = 0, j = 0; i < nrqr; i++) j += req_size[i];
928     if (nrqr) PetscCall(PetscMalloc1(j, &sbuf_aj[0]));
929     for (i = 1; i < nrqr; i++) sbuf_aj[i] = sbuf_aj[i - 1] + req_size[i - 1];
930 
931     PetscCall(PetscMalloc1(nrqr, &s_waits3));
932     {
933       for (i = 0; i < nrqr; i++) {
934         rbuf1_i   = rbuf1[i];
935         sbuf_aj_i = sbuf_aj[i];
936         ct1       = 2 * rbuf1_i[0] + 1;
937         ct2       = 0;
938         for (j = 1, max1 = rbuf1_i[0]; j <= max1; j++) {
939           kmax = rbuf1[i][2 * j];
940           for (k = 0; k < kmax; k++, ct1++) {
941             row    = rbuf1_i[ct1] - rstart;
942             nzA    = a_i[row + 1] - a_i[row];
943             nzB    = b_i[row + 1] - b_i[row];
944             ncols  = nzA + nzB;
945             cworkA = a_j + a_i[row];
946             cworkB = b_j + b_i[row];
947 
948             /* load the column indices for this row into cols */
949             cols = sbuf_aj_i + ct2;
950             for (l = 0; l < nzB; l++) {
951               if ((ctmp = bmap[cworkB[l]]) < cstart) cols[l] = ctmp;
952               else break;
953             }
954             imark = l;
955             for (l = 0; l < nzA; l++) cols[imark + l] = cstart + cworkA[l];
956             for (l = imark; l < nzB; l++) cols[nzA + l] = bmap[cworkB[l]];
957             ct2 += ncols;
958           }
959         }
960         PetscCallMPI(MPI_Isend(sbuf_aj_i, req_size[i], MPIU_INT, req_source1[i], tag3, comm, s_waits3 + i));
961       }
962     }
963 
964     /* create col map: global col of C -> local col of submatrices */
965 #if defined(PETSC_USE_CTABLE)
966     for (i = 0; i < ismax; i++) {
967       if (!allcolumns[i]) {
968         PetscCall(PetscTableCreate(ncol[i], c->Nbs, &cmap[i]));
969 
970         jmax   = ncol[i];
971         icol_i = icol[i];
972         cmap_i = cmap[i];
973         for (j = 0; j < jmax; j++) PetscCall(PetscTableAdd(cmap[i], icol_i[j] + 1, j + 1, INSERT_VALUES));
974       } else cmap[i] = NULL;
975     }
976 #else
977     for (i = 0; i < ismax; i++) {
978       if (!allcolumns[i]) {
979         PetscCall(PetscCalloc1(c->Nbs, &cmap[i]));
980         jmax   = ncol[i];
981         icol_i = icol[i];
982         cmap_i = cmap[i];
983         for (j = 0; j < jmax; j++) cmap_i[icol_i[j]] = j + 1;
984       } else cmap[i] = NULL;
985     }
986 #endif
987 
988     /* Create lens which is required for MatCreate... */
989     for (i = 0, j = 0; i < ismax; i++) j += nrow[i];
990     PetscCall(PetscMalloc1(ismax, &lens));
991 
992     if (ismax) PetscCall(PetscCalloc1(j, &lens[0]));
993     for (i = 1; i < ismax; i++) lens[i] = lens[i - 1] + nrow[i - 1];
994 
995     /* Update lens from local data */
996     for (i = 0; i < ismax; i++) {
997       row2proc_i = row2proc[i];
998       jmax       = nrow[i];
999       if (!allcolumns[i]) cmap_i = cmap[i];
1000       irow_i = irow[i];
1001       lens_i = lens[i];
1002       for (j = 0; j < jmax; j++) {
1003         if (allrows[i]) row = j;
1004         else row = irow_i[j]; /* global blocked row of C */
1005 
1006         proc = row2proc_i[j];
1007         if (proc == rank) {
1008           /* Get indices from matA and then from matB */
1009 #if defined(PETSC_USE_CTABLE)
1010           PetscInt tt;
1011 #endif
1012           row    = row - rstart;
1013           nzA    = a_i[row + 1] - a_i[row];
1014           nzB    = b_i[row + 1] - b_i[row];
1015           cworkA = a_j + a_i[row];
1016           cworkB = b_j + b_i[row];
1017 
1018           if (!allcolumns[i]) {
1019 #if defined(PETSC_USE_CTABLE)
1020             for (k = 0; k < nzA; k++) {
1021               PetscCall(PetscTableFind(cmap_i, cstart + cworkA[k] + 1, &tt));
1022               if (tt) lens_i[j]++;
1023             }
1024             for (k = 0; k < nzB; k++) {
1025               PetscCall(PetscTableFind(cmap_i, bmap[cworkB[k]] + 1, &tt));
1026               if (tt) lens_i[j]++;
1027             }
1028 
1029 #else
1030             for (k = 0; k < nzA; k++) {
1031               if (cmap_i[cstart + cworkA[k]]) lens_i[j]++;
1032             }
1033             for (k = 0; k < nzB; k++) {
1034               if (cmap_i[bmap[cworkB[k]]]) lens_i[j]++;
1035             }
1036 #endif
1037           } else { /* allcolumns */
1038             lens_i[j] = nzA + nzB;
1039           }
1040         }
1041       }
1042     }
1043 
1044     /* Create row map: global row of C -> local row of submatrices */
1045     for (i = 0; i < ismax; i++) {
1046       if (!allrows[i]) {
1047 #if defined(PETSC_USE_CTABLE)
1048         PetscCall(PetscTableCreate(nrow[i], c->Mbs, &rmap[i]));
1049         irow_i = irow[i];
1050         jmax   = nrow[i];
1051         for (j = 0; j < jmax; j++) {
1052           if (allrows[i]) {
1053             PetscCall(PetscTableAdd(rmap[i], j + 1, j + 1, INSERT_VALUES));
1054           } else {
1055             PetscCall(PetscTableAdd(rmap[i], irow_i[j] + 1, j + 1, INSERT_VALUES));
1056           }
1057         }
1058 #else
1059         PetscCall(PetscCalloc1(c->Mbs, &rmap[i]));
1060         rmap_i = rmap[i];
1061         irow_i = irow[i];
1062         jmax   = nrow[i];
1063         for (j = 0; j < jmax; j++) {
1064           if (allrows[i]) rmap_i[j] = j;
1065           else rmap_i[irow_i[j]] = j;
1066         }
1067 #endif
1068       } else rmap[i] = NULL;
1069     }
1070 
1071     /* Update lens from offproc data */
1072     {
1073       PetscInt *rbuf2_i, *rbuf3_i, *sbuf1_i;
1074 
1075       PetscCallMPI(MPI_Waitall(nrqs, r_waits3, MPI_STATUSES_IGNORE));
1076       for (tmp2 = 0; tmp2 < nrqs; tmp2++) {
1077         sbuf1_i = sbuf1[pa[tmp2]];
1078         jmax    = sbuf1_i[0];
1079         ct1     = 2 * jmax + 1;
1080         ct2     = 0;
1081         rbuf2_i = rbuf2[tmp2];
1082         rbuf3_i = rbuf3[tmp2];
1083         for (j = 1; j <= jmax; j++) {
1084           is_no  = sbuf1_i[2 * j - 1];
1085           max1   = sbuf1_i[2 * j];
1086           lens_i = lens[is_no];
1087           if (!allcolumns[is_no]) cmap_i = cmap[is_no];
1088           rmap_i = rmap[is_no];
1089           for (k = 0; k < max1; k++, ct1++) {
1090             if (allrows[is_no]) {
1091               row = sbuf1_i[ct1];
1092             } else {
1093 #if defined(PETSC_USE_CTABLE)
1094               PetscCall(PetscTableFind(rmap_i, sbuf1_i[ct1] + 1, &row));
1095               row--;
1096               PetscCheck(row >= 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "row not found in table");
1097 #else
1098               row = rmap_i[sbuf1_i[ct1]]; /* the val in the new matrix to be */
1099 #endif
1100             }
1101             max2 = rbuf2_i[ct1];
1102             for (l = 0; l < max2; l++, ct2++) {
1103               if (!allcolumns[is_no]) {
1104 #if defined(PETSC_USE_CTABLE)
1105                 PetscCall(PetscTableFind(cmap_i, rbuf3_i[ct2] + 1, &tcol));
1106 #else
1107                 tcol = cmap_i[rbuf3_i[ct2]];
1108 #endif
1109                 if (tcol) lens_i[row]++;
1110               } else {         /* allcolumns */
1111                 lens_i[row]++; /* lens_i[row] += max2 ? */
1112               }
1113             }
1114           }
1115         }
1116       }
1117     }
1118     PetscCall(PetscFree(r_waits3));
1119     PetscCallMPI(MPI_Waitall(nrqr, s_waits3, MPI_STATUSES_IGNORE));
1120     PetscCall(PetscFree(s_waits3));
1121 
1122     /* Create the submatrices */
1123     for (i = 0; i < ismax; i++) {
1124       PetscInt bs_tmp;
1125       if (ijonly) bs_tmp = 1;
1126       else bs_tmp = bs;
1127 
1128       PetscCall(MatCreate(PETSC_COMM_SELF, submats + i));
1129       PetscCall(MatSetSizes(submats[i], nrow[i] * bs_tmp, ncol[i] * bs_tmp, PETSC_DETERMINE, PETSC_DETERMINE));
1130 
1131       PetscCall(MatSetType(submats[i], ((PetscObject)A)->type_name));
1132       PetscCall(MatSeqBAIJSetPreallocation(submats[i], bs_tmp, 0, lens[i]));
1133       PetscCall(MatSeqSBAIJSetPreallocation(submats[i], bs_tmp, 0, lens[i])); /* this subroutine is used by SBAIJ routines */
1134 
1135       /* create struct Mat_SubSppt and attached it to submat */
1136       PetscCall(PetscNew(&smat_i));
1137       subc            = (Mat_SeqBAIJ *)submats[i]->data;
1138       subc->submatis1 = smat_i;
1139 
1140       smat_i->destroy          = submats[i]->ops->destroy;
1141       submats[i]->ops->destroy = MatDestroySubMatrix_SeqBAIJ;
1142       submats[i]->factortype   = C->factortype;
1143 
1144       smat_i->id          = i;
1145       smat_i->nrqs        = nrqs;
1146       smat_i->nrqr        = nrqr;
1147       smat_i->rbuf1       = rbuf1;
1148       smat_i->rbuf2       = rbuf2;
1149       smat_i->rbuf3       = rbuf3;
1150       smat_i->sbuf2       = sbuf2;
1151       smat_i->req_source2 = req_source2;
1152 
1153       smat_i->sbuf1 = sbuf1;
1154       smat_i->ptr   = ptr;
1155       smat_i->tmp   = tmp;
1156       smat_i->ctr   = ctr;
1157 
1158       smat_i->pa          = pa;
1159       smat_i->req_size    = req_size;
1160       smat_i->req_source1 = req_source1;
1161 
1162       smat_i->allcolumns = allcolumns[i];
1163       smat_i->allrows    = allrows[i];
1164       smat_i->singleis   = PETSC_FALSE;
1165       smat_i->row2proc   = row2proc[i];
1166       smat_i->rmap       = rmap[i];
1167       smat_i->cmap       = cmap[i];
1168     }
1169 
1170     if (!ismax) { /* Create dummy submats[0] for reuse struct subc */
1171       PetscCall(MatCreate(PETSC_COMM_SELF, &submats[0]));
1172       PetscCall(MatSetSizes(submats[0], 0, 0, PETSC_DETERMINE, PETSC_DETERMINE));
1173       PetscCall(MatSetType(submats[0], MATDUMMY));
1174 
1175       /* create struct Mat_SubSppt and attached it to submat */
1176       PetscCall(PetscNew(&smat_i));
1177       submats[0]->data = (void *)smat_i;
1178 
1179       smat_i->destroy          = submats[0]->ops->destroy;
1180       submats[0]->ops->destroy = MatDestroySubMatrix_Dummy;
1181       submats[0]->factortype   = C->factortype;
1182 
1183       smat_i->id          = 0;
1184       smat_i->nrqs        = nrqs;
1185       smat_i->nrqr        = nrqr;
1186       smat_i->rbuf1       = rbuf1;
1187       smat_i->rbuf2       = rbuf2;
1188       smat_i->rbuf3       = rbuf3;
1189       smat_i->sbuf2       = sbuf2;
1190       smat_i->req_source2 = req_source2;
1191 
1192       smat_i->sbuf1 = sbuf1;
1193       smat_i->ptr   = ptr;
1194       smat_i->tmp   = tmp;
1195       smat_i->ctr   = ctr;
1196 
1197       smat_i->pa          = pa;
1198       smat_i->req_size    = req_size;
1199       smat_i->req_source1 = req_source1;
1200 
1201       smat_i->allcolumns = PETSC_FALSE;
1202       smat_i->singleis   = PETSC_FALSE;
1203       smat_i->row2proc   = NULL;
1204       smat_i->rmap       = NULL;
1205       smat_i->cmap       = NULL;
1206     }
1207 
1208     if (ismax) PetscCall(PetscFree(lens[0]));
1209     PetscCall(PetscFree(lens));
1210     if (sbuf_aj) {
1211       PetscCall(PetscFree(sbuf_aj[0]));
1212       PetscCall(PetscFree(sbuf_aj));
1213     }
1214 
1215   } /* endof scall == MAT_INITIAL_MATRIX */
1216 
1217   /* Post recv matrix values */
1218   if (!ijonly) {
1219     PetscCall(PetscObjectGetNewTag((PetscObject)C, &tag4));
1220     PetscCall(PetscMalloc1(nrqs, &rbuf4));
1221     PetscCall(PetscMalloc1(nrqs, &r_waits4));
1222     for (i = 0; i < nrqs; ++i) {
1223       PetscCall(PetscMalloc1(rbuf2[i][0] * bs2, &rbuf4[i]));
1224       PetscCallMPI(MPI_Irecv(rbuf4[i], rbuf2[i][0] * bs2, MPIU_SCALAR, req_source2[i], tag4, comm, r_waits4 + i));
1225     }
1226 
1227     /* Allocate sending buffers for a->a, and send them off */
1228     PetscCall(PetscMalloc1(nrqr, &sbuf_aa));
1229     for (i = 0, j = 0; i < nrqr; i++) j += req_size[i];
1230 
1231     if (nrqr) PetscCall(PetscMalloc1(j * bs2, &sbuf_aa[0]));
1232     for (i = 1; i < nrqr; i++) sbuf_aa[i] = sbuf_aa[i - 1] + req_size[i - 1] * bs2;
1233 
1234     PetscCall(PetscMalloc1(nrqr, &s_waits4));
1235 
1236     for (i = 0; i < nrqr; i++) {
1237       rbuf1_i   = rbuf1[i];
1238       sbuf_aa_i = sbuf_aa[i];
1239       ct1       = 2 * rbuf1_i[0] + 1;
1240       ct2       = 0;
1241       for (j = 1, max1 = rbuf1_i[0]; j <= max1; j++) {
1242         kmax = rbuf1_i[2 * j];
1243         for (k = 0; k < kmax; k++, ct1++) {
1244           row    = rbuf1_i[ct1] - rstart;
1245           nzA    = a_i[row + 1] - a_i[row];
1246           nzB    = b_i[row + 1] - b_i[row];
1247           ncols  = nzA + nzB;
1248           cworkB = b_j + b_i[row];
1249           vworkA = a_a + a_i[row] * bs2;
1250           vworkB = b_a + b_i[row] * bs2;
1251 
1252           /* load the column values for this row into vals*/
1253           vals = sbuf_aa_i + ct2 * bs2;
1254           for (l = 0; l < nzB; l++) {
1255             if ((bmap[cworkB[l]]) < cstart) {
1256               PetscCall(PetscArraycpy(vals + l * bs2, vworkB + l * bs2, bs2));
1257             } else break;
1258           }
1259           imark = l;
1260           for (l = 0; l < nzA; l++) PetscCall(PetscArraycpy(vals + (imark + l) * bs2, vworkA + l * bs2, bs2));
1261           for (l = imark; l < nzB; l++) PetscCall(PetscArraycpy(vals + (nzA + l) * bs2, vworkB + l * bs2, bs2));
1262 
1263           ct2 += ncols;
1264         }
1265       }
1266       PetscCallMPI(MPI_Isend(sbuf_aa_i, req_size[i] * bs2, MPIU_SCALAR, req_source1[i], tag4, comm, s_waits4 + i));
1267     }
1268   }
1269 
1270   /* Assemble the matrices */
1271   /* First assemble the local rows */
1272   for (i = 0; i < ismax; i++) {
1273     row2proc_i = row2proc[i];
1274     subc       = (Mat_SeqBAIJ *)submats[i]->data;
1275     imat_ilen  = subc->ilen;
1276     imat_j     = subc->j;
1277     imat_i     = subc->i;
1278     imat_a     = subc->a;
1279 
1280     if (!allcolumns[i]) cmap_i = cmap[i];
1281     rmap_i = rmap[i];
1282     irow_i = irow[i];
1283     jmax   = nrow[i];
1284     for (j = 0; j < jmax; j++) {
1285       if (allrows[i]) row = j;
1286       else row = irow_i[j];
1287       proc = row2proc_i[j];
1288 
1289       if (proc == rank) {
1290         row    = row - rstart;
1291         nzA    = a_i[row + 1] - a_i[row];
1292         nzB    = b_i[row + 1] - b_i[row];
1293         cworkA = a_j + a_i[row];
1294         cworkB = b_j + b_i[row];
1295         if (!ijonly) {
1296           vworkA = a_a + a_i[row] * bs2;
1297           vworkB = b_a + b_i[row] * bs2;
1298         }
1299 
1300         if (allrows[i]) {
1301           row = row + rstart;
1302         } else {
1303 #if defined(PETSC_USE_CTABLE)
1304           PetscCall(PetscTableFind(rmap_i, row + rstart + 1, &row));
1305           row--;
1306 
1307           PetscCheck(row >= 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "row not found in table");
1308 #else
1309           row = rmap_i[row + rstart];
1310 #endif
1311         }
1312         mat_i = imat_i[row];
1313         if (!ijonly) mat_a = imat_a + mat_i * bs2;
1314         mat_j = imat_j + mat_i;
1315         ilen  = imat_ilen[row];
1316 
1317         /* load the column indices for this row into cols*/
1318         if (!allcolumns[i]) {
1319           for (l = 0; l < nzB; l++) {
1320             if ((ctmp = bmap[cworkB[l]]) < cstart) {
1321 #if defined(PETSC_USE_CTABLE)
1322               PetscCall(PetscTableFind(cmap_i, ctmp + 1, &tcol));
1323               if (tcol) {
1324 #else
1325               if ((tcol = cmap_i[ctmp])) {
1326 #endif
1327                 *mat_j++ = tcol - 1;
1328                 PetscCall(PetscArraycpy(mat_a, vworkB + l * bs2, bs2));
1329                 mat_a += bs2;
1330                 ilen++;
1331               }
1332             } else break;
1333           }
1334           imark = l;
1335           for (l = 0; l < nzA; l++) {
1336 #if defined(PETSC_USE_CTABLE)
1337             PetscCall(PetscTableFind(cmap_i, cstart + cworkA[l] + 1, &tcol));
1338             if (tcol) {
1339 #else
1340             if ((tcol = cmap_i[cstart + cworkA[l]])) {
1341 #endif
1342               *mat_j++ = tcol - 1;
1343               if (!ijonly) {
1344                 PetscCall(PetscArraycpy(mat_a, vworkA + l * bs2, bs2));
1345                 mat_a += bs2;
1346               }
1347               ilen++;
1348             }
1349           }
1350           for (l = imark; l < nzB; l++) {
1351 #if defined(PETSC_USE_CTABLE)
1352             PetscCall(PetscTableFind(cmap_i, bmap[cworkB[l]] + 1, &tcol));
1353             if (tcol) {
1354 #else
1355             if ((tcol = cmap_i[bmap[cworkB[l]]])) {
1356 #endif
1357               *mat_j++ = tcol - 1;
1358               if (!ijonly) {
1359                 PetscCall(PetscArraycpy(mat_a, vworkB + l * bs2, bs2));
1360                 mat_a += bs2;
1361               }
1362               ilen++;
1363             }
1364           }
1365         } else { /* allcolumns */
1366           for (l = 0; l < nzB; l++) {
1367             if ((ctmp = bmap[cworkB[l]]) < cstart) {
1368               *mat_j++ = ctmp;
1369               PetscCall(PetscArraycpy(mat_a, vworkB + l * bs2, bs2));
1370               mat_a += bs2;
1371               ilen++;
1372             } else break;
1373           }
1374           imark = l;
1375           for (l = 0; l < nzA; l++) {
1376             *mat_j++ = cstart + cworkA[l];
1377             if (!ijonly) {
1378               PetscCall(PetscArraycpy(mat_a, vworkA + l * bs2, bs2));
1379               mat_a += bs2;
1380             }
1381             ilen++;
1382           }
1383           for (l = imark; l < nzB; l++) {
1384             *mat_j++ = bmap[cworkB[l]];
1385             if (!ijonly) {
1386               PetscCall(PetscArraycpy(mat_a, vworkB + l * bs2, bs2));
1387               mat_a += bs2;
1388             }
1389             ilen++;
1390           }
1391         }
1392         imat_ilen[row] = ilen;
1393       }
1394     }
1395   }
1396 
1397   /* Now assemble the off proc rows */
1398   if (!ijonly) PetscCallMPI(MPI_Waitall(nrqs, r_waits4, MPI_STATUSES_IGNORE));
1399   for (tmp2 = 0; tmp2 < nrqs; tmp2++) {
1400     sbuf1_i = sbuf1[pa[tmp2]];
1401     jmax    = sbuf1_i[0];
1402     ct1     = 2 * jmax + 1;
1403     ct2     = 0;
1404     rbuf2_i = rbuf2[tmp2];
1405     rbuf3_i = rbuf3[tmp2];
1406     if (!ijonly) rbuf4_i = rbuf4[tmp2];
1407     for (j = 1; j <= jmax; j++) {
1408       is_no  = sbuf1_i[2 * j - 1];
1409       rmap_i = rmap[is_no];
1410       if (!allcolumns[is_no]) cmap_i = cmap[is_no];
1411       subc      = (Mat_SeqBAIJ *)submats[is_no]->data;
1412       imat_ilen = subc->ilen;
1413       imat_j    = subc->j;
1414       imat_i    = subc->i;
1415       if (!ijonly) imat_a = subc->a;
1416       max1 = sbuf1_i[2 * j];
1417       for (k = 0; k < max1; k++, ct1++) { /* for each recved block row */
1418         row = sbuf1_i[ct1];
1419 
1420         if (allrows[is_no]) {
1421           row = sbuf1_i[ct1];
1422         } else {
1423 #if defined(PETSC_USE_CTABLE)
1424           PetscCall(PetscTableFind(rmap_i, row + 1, &row));
1425           row--;
1426           PetscCheck(row >= 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "row not found in table");
1427 #else
1428           row = rmap_i[row];
1429 #endif
1430         }
1431         ilen  = imat_ilen[row];
1432         mat_i = imat_i[row];
1433         if (!ijonly) mat_a = imat_a + mat_i * bs2;
1434         mat_j = imat_j + mat_i;
1435         max2  = rbuf2_i[ct1];
1436         if (!allcolumns[is_no]) {
1437           for (l = 0; l < max2; l++, ct2++) {
1438 #if defined(PETSC_USE_CTABLE)
1439             PetscCall(PetscTableFind(cmap_i, rbuf3_i[ct2] + 1, &tcol));
1440 #else
1441             tcol = cmap_i[rbuf3_i[ct2]];
1442 #endif
1443             if (tcol) {
1444               *mat_j++ = tcol - 1;
1445               if (!ijonly) {
1446                 PetscCall(PetscArraycpy(mat_a, rbuf4_i + ct2 * bs2, bs2));
1447                 mat_a += bs2;
1448               }
1449               ilen++;
1450             }
1451           }
1452         } else { /* allcolumns */
1453           for (l = 0; l < max2; l++, ct2++) {
1454             *mat_j++ = rbuf3_i[ct2]; /* same global column index of C */
1455             if (!ijonly) {
1456               PetscCall(PetscArraycpy(mat_a, rbuf4_i + ct2 * bs2, bs2));
1457               mat_a += bs2;
1458             }
1459             ilen++;
1460           }
1461         }
1462         imat_ilen[row] = ilen;
1463       }
1464     }
1465   }
1466 
1467   if (!iscsorted) { /* sort column indices of the rows */
1468     MatScalar *work;
1469 
1470     PetscCall(PetscMalloc1(bs2, &work));
1471     for (i = 0; i < ismax; i++) {
1472       subc      = (Mat_SeqBAIJ *)submats[i]->data;
1473       imat_ilen = subc->ilen;
1474       imat_j    = subc->j;
1475       imat_i    = subc->i;
1476       if (!ijonly) imat_a = subc->a;
1477       if (allcolumns[i]) continue;
1478 
1479       jmax = nrow[i];
1480       for (j = 0; j < jmax; j++) {
1481         mat_i = imat_i[j];
1482         mat_j = imat_j + mat_i;
1483         ilen  = imat_ilen[j];
1484         if (ijonly) {
1485           PetscCall(PetscSortInt(ilen, mat_j));
1486         } else {
1487           mat_a = imat_a + mat_i * bs2;
1488           PetscCall(PetscSortIntWithDataArray(ilen, mat_j, mat_a, bs2 * sizeof(MatScalar), work));
1489         }
1490       }
1491     }
1492     PetscCall(PetscFree(work));
1493   }
1494 
1495   if (!ijonly) {
1496     PetscCall(PetscFree(r_waits4));
1497     PetscCallMPI(MPI_Waitall(nrqr, s_waits4, MPI_STATUSES_IGNORE));
1498     PetscCall(PetscFree(s_waits4));
1499   }
1500 
1501   /* Restore the indices */
1502   for (i = 0; i < ismax; i++) {
1503     if (!allrows[i]) PetscCall(ISRestoreIndices(isrow[i], irow + i));
1504     if (!allcolumns[i]) PetscCall(ISRestoreIndices(iscol[i], icol + i));
1505   }
1506 
1507   for (i = 0; i < ismax; i++) {
1508     PetscCall(MatAssemblyBegin(submats[i], MAT_FINAL_ASSEMBLY));
1509     PetscCall(MatAssemblyEnd(submats[i], MAT_FINAL_ASSEMBLY));
1510   }
1511 
1512   PetscCall(PetscFree5(*(PetscInt ***)&irow, *(PetscInt ***)&icol, nrow, ncol, issorted));
1513   PetscCall(PetscFree5(row2proc, cmap, rmap, allcolumns, allrows));
1514 
1515   if (!ijonly) {
1516     if (sbuf_aa) {
1517       PetscCall(PetscFree(sbuf_aa[0]));
1518       PetscCall(PetscFree(sbuf_aa));
1519     }
1520 
1521     for (i = 0; i < nrqs; ++i) PetscCall(PetscFree(rbuf4[i]));
1522     PetscCall(PetscFree(rbuf4));
1523   }
1524   c->ijonly = PETSC_FALSE; /* set back to the default */
1525   PetscFunctionReturn(0);
1526 }
1527