xref: /petsc/src/mat/impls/baij/mpi/baijov.c (revision e37d522bb0b6b2e333f7f902c83cd8ffbf2c1c9a)
1 
2 /*
3    Routines to compute overlapping regions of a parallel MPI matrix
4   and to find submatrices that were shared across processors.
5 */
6 #include <../src/mat/impls/baij/mpi/mpibaij.h>
7 #include <petscbt.h>
8 
9 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Local(Mat, PetscInt, char **, PetscInt *, PetscInt **);
10 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Receive(Mat, PetscInt, PetscInt **, PetscInt **, PetscInt *);
11 extern PetscErrorCode MatGetRow_MPIBAIJ(Mat, PetscInt, PetscInt *, PetscInt **, PetscScalar **);
12 extern PetscErrorCode MatRestoreRow_MPIBAIJ(Mat, PetscInt, PetscInt *, PetscInt **, PetscScalar **);
13 
14 PetscErrorCode MatIncreaseOverlap_MPIBAIJ(Mat C, PetscInt imax, IS is[], PetscInt ov)
15 {
16   PetscInt i, N = C->cmap->N, bs = C->rmap->bs;
17   IS      *is_new;
18 
19   PetscFunctionBegin;
20   PetscCall(PetscMalloc1(imax, &is_new));
21   /* Convert the indices into block format */
22   PetscCall(ISCompressIndicesGeneral(N, C->rmap->n, bs, imax, is, is_new));
23   PetscCheck(ov >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative overlap specified");
24   for (i = 0; i < ov; ++i) PetscCall(MatIncreaseOverlap_MPIBAIJ_Once(C, imax, is_new));
25   for (i = 0; i < imax; i++) PetscCall(ISDestroy(&is[i]));
26   PetscCall(ISExpandIndicesGeneral(N, N, bs, imax, is_new, is));
27   for (i = 0; i < imax; i++) PetscCall(ISDestroy(&is_new[i]));
28   PetscCall(PetscFree(is_new));
29   PetscFunctionReturn(PETSC_SUCCESS);
30 }
31 
32 /*
33   Sample message format:
34   If a processor A wants processor B to process some elements corresponding
35   to index sets is[1], is[5]
36   mesg [0] = 2   (no of index sets in the mesg)
37   -----------
38   mesg [1] = 1 => is[1]
39   mesg [2] = sizeof(is[1]);
40   -----------
41   mesg [5] = 5  => is[5]
42   mesg [6] = sizeof(is[5]);
43   -----------
44   mesg [7]
45   mesg [n]  data(is[1])
46   -----------
47   mesg[n+1]
48   mesg[m]  data(is[5])
49   -----------
50 
51   Notes:
52   nrqs - no of requests sent (or to be sent out)
53   nrqr - no of requests received (which have to be or which have been processed)
54 */
55 PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Once(Mat C, PetscInt imax, IS is[])
56 {
57   Mat_MPIBAIJ     *c = (Mat_MPIBAIJ *)C->data;
58   const PetscInt **idx, *idx_i;
59   PetscInt        *n, *w3, *w4, **data, len;
60   PetscMPIInt      size, rank, tag1, tag2, *w2, *w1, nrqr;
61   PetscInt         Mbs, i, j, k, **rbuf, row, nrqs, msz, **outdat, **ptr;
62   PetscInt        *ctr, *pa, *tmp, *isz, *isz1, **xdata, **rbuf2, *d_p;
63   PetscMPIInt     *onodes1, *olengths1, *onodes2, *olengths2, proc = -1;
64   PetscBT         *table;
65   MPI_Comm         comm, *iscomms;
66   MPI_Request     *s_waits1, *r_waits1, *s_waits2, *r_waits2;
67   char            *t_p;
68 
69   PetscFunctionBegin;
70   PetscCall(PetscObjectGetComm((PetscObject)C, &comm));
71   size = c->size;
72   rank = c->rank;
73   Mbs  = c->Mbs;
74 
75   PetscCall(PetscObjectGetNewTag((PetscObject)C, &tag1));
76   PetscCall(PetscObjectGetNewTag((PetscObject)C, &tag2));
77 
78   PetscCall(PetscMalloc2(imax, (PetscInt ***)&idx, imax, &n));
79 
80   for (i = 0; i < imax; i++) {
81     PetscCall(ISGetIndices(is[i], &idx[i]));
82     PetscCall(ISGetLocalSize(is[i], &n[i]));
83   }
84 
85   /* evaluate communication - mesg to who,length of mesg, and buffer space
86      required. Based on this, buffers are allocated, and data copied into them*/
87   PetscCall(PetscCalloc4(size, &w1, size, &w2, size, &w3, size, &w4));
88   for (i = 0; i < imax; i++) {
89     PetscCall(PetscArrayzero(w4, size)); /* initialise work vector*/
90     idx_i = idx[i];
91     len   = n[i];
92     for (j = 0; j < len; j++) {
93       row = idx_i[j];
94       PetscCheck(row >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Index set cannot have negative entries");
95       PetscCall(PetscLayoutFindOwner(C->rmap, row * C->rmap->bs, &proc));
96       w4[proc]++;
97     }
98     for (j = 0; j < size; j++) {
99       if (w4[j]) {
100         w1[j] += w4[j];
101         w3[j]++;
102       }
103     }
104   }
105 
106   nrqs     = 0; /* no of outgoing messages */
107   msz      = 0; /* total mesg length (for all proc */
108   w1[rank] = 0; /* no mesg sent to itself */
109   w3[rank] = 0;
110   for (i = 0; i < size; i++) {
111     if (w1[i]) {
112       w2[i] = 1;
113       nrqs++;
114     } /* there exists a message to proc i */
115   }
116   /* pa - is list of processors to communicate with */
117   PetscCall(PetscMalloc1(nrqs, &pa));
118   for (i = 0, j = 0; i < size; i++) {
119     if (w1[i]) {
120       pa[j] = i;
121       j++;
122     }
123   }
124 
125   /* Each message would have a header = 1 + 2*(no of IS) + data */
126   for (i = 0; i < nrqs; i++) {
127     j = pa[i];
128     w1[j] += w2[j] + 2 * w3[j];
129     msz += w1[j];
130   }
131 
132   /* Determine the number of messages to expect, their lengths, from from-ids */
133   PetscCall(PetscGatherNumberOfMessages(comm, w2, w1, &nrqr));
134   PetscCall(PetscGatherMessageLengths(comm, nrqs, nrqr, w1, &onodes1, &olengths1));
135 
136   /* Now post the Irecvs corresponding to these messages */
137   PetscCall(PetscPostIrecvInt(comm, tag1, nrqr, onodes1, olengths1, &rbuf, &r_waits1));
138 
139   /* Allocate Memory for outgoing messages */
140   PetscCall(PetscMalloc4(size, &outdat, size, &ptr, msz, &tmp, size, &ctr));
141   PetscCall(PetscArrayzero(outdat, size));
142   PetscCall(PetscArrayzero(ptr, size));
143   {
144     PetscInt *iptr = tmp, ict = 0;
145     for (i = 0; i < nrqs; i++) {
146       j = pa[i];
147       iptr += ict;
148       outdat[j] = iptr;
149       ict       = w1[j];
150     }
151   }
152 
153   /* Form the outgoing messages */
154   /*plug in the headers*/
155   for (i = 0; i < nrqs; i++) {
156     j            = pa[i];
157     outdat[j][0] = 0;
158     PetscCall(PetscArrayzero(outdat[j] + 1, 2 * w3[j]));
159     ptr[j] = outdat[j] + 2 * w3[j] + 1;
160   }
161 
162   /* Memory for doing local proc's work*/
163   {
164     PetscCall(PetscCalloc5(imax, &table, imax, &data, imax, &isz, Mbs * imax, &d_p, (Mbs / PETSC_BITS_PER_BYTE + 1) * imax, &t_p));
165 
166     for (i = 0; i < imax; i++) {
167       table[i] = t_p + (Mbs / PETSC_BITS_PER_BYTE + 1) * i;
168       data[i]  = d_p + (Mbs)*i;
169     }
170   }
171 
172   /* Parse the IS and update local tables and the outgoing buf with the data*/
173   {
174     PetscInt n_i, *data_i, isz_i, *outdat_j, ctr_j;
175     PetscBT  table_i;
176 
177     for (i = 0; i < imax; i++) {
178       PetscCall(PetscArrayzero(ctr, size));
179       n_i     = n[i];
180       table_i = table[i];
181       idx_i   = idx[i];
182       data_i  = data[i];
183       isz_i   = isz[i];
184       for (j = 0; j < n_i; j++) { /* parse the indices of each IS */
185         row = idx_i[j];
186         PetscCall(PetscLayoutFindOwner(C->rmap, row * C->rmap->bs, &proc));
187         if (proc != rank) { /* copy to the outgoing buffer */
188           ctr[proc]++;
189           *ptr[proc] = row;
190           ptr[proc]++;
191         } else { /* Update the local table */
192           if (!PetscBTLookupSet(table_i, row)) data_i[isz_i++] = row;
193         }
194       }
195       /* Update the headers for the current IS */
196       for (j = 0; j < size; j++) { /* Can Optimise this loop by using pa[] */
197         if ((ctr_j = ctr[j])) {
198           outdat_j            = outdat[j];
199           k                   = ++outdat_j[0];
200           outdat_j[2 * k]     = ctr_j;
201           outdat_j[2 * k - 1] = i;
202         }
203       }
204       isz[i] = isz_i;
205     }
206   }
207 
208   /*  Now  post the sends */
209   PetscCall(PetscMalloc1(nrqs, &s_waits1));
210   for (i = 0; i < nrqs; ++i) {
211     j = pa[i];
212     PetscCallMPI(MPI_Isend(outdat[j], w1[j], MPIU_INT, j, tag1, comm, s_waits1 + i));
213   }
214 
215   /* No longer need the original indices*/
216   for (i = 0; i < imax; ++i) PetscCall(ISRestoreIndices(is[i], idx + i));
217   PetscCall(PetscFree2(*(PetscInt ***)&idx, n));
218 
219   PetscCall(PetscMalloc1(imax, &iscomms));
220   for (i = 0; i < imax; ++i) {
221     PetscCall(PetscCommDuplicate(PetscObjectComm((PetscObject)is[i]), &iscomms[i], NULL));
222     PetscCall(ISDestroy(&is[i]));
223   }
224 
225   /* Do Local work*/
226   PetscCall(MatIncreaseOverlap_MPIBAIJ_Local(C, imax, table, isz, data));
227 
228   /* Receive messages*/
229   PetscCallMPI(MPI_Waitall(nrqr, r_waits1, MPI_STATUSES_IGNORE));
230   PetscCallMPI(MPI_Waitall(nrqs, s_waits1, MPI_STATUSES_IGNORE));
231 
232   /* Phase 1 sends are complete - deallocate buffers */
233   PetscCall(PetscFree4(outdat, ptr, tmp, ctr));
234   PetscCall(PetscFree4(w1, w2, w3, w4));
235 
236   PetscCall(PetscMalloc1(nrqr, &xdata));
237   PetscCall(PetscMalloc1(nrqr, &isz1));
238   PetscCall(MatIncreaseOverlap_MPIBAIJ_Receive(C, nrqr, rbuf, xdata, isz1));
239   if (rbuf) {
240     PetscCall(PetscFree(rbuf[0]));
241     PetscCall(PetscFree(rbuf));
242   }
243 
244   /* Send the data back*/
245   /* Do a global reduction to know the buffer space req for incoming messages*/
246   {
247     PetscMPIInt *rw1;
248 
249     PetscCall(PetscCalloc1(size, &rw1));
250 
251     for (i = 0; i < nrqr; ++i) {
252       proc      = onodes1[i];
253       rw1[proc] = isz1[i];
254     }
255 
256     /* Determine the number of messages to expect, their lengths, from from-ids */
257     PetscCall(PetscGatherMessageLengths(comm, nrqr, nrqs, rw1, &onodes2, &olengths2));
258     PetscCall(PetscFree(rw1));
259   }
260   /* Now post the Irecvs corresponding to these messages */
261   PetscCall(PetscPostIrecvInt(comm, tag2, nrqs, onodes2, olengths2, &rbuf2, &r_waits2));
262 
263   /*  Now  post the sends */
264   PetscCall(PetscMalloc1(nrqr, &s_waits2));
265   for (i = 0; i < nrqr; ++i) {
266     j = onodes1[i];
267     PetscCallMPI(MPI_Isend(xdata[i], isz1[i], MPIU_INT, j, tag2, comm, s_waits2 + i));
268   }
269 
270   PetscCall(PetscFree(onodes1));
271   PetscCall(PetscFree(olengths1));
272 
273   /* receive work done on other processors*/
274   {
275     PetscMPIInt idex;
276     PetscInt    is_no, ct1, max, *rbuf2_i, isz_i, *data_i, jmax;
277     PetscBT     table_i;
278 
279     for (i = 0; i < nrqs; ++i) {
280       PetscCallMPI(MPI_Waitany(nrqs, r_waits2, &idex, MPI_STATUS_IGNORE));
281       /* Process the message*/
282       rbuf2_i = rbuf2[idex];
283       ct1     = 2 * rbuf2_i[0] + 1;
284       jmax    = rbuf2[idex][0];
285       for (j = 1; j <= jmax; j++) {
286         max     = rbuf2_i[2 * j];
287         is_no   = rbuf2_i[2 * j - 1];
288         isz_i   = isz[is_no];
289         data_i  = data[is_no];
290         table_i = table[is_no];
291         for (k = 0; k < max; k++, ct1++) {
292           row = rbuf2_i[ct1];
293           if (!PetscBTLookupSet(table_i, row)) data_i[isz_i++] = row;
294         }
295         isz[is_no] = isz_i;
296       }
297     }
298     PetscCallMPI(MPI_Waitall(nrqr, s_waits2, MPI_STATUSES_IGNORE));
299   }
300 
301   for (i = 0; i < imax; ++i) {
302     PetscCall(ISCreateGeneral(iscomms[i], isz[i], data[i], PETSC_COPY_VALUES, is + i));
303     PetscCall(PetscCommDestroy(&iscomms[i]));
304   }
305 
306   PetscCall(PetscFree(iscomms));
307   PetscCall(PetscFree(onodes2));
308   PetscCall(PetscFree(olengths2));
309 
310   PetscCall(PetscFree(pa));
311   if (rbuf2) {
312     PetscCall(PetscFree(rbuf2[0]));
313     PetscCall(PetscFree(rbuf2));
314   }
315   PetscCall(PetscFree(s_waits1));
316   PetscCall(PetscFree(r_waits1));
317   PetscCall(PetscFree(s_waits2));
318   PetscCall(PetscFree(r_waits2));
319   PetscCall(PetscFree5(table, data, isz, d_p, t_p));
320   if (xdata) {
321     PetscCall(PetscFree(xdata[0]));
322     PetscCall(PetscFree(xdata));
323   }
324   PetscCall(PetscFree(isz1));
325   PetscFunctionReturn(PETSC_SUCCESS);
326 }
327 
328 /*
329    MatIncreaseOverlap_MPIBAIJ_Local - Called by MatincreaseOverlap, to do
330        the work on the local processor.
331 
332      Inputs:
333       C      - MAT_MPIBAIJ;
334       imax - total no of index sets processed at a time;
335       table  - an array of char - size = Mbs bits.
336 
337      Output:
338       isz    - array containing the count of the solution elements corresponding
339                to each index set;
340       data   - pointer to the solutions
341 */
342 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Local(Mat C, PetscInt imax, PetscBT *table, PetscInt *isz, PetscInt **data)
343 {
344   Mat_MPIBAIJ *c = (Mat_MPIBAIJ *)C->data;
345   Mat          A = c->A, B = c->B;
346   Mat_SeqBAIJ *a = (Mat_SeqBAIJ *)A->data, *b = (Mat_SeqBAIJ *)B->data;
347   PetscInt     start, end, val, max, rstart, cstart, *ai, *aj;
348   PetscInt    *bi, *bj, *garray, i, j, k, row, *data_i, isz_i;
349   PetscBT      table_i;
350 
351   PetscFunctionBegin;
352   rstart = c->rstartbs;
353   cstart = c->cstartbs;
354   ai     = a->i;
355   aj     = a->j;
356   bi     = b->i;
357   bj     = b->j;
358   garray = c->garray;
359 
360   for (i = 0; i < imax; i++) {
361     data_i  = data[i];
362     table_i = table[i];
363     isz_i   = isz[i];
364     for (j = 0, max = isz[i]; j < max; j++) {
365       row   = data_i[j] - rstart;
366       start = ai[row];
367       end   = ai[row + 1];
368       for (k = start; k < end; k++) { /* Amat */
369         val = aj[k] + cstart;
370         if (!PetscBTLookupSet(table_i, val)) data_i[isz_i++] = val;
371       }
372       start = bi[row];
373       end   = bi[row + 1];
374       for (k = start; k < end; k++) { /* Bmat */
375         val = garray[bj[k]];
376         if (!PetscBTLookupSet(table_i, val)) data_i[isz_i++] = val;
377       }
378     }
379     isz[i] = isz_i;
380   }
381   PetscFunctionReturn(PETSC_SUCCESS);
382 }
383 /*
384       MatIncreaseOverlap_MPIBAIJ_Receive - Process the received messages,
385          and return the output
386 
387          Input:
388            C    - the matrix
389            nrqr - no of messages being processed.
390            rbuf - an array of pointers to the received requests
391 
392          Output:
393            xdata - array of messages to be sent back
394            isz1  - size of each message
395 
396   For better efficiency perhaps we should malloc separately each xdata[i],
397 then if a remalloc is required we need only copy the data for that one row
398 rather than all previous rows as it is now where a single large chunk of
399 memory is used.
400 
401 */
402 static PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Receive(Mat C, PetscInt nrqr, PetscInt **rbuf, PetscInt **xdata, PetscInt *isz1)
403 {
404   Mat_MPIBAIJ *c = (Mat_MPIBAIJ *)C->data;
405   Mat          A = c->A, B = c->B;
406   Mat_SeqBAIJ *a = (Mat_SeqBAIJ *)A->data, *b = (Mat_SeqBAIJ *)B->data;
407   PetscInt     rstart, cstart, *ai, *aj, *bi, *bj, *garray, i, j, k;
408   PetscInt     row, total_sz, ct, ct1, ct2, ct3, mem_estimate, oct2, l, start, end;
409   PetscInt     val, max1, max2, Mbs, no_malloc = 0, *tmp, new_estimate, ctr;
410   PetscInt    *rbuf_i, kmax, rbuf_0;
411   PetscBT      xtable;
412 
413   PetscFunctionBegin;
414   Mbs    = c->Mbs;
415   rstart = c->rstartbs;
416   cstart = c->cstartbs;
417   ai     = a->i;
418   aj     = a->j;
419   bi     = b->i;
420   bj     = b->j;
421   garray = c->garray;
422 
423   for (i = 0, ct = 0, total_sz = 0; i < nrqr; ++i) {
424     rbuf_i = rbuf[i];
425     rbuf_0 = rbuf_i[0];
426     ct += rbuf_0;
427     for (j = 1; j <= rbuf_0; j++) total_sz += rbuf_i[2 * j];
428   }
429 
430   if (c->Mbs) max1 = ct * (a->nz + b->nz) / c->Mbs;
431   else max1 = 1;
432   mem_estimate = 3 * ((total_sz > max1 ? total_sz : max1) + 1);
433   if (nrqr) {
434     PetscCall(PetscMalloc1(mem_estimate, &xdata[0]));
435     ++no_malloc;
436   }
437   PetscCall(PetscBTCreate(Mbs, &xtable));
438   PetscCall(PetscArrayzero(isz1, nrqr));
439 
440   ct3 = 0;
441   for (i = 0; i < nrqr; i++) { /* for easch mesg from proc i */
442     rbuf_i = rbuf[i];
443     rbuf_0 = rbuf_i[0];
444     ct1    = 2 * rbuf_0 + 1;
445     ct2    = ct1;
446     ct3 += ct1;
447     for (j = 1; j <= rbuf_0; j++) { /* for each IS from proc i*/
448       PetscCall(PetscBTMemzero(Mbs, xtable));
449       oct2 = ct2;
450       kmax = rbuf_i[2 * j];
451       for (k = 0; k < kmax; k++, ct1++) {
452         row = rbuf_i[ct1];
453         if (!PetscBTLookupSet(xtable, row)) {
454           if (!(ct3 < mem_estimate)) {
455             new_estimate = (PetscInt)(1.5 * mem_estimate) + 1;
456             PetscCall(PetscMalloc1(new_estimate, &tmp));
457             PetscCall(PetscArraycpy(tmp, xdata[0], mem_estimate));
458             PetscCall(PetscFree(xdata[0]));
459             xdata[0]     = tmp;
460             mem_estimate = new_estimate;
461             ++no_malloc;
462             for (ctr = 1; ctr <= i; ctr++) xdata[ctr] = xdata[ctr - 1] + isz1[ctr - 1];
463           }
464           xdata[i][ct2++] = row;
465           ct3++;
466         }
467       }
468       for (k = oct2, max2 = ct2; k < max2; k++) {
469         row   = xdata[i][k] - rstart;
470         start = ai[row];
471         end   = ai[row + 1];
472         for (l = start; l < end; l++) {
473           val = aj[l] + cstart;
474           if (!PetscBTLookupSet(xtable, val)) {
475             if (!(ct3 < mem_estimate)) {
476               new_estimate = (PetscInt)(1.5 * mem_estimate) + 1;
477               PetscCall(PetscMalloc1(new_estimate, &tmp));
478               PetscCall(PetscArraycpy(tmp, xdata[0], mem_estimate));
479               PetscCall(PetscFree(xdata[0]));
480               xdata[0]     = tmp;
481               mem_estimate = new_estimate;
482               ++no_malloc;
483               for (ctr = 1; ctr <= i; ctr++) xdata[ctr] = xdata[ctr - 1] + isz1[ctr - 1];
484             }
485             xdata[i][ct2++] = val;
486             ct3++;
487           }
488         }
489         start = bi[row];
490         end   = bi[row + 1];
491         for (l = start; l < end; l++) {
492           val = garray[bj[l]];
493           if (!PetscBTLookupSet(xtable, val)) {
494             if (!(ct3 < mem_estimate)) {
495               new_estimate = (PetscInt)(1.5 * mem_estimate) + 1;
496               PetscCall(PetscMalloc1(new_estimate, &tmp));
497               PetscCall(PetscArraycpy(tmp, xdata[0], mem_estimate));
498               PetscCall(PetscFree(xdata[0]));
499               xdata[0]     = tmp;
500               mem_estimate = new_estimate;
501               ++no_malloc;
502               for (ctr = 1; ctr <= i; ctr++) xdata[ctr] = xdata[ctr - 1] + isz1[ctr - 1];
503             }
504             xdata[i][ct2++] = val;
505             ct3++;
506           }
507         }
508       }
509       /* Update the header*/
510       xdata[i][2 * j]     = ct2 - oct2; /* Undo the vector isz1 and use only a var*/
511       xdata[i][2 * j - 1] = rbuf_i[2 * j - 1];
512     }
513     xdata[i][0] = rbuf_0;
514     if (i + 1 < nrqr) xdata[i + 1] = xdata[i] + ct2;
515     isz1[i] = ct2; /* size of each message */
516   }
517   PetscCall(PetscBTDestroy(&xtable));
518   PetscCall(PetscInfo(C, "Allocated %" PetscInt_FMT " bytes, required %" PetscInt_FMT ", no of mallocs = %" PetscInt_FMT "\n", mem_estimate, ct3, no_malloc));
519   PetscFunctionReturn(PETSC_SUCCESS);
520 }
521 
522 PetscErrorCode MatCreateSubMatrices_MPIBAIJ(Mat C, PetscInt ismax, const IS isrow[], const IS iscol[], MatReuse scall, Mat *submat[])
523 {
524   IS          *isrow_block, *iscol_block;
525   Mat_MPIBAIJ *c = (Mat_MPIBAIJ *)C->data;
526   PetscInt     nmax, nstages_local, nstages, i, pos, max_no, N = C->cmap->N, bs = C->rmap->bs;
527   Mat_SeqBAIJ *subc;
528   Mat_SubSppt *smat;
529 
530   PetscFunctionBegin;
531   /* The compression and expansion should be avoided. Doesn't point
532      out errors, might change the indices, hence buggey */
533   PetscCall(PetscMalloc2(ismax, &isrow_block, ismax, &iscol_block));
534   PetscCall(ISCompressIndicesGeneral(C->rmap->N, C->rmap->n, bs, ismax, isrow, isrow_block));
535   if (isrow == iscol) {
536     for (i = 0; i < ismax; i++) {
537       iscol_block[i] = isrow_block[i];
538       PetscCall(PetscObjectReference((PetscObject)iscol_block[i]));
539     }
540   } else PetscCall(ISCompressIndicesGeneral(N, C->cmap->n, bs, ismax, iscol, iscol_block));
541 
542   /* Determine the number of stages through which submatrices are done */
543   if (!C->cmap->N) nmax = 20 * 1000000 / sizeof(PetscInt);
544   else nmax = 20 * 1000000 / (c->Nbs * sizeof(PetscInt));
545   if (!nmax) nmax = 1;
546 
547   if (scall == MAT_INITIAL_MATRIX) {
548     nstages_local = ismax / nmax + ((ismax % nmax) ? 1 : 0); /* local nstages */
549 
550     /* Make sure every processor loops through the nstages */
551     PetscCall(MPIU_Allreduce(&nstages_local, &nstages, 1, MPIU_INT, MPI_MAX, PetscObjectComm((PetscObject)C)));
552 
553     /* Allocate memory to hold all the submatrices and dummy submatrices */
554     PetscCall(PetscCalloc1(ismax + nstages, submat));
555   } else { /* MAT_REUSE_MATRIX */
556     if (ismax) {
557       subc = (Mat_SeqBAIJ *)((*submat)[0]->data);
558       smat = subc->submatis1;
559     } else { /* (*submat)[0] is a dummy matrix */
560       smat = (Mat_SubSppt *)(*submat)[0]->data;
561     }
562     PetscCheck(smat, PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "MatCreateSubMatrices(...,MAT_REUSE_MATRIX,...) requires submat");
563     nstages = smat->nstages;
564   }
565 
566   for (i = 0, pos = 0; i < nstages; i++) {
567     if (pos + nmax <= ismax) max_no = nmax;
568     else if (pos >= ismax) max_no = 0;
569     else max_no = ismax - pos;
570 
571     PetscCall(MatCreateSubMatrices_MPIBAIJ_local(C, max_no, isrow_block + pos, iscol_block + pos, scall, *submat + pos));
572     if (!max_no) {
573       if (scall == MAT_INITIAL_MATRIX) { /* submat[pos] is a dummy matrix */
574         smat          = (Mat_SubSppt *)(*submat)[pos]->data;
575         smat->nstages = nstages;
576       }
577       pos++; /* advance to next dummy matrix if any */
578     } else pos += max_no;
579   }
580 
581   if (scall == MAT_INITIAL_MATRIX && ismax) {
582     /* save nstages for reuse */
583     subc          = (Mat_SeqBAIJ *)((*submat)[0]->data);
584     smat          = subc->submatis1;
585     smat->nstages = nstages;
586   }
587 
588   for (i = 0; i < ismax; i++) {
589     PetscCall(ISDestroy(&isrow_block[i]));
590     PetscCall(ISDestroy(&iscol_block[i]));
591   }
592   PetscCall(PetscFree2(isrow_block, iscol_block));
593   PetscFunctionReturn(PETSC_SUCCESS);
594 }
595 
596 #if defined(PETSC_USE_CTABLE)
597 PetscErrorCode PetscGetProc(const PetscInt row, const PetscMPIInt size, const PetscInt proc_gnode[], PetscMPIInt *rank)
598 {
599   PetscInt    nGlobalNd = proc_gnode[size];
600   PetscMPIInt fproc;
601 
602   PetscFunctionBegin;
603   PetscCall(PetscMPIIntCast((PetscInt)(((float)row * (float)size / (float)nGlobalNd + 0.5)), &fproc));
604   if (fproc > size) fproc = size;
605   while (row < proc_gnode[fproc] || row >= proc_gnode[fproc + 1]) {
606     if (row < proc_gnode[fproc]) fproc--;
607     else fproc++;
608   }
609   *rank = fproc;
610   PetscFunctionReturn(PETSC_SUCCESS);
611 }
612 #endif
613 
614 /* This code is used for BAIJ and SBAIJ matrices (unfortunate dependency) */
615 PetscErrorCode MatCreateSubMatrices_MPIBAIJ_local(Mat C, PetscInt ismax, const IS isrow[], const IS iscol[], MatReuse scall, Mat *submats)
616 {
617   Mat_MPIBAIJ     *c = (Mat_MPIBAIJ *)C->data;
618   Mat              A = c->A;
619   Mat_SeqBAIJ     *a = (Mat_SeqBAIJ *)A->data, *b = (Mat_SeqBAIJ *)c->B->data, *subc;
620   const PetscInt **icol, **irow;
621   PetscInt        *nrow, *ncol, start;
622   PetscMPIInt      rank, size, tag0, tag2, tag3, tag4, *w1, *w2, *w3, *w4, nrqr;
623   PetscInt       **sbuf1, **sbuf2, *sbuf2_i, i, j, k, l, ct1, ct2, **rbuf1, row, proc = -1;
624   PetscInt         nrqs = 0, msz, **ptr = NULL, *req_size = NULL, *ctr = NULL, *pa, *tmp = NULL, tcol;
625   PetscInt       **rbuf3 = NULL, *req_source1 = NULL, *req_source2, **sbuf_aj, **rbuf2 = NULL, max1, max2;
626   PetscInt       **lens, is_no, ncols, *cols, mat_i, *mat_j, tmp2, jmax;
627 #if defined(PETSC_USE_CTABLE)
628   PetscHMapI *cmap, cmap_i = NULL, *rmap, rmap_i;
629 #else
630   PetscInt **cmap, *cmap_i = NULL, **rmap, *rmap_i;
631 #endif
632   const PetscInt *irow_i, *icol_i;
633   PetscInt        ctr_j, *sbuf1_j, *sbuf_aj_i, *rbuf1_i, kmax, *lens_i;
634   MPI_Request    *s_waits1, *r_waits1, *s_waits2, *r_waits2, *r_waits3;
635   MPI_Request    *r_waits4, *s_waits3, *s_waits4;
636   MPI_Comm        comm;
637   PetscScalar   **rbuf4, *rbuf4_i = NULL, **sbuf_aa, *vals, *mat_a = NULL, *imat_a = NULL, *sbuf_aa_i;
638   PetscMPIInt    *onodes1, *olengths1, end;
639   PetscInt      **row2proc, *row2proc_i, *imat_ilen, *imat_j, *imat_i;
640   Mat_SubSppt    *smat_i;
641   PetscBool      *issorted, colflag, iscsorted = PETSC_TRUE;
642   PetscInt       *sbuf1_i, *rbuf2_i, *rbuf3_i, ilen;
643   PetscInt        bs = C->rmap->bs, bs2 = c->bs2, rstart = c->rstartbs;
644   PetscBool       ijonly = c->ijonly; /* private flag indicates only matrix data structures are requested */
645   PetscInt        nzA, nzB, *a_i = a->i, *b_i = b->i, *a_j = a->j, *b_j = b->j, ctmp, imark, *cworkA, *cworkB;
646   PetscScalar    *vworkA = NULL, *vworkB = NULL, *a_a = a->a, *b_a = b->a;
647   PetscInt        cstart = c->cstartbs, *bmap = c->garray;
648   PetscBool      *allrows, *allcolumns;
649 
650   PetscFunctionBegin;
651   PetscCall(PetscObjectGetComm((PetscObject)C, &comm));
652   size = c->size;
653   rank = c->rank;
654 
655   PetscCall(PetscMalloc5(ismax, &row2proc, ismax, &cmap, ismax, &rmap, ismax + 1, &allcolumns, ismax, &allrows));
656   PetscCall(PetscMalloc5(ismax, (PetscInt ***)&irow, ismax, (PetscInt ***)&icol, ismax, &nrow, ismax, &ncol, ismax, &issorted));
657 
658   for (i = 0; i < ismax; i++) {
659     PetscCall(ISSorted(iscol[i], &issorted[i]));
660     if (!issorted[i]) iscsorted = issorted[i]; /* columns are not sorted! */
661     PetscCall(ISSorted(isrow[i], &issorted[i]));
662 
663     /* Check for special case: allcolumns */
664     PetscCall(ISIdentity(iscol[i], &colflag));
665     PetscCall(ISGetLocalSize(iscol[i], &ncol[i]));
666 
667     if (colflag && ncol[i] == c->Nbs) {
668       allcolumns[i] = PETSC_TRUE;
669       icol[i]       = NULL;
670     } else {
671       allcolumns[i] = PETSC_FALSE;
672       PetscCall(ISGetIndices(iscol[i], &icol[i]));
673     }
674 
675     /* Check for special case: allrows */
676     PetscCall(ISIdentity(isrow[i], &colflag));
677     PetscCall(ISGetLocalSize(isrow[i], &nrow[i]));
678     if (colflag && nrow[i] == c->Mbs) {
679       allrows[i] = PETSC_TRUE;
680       irow[i]    = NULL;
681     } else {
682       allrows[i] = PETSC_FALSE;
683       PetscCall(ISGetIndices(isrow[i], &irow[i]));
684     }
685   }
686 
687   if (scall == MAT_REUSE_MATRIX) {
688     /* Assumes new rows are same length as the old rows */
689     for (i = 0; i < ismax; i++) {
690       subc = (Mat_SeqBAIJ *)(submats[i]->data);
691       PetscCheck(subc->mbs == nrow[i] && subc->nbs == ncol[i], PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Cannot reuse matrix. wrong size");
692 
693       /* Initial matrix as if empty */
694       PetscCall(PetscArrayzero(subc->ilen, subc->mbs));
695 
696       /* Initial matrix as if empty */
697       submats[i]->factortype = C->factortype;
698 
699       smat_i = subc->submatis1;
700 
701       nrqs        = smat_i->nrqs;
702       nrqr        = smat_i->nrqr;
703       rbuf1       = smat_i->rbuf1;
704       rbuf2       = smat_i->rbuf2;
705       rbuf3       = smat_i->rbuf3;
706       req_source2 = smat_i->req_source2;
707 
708       sbuf1 = smat_i->sbuf1;
709       sbuf2 = smat_i->sbuf2;
710       ptr   = smat_i->ptr;
711       tmp   = smat_i->tmp;
712       ctr   = smat_i->ctr;
713 
714       pa          = smat_i->pa;
715       req_size    = smat_i->req_size;
716       req_source1 = smat_i->req_source1;
717 
718       allcolumns[i] = smat_i->allcolumns;
719       allrows[i]    = smat_i->allrows;
720       row2proc[i]   = smat_i->row2proc;
721       rmap[i]       = smat_i->rmap;
722       cmap[i]       = smat_i->cmap;
723     }
724 
725     if (!ismax) { /* Get dummy submatrices and retrieve struct submatis1 */
726       PetscCheck(submats[0], PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "submats are null, cannot reuse");
727       smat_i = (Mat_SubSppt *)submats[0]->data;
728 
729       nrqs        = smat_i->nrqs;
730       nrqr        = smat_i->nrqr;
731       rbuf1       = smat_i->rbuf1;
732       rbuf2       = smat_i->rbuf2;
733       rbuf3       = smat_i->rbuf3;
734       req_source2 = smat_i->req_source2;
735 
736       sbuf1 = smat_i->sbuf1;
737       sbuf2 = smat_i->sbuf2;
738       ptr   = smat_i->ptr;
739       tmp   = smat_i->tmp;
740       ctr   = smat_i->ctr;
741 
742       pa          = smat_i->pa;
743       req_size    = smat_i->req_size;
744       req_source1 = smat_i->req_source1;
745 
746       allcolumns[0] = PETSC_FALSE;
747     }
748   } else { /* scall == MAT_INITIAL_MATRIX */
749     /* Get some new tags to keep the communication clean */
750     PetscCall(PetscObjectGetNewTag((PetscObject)C, &tag2));
751     PetscCall(PetscObjectGetNewTag((PetscObject)C, &tag3));
752 
753     /* evaluate communication - mesg to who, length of mesg, and buffer space
754      required. Based on this, buffers are allocated, and data copied into them*/
755     PetscCall(PetscCalloc4(size, &w1, size, &w2, size, &w3, size, &w4)); /* mesg size, initialize work vectors */
756 
757     for (i = 0; i < ismax; i++) {
758       jmax   = nrow[i];
759       irow_i = irow[i];
760 
761       PetscCall(PetscMalloc1(jmax, &row2proc_i));
762       row2proc[i] = row2proc_i;
763 
764       if (issorted[i]) proc = 0;
765       for (j = 0; j < jmax; j++) {
766         if (!issorted[i]) proc = 0;
767         if (allrows[i]) row = j;
768         else row = irow_i[j];
769 
770         while (row >= c->rangebs[proc + 1]) proc++;
771         w4[proc]++;
772         row2proc_i[j] = proc; /* map row index to proc */
773       }
774       for (j = 0; j < size; j++) {
775         if (w4[j]) {
776           w1[j] += w4[j];
777           w3[j]++;
778           w4[j] = 0;
779         }
780       }
781     }
782 
783     nrqs     = 0; /* no of outgoing messages */
784     msz      = 0; /* total mesg length (for all procs) */
785     w1[rank] = 0; /* no mesg sent to self */
786     w3[rank] = 0;
787     for (i = 0; i < size; i++) {
788       if (w1[i]) {
789         w2[i] = 1;
790         nrqs++;
791       } /* there exists a message to proc i */
792     }
793     PetscCall(PetscMalloc1(nrqs, &pa)); /*(proc -array)*/
794     for (i = 0, j = 0; i < size; i++) {
795       if (w1[i]) {
796         pa[j] = i;
797         j++;
798       }
799     }
800 
801     /* Each message would have a header = 1 + 2*(no of IS) + data */
802     for (i = 0; i < nrqs; i++) {
803       j = pa[i];
804       w1[j] += w2[j] + 2 * w3[j];
805       msz += w1[j];
806     }
807     PetscCall(PetscInfo(0, "Number of outgoing messages %" PetscInt_FMT " Total message length %" PetscInt_FMT "\n", nrqs, msz));
808 
809     /* Determine the number of messages to expect, their lengths, from from-ids */
810     PetscCall(PetscGatherNumberOfMessages(comm, w2, w1, &nrqr));
811     PetscCall(PetscGatherMessageLengths(comm, nrqs, nrqr, w1, &onodes1, &olengths1));
812 
813     /* Now post the Irecvs corresponding to these messages */
814     PetscCall(PetscObjectGetNewTag((PetscObject)C, &tag0));
815     PetscCall(PetscPostIrecvInt(comm, tag0, nrqr, onodes1, olengths1, &rbuf1, &r_waits1));
816 
817     /* Allocate Memory for outgoing messages */
818     PetscCall(PetscMalloc4(size, &sbuf1, size, &ptr, 2 * msz, &tmp, size, &ctr));
819     PetscCall(PetscArrayzero(sbuf1, size));
820     PetscCall(PetscArrayzero(ptr, size));
821 
822     {
823       PetscInt *iptr = tmp;
824       k              = 0;
825       for (i = 0; i < nrqs; i++) {
826         j = pa[i];
827         iptr += k;
828         sbuf1[j] = iptr;
829         k        = w1[j];
830       }
831     }
832 
833     /* Form the outgoing messages. Initialize the header space */
834     for (i = 0; i < nrqs; i++) {
835       j           = pa[i];
836       sbuf1[j][0] = 0;
837       PetscCall(PetscArrayzero(sbuf1[j] + 1, 2 * w3[j]));
838       ptr[j] = sbuf1[j] + 2 * w3[j] + 1;
839     }
840 
841     /* Parse the isrow and copy data into outbuf */
842     for (i = 0; i < ismax; i++) {
843       row2proc_i = row2proc[i];
844       PetscCall(PetscArrayzero(ctr, size));
845       irow_i = irow[i];
846       jmax   = nrow[i];
847       for (j = 0; j < jmax; j++) { /* parse the indices of each IS */
848         proc = row2proc_i[j];
849         if (allrows[i]) row = j;
850         else row = irow_i[j];
851 
852         if (proc != rank) { /* copy to the outgoing buf*/
853           ctr[proc]++;
854           *ptr[proc] = row;
855           ptr[proc]++;
856         }
857       }
858       /* Update the headers for the current IS */
859       for (j = 0; j < size; j++) { /* Can Optimise this loop too */
860         if ((ctr_j = ctr[j])) {
861           sbuf1_j            = sbuf1[j];
862           k                  = ++sbuf1_j[0];
863           sbuf1_j[2 * k]     = ctr_j;
864           sbuf1_j[2 * k - 1] = i;
865         }
866       }
867     }
868 
869     /*  Now  post the sends */
870     PetscCall(PetscMalloc1(nrqs, &s_waits1));
871     for (i = 0; i < nrqs; ++i) {
872       j = pa[i];
873       PetscCallMPI(MPI_Isend(sbuf1[j], w1[j], MPIU_INT, j, tag0, comm, s_waits1 + i));
874     }
875 
876     /* Post Receives to capture the buffer size */
877     PetscCall(PetscMalloc1(nrqs, &r_waits2));
878     PetscCall(PetscMalloc3(nrqs, &req_source2, nrqs, &rbuf2, nrqs, &rbuf3));
879     if (nrqs) rbuf2[0] = tmp + msz;
880     for (i = 1; i < nrqs; ++i) rbuf2[i] = rbuf2[i - 1] + w1[pa[i - 1]];
881     for (i = 0; i < nrqs; ++i) {
882       j = pa[i];
883       PetscCallMPI(MPI_Irecv(rbuf2[i], w1[j], MPIU_INT, j, tag2, comm, r_waits2 + i));
884     }
885 
886     /* Send to other procs the buf size they should allocate */
887     /* Receive messages*/
888     PetscCall(PetscMalloc1(nrqr, &s_waits2));
889     PetscCall(PetscMalloc3(nrqr, &sbuf2, nrqr, &req_size, nrqr, &req_source1));
890 
891     PetscCallMPI(MPI_Waitall(nrqr, r_waits1, MPI_STATUSES_IGNORE));
892     for (i = 0; i < nrqr; ++i) {
893       req_size[i] = 0;
894       rbuf1_i     = rbuf1[i];
895       start       = 2 * rbuf1_i[0] + 1;
896       end         = olengths1[i];
897       PetscCall(PetscMalloc1(end, &sbuf2[i]));
898       sbuf2_i = sbuf2[i];
899       for (j = start; j < end; j++) {
900         row        = rbuf1_i[j] - rstart;
901         ncols      = a_i[row + 1] - a_i[row] + b_i[row + 1] - b_i[row];
902         sbuf2_i[j] = ncols;
903         req_size[i] += ncols;
904       }
905       req_source1[i] = onodes1[i];
906       /* form the header */
907       sbuf2_i[0] = req_size[i];
908       for (j = 1; j < start; j++) sbuf2_i[j] = rbuf1_i[j];
909 
910       PetscCallMPI(MPI_Isend(sbuf2_i, end, MPIU_INT, req_source1[i], tag2, comm, s_waits2 + i));
911     }
912 
913     PetscCall(PetscFree(onodes1));
914     PetscCall(PetscFree(olengths1));
915 
916     PetscCall(PetscFree(r_waits1));
917     PetscCall(PetscFree4(w1, w2, w3, w4));
918 
919     /* Receive messages*/
920     PetscCall(PetscMalloc1(nrqs, &r_waits3));
921 
922     PetscCallMPI(MPI_Waitall(nrqs, r_waits2, MPI_STATUSES_IGNORE));
923     for (i = 0; i < nrqs; ++i) {
924       PetscCall(PetscMalloc1(rbuf2[i][0], &rbuf3[i]));
925       req_source2[i] = pa[i];
926       PetscCallMPI(MPI_Irecv(rbuf3[i], rbuf2[i][0], MPIU_INT, req_source2[i], tag3, comm, r_waits3 + i));
927     }
928     PetscCall(PetscFree(r_waits2));
929 
930     /* Wait on sends1 and sends2 */
931     PetscCallMPI(MPI_Waitall(nrqs, s_waits1, MPI_STATUSES_IGNORE));
932     PetscCallMPI(MPI_Waitall(nrqr, s_waits2, MPI_STATUSES_IGNORE));
933     PetscCall(PetscFree(s_waits1));
934     PetscCall(PetscFree(s_waits2));
935 
936     /* Now allocate sending buffers for a->j, and send them off */
937     PetscCall(PetscMalloc1(nrqr, &sbuf_aj));
938     for (i = 0, j = 0; i < nrqr; i++) j += req_size[i];
939     if (nrqr) PetscCall(PetscMalloc1(j, &sbuf_aj[0]));
940     for (i = 1; i < nrqr; i++) sbuf_aj[i] = sbuf_aj[i - 1] + req_size[i - 1];
941 
942     PetscCall(PetscMalloc1(nrqr, &s_waits3));
943     {
944       for (i = 0; i < nrqr; i++) {
945         rbuf1_i   = rbuf1[i];
946         sbuf_aj_i = sbuf_aj[i];
947         ct1       = 2 * rbuf1_i[0] + 1;
948         ct2       = 0;
949         for (j = 1, max1 = rbuf1_i[0]; j <= max1; j++) {
950           kmax = rbuf1[i][2 * j];
951           for (k = 0; k < kmax; k++, ct1++) {
952             row    = rbuf1_i[ct1] - rstart;
953             nzA    = a_i[row + 1] - a_i[row];
954             nzB    = b_i[row + 1] - b_i[row];
955             ncols  = nzA + nzB;
956             cworkA = a_j + a_i[row];
957             cworkB = b_j + b_i[row];
958 
959             /* load the column indices for this row into cols */
960             cols = sbuf_aj_i + ct2;
961             for (l = 0; l < nzB; l++) {
962               if ((ctmp = bmap[cworkB[l]]) < cstart) cols[l] = ctmp;
963               else break;
964             }
965             imark = l;
966             for (l = 0; l < nzA; l++) cols[imark + l] = cstart + cworkA[l];
967             for (l = imark; l < nzB; l++) cols[nzA + l] = bmap[cworkB[l]];
968             ct2 += ncols;
969           }
970         }
971         PetscCallMPI(MPI_Isend(sbuf_aj_i, req_size[i], MPIU_INT, req_source1[i], tag3, comm, s_waits3 + i));
972       }
973     }
974 
975     /* create col map: global col of C -> local col of submatrices */
976 #if defined(PETSC_USE_CTABLE)
977     for (i = 0; i < ismax; i++) {
978       if (!allcolumns[i]) {
979         PetscCall(PetscHMapICreateWithSize(ncol[i], cmap + i));
980 
981         jmax   = ncol[i];
982         icol_i = icol[i];
983         cmap_i = cmap[i];
984         for (j = 0; j < jmax; j++) PetscCall(PetscHMapISet(cmap[i], icol_i[j] + 1, j + 1));
985       } else cmap[i] = NULL;
986     }
987 #else
988     for (i = 0; i < ismax; i++) {
989       if (!allcolumns[i]) {
990         PetscCall(PetscCalloc1(c->Nbs, &cmap[i]));
991         jmax   = ncol[i];
992         icol_i = icol[i];
993         cmap_i = cmap[i];
994         for (j = 0; j < jmax; j++) cmap_i[icol_i[j]] = j + 1;
995       } else cmap[i] = NULL;
996     }
997 #endif
998 
999     /* Create lens which is required for MatCreate... */
1000     for (i = 0, j = 0; i < ismax; i++) j += nrow[i];
1001     PetscCall(PetscMalloc1(ismax, &lens));
1002 
1003     if (ismax) PetscCall(PetscCalloc1(j, &lens[0]));
1004     for (i = 1; i < ismax; i++) lens[i] = lens[i - 1] + nrow[i - 1];
1005 
1006     /* Update lens from local data */
1007     for (i = 0; i < ismax; i++) {
1008       row2proc_i = row2proc[i];
1009       jmax       = nrow[i];
1010       if (!allcolumns[i]) cmap_i = cmap[i];
1011       irow_i = irow[i];
1012       lens_i = lens[i];
1013       for (j = 0; j < jmax; j++) {
1014         if (allrows[i]) row = j;
1015         else row = irow_i[j]; /* global blocked row of C */
1016 
1017         proc = row2proc_i[j];
1018         if (proc == rank) {
1019           /* Get indices from matA and then from matB */
1020 #if defined(PETSC_USE_CTABLE)
1021           PetscInt tt;
1022 #endif
1023           row    = row - rstart;
1024           nzA    = a_i[row + 1] - a_i[row];
1025           nzB    = b_i[row + 1] - b_i[row];
1026           cworkA = a_j + a_i[row];
1027           cworkB = b_j + b_i[row];
1028 
1029           if (!allcolumns[i]) {
1030 #if defined(PETSC_USE_CTABLE)
1031             for (k = 0; k < nzA; k++) {
1032               PetscCall(PetscHMapIGetWithDefault(cmap_i, cstart + cworkA[k] + 1, 0, &tt));
1033               if (tt) lens_i[j]++;
1034             }
1035             for (k = 0; k < nzB; k++) {
1036               PetscCall(PetscHMapIGetWithDefault(cmap_i, bmap[cworkB[k]] + 1, 0, &tt));
1037               if (tt) lens_i[j]++;
1038             }
1039 
1040 #else
1041             for (k = 0; k < nzA; k++) {
1042               if (cmap_i[cstart + cworkA[k]]) lens_i[j]++;
1043             }
1044             for (k = 0; k < nzB; k++) {
1045               if (cmap_i[bmap[cworkB[k]]]) lens_i[j]++;
1046             }
1047 #endif
1048           } else { /* allcolumns */
1049             lens_i[j] = nzA + nzB;
1050           }
1051         }
1052       }
1053     }
1054 
1055     /* Create row map: global row of C -> local row of submatrices */
1056     for (i = 0; i < ismax; i++) {
1057       if (!allrows[i]) {
1058 #if defined(PETSC_USE_CTABLE)
1059         PetscCall(PetscHMapICreateWithSize(nrow[i], rmap + i));
1060         irow_i = irow[i];
1061         jmax   = nrow[i];
1062         for (j = 0; j < jmax; j++) {
1063           if (allrows[i]) {
1064             PetscCall(PetscHMapISet(rmap[i], j + 1, j + 1));
1065           } else {
1066             PetscCall(PetscHMapISet(rmap[i], irow_i[j] + 1, j + 1));
1067           }
1068         }
1069 #else
1070         PetscCall(PetscCalloc1(c->Mbs, &rmap[i]));
1071         rmap_i = rmap[i];
1072         irow_i = irow[i];
1073         jmax   = nrow[i];
1074         for (j = 0; j < jmax; j++) {
1075           if (allrows[i]) rmap_i[j] = j;
1076           else rmap_i[irow_i[j]] = j;
1077         }
1078 #endif
1079       } else rmap[i] = NULL;
1080     }
1081 
1082     /* Update lens from offproc data */
1083     {
1084       PetscInt *rbuf2_i, *rbuf3_i, *sbuf1_i;
1085 
1086       PetscCallMPI(MPI_Waitall(nrqs, r_waits3, MPI_STATUSES_IGNORE));
1087       for (tmp2 = 0; tmp2 < nrqs; tmp2++) {
1088         sbuf1_i = sbuf1[pa[tmp2]];
1089         jmax    = sbuf1_i[0];
1090         ct1     = 2 * jmax + 1;
1091         ct2     = 0;
1092         rbuf2_i = rbuf2[tmp2];
1093         rbuf3_i = rbuf3[tmp2];
1094         for (j = 1; j <= jmax; j++) {
1095           is_no  = sbuf1_i[2 * j - 1];
1096           max1   = sbuf1_i[2 * j];
1097           lens_i = lens[is_no];
1098           if (!allcolumns[is_no]) cmap_i = cmap[is_no];
1099           rmap_i = rmap[is_no];
1100           for (k = 0; k < max1; k++, ct1++) {
1101             if (allrows[is_no]) {
1102               row = sbuf1_i[ct1];
1103             } else {
1104 #if defined(PETSC_USE_CTABLE)
1105               PetscCall(PetscHMapIGetWithDefault(rmap_i, sbuf1_i[ct1] + 1, 0, &row));
1106               row--;
1107               PetscCheck(row >= 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "row not found in table");
1108 #else
1109               row = rmap_i[sbuf1_i[ct1]]; /* the val in the new matrix to be */
1110 #endif
1111             }
1112             max2 = rbuf2_i[ct1];
1113             for (l = 0; l < max2; l++, ct2++) {
1114               if (!allcolumns[is_no]) {
1115 #if defined(PETSC_USE_CTABLE)
1116                 PetscCall(PetscHMapIGetWithDefault(cmap_i, rbuf3_i[ct2] + 1, 0, &tcol));
1117 #else
1118                 tcol = cmap_i[rbuf3_i[ct2]];
1119 #endif
1120                 if (tcol) lens_i[row]++;
1121               } else {         /* allcolumns */
1122                 lens_i[row]++; /* lens_i[row] += max2 ? */
1123               }
1124             }
1125           }
1126         }
1127       }
1128     }
1129     PetscCall(PetscFree(r_waits3));
1130     PetscCallMPI(MPI_Waitall(nrqr, s_waits3, MPI_STATUSES_IGNORE));
1131     PetscCall(PetscFree(s_waits3));
1132 
1133     /* Create the submatrices */
1134     for (i = 0; i < ismax; i++) {
1135       PetscInt bs_tmp;
1136       if (ijonly) bs_tmp = 1;
1137       else bs_tmp = bs;
1138 
1139       PetscCall(MatCreate(PETSC_COMM_SELF, submats + i));
1140       PetscCall(MatSetSizes(submats[i], nrow[i] * bs_tmp, ncol[i] * bs_tmp, PETSC_DETERMINE, PETSC_DETERMINE));
1141 
1142       PetscCall(MatSetType(submats[i], ((PetscObject)A)->type_name));
1143       PetscCall(MatSeqBAIJSetPreallocation(submats[i], bs_tmp, 0, lens[i]));
1144       PetscCall(MatSeqSBAIJSetPreallocation(submats[i], bs_tmp, 0, lens[i])); /* this subroutine is used by SBAIJ routines */
1145 
1146       /* create struct Mat_SubSppt and attached it to submat */
1147       PetscCall(PetscNew(&smat_i));
1148       subc            = (Mat_SeqBAIJ *)submats[i]->data;
1149       subc->submatis1 = smat_i;
1150 
1151       smat_i->destroy          = submats[i]->ops->destroy;
1152       submats[i]->ops->destroy = MatDestroySubMatrix_SeqBAIJ;
1153       submats[i]->factortype   = C->factortype;
1154 
1155       smat_i->id          = i;
1156       smat_i->nrqs        = nrqs;
1157       smat_i->nrqr        = nrqr;
1158       smat_i->rbuf1       = rbuf1;
1159       smat_i->rbuf2       = rbuf2;
1160       smat_i->rbuf3       = rbuf3;
1161       smat_i->sbuf2       = sbuf2;
1162       smat_i->req_source2 = req_source2;
1163 
1164       smat_i->sbuf1 = sbuf1;
1165       smat_i->ptr   = ptr;
1166       smat_i->tmp   = tmp;
1167       smat_i->ctr   = ctr;
1168 
1169       smat_i->pa          = pa;
1170       smat_i->req_size    = req_size;
1171       smat_i->req_source1 = req_source1;
1172 
1173       smat_i->allcolumns = allcolumns[i];
1174       smat_i->allrows    = allrows[i];
1175       smat_i->singleis   = PETSC_FALSE;
1176       smat_i->row2proc   = row2proc[i];
1177       smat_i->rmap       = rmap[i];
1178       smat_i->cmap       = cmap[i];
1179     }
1180 
1181     if (!ismax) { /* Create dummy submats[0] for reuse struct subc */
1182       PetscCall(MatCreate(PETSC_COMM_SELF, &submats[0]));
1183       PetscCall(MatSetSizes(submats[0], 0, 0, PETSC_DETERMINE, PETSC_DETERMINE));
1184       PetscCall(MatSetType(submats[0], MATDUMMY));
1185 
1186       /* create struct Mat_SubSppt and attached it to submat */
1187       PetscCall(PetscNew(&smat_i));
1188       submats[0]->data = (void *)smat_i;
1189 
1190       smat_i->destroy          = submats[0]->ops->destroy;
1191       submats[0]->ops->destroy = MatDestroySubMatrix_Dummy;
1192       submats[0]->factortype   = C->factortype;
1193 
1194       smat_i->id          = 0;
1195       smat_i->nrqs        = nrqs;
1196       smat_i->nrqr        = nrqr;
1197       smat_i->rbuf1       = rbuf1;
1198       smat_i->rbuf2       = rbuf2;
1199       smat_i->rbuf3       = rbuf3;
1200       smat_i->sbuf2       = sbuf2;
1201       smat_i->req_source2 = req_source2;
1202 
1203       smat_i->sbuf1 = sbuf1;
1204       smat_i->ptr   = ptr;
1205       smat_i->tmp   = tmp;
1206       smat_i->ctr   = ctr;
1207 
1208       smat_i->pa          = pa;
1209       smat_i->req_size    = req_size;
1210       smat_i->req_source1 = req_source1;
1211 
1212       smat_i->allcolumns = PETSC_FALSE;
1213       smat_i->singleis   = PETSC_FALSE;
1214       smat_i->row2proc   = NULL;
1215       smat_i->rmap       = NULL;
1216       smat_i->cmap       = NULL;
1217     }
1218 
1219     if (ismax) PetscCall(PetscFree(lens[0]));
1220     PetscCall(PetscFree(lens));
1221     if (sbuf_aj) {
1222       PetscCall(PetscFree(sbuf_aj[0]));
1223       PetscCall(PetscFree(sbuf_aj));
1224     }
1225 
1226   } /* endof scall == MAT_INITIAL_MATRIX */
1227 
1228   /* Post recv matrix values */
1229   if (!ijonly) {
1230     PetscCall(PetscObjectGetNewTag((PetscObject)C, &tag4));
1231     PetscCall(PetscMalloc1(nrqs, &rbuf4));
1232     PetscCall(PetscMalloc1(nrqs, &r_waits4));
1233     for (i = 0; i < nrqs; ++i) {
1234       PetscCall(PetscMalloc1(rbuf2[i][0] * bs2, &rbuf4[i]));
1235       PetscCallMPI(MPI_Irecv(rbuf4[i], rbuf2[i][0] * bs2, MPIU_SCALAR, req_source2[i], tag4, comm, r_waits4 + i));
1236     }
1237 
1238     /* Allocate sending buffers for a->a, and send them off */
1239     PetscCall(PetscMalloc1(nrqr, &sbuf_aa));
1240     for (i = 0, j = 0; i < nrqr; i++) j += req_size[i];
1241 
1242     if (nrqr) PetscCall(PetscMalloc1(j * bs2, &sbuf_aa[0]));
1243     for (i = 1; i < nrqr; i++) sbuf_aa[i] = sbuf_aa[i - 1] + req_size[i - 1] * bs2;
1244 
1245     PetscCall(PetscMalloc1(nrqr, &s_waits4));
1246 
1247     for (i = 0; i < nrqr; i++) {
1248       rbuf1_i   = rbuf1[i];
1249       sbuf_aa_i = sbuf_aa[i];
1250       ct1       = 2 * rbuf1_i[0] + 1;
1251       ct2       = 0;
1252       for (j = 1, max1 = rbuf1_i[0]; j <= max1; j++) {
1253         kmax = rbuf1_i[2 * j];
1254         for (k = 0; k < kmax; k++, ct1++) {
1255           row    = rbuf1_i[ct1] - rstart;
1256           nzA    = a_i[row + 1] - a_i[row];
1257           nzB    = b_i[row + 1] - b_i[row];
1258           ncols  = nzA + nzB;
1259           cworkB = b_j + b_i[row];
1260           vworkA = a_a + a_i[row] * bs2;
1261           vworkB = b_a + b_i[row] * bs2;
1262 
1263           /* load the column values for this row into vals*/
1264           vals = sbuf_aa_i + ct2 * bs2;
1265           for (l = 0; l < nzB; l++) {
1266             if ((bmap[cworkB[l]]) < cstart) {
1267               PetscCall(PetscArraycpy(vals + l * bs2, vworkB + l * bs2, bs2));
1268             } else break;
1269           }
1270           imark = l;
1271           for (l = 0; l < nzA; l++) PetscCall(PetscArraycpy(vals + (imark + l) * bs2, vworkA + l * bs2, bs2));
1272           for (l = imark; l < nzB; l++) PetscCall(PetscArraycpy(vals + (nzA + l) * bs2, vworkB + l * bs2, bs2));
1273 
1274           ct2 += ncols;
1275         }
1276       }
1277       PetscCallMPI(MPI_Isend(sbuf_aa_i, req_size[i] * bs2, MPIU_SCALAR, req_source1[i], tag4, comm, s_waits4 + i));
1278     }
1279   }
1280 
1281   /* Assemble the matrices */
1282   /* First assemble the local rows */
1283   for (i = 0; i < ismax; i++) {
1284     row2proc_i = row2proc[i];
1285     subc       = (Mat_SeqBAIJ *)submats[i]->data;
1286     imat_ilen  = subc->ilen;
1287     imat_j     = subc->j;
1288     imat_i     = subc->i;
1289     imat_a     = subc->a;
1290 
1291     if (!allcolumns[i]) cmap_i = cmap[i];
1292     rmap_i = rmap[i];
1293     irow_i = irow[i];
1294     jmax   = nrow[i];
1295     for (j = 0; j < jmax; j++) {
1296       if (allrows[i]) row = j;
1297       else row = irow_i[j];
1298       proc = row2proc_i[j];
1299 
1300       if (proc == rank) {
1301         row    = row - rstart;
1302         nzA    = a_i[row + 1] - a_i[row];
1303         nzB    = b_i[row + 1] - b_i[row];
1304         cworkA = a_j + a_i[row];
1305         cworkB = b_j + b_i[row];
1306         if (!ijonly) {
1307           vworkA = a_a + a_i[row] * bs2;
1308           vworkB = b_a + b_i[row] * bs2;
1309         }
1310 
1311         if (allrows[i]) {
1312           row = row + rstart;
1313         } else {
1314 #if defined(PETSC_USE_CTABLE)
1315           PetscCall(PetscHMapIGetWithDefault(rmap_i, row + rstart + 1, 0, &row));
1316           row--;
1317 
1318           PetscCheck(row >= 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "row not found in table");
1319 #else
1320           row = rmap_i[row + rstart];
1321 #endif
1322         }
1323         mat_i = imat_i[row];
1324         if (!ijonly) mat_a = imat_a + mat_i * bs2;
1325         mat_j = imat_j + mat_i;
1326         ilen  = imat_ilen[row];
1327 
1328         /* load the column indices for this row into cols*/
1329         if (!allcolumns[i]) {
1330           for (l = 0; l < nzB; l++) {
1331             if ((ctmp = bmap[cworkB[l]]) < cstart) {
1332 #if defined(PETSC_USE_CTABLE)
1333               PetscCall(PetscHMapIGetWithDefault(cmap_i, ctmp + 1, 0, &tcol));
1334               if (tcol) {
1335 #else
1336               if ((tcol = cmap_i[ctmp])) {
1337 #endif
1338                 *mat_j++ = tcol - 1;
1339                 PetscCall(PetscArraycpy(mat_a, vworkB + l * bs2, bs2));
1340                 mat_a += bs2;
1341                 ilen++;
1342               }
1343             } else break;
1344           }
1345           imark = l;
1346           for (l = 0; l < nzA; l++) {
1347 #if defined(PETSC_USE_CTABLE)
1348             PetscCall(PetscHMapIGetWithDefault(cmap_i, cstart + cworkA[l] + 1, 0, &tcol));
1349             if (tcol) {
1350 #else
1351             if ((tcol = cmap_i[cstart + cworkA[l]])) {
1352 #endif
1353               *mat_j++ = tcol - 1;
1354               if (!ijonly) {
1355                 PetscCall(PetscArraycpy(mat_a, vworkA + l * bs2, bs2));
1356                 mat_a += bs2;
1357               }
1358               ilen++;
1359             }
1360           }
1361           for (l = imark; l < nzB; l++) {
1362 #if defined(PETSC_USE_CTABLE)
1363             PetscCall(PetscHMapIGetWithDefault(cmap_i, bmap[cworkB[l]] + 1, 0, &tcol));
1364             if (tcol) {
1365 #else
1366             if ((tcol = cmap_i[bmap[cworkB[l]]])) {
1367 #endif
1368               *mat_j++ = tcol - 1;
1369               if (!ijonly) {
1370                 PetscCall(PetscArraycpy(mat_a, vworkB + l * bs2, bs2));
1371                 mat_a += bs2;
1372               }
1373               ilen++;
1374             }
1375           }
1376         } else { /* allcolumns */
1377           for (l = 0; l < nzB; l++) {
1378             if ((ctmp = bmap[cworkB[l]]) < cstart) {
1379               *mat_j++ = ctmp;
1380               PetscCall(PetscArraycpy(mat_a, vworkB + l * bs2, bs2));
1381               mat_a += bs2;
1382               ilen++;
1383             } else break;
1384           }
1385           imark = l;
1386           for (l = 0; l < nzA; l++) {
1387             *mat_j++ = cstart + cworkA[l];
1388             if (!ijonly) {
1389               PetscCall(PetscArraycpy(mat_a, vworkA + l * bs2, bs2));
1390               mat_a += bs2;
1391             }
1392             ilen++;
1393           }
1394           for (l = imark; l < nzB; l++) {
1395             *mat_j++ = bmap[cworkB[l]];
1396             if (!ijonly) {
1397               PetscCall(PetscArraycpy(mat_a, vworkB + l * bs2, bs2));
1398               mat_a += bs2;
1399             }
1400             ilen++;
1401           }
1402         }
1403         imat_ilen[row] = ilen;
1404       }
1405     }
1406   }
1407 
1408   /* Now assemble the off proc rows */
1409   if (!ijonly) PetscCallMPI(MPI_Waitall(nrqs, r_waits4, MPI_STATUSES_IGNORE));
1410   for (tmp2 = 0; tmp2 < nrqs; tmp2++) {
1411     sbuf1_i = sbuf1[pa[tmp2]];
1412     jmax    = sbuf1_i[0];
1413     ct1     = 2 * jmax + 1;
1414     ct2     = 0;
1415     rbuf2_i = rbuf2[tmp2];
1416     rbuf3_i = rbuf3[tmp2];
1417     if (!ijonly) rbuf4_i = rbuf4[tmp2];
1418     for (j = 1; j <= jmax; j++) {
1419       is_no  = sbuf1_i[2 * j - 1];
1420       rmap_i = rmap[is_no];
1421       if (!allcolumns[is_no]) cmap_i = cmap[is_no];
1422       subc      = (Mat_SeqBAIJ *)submats[is_no]->data;
1423       imat_ilen = subc->ilen;
1424       imat_j    = subc->j;
1425       imat_i    = subc->i;
1426       if (!ijonly) imat_a = subc->a;
1427       max1 = sbuf1_i[2 * j];
1428       for (k = 0; k < max1; k++, ct1++) { /* for each recved block row */
1429         row = sbuf1_i[ct1];
1430 
1431         if (allrows[is_no]) {
1432           row = sbuf1_i[ct1];
1433         } else {
1434 #if defined(PETSC_USE_CTABLE)
1435           PetscCall(PetscHMapIGetWithDefault(rmap_i, row + 1, 0, &row));
1436           row--;
1437           PetscCheck(row >= 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "row not found in table");
1438 #else
1439           row = rmap_i[row];
1440 #endif
1441         }
1442         ilen  = imat_ilen[row];
1443         mat_i = imat_i[row];
1444         if (!ijonly) mat_a = imat_a + mat_i * bs2;
1445         mat_j = imat_j + mat_i;
1446         max2  = rbuf2_i[ct1];
1447         if (!allcolumns[is_no]) {
1448           for (l = 0; l < max2; l++, ct2++) {
1449 #if defined(PETSC_USE_CTABLE)
1450             PetscCall(PetscHMapIGetWithDefault(cmap_i, rbuf3_i[ct2] + 1, 0, &tcol));
1451 #else
1452             tcol = cmap_i[rbuf3_i[ct2]];
1453 #endif
1454             if (tcol) {
1455               *mat_j++ = tcol - 1;
1456               if (!ijonly) {
1457                 PetscCall(PetscArraycpy(mat_a, rbuf4_i + ct2 * bs2, bs2));
1458                 mat_a += bs2;
1459               }
1460               ilen++;
1461             }
1462           }
1463         } else { /* allcolumns */
1464           for (l = 0; l < max2; l++, ct2++) {
1465             *mat_j++ = rbuf3_i[ct2]; /* same global column index of C */
1466             if (!ijonly) {
1467               PetscCall(PetscArraycpy(mat_a, rbuf4_i + ct2 * bs2, bs2));
1468               mat_a += bs2;
1469             }
1470             ilen++;
1471           }
1472         }
1473         imat_ilen[row] = ilen;
1474       }
1475     }
1476   }
1477 
1478   if (!iscsorted) { /* sort column indices of the rows */
1479     MatScalar *work;
1480 
1481     PetscCall(PetscMalloc1(bs2, &work));
1482     for (i = 0; i < ismax; i++) {
1483       subc      = (Mat_SeqBAIJ *)submats[i]->data;
1484       imat_ilen = subc->ilen;
1485       imat_j    = subc->j;
1486       imat_i    = subc->i;
1487       if (!ijonly) imat_a = subc->a;
1488       if (allcolumns[i]) continue;
1489 
1490       jmax = nrow[i];
1491       for (j = 0; j < jmax; j++) {
1492         mat_i = imat_i[j];
1493         mat_j = imat_j + mat_i;
1494         ilen  = imat_ilen[j];
1495         if (ijonly) {
1496           PetscCall(PetscSortInt(ilen, mat_j));
1497         } else {
1498           mat_a = imat_a + mat_i * bs2;
1499           PetscCall(PetscSortIntWithDataArray(ilen, mat_j, mat_a, bs2 * sizeof(MatScalar), work));
1500         }
1501       }
1502     }
1503     PetscCall(PetscFree(work));
1504   }
1505 
1506   if (!ijonly) {
1507     PetscCall(PetscFree(r_waits4));
1508     PetscCallMPI(MPI_Waitall(nrqr, s_waits4, MPI_STATUSES_IGNORE));
1509     PetscCall(PetscFree(s_waits4));
1510   }
1511 
1512   /* Restore the indices */
1513   for (i = 0; i < ismax; i++) {
1514     if (!allrows[i]) PetscCall(ISRestoreIndices(isrow[i], irow + i));
1515     if (!allcolumns[i]) PetscCall(ISRestoreIndices(iscol[i], icol + i));
1516   }
1517 
1518   for (i = 0; i < ismax; i++) {
1519     PetscCall(MatAssemblyBegin(submats[i], MAT_FINAL_ASSEMBLY));
1520     PetscCall(MatAssemblyEnd(submats[i], MAT_FINAL_ASSEMBLY));
1521   }
1522 
1523   PetscCall(PetscFree5(*(PetscInt ***)&irow, *(PetscInt ***)&icol, nrow, ncol, issorted));
1524   PetscCall(PetscFree5(row2proc, cmap, rmap, allcolumns, allrows));
1525 
1526   if (!ijonly) {
1527     if (sbuf_aa) {
1528       PetscCall(PetscFree(sbuf_aa[0]));
1529       PetscCall(PetscFree(sbuf_aa));
1530     }
1531 
1532     for (i = 0; i < nrqs; ++i) PetscCall(PetscFree(rbuf4[i]));
1533     PetscCall(PetscFree(rbuf4));
1534   }
1535   c->ijonly = PETSC_FALSE; /* set back to the default */
1536   PetscFunctionReturn(PETSC_SUCCESS);
1537 }
1538