xref: /petsc/src/ksp/pc/impls/tfs/xxt.c (revision 37d05b0256c1e9ba4bc423c4eccb3df226931ef0)
1 
2 /*
3 Module Name: xxt
4 Module Info:
5 
6 author:  Henry M. Tufo III
7 e-mail:  hmt@asci.uchicago.edu
8 contact:
9 +--------------------------------+--------------------------------+
10 |MCS Division - Building 221     |Department of Computer Science  |
11 |Argonne National Laboratory     |Ryerson 152                     |
12 |9700 S. Cass Avenue             |The University of Chicago       |
13 |Argonne, IL  60439              |Chicago, IL  60637              |
14 |(630) 252-5354/5986 ph/fx       |(773) 702-6019/8487 ph/fx       |
15 +--------------------------------+--------------------------------+
16 
17 Last Modification: 3.20.01
18 */
19 #include <../src/ksp/pc/impls/tfs/tfs.h>
20 
21 #define LEFT  -1
22 #define RIGHT 1
23 #define BOTH  0
24 
25 typedef struct xxt_solver_info {
26   PetscInt      n, m, n_global, m_global;
27   PetscInt      nnz, max_nnz, msg_buf_sz;
28   PetscInt     *nsep, *lnsep, *fo, nfo, *stages;
29   PetscInt     *col_sz, *col_indices;
30   PetscScalar **col_vals, *x, *solve_uu, *solve_w;
31   PetscInt      nsolves;
32   PetscScalar   tot_solve_time;
33 } xxt_info;
34 
35 typedef struct matvec_info {
36   PetscInt     n, m, n_global, m_global;
37   PetscInt    *local2global;
38   PCTFS_gs_ADT PCTFS_gs_handle;
39   PetscErrorCode (*matvec)(struct matvec_info *, PetscScalar *, PetscScalar *);
40   void *grid_data;
41 } mv_info;
42 
43 struct xxt_CDT {
44   PetscInt  id;
45   PetscInt  ns;
46   PetscInt  level;
47   xxt_info *info;
48   mv_info  *mvi;
49 };
50 
51 static PetscInt n_xxt         = 0;
52 static PetscInt n_xxt_handles = 0;
53 
54 /* prototypes */
55 static PetscErrorCode do_xxt_solve(xxt_ADT xxt_handle, PetscScalar *rhs);
56 static PetscErrorCode check_handle(xxt_ADT xxt_handle);
57 static PetscErrorCode det_separators(xxt_ADT xxt_handle);
58 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u);
59 static PetscErrorCode xxt_generate(xxt_ADT xxt_handle);
60 static PetscErrorCode do_xxt_factor(xxt_ADT xxt_handle);
61 static mv_info       *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, PetscErrorCode (*matvec)(mv_info *, PetscScalar *, PetscScalar *), void *grid_data);
62 
63 xxt_ADT XXT_new(void)
64 {
65   xxt_ADT xxt_handle;
66 
67   /* rolling count on n_xxt ... pot. problem here */
68   n_xxt_handles++;
69   xxt_handle       = (xxt_ADT)malloc(sizeof(struct xxt_CDT));
70   xxt_handle->id   = ++n_xxt;
71   xxt_handle->info = NULL;
72   xxt_handle->mvi  = NULL;
73 
74   return (xxt_handle);
75 }
76 
77 PetscErrorCode XXT_factor(xxt_ADT   xxt_handle,                                           /* prev. allocated xxt  handle */
78                           PetscInt *local2global,                                         /* global column mapping       */
79                           PetscInt  n,                                                    /* local num rows              */
80                           PetscInt  m,                                                    /* local num cols              */
81                           PetscErrorCode (*matvec)(void *, PetscScalar *, PetscScalar *), /* b_loc=A_local.x_loc         */
82                           void *grid_data)                                                /* grid data for matvec        */
83 {
84   PetscFunctionBegin;
85   PetscCall(PCTFS_comm_init());
86   PetscCall(check_handle(xxt_handle));
87 
88   /* only 2^k for now and all nodes participating */
89   PetscCheck((1 << (xxt_handle->level = PCTFS_i_log2_num_nodes)) == PCTFS_num_nodes, PETSC_COMM_SELF, PETSC_ERR_PLIB, "only 2^k for now and MPI_COMM_WORLD!!! %d != %d", 1 << PCTFS_i_log2_num_nodes, PCTFS_num_nodes);
90 
91   /* space for X info */
92   xxt_handle->info = (xxt_info *)malloc(sizeof(xxt_info));
93 
94   /* set up matvec handles */
95   xxt_handle->mvi = set_mvi(local2global, n, m, (PetscErrorCode(*)(mv_info *, PetscScalar *, PetscScalar *))matvec, grid_data);
96 
97   /* matrix is assumed to be of full rank */
98   /* LATER we can reset to indicate rank def. */
99   xxt_handle->ns = 0;
100 
101   /* determine separators and generate firing order - NB xxt info set here */
102   PetscCall(det_separators(xxt_handle));
103 
104   PetscCall(do_xxt_factor(xxt_handle));
105   PetscFunctionReturn(PETSC_SUCCESS);
106 }
107 
108 PetscErrorCode XXT_solve(xxt_ADT xxt_handle, PetscScalar *x, PetscScalar *b)
109 {
110   PetscFunctionBegin;
111   PetscCall(PCTFS_comm_init());
112   PetscCall(check_handle(xxt_handle));
113 
114   /* need to copy b into x? */
115   if (b) PetscCall(PCTFS_rvec_copy(x, b, xxt_handle->mvi->n));
116   PetscCall(do_xxt_solve(xxt_handle, x));
117   PetscFunctionReturn(PETSC_SUCCESS);
118 }
119 
120 PetscErrorCode XXT_free(xxt_ADT xxt_handle)
121 {
122   PetscFunctionBegin;
123   PetscCall(PCTFS_comm_init());
124   PetscCall(check_handle(xxt_handle));
125   n_xxt_handles--;
126 
127   free(xxt_handle->info->nsep);
128   free(xxt_handle->info->lnsep);
129   free(xxt_handle->info->fo);
130   free(xxt_handle->info->stages);
131   free(xxt_handle->info->solve_uu);
132   free(xxt_handle->info->solve_w);
133   free(xxt_handle->info->x);
134   free(xxt_handle->info->col_vals);
135   free(xxt_handle->info->col_sz);
136   free(xxt_handle->info->col_indices);
137   free(xxt_handle->info);
138   free(xxt_handle->mvi->local2global);
139   PetscCall(PCTFS_gs_free(xxt_handle->mvi->PCTFS_gs_handle));
140   free(xxt_handle->mvi);
141   free(xxt_handle);
142 
143   /* if the check fails we nuke */
144   /* if NULL pointer passed to free we nuke */
145   /* if the calls to free fail that's not my problem */
146   PetscFunctionReturn(PETSC_SUCCESS);
147 }
148 
149 /* This function is currently unused */
150 PetscErrorCode XXT_stats(xxt_ADT xxt_handle)
151 {
152   PetscInt    op[]  = {NON_UNIFORM, GL_MIN, GL_MAX, GL_ADD, GL_MIN, GL_MAX, GL_ADD, GL_MIN, GL_MAX, GL_ADD};
153   PetscInt    fop[] = {NON_UNIFORM, GL_MIN, GL_MAX, GL_ADD};
154   PetscInt    vals[9], work[9];
155   PetscScalar fvals[3], fwork[3];
156 
157   PetscFunctionBegin;
158   PetscCall(PCTFS_comm_init());
159   PetscCall(check_handle(xxt_handle));
160 
161   /* if factorization not done there are no stats */
162   if (!xxt_handle->info || !xxt_handle->mvi) {
163     if (!PCTFS_my_id) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "XXT_stats() :: no stats available!\n"));
164     PetscFunctionReturn(PETSC_SUCCESS);
165   }
166 
167   vals[0] = vals[1] = vals[2] = xxt_handle->info->nnz;
168   vals[3] = vals[4] = vals[5] = xxt_handle->mvi->n;
169   vals[6] = vals[7] = vals[8] = xxt_handle->info->msg_buf_sz;
170   PetscCall(PCTFS_giop(vals, work, PETSC_STATIC_ARRAY_LENGTH(op) - 1, op));
171 
172   fvals[0] = fvals[1] = fvals[2] = xxt_handle->info->tot_solve_time / xxt_handle->info->nsolves++;
173   PetscCall(PCTFS_grop(fvals, fwork, PETSC_STATIC_ARRAY_LENGTH(fop) - 1, fop));
174 
175   if (!PCTFS_my_id) {
176     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: min   xxt_nnz=%" PetscInt_FMT "\n", PCTFS_my_id, vals[0]));
177     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: max   xxt_nnz=%" PetscInt_FMT "\n", PCTFS_my_id, vals[1]));
178     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: avg   xxt_nnz=%g\n", PCTFS_my_id, (double)(1.0 * vals[2] / PCTFS_num_nodes)));
179     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: tot   xxt_nnz=%" PetscInt_FMT "\n", PCTFS_my_id, vals[2]));
180     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: xxt   C(2d)  =%g\n", PCTFS_my_id, (double)(vals[2] / (PetscPowReal(1.0 * vals[5], 1.5)))));
181     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: xxt   C(3d)  =%g\n", PCTFS_my_id, (double)(vals[2] / (PetscPowReal(1.0 * vals[5], 1.6667)))));
182     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: min   xxt_n  =%" PetscInt_FMT "\n", PCTFS_my_id, vals[3]));
183     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: max   xxt_n  =%" PetscInt_FMT "\n", PCTFS_my_id, vals[4]));
184     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: avg   xxt_n  =%g\n", PCTFS_my_id, (double)(1.0 * vals[5] / PCTFS_num_nodes)));
185     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: tot   xxt_n  =%" PetscInt_FMT "\n", PCTFS_my_id, vals[5]));
186     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: min   xxt_buf=%" PetscInt_FMT "\n", PCTFS_my_id, vals[6]));
187     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: max   xxt_buf=%" PetscInt_FMT "\n", PCTFS_my_id, vals[7]));
188     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: avg   xxt_buf=%g\n", PCTFS_my_id, (double)(1.0 * vals[8] / PCTFS_num_nodes)));
189     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: min   xxt_slv=%g\n", PCTFS_my_id, (double)PetscRealPart(fvals[0])));
190     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: max   xxt_slv=%g\n", PCTFS_my_id, (double)PetscRealPart(fvals[1])));
191     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: avg   xxt_slv=%g\n", PCTFS_my_id, (double)PetscRealPart(fvals[2] / PCTFS_num_nodes)));
192   }
193   PetscFunctionReturn(PETSC_SUCCESS);
194 }
195 
196 /*
197 
198 Description: get A_local, local portion of global coarse matrix which
199 is a row dist. nxm matrix w/ n<m.
200    o my_ml holds address of ML struct associated w/A_local and coarse grid
201    o local2global holds global number of column i (i=0,...,m-1)
202    o local2global holds global number of row    i (i=0,...,n-1)
203    o mylocmatvec performs A_local . vec_local (note that gs is performed using
204    PCTFS_gs_init/gop).
205 
206 mylocmatvec = my_ml->Amat[grid_tag].matvec->external;
207 mylocmatvec (void :: void *data, double *in, double *out)
208 */
209 static PetscErrorCode do_xxt_factor(xxt_ADT xxt_handle)
210 {
211   return xxt_generate(xxt_handle);
212 }
213 
214 static PetscErrorCode xxt_generate(xxt_ADT xxt_handle)
215 {
216   PetscInt      i, j, k, idex;
217   PetscInt      dim, col;
218   PetscScalar  *u, *uu, *v, *z, *w, alpha, alpha_w;
219   PetscInt     *segs;
220   PetscInt      op[] = {GL_ADD, 0};
221   PetscInt      off, len;
222   PetscScalar  *x_ptr;
223   PetscInt     *iptr, flag;
224   PetscInt      start = 0, end, work;
225   PetscInt      op2[] = {GL_MIN, 0};
226   PCTFS_gs_ADT  PCTFS_gs_handle;
227   PetscInt     *nsep, *lnsep, *fo;
228   PetscInt      a_n            = xxt_handle->mvi->n;
229   PetscInt      a_m            = xxt_handle->mvi->m;
230   PetscInt     *a_local2global = xxt_handle->mvi->local2global;
231   PetscInt      level;
232   PetscInt      xxt_nnz = 0, xxt_max_nnz = 0;
233   PetscInt      n, m;
234   PetscInt     *col_sz, *col_indices, *stages;
235   PetscScalar **col_vals, *x;
236   PetscInt      n_global;
237   PetscBLASInt  i1  = 1, dlen;
238   PetscScalar   dm1 = -1.0;
239 
240   n               = xxt_handle->mvi->n;
241   nsep            = xxt_handle->info->nsep;
242   lnsep           = xxt_handle->info->lnsep;
243   fo              = xxt_handle->info->fo;
244   end             = lnsep[0];
245   level           = xxt_handle->level;
246   PCTFS_gs_handle = xxt_handle->mvi->PCTFS_gs_handle;
247 
248   /* is there a null space? */
249   /* LATER add in ability to detect null space by checking alpha */
250   for (i = 0, j = 0; i <= level; i++) j += nsep[i];
251 
252   m = j - xxt_handle->ns;
253   if (m != j) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "xxt_generate() :: null space exists %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\n", m, j, xxt_handle->ns));
254 
255   /* get and initialize storage for x local         */
256   /* note that x local is nxm and stored by columns */
257   col_sz      = (PetscInt *)malloc(m * sizeof(PetscInt));
258   col_indices = (PetscInt *)malloc((2 * m + 1) * sizeof(PetscInt));
259   col_vals    = (PetscScalar **)malloc(m * sizeof(PetscScalar *));
260   for (i = j = 0; i < m; i++, j += 2) {
261     col_indices[j] = col_indices[j + 1] = col_sz[i] = -1;
262     col_vals[i]                                     = NULL;
263   }
264   col_indices[j] = -1;
265 
266   /* size of separators for each sub-hc working from bottom of tree to top */
267   /* this looks like nsep[]=segments */
268   stages = (PetscInt *)malloc((level + 1) * sizeof(PetscInt));
269   segs   = (PetscInt *)malloc((level + 1) * sizeof(PetscInt));
270   PetscCall(PCTFS_ivec_zero(stages, level + 1));
271   PCTFS_ivec_copy(segs, nsep, level + 1);
272   for (i = 0; i < level; i++) segs[i + 1] += segs[i];
273   stages[0] = segs[0];
274 
275   /* temporary vectors  */
276   u  = (PetscScalar *)malloc(n * sizeof(PetscScalar));
277   z  = (PetscScalar *)malloc(n * sizeof(PetscScalar));
278   v  = (PetscScalar *)malloc(a_m * sizeof(PetscScalar));
279   uu = (PetscScalar *)malloc(m * sizeof(PetscScalar));
280   w  = (PetscScalar *)malloc(m * sizeof(PetscScalar));
281 
282   /* extra nnz due to replication of vertices across separators */
283   for (i = 1, j = 0; i <= level; i++) j += nsep[i];
284 
285   /* storage for sparse x values */
286   n_global    = xxt_handle->info->n_global;
287   xxt_max_nnz = (PetscInt)(2.5 * PetscPowReal(1.0 * n_global, 1.6667) + j * n / 2) / PCTFS_num_nodes;
288   x           = (PetscScalar *)malloc(xxt_max_nnz * sizeof(PetscScalar));
289   xxt_nnz     = 0;
290 
291   /* LATER - can embed next sep to fire in gs */
292   /* time to make the donuts - generate X factor */
293   for (dim = i = j = 0; i < m; i++) {
294     /* time to move to the next level? */
295     while (i == segs[dim]) {
296       PetscCheck(dim != level, PETSC_COMM_SELF, PETSC_ERR_PLIB, "dim about to exceed level");
297       stages[dim++] = i;
298       end += lnsep[dim];
299     }
300     stages[dim] = i;
301 
302     /* which column are we firing? */
303     /* i.e. set v_l */
304     /* use new seps and do global min across hc to determine which one to fire */
305     (start < end) ? (col = fo[start]) : (col = INT_MAX);
306     PetscCall(PCTFS_giop_hc(&col, &work, 1, op2, dim));
307 
308     /* shouldn't need this */
309     if (col == INT_MAX) {
310       PetscCall(PetscInfo(0, "hey ... col==INT_MAX??\n"));
311       continue;
312     }
313 
314     /* do I own it? I should */
315     PetscCall(PCTFS_rvec_zero(v, a_m));
316     if (col == fo[start]) {
317       start++;
318       idex = PCTFS_ivec_linear_search(col, a_local2global, a_n);
319       PetscCheck(idex != -1, PETSC_COMM_SELF, PETSC_ERR_PLIB, "NOT FOUND!");
320       v[idex] = 1.0;
321       j++;
322     } else {
323       idex = PCTFS_ivec_linear_search(col, a_local2global, a_m);
324       if (idex != -1) v[idex] = 1.0;
325     }
326 
327     /* perform u = A.v_l */
328     PetscCall(PCTFS_rvec_zero(u, n));
329     PetscCall(do_matvec(xxt_handle->mvi, v, u));
330 
331     /* uu =  X^T.u_l (local portion) */
332     /* technically only need to zero out first i entries */
333     /* later turn this into an XXT_solve call ? */
334     PetscCall(PCTFS_rvec_zero(uu, m));
335     x_ptr = x;
336     iptr  = col_indices;
337     for (k = 0; k < i; k++) {
338       off = *iptr++;
339       len = *iptr++;
340       PetscCall(PetscBLASIntCast(len, &dlen));
341       PetscCallBLAS("BLASdot", uu[k] = BLASdot_(&dlen, u + off, &i1, x_ptr, &i1));
342       x_ptr += len;
343     }
344 
345     /* uu = X^T.u_l (comm portion) */
346     PetscCall(PCTFS_ssgl_radd(uu, w, dim, stages));
347 
348     /* z = X.uu */
349     PetscCall(PCTFS_rvec_zero(z, n));
350     x_ptr = x;
351     iptr  = col_indices;
352     for (k = 0; k < i; k++) {
353       off = *iptr++;
354       len = *iptr++;
355       PetscCall(PetscBLASIntCast(len, &dlen));
356       PetscCallBLAS("BLASaxpy", BLASaxpy_(&dlen, &uu[k], x_ptr, &i1, z + off, &i1));
357       x_ptr += len;
358     }
359 
360     /* compute v_l = v_l - z */
361     PetscCall(PCTFS_rvec_zero(v + a_n, a_m - a_n));
362     PetscCall(PetscBLASIntCast(n, &dlen));
363     PetscCallBLAS("BLASaxpy", BLASaxpy_(&dlen, &dm1, z, &i1, v, &i1));
364 
365     /* compute u_l = A.v_l */
366     if (a_n != a_m) PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, v, "+\0", dim));
367     PetscCall(PCTFS_rvec_zero(u, n));
368     PetscCall(do_matvec(xxt_handle->mvi, v, u));
369 
370     /* compute sqrt(alpha) = sqrt(v_l^T.u_l) - local portion */
371     PetscCall(PetscBLASIntCast(n, &dlen));
372     PetscCallBLAS("BLASdot", alpha = BLASdot_(&dlen, u, &i1, v, &i1));
373     /* compute sqrt(alpha) = sqrt(v_l^T.u_l) - comm portion */
374     PetscCall(PCTFS_grop_hc(&alpha, &alpha_w, 1, op, dim));
375 
376     alpha = (PetscScalar)PetscSqrtReal((PetscReal)alpha);
377 
378     /* check for small alpha                             */
379     /* LATER use this to detect and determine null space */
380     PetscCheck(PetscAbsScalar(alpha) >= 1.0e-14, PETSC_COMM_SELF, PETSC_ERR_PLIB, "bad alpha! %g", (double)PetscAbsScalar(alpha));
381 
382     /* compute v_l = v_l/sqrt(alpha) */
383     PetscCall(PCTFS_rvec_scale(v, 1.0 / alpha, n));
384 
385     /* add newly generated column, v_l, to X */
386     flag = 1;
387     off = len = 0;
388     for (k = 0; k < n; k++) {
389       if (v[k] != 0.0) {
390         len = k;
391         if (flag) {
392           off  = k;
393           flag = 0;
394         }
395       }
396     }
397 
398     len -= (off - 1);
399 
400     if (len > 0) {
401       if ((xxt_nnz + len) > xxt_max_nnz) {
402         PetscCall(PetscInfo(0, "increasing space for X by 2x!\n"));
403         xxt_max_nnz *= 2;
404         x_ptr = (PetscScalar *)malloc(xxt_max_nnz * sizeof(PetscScalar));
405         PetscCall(PCTFS_rvec_copy(x_ptr, x, xxt_nnz));
406         free(x);
407         x = x_ptr;
408         x_ptr += xxt_nnz;
409       }
410       xxt_nnz += len;
411       PetscCall(PCTFS_rvec_copy(x_ptr, v + off, len));
412 
413       col_indices[2 * i] = off;
414       col_sz[i] = col_indices[2 * i + 1] = len;
415       col_vals[i]                        = x_ptr;
416     } else {
417       col_indices[2 * i] = 0;
418       col_sz[i] = col_indices[2 * i + 1] = 0;
419       col_vals[i]                        = x_ptr;
420     }
421   }
422 
423   /* close off stages for execution phase */
424   while (dim != level) {
425     stages[dim++] = i;
426     PetscCall(PetscInfo(0, "disconnected!!! dim(%" PetscInt_FMT ")!=level(%" PetscInt_FMT ")\n", dim, level));
427   }
428   stages[dim] = i;
429 
430   xxt_handle->info->n              = xxt_handle->mvi->n;
431   xxt_handle->info->m              = m;
432   xxt_handle->info->nnz            = xxt_nnz;
433   xxt_handle->info->max_nnz        = xxt_max_nnz;
434   xxt_handle->info->msg_buf_sz     = stages[level] - stages[0];
435   xxt_handle->info->solve_uu       = (PetscScalar *)malloc(m * sizeof(PetscScalar));
436   xxt_handle->info->solve_w        = (PetscScalar *)malloc(m * sizeof(PetscScalar));
437   xxt_handle->info->x              = x;
438   xxt_handle->info->col_vals       = col_vals;
439   xxt_handle->info->col_sz         = col_sz;
440   xxt_handle->info->col_indices    = col_indices;
441   xxt_handle->info->stages         = stages;
442   xxt_handle->info->nsolves        = 0;
443   xxt_handle->info->tot_solve_time = 0.0;
444 
445   free(segs);
446   free(u);
447   free(v);
448   free(uu);
449   free(z);
450   free(w);
451 
452   return PETSC_SUCCESS;
453 }
454 
455 static PetscErrorCode do_xxt_solve(xxt_ADT xxt_handle, PetscScalar *uc)
456 {
457   PetscInt     off, len, *iptr;
458   PetscInt     level       = xxt_handle->level;
459   PetscInt     n           = xxt_handle->info->n;
460   PetscInt     m           = xxt_handle->info->m;
461   PetscInt    *stages      = xxt_handle->info->stages;
462   PetscInt    *col_indices = xxt_handle->info->col_indices;
463   PetscScalar *x_ptr, *uu_ptr;
464   PetscScalar *solve_uu = xxt_handle->info->solve_uu;
465   PetscScalar *solve_w  = xxt_handle->info->solve_w;
466   PetscScalar *x        = xxt_handle->info->x;
467   PetscBLASInt i1       = 1, dlen;
468 
469   PetscFunctionBegin;
470   uu_ptr = solve_uu;
471   PetscCall(PCTFS_rvec_zero(uu_ptr, m));
472 
473   /* x  = X.Y^T.b */
474   /* uu = Y^T.b */
475   for (x_ptr = x, iptr = col_indices; *iptr != -1; x_ptr += len) {
476     off = *iptr++;
477     len = *iptr++;
478     PetscCall(PetscBLASIntCast(len, &dlen));
479     PetscCallBLAS("BLASdot", *uu_ptr++ = BLASdot_(&dlen, uc + off, &i1, x_ptr, &i1));
480   }
481 
482   /* communication of beta */
483   uu_ptr = solve_uu;
484   if (level) PetscCall(PCTFS_ssgl_radd(uu_ptr, solve_w, level, stages));
485 
486   PetscCall(PCTFS_rvec_zero(uc, n));
487 
488   /* x = X.uu */
489   for (x_ptr = x, iptr = col_indices; *iptr != -1; x_ptr += len) {
490     off = *iptr++;
491     len = *iptr++;
492     PetscCall(PetscBLASIntCast(len, &dlen));
493     PetscCallBLAS("BLASaxpy", BLASaxpy_(&dlen, uu_ptr++, x_ptr, &i1, uc + off, &i1));
494   }
495   PetscFunctionReturn(PETSC_SUCCESS);
496 }
497 
498 static PetscErrorCode check_handle(xxt_ADT xxt_handle)
499 {
500   PetscInt vals[2], work[2], op[] = {NON_UNIFORM, GL_MIN, GL_MAX};
501 
502   PetscFunctionBegin;
503   PetscCheck(xxt_handle, PETSC_COMM_SELF, PETSC_ERR_PLIB, "check_handle() :: bad handle :: NULL %p", (void *)xxt_handle);
504 
505   vals[0] = vals[1] = xxt_handle->id;
506   PetscCall(PCTFS_giop(vals, work, PETSC_STATIC_ARRAY_LENGTH(op) - 1, op));
507   PetscCheck(!(vals[0] != vals[1]) && !(xxt_handle->id <= 0), PETSC_COMM_SELF, PETSC_ERR_PLIB, "check_handle() :: bad handle :: id mismatch min/max %" PetscInt_FMT "/%" PetscInt_FMT " %" PetscInt_FMT, vals[0], vals[1], xxt_handle->id);
508   PetscFunctionReturn(PETSC_SUCCESS);
509 }
510 
511 static PetscErrorCode det_separators(xxt_ADT xxt_handle)
512 {
513   PetscInt     i, ct, id;
514   PetscInt     mask, edge, *iptr;
515   PetscInt    *dir, *used;
516   PetscInt     sum[4], w[4];
517   PetscScalar  rsum[4], rw[4];
518   PetscInt     op[] = {GL_ADD, 0};
519   PetscScalar *lhs, *rhs;
520   PetscInt    *nsep, *lnsep, *fo, nfo = 0;
521   PCTFS_gs_ADT PCTFS_gs_handle = xxt_handle->mvi->PCTFS_gs_handle;
522   PetscInt    *local2global    = xxt_handle->mvi->local2global;
523   PetscInt     n               = xxt_handle->mvi->n;
524   PetscInt     m               = xxt_handle->mvi->m;
525   PetscInt     level           = xxt_handle->level;
526   PetscInt     shared          = 0;
527 
528   PetscFunctionBegin;
529   dir   = (PetscInt *)malloc(sizeof(PetscInt) * (level + 1));
530   nsep  = (PetscInt *)malloc(sizeof(PetscInt) * (level + 1));
531   lnsep = (PetscInt *)malloc(sizeof(PetscInt) * (level + 1));
532   fo    = (PetscInt *)malloc(sizeof(PetscInt) * (n + 1));
533   used  = (PetscInt *)malloc(sizeof(PetscInt) * n);
534 
535   PetscCall(PCTFS_ivec_zero(dir, level + 1));
536   PetscCall(PCTFS_ivec_zero(nsep, level + 1));
537   PetscCall(PCTFS_ivec_zero(lnsep, level + 1));
538   PetscCall(PCTFS_ivec_set(fo, -1, n + 1));
539   PetscCall(PCTFS_ivec_zero(used, n));
540 
541   lhs = (PetscScalar *)malloc(sizeof(PetscScalar) * m);
542   rhs = (PetscScalar *)malloc(sizeof(PetscScalar) * m);
543 
544   /* determine the # of unique dof */
545   PetscCall(PCTFS_rvec_zero(lhs, m));
546   PetscCall(PCTFS_rvec_set(lhs, 1.0, n));
547   PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, lhs, "+\0", level));
548   PetscCall(PCTFS_rvec_zero(rsum, 2));
549   for (i = 0; i < n; i++) {
550     if (lhs[i] != 0.0) {
551       rsum[0] += 1.0 / lhs[i];
552       rsum[1] += lhs[i];
553     }
554   }
555   PetscCall(PCTFS_grop_hc(rsum, rw, 2, op, level));
556   rsum[0] += 0.1;
557   rsum[1] += 0.1;
558 
559   if (PetscAbsScalar(rsum[0] - rsum[1]) > EPS) shared = 1;
560 
561   xxt_handle->info->n_global = xxt_handle->info->m_global = (PetscInt)rsum[0];
562   xxt_handle->mvi->n_global = xxt_handle->mvi->m_global = (PetscInt)rsum[0];
563 
564   /* determine separator sets top down */
565   if (shared) {
566     for (iptr = fo + n, id = PCTFS_my_id, mask = PCTFS_num_nodes >> 1, edge = level; edge > 0; edge--, mask >>= 1) {
567       /* set rsh of hc, fire, and collect lhs responses */
568       PetscCall((id < mask) ? PCTFS_rvec_zero(lhs, m) : PCTFS_rvec_set(lhs, 1.0, m));
569       PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, lhs, "+\0", edge));
570 
571       /* set lsh of hc, fire, and collect rhs responses */
572       PetscCall((id < mask) ? PCTFS_rvec_set(rhs, 1.0, m) : PCTFS_rvec_zero(rhs, m));
573       PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, rhs, "+\0", edge));
574 
575       for (i = 0; i < n; i++) {
576         if (id < mask) {
577           if (lhs[i] != 0.0) lhs[i] = 1.0;
578         }
579         if (id >= mask) {
580           if (rhs[i] != 0.0) rhs[i] = 1.0;
581         }
582       }
583 
584       if (id < mask) PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, lhs, "+\0", edge - 1));
585       else PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, rhs, "+\0", edge - 1));
586 
587       /* count number of dofs I own that have signal and not in sep set */
588       PetscCall(PCTFS_rvec_zero(rsum, 4));
589       PetscCall(PCTFS_ivec_zero(sum, 4));
590       for (ct = i = 0; i < n; i++) {
591         if (!used[i]) {
592           /* number of unmarked dofs on node */
593           ct++;
594           /* number of dofs to be marked on lhs hc */
595           if (id < mask) {
596             if (lhs[i] != 0.0) {
597               sum[0]++;
598               rsum[0] += 1.0 / lhs[i];
599             }
600           }
601           /* number of dofs to be marked on rhs hc */
602           if (id >= mask) {
603             if (rhs[i] != 0.0) {
604               sum[1]++;
605               rsum[1] += 1.0 / rhs[i];
606             }
607           }
608         }
609       }
610 
611       /* go for load balance - choose half with most unmarked dofs, bias LHS */
612       (id < mask) ? (sum[2] = ct) : (sum[3] = ct);
613       (id < mask) ? (rsum[2] = ct) : (rsum[3] = ct);
614       PetscCall(PCTFS_giop_hc(sum, w, 4, op, edge));
615       PetscCall(PCTFS_grop_hc(rsum, rw, 4, op, edge));
616       rsum[0] += 0.1;
617       rsum[1] += 0.1;
618       rsum[2] += 0.1;
619       rsum[3] += 0.1;
620 
621       if (id < mask) {
622         /* mark dofs I own that have signal and not in sep set */
623         for (ct = i = 0; i < n; i++) {
624           if ((!used[i]) && (lhs[i] != 0.0)) {
625             ct++;
626             nfo++;
627 
628             PetscCheck(nfo <= n, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nfo about to exceed n");
629 
630             *--iptr = local2global[i];
631             used[i] = edge;
632           }
633         }
634         if (ct > 1) PetscCall(PCTFS_ivec_sort(iptr, ct));
635 
636         lnsep[edge] = ct;
637         nsep[edge]  = (PetscInt)rsum[0];
638         dir[edge]   = LEFT;
639       }
640 
641       if (id >= mask) {
642         /* mark dofs I own that have signal and not in sep set */
643         for (ct = i = 0; i < n; i++) {
644           if ((!used[i]) && (rhs[i] != 0.0)) {
645             ct++;
646             nfo++;
647 
648             PetscCheck(nfo <= n, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nfo about to exceed n");
649 
650             *--iptr = local2global[i];
651             used[i] = edge;
652           }
653         }
654         if (ct > 1) PetscCall(PCTFS_ivec_sort(iptr, ct));
655 
656         lnsep[edge] = ct;
657         nsep[edge]  = (PetscInt)rsum[1];
658         dir[edge]   = RIGHT;
659       }
660 
661       /* LATER or we can recur on these to order seps at this level */
662       /* do we need full set of separators for this?                */
663 
664       /* fold rhs hc into lower */
665       if (id >= mask) id -= mask;
666     }
667   } else {
668     for (iptr = fo + n, id = PCTFS_my_id, mask = PCTFS_num_nodes >> 1, edge = level; edge > 0; edge--, mask >>= 1) {
669       /* set rsh of hc, fire, and collect lhs responses */
670       PetscCall((id < mask) ? PCTFS_rvec_zero(lhs, m) : PCTFS_rvec_set(lhs, 1.0, m));
671       PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, lhs, "+\0", edge));
672 
673       /* set lsh of hc, fire, and collect rhs responses */
674       PetscCall((id < mask) ? PCTFS_rvec_set(rhs, 1.0, m) : PCTFS_rvec_zero(rhs, m));
675       PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, rhs, "+\0", edge));
676 
677       /* count number of dofs I own that have signal and not in sep set */
678       PetscCall(PCTFS_ivec_zero(sum, 4));
679       for (ct = i = 0; i < n; i++) {
680         if (!used[i]) {
681           /* number of unmarked dofs on node */
682           ct++;
683           /* number of dofs to be marked on lhs hc */
684           if ((id < mask) && (lhs[i] != 0.0)) sum[0]++;
685           /* number of dofs to be marked on rhs hc */
686           if ((id >= mask) && (rhs[i] != 0.0)) sum[1]++;
687         }
688       }
689 
690       /* go for load balance - choose half with most unmarked dofs, bias LHS */
691       (id < mask) ? (sum[2] = ct) : (sum[3] = ct);
692       PetscCall(PCTFS_giop_hc(sum, w, 4, op, edge));
693 
694       /* lhs hc wins */
695       if (sum[2] >= sum[3]) {
696         if (id < mask) {
697           /* mark dofs I own that have signal and not in sep set */
698           for (ct = i = 0; i < n; i++) {
699             if ((!used[i]) && (lhs[i] != 0.0)) {
700               ct++;
701               nfo++;
702               *--iptr = local2global[i];
703               used[i] = edge;
704             }
705           }
706           if (ct > 1) PetscCall(PCTFS_ivec_sort(iptr, ct));
707           lnsep[edge] = ct;
708         }
709         nsep[edge] = sum[0];
710         dir[edge]  = LEFT;
711       } else { /* rhs hc wins */
712         if (id >= mask) {
713           /* mark dofs I own that have signal and not in sep set */
714           for (ct = i = 0; i < n; i++) {
715             if ((!used[i]) && (rhs[i] != 0.0)) {
716               ct++;
717               nfo++;
718               *--iptr = local2global[i];
719               used[i] = edge;
720             }
721           }
722           if (ct > 1) PetscCall(PCTFS_ivec_sort(iptr, ct));
723           lnsep[edge] = ct;
724         }
725         nsep[edge] = sum[1];
726         dir[edge]  = RIGHT;
727       }
728       /* LATER or we can recur on these to order seps at this level */
729       /* do we need full set of separators for this?                */
730 
731       /* fold rhs hc into lower */
732       if (id >= mask) id -= mask;
733     }
734   }
735 
736   /* level 0 is on processor case - so mark the remainder */
737   for (ct = i = 0; i < n; i++) {
738     if (!used[i]) {
739       ct++;
740       nfo++;
741       *--iptr = local2global[i];
742       used[i] = edge;
743     }
744   }
745   if (ct > 1) PetscCall(PCTFS_ivec_sort(iptr, ct));
746   lnsep[edge] = ct;
747   nsep[edge]  = ct;
748   dir[edge]   = LEFT;
749 
750   xxt_handle->info->nsep  = nsep;
751   xxt_handle->info->lnsep = lnsep;
752   xxt_handle->info->fo    = fo;
753   xxt_handle->info->nfo   = nfo;
754 
755   free(dir);
756   free(lhs);
757   free(rhs);
758   free(used);
759   PetscFunctionReturn(PETSC_SUCCESS);
760 }
761 
762 static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, PetscErrorCode (*matvec)(mv_info *, PetscScalar *, PetscScalar *), void *grid_data)
763 {
764   mv_info *mvi;
765 
766   mvi               = (mv_info *)malloc(sizeof(mv_info));
767   mvi->n            = n;
768   mvi->m            = m;
769   mvi->n_global     = -1;
770   mvi->m_global     = -1;
771   mvi->local2global = (PetscInt *)malloc((m + 1) * sizeof(PetscInt));
772   PCTFS_ivec_copy(mvi->local2global, local2global, m);
773   mvi->local2global[m] = INT_MAX;
774   mvi->matvec          = matvec;
775   mvi->grid_data       = grid_data;
776 
777   /* set xxt communication handle to perform restricted matvec */
778   mvi->PCTFS_gs_handle = PCTFS_gs_init(local2global, m, PCTFS_num_nodes);
779 
780   return (mvi);
781 }
782 
783 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u)
784 {
785   PetscFunctionBegin;
786   PetscCall(A->matvec((mv_info *)A->grid_data, v, u));
787   PetscFunctionReturn(PETSC_SUCCESS);
788 }
789