xref: /petsc/src/ksp/pc/impls/tfs/xyt.c (revision 58d68138c660dfb4e9f5b03334792cd4f2ffd7cc)
1 
2 /*************************************xyt.c************************************
3 Module Name: xyt
4 Module Info:
5 
6 author:  Henry M. Tufo III
7 e-mail:  hmt@asci.uchicago.edu
8 contact:
9 +--------------------------------+--------------------------------+
10 |MCS Division - Building 221     |Department of Computer Science  |
11 |Argonne National Laboratory     |Ryerson 152                     |
12 |9700 S. Cass Avenue             |The University of Chicago       |
13 |Argonne, IL  60439              |Chicago, IL  60637              |
14 |(630) 252-5354/5986 ph/fx       |(773) 702-6019/8487 ph/fx       |
15 +--------------------------------+--------------------------------+
16 
17 Last Modification: 3.20.01
18 **************************************xyt.c***********************************/
19 #include <../src/ksp/pc/impls/tfs/tfs.h>
20 
21 #define LEFT  -1
22 #define RIGHT 1
23 #define BOTH  0
24 
25 typedef struct xyt_solver_info {
26   PetscInt      n, m, n_global, m_global;
27   PetscInt      nnz, max_nnz, msg_buf_sz;
28   PetscInt     *nsep, *lnsep, *fo, nfo, *stages;
29   PetscInt     *xcol_sz, *xcol_indices;
30   PetscScalar **xcol_vals, *x, *solve_uu, *solve_w;
31   PetscInt     *ycol_sz, *ycol_indices;
32   PetscScalar **ycol_vals, *y;
33   PetscInt      nsolves;
34   PetscScalar   tot_solve_time;
35 } xyt_info;
36 
37 typedef struct matvec_info {
38   PetscInt     n, m, n_global, m_global;
39   PetscInt    *local2global;
40   PCTFS_gs_ADT PCTFS_gs_handle;
41   PetscErrorCode (*matvec)(struct matvec_info *, PetscScalar *, PetscScalar *);
42   void *grid_data;
43 } mv_info;
44 
45 struct xyt_CDT {
46   PetscInt  id;
47   PetscInt  ns;
48   PetscInt  level;
49   xyt_info *info;
50   mv_info  *mvi;
51 };
52 
53 static PetscInt n_xyt         = 0;
54 static PetscInt n_xyt_handles = 0;
55 
56 /* prototypes */
57 static PetscErrorCode do_xyt_solve(xyt_ADT xyt_handle, PetscScalar *rhs);
58 static PetscErrorCode check_handle(xyt_ADT xyt_handle);
59 static PetscErrorCode det_separators(xyt_ADT xyt_handle);
60 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u);
61 static PetscErrorCode xyt_generate(xyt_ADT xyt_handle);
62 static PetscErrorCode do_xyt_factor(xyt_ADT xyt_handle);
63 static mv_info       *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, PetscErrorCode (*matvec)(mv_info *, PetscScalar *, PetscScalar *), void *grid_data);
64 
65 /**************************************xyt.c***********************************/
66 xyt_ADT XYT_new(void) {
67   xyt_ADT xyt_handle;
68 
69   /* rolling count on n_xyt ... pot. problem here */
70   n_xyt_handles++;
71   xyt_handle       = (xyt_ADT)malloc(sizeof(struct xyt_CDT));
72   xyt_handle->id   = ++n_xyt;
73   xyt_handle->info = NULL;
74   xyt_handle->mvi  = NULL;
75 
76   return (xyt_handle);
77 }
78 
79 /**************************************xyt.c***********************************/
80 PetscErrorCode XYT_factor(xyt_ADT   xyt_handle,                                           /* prev. allocated xyt  handle */
81                           PetscInt *local2global,                                         /* global column mapping       */
82                           PetscInt  n,                                                    /* local num rows              */
83                           PetscInt  m,                                                    /* local num cols              */
84                           PetscErrorCode (*matvec)(void *, PetscScalar *, PetscScalar *), /* b_loc=A_local.x_loc         */
85                           void *grid_data)                                                /* grid data for matvec        */
86 {
87   PCTFS_comm_init();
88   check_handle(xyt_handle);
89 
90   /* only 2^k for now and all nodes participating */
91   PetscCheck((1 << (xyt_handle->level = PCTFS_i_log2_num_nodes)) == PCTFS_num_nodes, PETSC_COMM_SELF, PETSC_ERR_PLIB, "only 2^k for now and MPI_COMM_WORLD!!! %d != %d", 1 << PCTFS_i_log2_num_nodes, PCTFS_num_nodes);
92 
93   /* space for X info */
94   xyt_handle->info = (xyt_info *)malloc(sizeof(xyt_info));
95 
96   /* set up matvec handles */
97   xyt_handle->mvi = set_mvi(local2global, n, m, (PetscErrorCode(*)(mv_info *, PetscScalar *, PetscScalar *))matvec, grid_data);
98 
99   /* matrix is assumed to be of full rank */
100   /* LATER we can reset to indicate rank def. */
101   xyt_handle->ns = 0;
102 
103   /* determine separators and generate firing order - NB xyt info set here */
104   det_separators(xyt_handle);
105 
106   return (do_xyt_factor(xyt_handle));
107 }
108 
109 /**************************************xyt.c***********************************/
110 PetscErrorCode XYT_solve(xyt_ADT xyt_handle, PetscScalar *x, PetscScalar *b) {
111   PCTFS_comm_init();
112   check_handle(xyt_handle);
113 
114   /* need to copy b into x? */
115   if (b) PCTFS_rvec_copy(x, b, xyt_handle->mvi->n);
116   return do_xyt_solve(xyt_handle, x);
117 }
118 
119 /**************************************xyt.c***********************************/
120 PetscErrorCode XYT_free(xyt_ADT xyt_handle) {
121   PCTFS_comm_init();
122   check_handle(xyt_handle);
123   n_xyt_handles--;
124 
125   free(xyt_handle->info->nsep);
126   free(xyt_handle->info->lnsep);
127   free(xyt_handle->info->fo);
128   free(xyt_handle->info->stages);
129   free(xyt_handle->info->solve_uu);
130   free(xyt_handle->info->solve_w);
131   free(xyt_handle->info->x);
132   free(xyt_handle->info->xcol_vals);
133   free(xyt_handle->info->xcol_sz);
134   free(xyt_handle->info->xcol_indices);
135   free(xyt_handle->info->y);
136   free(xyt_handle->info->ycol_vals);
137   free(xyt_handle->info->ycol_sz);
138   free(xyt_handle->info->ycol_indices);
139   free(xyt_handle->info);
140   free(xyt_handle->mvi->local2global);
141   PCTFS_gs_free(xyt_handle->mvi->PCTFS_gs_handle);
142   free(xyt_handle->mvi);
143   free(xyt_handle);
144 
145   /* if the check fails we nuke */
146   /* if NULL pointer passed to free we nuke */
147   /* if the calls to free fail that's not my problem */
148   return (0);
149 }
150 
151 /**************************************xyt.c***********************************/
152 PetscErrorCode XYT_stats(xyt_ADT xyt_handle) {
153   PetscInt    op[]  = {NON_UNIFORM, GL_MIN, GL_MAX, GL_ADD, GL_MIN, GL_MAX, GL_ADD, GL_MIN, GL_MAX, GL_ADD};
154   PetscInt    fop[] = {NON_UNIFORM, GL_MIN, GL_MAX, GL_ADD};
155   PetscInt    vals[9], work[9];
156   PetscScalar fvals[3], fwork[3];
157 
158   PetscFunctionBegin;
159   PetscCall(PCTFS_comm_init());
160   PetscCall(check_handle(xyt_handle));
161 
162   /* if factorization not done there are no stats */
163   if (!xyt_handle->info || !xyt_handle->mvi) {
164     if (!PCTFS_my_id) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "XYT_stats() :: no stats available!\n"));
165     PetscFunctionReturn(1);
166   }
167 
168   vals[0] = vals[1] = vals[2] = xyt_handle->info->nnz;
169   vals[3] = vals[4] = vals[5] = xyt_handle->mvi->n;
170   vals[6] = vals[7] = vals[8] = xyt_handle->info->msg_buf_sz;
171   PetscCall(PCTFS_giop(vals, work, PETSC_STATIC_ARRAY_LENGTH(op) - 1, op));
172 
173   fvals[0] = fvals[1] = fvals[2] = xyt_handle->info->tot_solve_time / xyt_handle->info->nsolves++;
174   PetscCall(PCTFS_grop(fvals, fwork, PETSC_STATIC_ARRAY_LENGTH(fop) - 1, fop));
175 
176   if (!PCTFS_my_id) {
177     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: min   xyt_nnz=%" PetscInt_FMT "\n", PCTFS_my_id, vals[0]));
178     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: max   xyt_nnz=%" PetscInt_FMT "\n", PCTFS_my_id, vals[1]));
179     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: avg   xyt_nnz=%g\n", PCTFS_my_id, (double)(1.0 * vals[2] / PCTFS_num_nodes)));
180     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: tot   xyt_nnz=%" PetscInt_FMT "\n", PCTFS_my_id, vals[2]));
181     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: xyt   C(2d)  =%g\n", PCTFS_my_id, (double)(vals[2] / (PetscPowReal(1.0 * vals[5], 1.5)))));
182     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: xyt   C(3d)  =%g\n", PCTFS_my_id, (double)(vals[2] / (PetscPowReal(1.0 * vals[5], 1.6667)))));
183     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: min   xyt_n  =%" PetscInt_FMT "\n", PCTFS_my_id, vals[3]));
184     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: max   xyt_n  =%" PetscInt_FMT "\n", PCTFS_my_id, vals[4]));
185     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: avg   xyt_n  =%g\n", PCTFS_my_id, (double)(1.0 * vals[5] / PCTFS_num_nodes)));
186     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: tot   xyt_n  =%" PetscInt_FMT "\n", PCTFS_my_id, vals[5]));
187     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: min   xyt_buf=%" PetscInt_FMT "\n", PCTFS_my_id, vals[6]));
188     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: max   xyt_buf=%" PetscInt_FMT "\n", PCTFS_my_id, vals[7]));
189     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: avg   xyt_buf=%g\n", PCTFS_my_id, (double)(1.0 * vals[8] / PCTFS_num_nodes)));
190     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: min   xyt_slv=%g\n", PCTFS_my_id, (double)PetscRealPart(fvals[0])));
191     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: max   xyt_slv=%g\n", PCTFS_my_id, (double)PetscRealPart(fvals[1])));
192     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%d :: avg   xyt_slv=%g\n", PCTFS_my_id, (double)PetscRealPart(fvals[2] / PCTFS_num_nodes)));
193   }
194   PetscFunctionReturn(0);
195 }
196 
197 /*************************************xyt.c************************************
198 
199 Description: get A_local, local portion of global coarse matrix which
200 is a row dist. nxm matrix w/ n<m.
201    o my_ml holds address of ML struct associated w/A_local and coarse grid
202    o local2global holds global number of column i (i=0,...,m-1)
203    o local2global holds global number of row    i (i=0,...,n-1)
204    o mylocmatvec performs A_local . vec_local (note that gs is performed using
205    PCTFS_gs_init/gop).
206 
207 mylocmatvec = my_ml->Amat[grid_tag].matvec->external;
208 mylocmatvec (void :: void *data, double *in, double *out)
209 **************************************xyt.c***********************************/
210 static PetscErrorCode do_xyt_factor(xyt_ADT xyt_handle) {
211   return xyt_generate(xyt_handle);
212 }
213 
214 /**************************************xyt.c***********************************/
215 static PetscErrorCode xyt_generate(xyt_ADT xyt_handle) {
216   PetscInt      i, j, k, idx;
217   PetscInt      dim, col;
218   PetscScalar  *u, *uu, *v, *z, *w, alpha, alpha_w;
219   PetscInt     *segs;
220   PetscInt      op[] = {GL_ADD, 0};
221   PetscInt      off, len;
222   PetscScalar  *x_ptr, *y_ptr;
223   PetscInt     *iptr, flag;
224   PetscInt      start = 0, end, work;
225   PetscInt      op2[] = {GL_MIN, 0};
226   PCTFS_gs_ADT  PCTFS_gs_handle;
227   PetscInt     *nsep, *lnsep, *fo;
228   PetscInt      a_n            = xyt_handle->mvi->n;
229   PetscInt      a_m            = xyt_handle->mvi->m;
230   PetscInt     *a_local2global = xyt_handle->mvi->local2global;
231   PetscInt      level;
232   PetscInt      n, m;
233   PetscInt     *xcol_sz, *xcol_indices, *stages;
234   PetscScalar **xcol_vals, *x;
235   PetscInt     *ycol_sz, *ycol_indices;
236   PetscScalar **ycol_vals, *y;
237   PetscInt      n_global;
238   PetscInt      xt_nnz = 0, xt_max_nnz = 0;
239   PetscInt      yt_nnz = 0, yt_max_nnz = 0;
240   PetscBLASInt  i1  = 1, dlen;
241   PetscScalar   dm1 = -1.0;
242 
243   n               = xyt_handle->mvi->n;
244   nsep            = xyt_handle->info->nsep;
245   lnsep           = xyt_handle->info->lnsep;
246   fo              = xyt_handle->info->fo;
247   end             = lnsep[0];
248   level           = xyt_handle->level;
249   PCTFS_gs_handle = xyt_handle->mvi->PCTFS_gs_handle;
250 
251   /* is there a null space? */
252   /* LATER add in ability to detect null space by checking alpha */
253   for (i = 0, j = 0; i <= level; i++) j += nsep[i];
254 
255   m = j - xyt_handle->ns;
256   if (m != j) { PetscCall(PetscPrintf(PETSC_COMM_WORLD, "xyt_generate() :: null space exists %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\n", m, j, xyt_handle->ns)); }
257 
258   PetscCall(PetscInfo(0, "xyt_generate() :: X(%" PetscInt_FMT ",%" PetscInt_FMT ")\n", n, m));
259 
260   /* get and initialize storage for x local         */
261   /* note that x local is nxm and stored by columns */
262   xcol_sz      = (PetscInt *)malloc(m * sizeof(PetscInt));
263   xcol_indices = (PetscInt *)malloc((2 * m + 1) * sizeof(PetscInt));
264   xcol_vals    = (PetscScalar **)malloc(m * sizeof(PetscScalar *));
265   for (i = j = 0; i < m; i++, j += 2) {
266     xcol_indices[j] = xcol_indices[j + 1] = xcol_sz[i] = -1;
267     xcol_vals[i]                                       = NULL;
268   }
269   xcol_indices[j] = -1;
270 
271   /* get and initialize storage for y local         */
272   /* note that y local is nxm and stored by columns */
273   ycol_sz      = (PetscInt *)malloc(m * sizeof(PetscInt));
274   ycol_indices = (PetscInt *)malloc((2 * m + 1) * sizeof(PetscInt));
275   ycol_vals    = (PetscScalar **)malloc(m * sizeof(PetscScalar *));
276   for (i = j = 0; i < m; i++, j += 2) {
277     ycol_indices[j] = ycol_indices[j + 1] = ycol_sz[i] = -1;
278     ycol_vals[i]                                       = NULL;
279   }
280   ycol_indices[j] = -1;
281 
282   /* size of separators for each sub-hc working from bottom of tree to top */
283   /* this looks like nsep[]=segments */
284   stages = (PetscInt *)malloc((level + 1) * sizeof(PetscInt));
285   segs   = (PetscInt *)malloc((level + 1) * sizeof(PetscInt));
286   PCTFS_ivec_zero(stages, level + 1);
287   PCTFS_ivec_copy(segs, nsep, level + 1);
288   for (i = 0; i < level; i++) segs[i + 1] += segs[i];
289   stages[0] = segs[0];
290 
291   /* temporary vectors  */
292   u  = (PetscScalar *)malloc(n * sizeof(PetscScalar));
293   z  = (PetscScalar *)malloc(n * sizeof(PetscScalar));
294   v  = (PetscScalar *)malloc(a_m * sizeof(PetscScalar));
295   uu = (PetscScalar *)malloc(m * sizeof(PetscScalar));
296   w  = (PetscScalar *)malloc(m * sizeof(PetscScalar));
297 
298   /* extra nnz due to replication of vertices across separators */
299   for (i = 1, j = 0; i <= level; i++) j += nsep[i];
300 
301   /* storage for sparse x values */
302   n_global   = xyt_handle->info->n_global;
303   xt_max_nnz = yt_max_nnz = (PetscInt)(2.5 * PetscPowReal(1.0 * n_global, 1.6667) + j * n / 2) / PCTFS_num_nodes;
304   x                       = (PetscScalar *)malloc(xt_max_nnz * sizeof(PetscScalar));
305   y                       = (PetscScalar *)malloc(yt_max_nnz * sizeof(PetscScalar));
306 
307   /* LATER - can embed next sep to fire in gs */
308   /* time to make the donuts - generate X factor */
309   for (dim = i = j = 0; i < m; i++) {
310     /* time to move to the next level? */
311     while (i == segs[dim]) {
312       PetscCheck(dim != level, PETSC_COMM_SELF, PETSC_ERR_PLIB, "dim about to exceed level");
313       stages[dim++] = i;
314       end += lnsep[dim];
315     }
316     stages[dim] = i;
317 
318     /* which column are we firing? */
319     /* i.e. set v_l */
320     /* use new seps and do global min across hc to determine which one to fire */
321     (start < end) ? (col = fo[start]) : (col = INT_MAX);
322     PCTFS_giop_hc(&col, &work, 1, op2, dim);
323 
324     /* shouldn't need this */
325     if (col == INT_MAX) {
326       PetscCall(PetscInfo(0, "hey ... col==INT_MAX??\n"));
327       continue;
328     }
329 
330     /* do I own it? I should */
331     PCTFS_rvec_zero(v, a_m);
332     if (col == fo[start]) {
333       start++;
334       idx = PCTFS_ivec_linear_search(col, a_local2global, a_n);
335       if (idx != -1) {
336         v[idx] = 1.0;
337         j++;
338       } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "NOT FOUND!");
339     } else {
340       idx = PCTFS_ivec_linear_search(col, a_local2global, a_m);
341       if (idx != -1) v[idx] = 1.0;
342     }
343 
344     /* perform u = A.v_l */
345     PCTFS_rvec_zero(u, n);
346     do_matvec(xyt_handle->mvi, v, u);
347 
348     /* uu =  X^T.u_l (local portion) */
349     /* technically only need to zero out first i entries */
350     /* later turn this into an XYT_solve call ? */
351     PCTFS_rvec_zero(uu, m);
352     y_ptr = y;
353     iptr  = ycol_indices;
354     for (k = 0; k < i; k++) {
355       off = *iptr++;
356       len = *iptr++;
357       PetscCall(PetscBLASIntCast(len, &dlen));
358       PetscCallBLAS("BLASdot", uu[k] = BLASdot_(&dlen, u + off, &i1, y_ptr, &i1));
359       y_ptr += len;
360     }
361 
362     /* uu = X^T.u_l (comm portion) */
363     PetscCall(PCTFS_ssgl_radd(uu, w, dim, stages));
364 
365     /* z = X.uu */
366     PCTFS_rvec_zero(z, n);
367     x_ptr = x;
368     iptr  = xcol_indices;
369     for (k = 0; k < i; k++) {
370       off = *iptr++;
371       len = *iptr++;
372       PetscCall(PetscBLASIntCast(len, &dlen));
373       PetscCallBLAS("BLASaxpy", BLASaxpy_(&dlen, &uu[k], x_ptr, &i1, z + off, &i1));
374       x_ptr += len;
375     }
376 
377     /* compute v_l = v_l - z */
378     PCTFS_rvec_zero(v + a_n, a_m - a_n);
379     PetscCall(PetscBLASIntCast(n, &dlen));
380     PetscCallBLAS("BLASaxpy", BLASaxpy_(&dlen, &dm1, z, &i1, v, &i1));
381 
382     /* compute u_l = A.v_l */
383     if (a_n != a_m) PCTFS_gs_gop_hc(PCTFS_gs_handle, v, "+\0", dim);
384     PCTFS_rvec_zero(u, n);
385     do_matvec(xyt_handle->mvi, v, u);
386 
387     /* compute sqrt(alpha) = sqrt(u_l^T.u_l) - local portion */
388     PetscCall(PetscBLASIntCast(n, &dlen));
389     PetscCallBLAS("BLASdot", alpha = BLASdot_(&dlen, u, &i1, u, &i1));
390     /* compute sqrt(alpha) = sqrt(u_l^T.u_l) - comm portion */
391     PCTFS_grop_hc(&alpha, &alpha_w, 1, op, dim);
392 
393     alpha = (PetscScalar)PetscSqrtReal((PetscReal)alpha);
394 
395     /* check for small alpha                             */
396     /* LATER use this to detect and determine null space */
397     PetscCheck(PetscAbsScalar(alpha) >= 1.0e-14, PETSC_COMM_SELF, PETSC_ERR_PLIB, "bad alpha! %g", (double)PetscAbsScalar(alpha));
398 
399     /* compute v_l = v_l/sqrt(alpha) */
400     PCTFS_rvec_scale(v, 1.0 / alpha, n);
401     PCTFS_rvec_scale(u, 1.0 / alpha, n);
402 
403     /* add newly generated column, v_l, to X */
404     flag = 1;
405     off = len = 0;
406     for (k = 0; k < n; k++) {
407       if (v[k] != 0.0) {
408         len = k;
409         if (flag) {
410           off  = k;
411           flag = 0;
412         }
413       }
414     }
415 
416     len -= (off - 1);
417 
418     if (len > 0) {
419       if ((xt_nnz + len) > xt_max_nnz) {
420         PetscCall(PetscInfo(0, "increasing space for X by 2x!\n"));
421         xt_max_nnz *= 2;
422         x_ptr = (PetscScalar *)malloc(xt_max_nnz * sizeof(PetscScalar));
423         PCTFS_rvec_copy(x_ptr, x, xt_nnz);
424         free(x);
425         x = x_ptr;
426         x_ptr += xt_nnz;
427       }
428       xt_nnz += len;
429       PCTFS_rvec_copy(x_ptr, v + off, len);
430 
431       xcol_indices[2 * i] = off;
432       xcol_sz[i] = xcol_indices[2 * i + 1] = len;
433       xcol_vals[i]                         = x_ptr;
434     } else {
435       xcol_indices[2 * i] = 0;
436       xcol_sz[i] = xcol_indices[2 * i + 1] = 0;
437       xcol_vals[i]                         = x_ptr;
438     }
439 
440     /* add newly generated column, u_l, to Y */
441     flag = 1;
442     off = len = 0;
443     for (k = 0; k < n; k++) {
444       if (u[k] != 0.0) {
445         len = k;
446         if (flag) {
447           off  = k;
448           flag = 0;
449         }
450       }
451     }
452 
453     len -= (off - 1);
454 
455     if (len > 0) {
456       if ((yt_nnz + len) > yt_max_nnz) {
457         PetscCall(PetscInfo(0, "increasing space for Y by 2x!\n"));
458         yt_max_nnz *= 2;
459         y_ptr = (PetscScalar *)malloc(yt_max_nnz * sizeof(PetscScalar));
460         PCTFS_rvec_copy(y_ptr, y, yt_nnz);
461         free(y);
462         y = y_ptr;
463         y_ptr += yt_nnz;
464       }
465       yt_nnz += len;
466       PCTFS_rvec_copy(y_ptr, u + off, len);
467 
468       ycol_indices[2 * i] = off;
469       ycol_sz[i] = ycol_indices[2 * i + 1] = len;
470       ycol_vals[i]                         = y_ptr;
471     } else {
472       ycol_indices[2 * i] = 0;
473       ycol_sz[i] = ycol_indices[2 * i + 1] = 0;
474       ycol_vals[i]                         = y_ptr;
475     }
476   }
477 
478   /* close off stages for execution phase */
479   while (dim != level) {
480     stages[dim++] = i;
481     PetscCall(PetscInfo(0, "disconnected!!! dim(%" PetscInt_FMT ")!=level(%" PetscInt_FMT ")\n", dim, level));
482   }
483   stages[dim] = i;
484 
485   xyt_handle->info->n            = xyt_handle->mvi->n;
486   xyt_handle->info->m            = m;
487   xyt_handle->info->nnz          = xt_nnz + yt_nnz;
488   xyt_handle->info->max_nnz      = xt_max_nnz + yt_max_nnz;
489   xyt_handle->info->msg_buf_sz   = stages[level] - stages[0];
490   xyt_handle->info->solve_uu     = (PetscScalar *)malloc(m * sizeof(PetscScalar));
491   xyt_handle->info->solve_w      = (PetscScalar *)malloc(m * sizeof(PetscScalar));
492   xyt_handle->info->x            = x;
493   xyt_handle->info->xcol_vals    = xcol_vals;
494   xyt_handle->info->xcol_sz      = xcol_sz;
495   xyt_handle->info->xcol_indices = xcol_indices;
496   xyt_handle->info->stages       = stages;
497   xyt_handle->info->y            = y;
498   xyt_handle->info->ycol_vals    = ycol_vals;
499   xyt_handle->info->ycol_sz      = ycol_sz;
500   xyt_handle->info->ycol_indices = ycol_indices;
501 
502   free(segs);
503   free(u);
504   free(v);
505   free(uu);
506   free(z);
507   free(w);
508 
509   return (0);
510 }
511 
512 /**************************************xyt.c***********************************/
513 static PetscErrorCode do_xyt_solve(xyt_ADT xyt_handle, PetscScalar *uc) {
514   PetscInt     off, len, *iptr;
515   PetscInt     level        = xyt_handle->level;
516   PetscInt     n            = xyt_handle->info->n;
517   PetscInt     m            = xyt_handle->info->m;
518   PetscInt    *stages       = xyt_handle->info->stages;
519   PetscInt    *xcol_indices = xyt_handle->info->xcol_indices;
520   PetscInt    *ycol_indices = xyt_handle->info->ycol_indices;
521   PetscScalar *x_ptr, *y_ptr, *uu_ptr;
522   PetscScalar *solve_uu = xyt_handle->info->solve_uu;
523   PetscScalar *solve_w  = xyt_handle->info->solve_w;
524   PetscScalar *x        = xyt_handle->info->x;
525   PetscScalar *y        = xyt_handle->info->y;
526   PetscBLASInt i1       = 1, dlen;
527 
528   PetscFunctionBegin;
529   uu_ptr = solve_uu;
530   PCTFS_rvec_zero(uu_ptr, m);
531 
532   /* x  = X.Y^T.b */
533   /* uu = Y^T.b */
534   for (y_ptr = y, iptr = ycol_indices; *iptr != -1; y_ptr += len) {
535     off = *iptr++;
536     len = *iptr++;
537     PetscCall(PetscBLASIntCast(len, &dlen));
538     PetscCallBLAS("BLASdot", *uu_ptr++ = BLASdot_(&dlen, uc + off, &i1, y_ptr, &i1));
539   }
540 
541   /* comunication of beta */
542   uu_ptr = solve_uu;
543   if (level) PetscCall(PCTFS_ssgl_radd(uu_ptr, solve_w, level, stages));
544   PCTFS_rvec_zero(uc, n);
545 
546   /* x = X.uu */
547   for (x_ptr = x, iptr = xcol_indices; *iptr != -1; x_ptr += len) {
548     off = *iptr++;
549     len = *iptr++;
550     PetscCall(PetscBLASIntCast(len, &dlen));
551     PetscCallBLAS("BLASaxpy", BLASaxpy_(&dlen, uu_ptr++, x_ptr, &i1, uc + off, &i1));
552   }
553   PetscFunctionReturn(0);
554 }
555 
556 /**************************************xyt.c***********************************/
557 static PetscErrorCode check_handle(xyt_ADT xyt_handle) {
558   PetscInt vals[2], work[2], op[] = {NON_UNIFORM, GL_MIN, GL_MAX};
559 
560   PetscFunctionBegin;
561   PetscCheck(xyt_handle, PETSC_COMM_SELF, PETSC_ERR_PLIB, "check_handle() :: bad handle :: NULL %p", xyt_handle);
562 
563   vals[0] = vals[1] = xyt_handle->id;
564   PCTFS_giop(vals, work, PETSC_STATIC_ARRAY_LENGTH(op) - 1, op);
565   PetscCheck(!(vals[0] != vals[1]) && !(xyt_handle->id <= 0), PETSC_COMM_SELF, PETSC_ERR_PLIB, "check_handle() :: bad handle :: id mismatch min/max %" PetscInt_FMT "/%" PetscInt_FMT " %" PetscInt_FMT, vals[0], vals[1], xyt_handle->id);
566   PetscFunctionReturn(0);
567 }
568 
569 /**************************************xyt.c***********************************/
570 static PetscErrorCode det_separators(xyt_ADT xyt_handle) {
571   PetscInt     i, ct, id;
572   PetscInt     mask, edge, *iptr;
573   PetscInt    *dir, *used;
574   PetscInt     sum[4], w[4];
575   PetscScalar  rsum[4], rw[4];
576   PetscInt     op[] = {GL_ADD, 0};
577   PetscScalar *lhs, *rhs;
578   PetscInt    *nsep, *lnsep, *fo, nfo = 0;
579   PCTFS_gs_ADT PCTFS_gs_handle = xyt_handle->mvi->PCTFS_gs_handle;
580   PetscInt    *local2global    = xyt_handle->mvi->local2global;
581   PetscInt     n               = xyt_handle->mvi->n;
582   PetscInt     m               = xyt_handle->mvi->m;
583   PetscInt     level           = xyt_handle->level;
584   PetscInt     shared          = 0;
585 
586   PetscFunctionBegin;
587   dir   = (PetscInt *)malloc(sizeof(PetscInt) * (level + 1));
588   nsep  = (PetscInt *)malloc(sizeof(PetscInt) * (level + 1));
589   lnsep = (PetscInt *)malloc(sizeof(PetscInt) * (level + 1));
590   fo    = (PetscInt *)malloc(sizeof(PetscInt) * (n + 1));
591   used  = (PetscInt *)malloc(sizeof(PetscInt) * n);
592 
593   PCTFS_ivec_zero(dir, level + 1);
594   PCTFS_ivec_zero(nsep, level + 1);
595   PCTFS_ivec_zero(lnsep, level + 1);
596   PCTFS_ivec_set(fo, -1, n + 1);
597   PCTFS_ivec_zero(used, n);
598 
599   lhs = (PetscScalar *)malloc(sizeof(PetscScalar) * m);
600   rhs = (PetscScalar *)malloc(sizeof(PetscScalar) * m);
601 
602   /* determine the # of unique dof */
603   PCTFS_rvec_zero(lhs, m);
604   PCTFS_rvec_set(lhs, 1.0, n);
605   PCTFS_gs_gop_hc(PCTFS_gs_handle, lhs, "+\0", level);
606   PetscCall(PetscInfo(0, "done first PCTFS_gs_gop_hc\n"));
607   PCTFS_rvec_zero(rsum, 2);
608   for (i = 0; i < n; i++) {
609     if (lhs[i] != 0.0) {
610       rsum[0] += 1.0 / lhs[i];
611       rsum[1] += lhs[i];
612     }
613     if (lhs[i] != 1.0) shared = 1;
614   }
615 
616   PCTFS_grop_hc(rsum, rw, 2, op, level);
617   rsum[0] += 0.1;
618   rsum[1] += 0.1;
619 
620   xyt_handle->info->n_global = xyt_handle->info->m_global = (PetscInt)rsum[0];
621   xyt_handle->mvi->n_global = xyt_handle->mvi->m_global = (PetscInt)rsum[0];
622 
623   /* determine separator sets top down */
624   if (shared) {
625     /* solution is to do as in the symmetric shared case but then */
626     /* pick the sub-hc with the most free dofs and do a mat-vec   */
627     /* and pick up the responses on the other sub-hc from the     */
628     /* initial separator set obtained from the symm. shared case  */
629     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "shared dof separator determination not ready ... see hmt!!!");
630     /* [dead code deleted since it is unlikely to be completed] */
631   } else {
632     for (iptr = fo + n, id = PCTFS_my_id, mask = PCTFS_num_nodes >> 1, edge = level; edge > 0; edge--, mask >>= 1) {
633       /* set rsh of hc, fire, and collect lhs responses */
634       (id < mask) ? PCTFS_rvec_zero(lhs, m) : PCTFS_rvec_set(lhs, 1.0, m);
635       PCTFS_gs_gop_hc(PCTFS_gs_handle, lhs, "+\0", edge);
636 
637       /* set lsh of hc, fire, and collect rhs responses */
638       (id < mask) ? PCTFS_rvec_set(rhs, 1.0, m) : PCTFS_rvec_zero(rhs, m);
639       PCTFS_gs_gop_hc(PCTFS_gs_handle, rhs, "+\0", edge);
640 
641       /* count number of dofs I own that have signal and not in sep set */
642       for (PCTFS_ivec_zero(sum, 4), ct = i = 0; i < n; i++) {
643         if (!used[i]) {
644           /* number of unmarked dofs on node */
645           ct++;
646           /* number of dofs to be marked on lhs hc */
647           if ((id < mask) && (lhs[i] != 0.0)) sum[0]++;
648           /* number of dofs to be marked on rhs hc */
649           if ((id >= mask) && (rhs[i] != 0.0)) sum[1]++;
650         }
651       }
652 
653       /* for the non-symmetric case we need separators of width 2 */
654       /* so take both sides */
655       (id < mask) ? (sum[2] = ct) : (sum[3] = ct);
656       PCTFS_giop_hc(sum, w, 4, op, edge);
657 
658       ct = 0;
659       if (id < mask) {
660         /* mark dofs I own that have signal and not in sep set */
661         for (i = 0; i < n; i++) {
662           if ((!used[i]) && (lhs[i] != 0.0)) {
663             ct++;
664             nfo++;
665             *--iptr = local2global[i];
666             used[i] = edge;
667           }
668         }
669         /* LSH hc summation of ct should be sum[0] */
670       } else {
671         /* mark dofs I own that have signal and not in sep set */
672         for (i = 0; i < n; i++) {
673           if ((!used[i]) && (rhs[i] != 0.0)) {
674             ct++;
675             nfo++;
676             *--iptr = local2global[i];
677             used[i] = edge;
678           }
679         }
680         /* RSH hc summation of ct should be sum[1] */
681       }
682 
683       if (ct > 1) PCTFS_ivec_sort(iptr, ct);
684       lnsep[edge] = ct;
685       nsep[edge]  = sum[0] + sum[1];
686       dir[edge]   = BOTH;
687 
688       /* LATER or we can recur on these to order seps at this level */
689       /* do we need full set of separators for this?                */
690 
691       /* fold rhs hc into lower */
692       if (id >= mask) id -= mask;
693     }
694   }
695 
696   /* level 0 is on processor case - so mark the remainder */
697   for (ct = i = 0; i < n; i++) {
698     if (!used[i]) {
699       ct++;
700       nfo++;
701       *--iptr = local2global[i];
702       used[i] = edge;
703     }
704   }
705   if (ct > 1) PCTFS_ivec_sort(iptr, ct);
706   lnsep[edge] = ct;
707   nsep[edge]  = ct;
708   dir[edge]   = BOTH;
709 
710   xyt_handle->info->nsep  = nsep;
711   xyt_handle->info->lnsep = lnsep;
712   xyt_handle->info->fo    = fo;
713   xyt_handle->info->nfo   = nfo;
714 
715   free(dir);
716   free(lhs);
717   free(rhs);
718   free(used);
719   PetscFunctionReturn(0);
720 }
721 
722 /**************************************xyt.c***********************************/
723 static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, PetscErrorCode (*matvec)(mv_info *, PetscScalar *, PetscScalar *), void *grid_data) {
724   mv_info *mvi;
725 
726   mvi               = (mv_info *)malloc(sizeof(mv_info));
727   mvi->n            = n;
728   mvi->m            = m;
729   mvi->n_global     = -1;
730   mvi->m_global     = -1;
731   mvi->local2global = (PetscInt *)malloc((m + 1) * sizeof(PetscInt));
732 
733   PCTFS_ivec_copy(mvi->local2global, local2global, m);
734   mvi->local2global[m] = INT_MAX;
735   mvi->matvec          = matvec;
736   mvi->grid_data       = grid_data;
737 
738   /* set xyt communication handle to perform restricted matvec */
739   mvi->PCTFS_gs_handle = PCTFS_gs_init(local2global, m, PCTFS_num_nodes);
740 
741   return (mvi);
742 }
743 
744 /**************************************xyt.c***********************************/
745 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u) {
746   PetscFunctionBegin;
747   A->matvec((mv_info *)A->grid_data, v, u);
748   PetscFunctionReturn(0);
749 }
750