xref: /petsc/src/ksp/pc/impls/tfs/xyt.c (revision fdf6c4e30aafdbc795e4f76379caa977fd5cdf5a)
1 
2 /*************************************xyt.c************************************
3 Module Name: xyt
4 Module Info:
5 
6 author:  Henry M. Tufo III
7 e-mail:  hmt@asci.uchicago.edu
8 contact:
9 +--------------------------------+--------------------------------+
10 |MCS Division - Building 221     |Department of Computer Science  |
11 |Argonne National Laboratory     |Ryerson 152                     |
12 |9700 S. Cass Avenue             |The University of Chicago       |
13 |Argonne, IL  60439              |Chicago, IL  60637              |
14 |(630) 252-5354/5986 ph/fx       |(773) 702-6019/8487 ph/fx       |
15 +--------------------------------+--------------------------------+
16 
17 Last Modification: 3.20.01
18 **************************************xyt.c***********************************/
19 #include <../src/ksp/pc/impls/tfs/tfs.h>
20 
21 #define LEFT  -1
22 #define RIGHT  1
23 #define BOTH   0
24 
25 typedef struct xyt_solver_info {
26   PetscInt    n, m, n_global, m_global;
27   PetscInt    nnz, max_nnz, msg_buf_sz;
28   PetscInt    *nsep, *lnsep, *fo, nfo, *stages;
29   PetscInt    *xcol_sz, *xcol_indices;
30   PetscScalar **xcol_vals, *x, *solve_uu, *solve_w;
31   PetscInt    *ycol_sz, *ycol_indices;
32   PetscScalar **ycol_vals, *y;
33   PetscInt    nsolves;
34   PetscScalar tot_solve_time;
35 } xyt_info;
36 
37 typedef struct matvec_info {
38   PetscInt     n, m, n_global, m_global;
39   PetscInt     *local2global;
40   PCTFS_gs_ADT PCTFS_gs_handle;
41   PetscErrorCode (*matvec)(struct matvec_info*,PetscScalar*,PetscScalar*);
42   void *grid_data;
43 } mv_info;
44 
45 struct xyt_CDT {
46   PetscInt id;
47   PetscInt ns;
48   PetscInt level;
49   xyt_info *info;
50   mv_info  *mvi;
51 };
52 
53 static PetscInt n_xyt        =0;
54 static PetscInt n_xyt_handles=0;
55 
56 /* prototypes */
57 static PetscErrorCode do_xyt_solve(xyt_ADT xyt_handle, PetscScalar *rhs);
58 static PetscErrorCode check_handle(xyt_ADT xyt_handle);
59 static PetscErrorCode det_separators(xyt_ADT xyt_handle);
60 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u);
61 static PetscErrorCode xyt_generate(xyt_ADT xyt_handle);
62 static PetscErrorCode do_xyt_factor(xyt_ADT xyt_handle);
63 static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, PetscErrorCode (*matvec)(mv_info*,PetscScalar*,PetscScalar*), void *grid_data);
64 
65 /**************************************xyt.c***********************************/
66 xyt_ADT XYT_new(void)
67 {
68   xyt_ADT xyt_handle;
69 
70   /* rolling count on n_xyt ... pot. problem here */
71   n_xyt_handles++;
72   xyt_handle       = (xyt_ADT)malloc(sizeof(struct xyt_CDT));
73   xyt_handle->id   = ++n_xyt;
74   xyt_handle->info = NULL;
75   xyt_handle->mvi  = NULL;
76 
77   return(xyt_handle);
78 }
79 
80 /**************************************xyt.c***********************************/
81 PetscErrorCode XYT_factor(xyt_ADT xyt_handle,     /* prev. allocated xyt  handle */
82                     PetscInt *local2global, /* global column mapping       */
83                     PetscInt n,             /* local num rows              */
84                     PetscInt m,             /* local num cols              */
85                     PetscErrorCode (*matvec)(void*,PetscScalar*,PetscScalar*), /* b_loc=A_local.x_loc         */
86                     void *grid_data)        /* grid data for matvec        */
87 {
88 
89   PCTFS_comm_init();
90   check_handle(xyt_handle);
91 
92   /* only 2^k for now and all nodes participating */
93   PetscCheck((1<<(xyt_handle->level=PCTFS_i_log2_num_nodes))==PCTFS_num_nodes,PETSC_COMM_SELF,PETSC_ERR_PLIB,"only 2^k for now and MPI_COMM_WORLD!!! %d != %d",1<<PCTFS_i_log2_num_nodes,PCTFS_num_nodes);
94 
95   /* space for X info */
96   xyt_handle->info = (xyt_info*)malloc(sizeof(xyt_info));
97 
98   /* set up matvec handles */
99   xyt_handle->mvi = set_mvi(local2global, n, m, (PetscErrorCode (*)(mv_info*,PetscScalar*,PetscScalar*))matvec, grid_data);
100 
101   /* matrix is assumed to be of full rank */
102   /* LATER we can reset to indicate rank def. */
103   xyt_handle->ns=0;
104 
105   /* determine separators and generate firing order - NB xyt info set here */
106   det_separators(xyt_handle);
107 
108   return(do_xyt_factor(xyt_handle));
109 }
110 
111 /**************************************xyt.c***********************************/
112 PetscErrorCode XYT_solve(xyt_ADT xyt_handle, PetscScalar *x, PetscScalar *b)
113 {
114   PCTFS_comm_init();
115   check_handle(xyt_handle);
116 
117   /* need to copy b into x? */
118   if (b) PCTFS_rvec_copy(x,b,xyt_handle->mvi->n);
119   return do_xyt_solve(xyt_handle,x);
120 }
121 
122 /**************************************xyt.c***********************************/
123 PetscErrorCode XYT_free(xyt_ADT xyt_handle)
124 {
125   PCTFS_comm_init();
126   check_handle(xyt_handle);
127   n_xyt_handles--;
128 
129   free(xyt_handle->info->nsep);
130   free(xyt_handle->info->lnsep);
131   free(xyt_handle->info->fo);
132   free(xyt_handle->info->stages);
133   free(xyt_handle->info->solve_uu);
134   free(xyt_handle->info->solve_w);
135   free(xyt_handle->info->x);
136   free(xyt_handle->info->xcol_vals);
137   free(xyt_handle->info->xcol_sz);
138   free(xyt_handle->info->xcol_indices);
139   free(xyt_handle->info->y);
140   free(xyt_handle->info->ycol_vals);
141   free(xyt_handle->info->ycol_sz);
142   free(xyt_handle->info->ycol_indices);
143   free(xyt_handle->info);
144   free(xyt_handle->mvi->local2global);
145   PCTFS_gs_free(xyt_handle->mvi->PCTFS_gs_handle);
146   free(xyt_handle->mvi);
147   free(xyt_handle);
148 
149   /* if the check fails we nuke */
150   /* if NULL pointer passed to free we nuke */
151   /* if the calls to free fail that's not my problem */
152   return(0);
153 }
154 
155 /**************************************xyt.c***********************************/
156 PetscErrorCode XYT_stats(xyt_ADT xyt_handle)
157 {
158   PetscInt    op[]  = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD};
159   PetscInt    fop[] = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD};
160   PetscInt    vals[9],  work[9];
161   PetscScalar fvals[3], fwork[3];
162 
163   PetscFunctionBegin;
164   PetscCall(PCTFS_comm_init());
165   PetscCall(check_handle(xyt_handle));
166 
167   /* if factorization not done there are no stats */
168   if (!xyt_handle->info||!xyt_handle->mvi) {
169     if (!PCTFS_my_id) PetscCall(PetscPrintf(PETSC_COMM_WORLD,"XYT_stats() :: no stats available!\n"));
170     PetscFunctionReturn(1);
171   }
172 
173   vals[0]=vals[1]=vals[2]=xyt_handle->info->nnz;
174   vals[3]=vals[4]=vals[5]=xyt_handle->mvi->n;
175   vals[6]=vals[7]=vals[8]=xyt_handle->info->msg_buf_sz;
176   PetscCall(PCTFS_giop(vals,work,PETSC_STATIC_ARRAY_LENGTH(op)-1,op));
177 
178   fvals[0]=fvals[1]=fvals[2]=xyt_handle->info->tot_solve_time/xyt_handle->info->nsolves++;
179   PetscCall(PCTFS_grop(fvals,fwork,PETSC_STATIC_ARRAY_LENGTH(fop)-1,fop));
180 
181   if (!PCTFS_my_id) {
182     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"%d :: min   xyt_nnz=%" PetscInt_FMT "\n",PCTFS_my_id,vals[0]));
183     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"%d :: max   xyt_nnz=%" PetscInt_FMT "\n",PCTFS_my_id,vals[1]));
184     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"%d :: avg   xyt_nnz=%g\n",PCTFS_my_id,(double)(1.0*vals[2]/PCTFS_num_nodes)));
185     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"%d :: tot   xyt_nnz=%" PetscInt_FMT "\n",PCTFS_my_id,vals[2]));
186     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"%d :: xyt   C(2d)  =%g\n",PCTFS_my_id,(double)(vals[2]/(PetscPowReal(1.0*vals[5],1.5)))));
187     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"%d :: xyt   C(3d)  =%g\n",PCTFS_my_id,(double)(vals[2]/(PetscPowReal(1.0*vals[5],1.6667)))));
188     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"%d :: min   xyt_n  =%" PetscInt_FMT "\n",PCTFS_my_id,vals[3]));
189     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"%d :: max   xyt_n  =%" PetscInt_FMT "\n",PCTFS_my_id,vals[4]));
190     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"%d :: avg   xyt_n  =%g\n",PCTFS_my_id,(double)(1.0*vals[5]/PCTFS_num_nodes)));
191     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"%d :: tot   xyt_n  =%" PetscInt_FMT "\n",PCTFS_my_id,vals[5]));
192     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"%d :: min   xyt_buf=%" PetscInt_FMT "\n",PCTFS_my_id,vals[6]));
193     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"%d :: max   xyt_buf=%" PetscInt_FMT "\n",PCTFS_my_id,vals[7]));
194     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"%d :: avg   xyt_buf=%g\n",PCTFS_my_id,(double)(1.0*vals[8]/PCTFS_num_nodes)));
195     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"%d :: min   xyt_slv=%g\n",PCTFS_my_id,(double)PetscRealPart(fvals[0])));
196     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"%d :: max   xyt_slv=%g\n",PCTFS_my_id,(double)PetscRealPart(fvals[1])));
197     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"%d :: avg   xyt_slv=%g\n",PCTFS_my_id,(double)PetscRealPart(fvals[2]/PCTFS_num_nodes)));
198   }
199   PetscFunctionReturn(0);
200 }
201 
202 /*************************************xyt.c************************************
203 
204 Description: get A_local, local portion of global coarse matrix which
205 is a row dist. nxm matrix w/ n<m.
206    o my_ml holds address of ML struct associated w/A_local and coarse grid
207    o local2global holds global number of column i (i=0,...,m-1)
208    o local2global holds global number of row    i (i=0,...,n-1)
209    o mylocmatvec performs A_local . vec_local (note that gs is performed using
210    PCTFS_gs_init/gop).
211 
212 mylocmatvec = my_ml->Amat[grid_tag].matvec->external;
213 mylocmatvec (void :: void *data, double *in, double *out)
214 **************************************xyt.c***********************************/
215 static PetscErrorCode do_xyt_factor(xyt_ADT xyt_handle)
216 {
217   return xyt_generate(xyt_handle);
218 }
219 
220 /**************************************xyt.c***********************************/
221 static PetscErrorCode xyt_generate(xyt_ADT xyt_handle)
222 {
223   PetscInt       i,j,k,idx;
224   PetscInt       dim, col;
225   PetscScalar    *u, *uu, *v, *z, *w, alpha, alpha_w;
226   PetscInt       *segs;
227   PetscInt       op[] = {GL_ADD,0};
228   PetscInt       off, len;
229   PetscScalar    *x_ptr, *y_ptr;
230   PetscInt       *iptr, flag;
231   PetscInt       start =0, end, work;
232   PetscInt       op2[] = {GL_MIN,0};
233   PCTFS_gs_ADT   PCTFS_gs_handle;
234   PetscInt       *nsep, *lnsep, *fo;
235   PetscInt       a_n            =xyt_handle->mvi->n;
236   PetscInt       a_m            =xyt_handle->mvi->m;
237   PetscInt       *a_local2global=xyt_handle->mvi->local2global;
238   PetscInt       level;
239   PetscInt       n, m;
240   PetscInt       *xcol_sz, *xcol_indices, *stages;
241   PetscScalar    **xcol_vals, *x;
242   PetscInt       *ycol_sz, *ycol_indices;
243   PetscScalar    **ycol_vals, *y;
244   PetscInt       n_global;
245   PetscInt       xt_nnz       =0, xt_max_nnz=0;
246   PetscInt       yt_nnz       =0, yt_max_nnz=0;
247   PetscBLASInt   i1           = 1,dlen;
248   PetscScalar    dm1          = -1.0;
249 
250   n              =xyt_handle->mvi->n;
251   nsep           =xyt_handle->info->nsep;
252   lnsep          =xyt_handle->info->lnsep;
253   fo             =xyt_handle->info->fo;
254   end            =lnsep[0];
255   level          =xyt_handle->level;
256   PCTFS_gs_handle=xyt_handle->mvi->PCTFS_gs_handle;
257 
258   /* is there a null space? */
259   /* LATER add in ability to detect null space by checking alpha */
260   for (i=0, j=0; i<=level; i++) j+=nsep[i];
261 
262   m = j-xyt_handle->ns;
263   if (m!=j) {
264     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"xyt_generate() :: null space exists %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\n",m,j,xyt_handle->ns));
265   }
266 
267   PetscCall(PetscInfo(0,"xyt_generate() :: X(%" PetscInt_FMT ",%" PetscInt_FMT ")\n",n,m));
268 
269   /* get and initialize storage for x local         */
270   /* note that x local is nxm and stored by columns */
271   xcol_sz      = (PetscInt*) malloc(m*sizeof(PetscInt));
272   xcol_indices = (PetscInt*) malloc((2*m+1)*sizeof(PetscInt));
273   xcol_vals    = (PetscScalar**) malloc(m*sizeof(PetscScalar*));
274   for (i=j=0; i<m; i++, j+=2) {
275     xcol_indices[j]=xcol_indices[j+1]=xcol_sz[i]=-1;
276     xcol_vals[i]   = NULL;
277   }
278   xcol_indices[j]=-1;
279 
280   /* get and initialize storage for y local         */
281   /* note that y local is nxm and stored by columns */
282   ycol_sz      = (PetscInt*) malloc(m*sizeof(PetscInt));
283   ycol_indices = (PetscInt*) malloc((2*m+1)*sizeof(PetscInt));
284   ycol_vals    = (PetscScalar**) malloc(m*sizeof(PetscScalar*));
285   for (i=j=0; i<m; i++, j+=2) {
286     ycol_indices[j]=ycol_indices[j+1]=ycol_sz[i]=-1;
287     ycol_vals[i]   = NULL;
288   }
289   ycol_indices[j]=-1;
290 
291   /* size of separators for each sub-hc working from bottom of tree to top */
292   /* this looks like nsep[]=segments */
293   stages = (PetscInt*) malloc((level+1)*sizeof(PetscInt));
294   segs   = (PetscInt*) malloc((level+1)*sizeof(PetscInt));
295   PCTFS_ivec_zero(stages,level+1);
296   PCTFS_ivec_copy(segs,nsep,level+1);
297   for (i=0; i<level; i++) segs[i+1] += segs[i];
298   stages[0] = segs[0];
299 
300   /* temporary vectors  */
301   u  = (PetscScalar*) malloc(n*sizeof(PetscScalar));
302   z  = (PetscScalar*) malloc(n*sizeof(PetscScalar));
303   v  = (PetscScalar*) malloc(a_m*sizeof(PetscScalar));
304   uu = (PetscScalar*) malloc(m*sizeof(PetscScalar));
305   w  = (PetscScalar*) malloc(m*sizeof(PetscScalar));
306 
307   /* extra nnz due to replication of vertices across separators */
308   for (i=1, j=0; i<=level; i++) j+=nsep[i];
309 
310   /* storage for sparse x values */
311   n_global   = xyt_handle->info->n_global;
312   xt_max_nnz = yt_max_nnz = (PetscInt)(2.5*PetscPowReal(1.0*n_global,1.6667) + j*n/2)/PCTFS_num_nodes;
313   x          = (PetscScalar*) malloc(xt_max_nnz*sizeof(PetscScalar));
314   y          = (PetscScalar*) malloc(yt_max_nnz*sizeof(PetscScalar));
315 
316   /* LATER - can embed next sep to fire in gs */
317   /* time to make the donuts - generate X factor */
318   for (dim=i=j=0; i<m; i++) {
319     /* time to move to the next level? */
320     while (i==segs[dim]) {
321       PetscCheck(dim!=level,PETSC_COMM_SELF,PETSC_ERR_PLIB,"dim about to exceed level");
322       stages[dim++]=i;
323       end         +=lnsep[dim];
324     }
325     stages[dim]=i;
326 
327     /* which column are we firing? */
328     /* i.e. set v_l */
329     /* use new seps and do global min across hc to determine which one to fire */
330     (start<end) ? (col=fo[start]) : (col=INT_MAX);
331     PCTFS_giop_hc(&col,&work,1,op2,dim);
332 
333     /* shouldn't need this */
334     if (col==INT_MAX) {
335       PetscCall(PetscInfo(0,"hey ... col==INT_MAX??\n"));
336       continue;
337     }
338 
339     /* do I own it? I should */
340     PCTFS_rvec_zero(v,a_m);
341     if (col==fo[start]) {
342       start++;
343       idx=PCTFS_ivec_linear_search(col, a_local2global, a_n);
344       if (idx!=-1) {
345         v[idx] = 1.0;
346         j++;
347       } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"NOT FOUND!");
348     } else {
349       idx=PCTFS_ivec_linear_search(col, a_local2global, a_m);
350       if (idx!=-1) v[idx] = 1.0;
351     }
352 
353     /* perform u = A.v_l */
354     PCTFS_rvec_zero(u,n);
355     do_matvec(xyt_handle->mvi,v,u);
356 
357     /* uu =  X^T.u_l (local portion) */
358     /* technically only need to zero out first i entries */
359     /* later turn this into an XYT_solve call ? */
360     PCTFS_rvec_zero(uu,m);
361     y_ptr=y;
362     iptr = ycol_indices;
363     for (k=0; k<i; k++) {
364       off   = *iptr++;
365       len   = *iptr++;
366       PetscCall(PetscBLASIntCast(len,&dlen));
367       PetscStackCallBLAS("BLASdot",uu[k] = BLASdot_(&dlen,u+off,&i1,y_ptr,&i1));
368       y_ptr+=len;
369     }
370 
371     /* uu = X^T.u_l (comm portion) */
372     PetscCall(PCTFS_ssgl_radd  (uu, w, dim, stages));
373 
374     /* z = X.uu */
375     PCTFS_rvec_zero(z,n);
376     x_ptr=x;
377     iptr = xcol_indices;
378     for (k=0; k<i; k++) {
379       off  = *iptr++;
380       len  = *iptr++;
381       PetscCall(PetscBLASIntCast(len,&dlen));
382       PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&dlen,&uu[k],x_ptr,&i1,z+off,&i1));
383       x_ptr+=len;
384     }
385 
386     /* compute v_l = v_l - z */
387     PCTFS_rvec_zero(v+a_n,a_m-a_n);
388     PetscCall(PetscBLASIntCast(n,&dlen));
389     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&dlen,&dm1,z,&i1,v,&i1));
390 
391     /* compute u_l = A.v_l */
392     if (a_n!=a_m) PCTFS_gs_gop_hc(PCTFS_gs_handle,v,"+\0",dim);
393     PCTFS_rvec_zero(u,n);
394     do_matvec(xyt_handle->mvi,v,u);
395 
396     /* compute sqrt(alpha) = sqrt(u_l^T.u_l) - local portion */
397     PetscCall(PetscBLASIntCast(n,&dlen));
398     PetscStackCallBLAS("BLASdot",alpha = BLASdot_(&dlen,u,&i1,u,&i1));
399     /* compute sqrt(alpha) = sqrt(u_l^T.u_l) - comm portion */
400     PCTFS_grop_hc(&alpha, &alpha_w, 1, op, dim);
401 
402     alpha = (PetscScalar) PetscSqrtReal((PetscReal)alpha);
403 
404     /* check for small alpha                             */
405     /* LATER use this to detect and determine null space */
406     PetscCheck(PetscAbsScalar(alpha)>=1.0e-14,PETSC_COMM_SELF,PETSC_ERR_PLIB,"bad alpha! %g",(double)PetscAbsScalar(alpha));
407 
408     /* compute v_l = v_l/sqrt(alpha) */
409     PCTFS_rvec_scale(v,1.0/alpha,n);
410     PCTFS_rvec_scale(u,1.0/alpha,n);
411 
412     /* add newly generated column, v_l, to X */
413     flag = 1;
414     off  =len=0;
415     for (k=0; k<n; k++) {
416       if (v[k]!=0.0) {
417         len=k;
418         if (flag) {off=k; flag=0;}
419       }
420     }
421 
422     len -= (off-1);
423 
424     if (len>0) {
425       if ((xt_nnz+len)>xt_max_nnz) {
426         PetscCall(PetscInfo(0,"increasing space for X by 2x!\n"));
427         xt_max_nnz *= 2;
428         x_ptr       = (PetscScalar*) malloc(xt_max_nnz*sizeof(PetscScalar));
429         PCTFS_rvec_copy(x_ptr,x,xt_nnz);
430         free(x);
431         x     = x_ptr;
432         x_ptr+=xt_nnz;
433       }
434       xt_nnz += len;
435       PCTFS_rvec_copy(x_ptr,v+off,len);
436 
437       xcol_indices[2*i] = off;
438       xcol_sz[i]        = xcol_indices[2*i+1] = len;
439       xcol_vals[i]      = x_ptr;
440     } else {
441       xcol_indices[2*i] = 0;
442       xcol_sz[i]        = xcol_indices[2*i+1] = 0;
443       xcol_vals[i]      = x_ptr;
444     }
445 
446     /* add newly generated column, u_l, to Y */
447     flag = 1;
448     off  =len=0;
449     for (k=0; k<n; k++) {
450       if (u[k]!=0.0) {
451         len=k;
452         if (flag) { off=k; flag=0; }
453       }
454     }
455 
456     len -= (off-1);
457 
458     if (len>0) {
459       if ((yt_nnz+len)>yt_max_nnz) {
460         PetscCall(PetscInfo(0,"increasing space for Y by 2x!\n"));
461         yt_max_nnz *= 2;
462         y_ptr       = (PetscScalar*) malloc(yt_max_nnz*sizeof(PetscScalar));
463         PCTFS_rvec_copy(y_ptr,y,yt_nnz);
464         free(y);
465         y     = y_ptr;
466         y_ptr+=yt_nnz;
467       }
468       yt_nnz += len;
469       PCTFS_rvec_copy(y_ptr,u+off,len);
470 
471       ycol_indices[2*i] = off;
472       ycol_sz[i]        = ycol_indices[2*i+1] = len;
473       ycol_vals[i]      = y_ptr;
474     } else {
475       ycol_indices[2*i] = 0;
476       ycol_sz[i]        = ycol_indices[2*i+1] = 0;
477       ycol_vals[i]      = y_ptr;
478     }
479   }
480 
481   /* close off stages for execution phase */
482   while (dim!=level) {
483     stages[dim++]=i;
484     PetscCall(PetscInfo(0,"disconnected!!! dim(%" PetscInt_FMT ")!=level(%" PetscInt_FMT ")\n",dim,level));
485   }
486   stages[dim]=i;
487 
488   xyt_handle->info->n           =xyt_handle->mvi->n;
489   xyt_handle->info->m           =m;
490   xyt_handle->info->nnz         =xt_nnz + yt_nnz;
491   xyt_handle->info->max_nnz     =xt_max_nnz + yt_max_nnz;
492   xyt_handle->info->msg_buf_sz  =stages[level]-stages[0];
493   xyt_handle->info->solve_uu    = (PetscScalar*) malloc(m*sizeof(PetscScalar));
494   xyt_handle->info->solve_w     = (PetscScalar*) malloc(m*sizeof(PetscScalar));
495   xyt_handle->info->x           =x;
496   xyt_handle->info->xcol_vals   =xcol_vals;
497   xyt_handle->info->xcol_sz     =xcol_sz;
498   xyt_handle->info->xcol_indices=xcol_indices;
499   xyt_handle->info->stages      =stages;
500   xyt_handle->info->y           =y;
501   xyt_handle->info->ycol_vals   =ycol_vals;
502   xyt_handle->info->ycol_sz     =ycol_sz;
503   xyt_handle->info->ycol_indices=ycol_indices;
504 
505   free(segs);
506   free(u);
507   free(v);
508   free(uu);
509   free(z);
510   free(w);
511 
512   return(0);
513 }
514 
515 /**************************************xyt.c***********************************/
516 static PetscErrorCode do_xyt_solve(xyt_ADT xyt_handle,  PetscScalar *uc)
517 {
518   PetscInt       off, len, *iptr;
519   PetscInt       level        =xyt_handle->level;
520   PetscInt       n            =xyt_handle->info->n;
521   PetscInt       m            =xyt_handle->info->m;
522   PetscInt       *stages      =xyt_handle->info->stages;
523   PetscInt       *xcol_indices=xyt_handle->info->xcol_indices;
524   PetscInt       *ycol_indices=xyt_handle->info->ycol_indices;
525   PetscScalar    *x_ptr, *y_ptr, *uu_ptr;
526   PetscScalar    *solve_uu=xyt_handle->info->solve_uu;
527   PetscScalar    *solve_w =xyt_handle->info->solve_w;
528   PetscScalar    *x       =xyt_handle->info->x;
529   PetscScalar    *y       =xyt_handle->info->y;
530   PetscBLASInt   i1       = 1,dlen;
531 
532   PetscFunctionBegin;
533   uu_ptr=solve_uu;
534   PCTFS_rvec_zero(uu_ptr,m);
535 
536   /* x  = X.Y^T.b */
537   /* uu = Y^T.b */
538   for (y_ptr=y,iptr=ycol_indices; *iptr!=-1; y_ptr+=len) {
539     off       =*iptr++;
540     len       =*iptr++;
541     PetscCall(PetscBLASIntCast(len,&dlen));
542     PetscStackCallBLAS("BLASdot",*uu_ptr++ = BLASdot_(&dlen,uc+off,&i1,y_ptr,&i1));
543   }
544 
545   /* comunication of beta */
546   uu_ptr=solve_uu;
547   if (level) PetscCall(PCTFS_ssgl_radd(uu_ptr, solve_w, level, stages));
548   PCTFS_rvec_zero(uc,n);
549 
550   /* x = X.uu */
551   for (x_ptr=x,iptr=xcol_indices; *iptr!=-1; x_ptr+=len) {
552     off  =*iptr++;
553     len  =*iptr++;
554     PetscCall(PetscBLASIntCast(len,&dlen));
555     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&dlen,uu_ptr++,x_ptr,&i1,uc+off,&i1));
556   }
557   PetscFunctionReturn(0);
558 }
559 
560 /**************************************xyt.c***********************************/
561 static PetscErrorCode check_handle(xyt_ADT xyt_handle)
562 {
563   PetscInt vals[2], work[2], op[] = {NON_UNIFORM,GL_MIN,GL_MAX};
564 
565   PetscFunctionBegin;
566   PetscCheck(xyt_handle,PETSC_COMM_SELF,PETSC_ERR_PLIB,"check_handle() :: bad handle :: NULL %p",xyt_handle);
567 
568   vals[0]=vals[1]=xyt_handle->id;
569   PCTFS_giop(vals,work,PETSC_STATIC_ARRAY_LENGTH(op)-1,op);
570   PetscCheck(!(vals[0]!=vals[1])&&!(xyt_handle->id<=0),PETSC_COMM_SELF,PETSC_ERR_PLIB,"check_handle() :: bad handle :: id mismatch min/max %" PetscInt_FMT "/%" PetscInt_FMT " %" PetscInt_FMT, vals[0],vals[1], xyt_handle->id);
571   PetscFunctionReturn(0);
572 }
573 
574 /**************************************xyt.c***********************************/
575 static PetscErrorCode det_separators(xyt_ADT xyt_handle)
576 {
577   PetscInt       i, ct, id;
578   PetscInt       mask, edge, *iptr;
579   PetscInt       *dir, *used;
580   PetscInt       sum[4], w[4];
581   PetscScalar    rsum[4], rw[4];
582   PetscInt       op[] = {GL_ADD,0};
583   PetscScalar    *lhs, *rhs;
584   PetscInt       *nsep, *lnsep, *fo, nfo=0;
585   PCTFS_gs_ADT   PCTFS_gs_handle=xyt_handle->mvi->PCTFS_gs_handle;
586   PetscInt       *local2global  =xyt_handle->mvi->local2global;
587   PetscInt       n              =xyt_handle->mvi->n;
588   PetscInt       m              =xyt_handle->mvi->m;
589   PetscInt       level          =xyt_handle->level;
590   PetscInt       shared         =0;
591 
592   PetscFunctionBegin;
593   dir  = (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
594   nsep = (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
595   lnsep= (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
596   fo   = (PetscInt*)malloc(sizeof(PetscInt)*(n+1));
597   used = (PetscInt*)malloc(sizeof(PetscInt)*n);
598 
599   PCTFS_ivec_zero(dir,level+1);
600   PCTFS_ivec_zero(nsep,level+1);
601   PCTFS_ivec_zero(lnsep,level+1);
602   PCTFS_ivec_set (fo,-1,n+1);
603   PCTFS_ivec_zero(used,n);
604 
605   lhs = (PetscScalar*)malloc(sizeof(PetscScalar)*m);
606   rhs = (PetscScalar*)malloc(sizeof(PetscScalar)*m);
607 
608   /* determine the # of unique dof */
609   PCTFS_rvec_zero(lhs,m);
610   PCTFS_rvec_set(lhs,1.0,n);
611   PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",level);
612   PetscCall(PetscInfo(0,"done first PCTFS_gs_gop_hc\n"));
613   PCTFS_rvec_zero(rsum,2);
614   for (i=0; i<n; i++) {
615     if (lhs[i]!=0.0) { rsum[0]+=1.0/lhs[i]; rsum[1]+=lhs[i]; }
616     if (lhs[i]!=1.0) shared=1;
617   }
618 
619   PCTFS_grop_hc(rsum,rw,2,op,level);
620   rsum[0]+=0.1;
621   rsum[1]+=0.1;
622 
623   xyt_handle->info->n_global=xyt_handle->info->m_global=(PetscInt) rsum[0];
624   xyt_handle->mvi->n_global =xyt_handle->mvi->m_global =(PetscInt) rsum[0];
625 
626   /* determine separator sets top down */
627   if (shared) {
628     /* solution is to do as in the symmetric shared case but then */
629     /* pick the sub-hc with the most free dofs and do a mat-vec   */
630     /* and pick up the responses on the other sub-hc from the     */
631     /* initial separator set obtained from the symm. shared case  */
632     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"shared dof separator determination not ready ... see hmt!!!");
633     /* [dead code deleted since it is unlikely to be completed] */
634   } else {
635     for (iptr=fo+n,id=PCTFS_my_id,mask=PCTFS_num_nodes>>1,edge=level;edge>0;edge--,mask>>=1) {
636       /* set rsh of hc, fire, and collect lhs responses */
637       (id<mask) ? PCTFS_rvec_zero(lhs,m) : PCTFS_rvec_set(lhs,1.0,m);
638       PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",edge);
639 
640       /* set lsh of hc, fire, and collect rhs responses */
641       (id<mask) ? PCTFS_rvec_set(rhs,1.0,m) : PCTFS_rvec_zero(rhs,m);
642       PCTFS_gs_gop_hc(PCTFS_gs_handle,rhs,"+\0",edge);
643 
644       /* count number of dofs I own that have signal and not in sep set */
645       for (PCTFS_ivec_zero(sum,4),ct=i=0;i<n;i++) {
646         if (!used[i]) {
647           /* number of unmarked dofs on node */
648           ct++;
649           /* number of dofs to be marked on lhs hc */
650           if ((id< mask)&&(lhs[i]!=0.0)) sum[0]++;
651           /* number of dofs to be marked on rhs hc */
652           if ((id>=mask)&&(rhs[i]!=0.0)) sum[1]++;
653         }
654       }
655 
656       /* for the non-symmetric case we need separators of width 2 */
657       /* so take both sides */
658       (id<mask) ? (sum[2]=ct) : (sum[3]=ct);
659       PCTFS_giop_hc(sum,w,4,op,edge);
660 
661       ct=0;
662       if (id<mask) {
663         /* mark dofs I own that have signal and not in sep set */
664         for (i=0;i<n;i++) {
665           if ((!used[i])&&(lhs[i]!=0.0)) {
666             ct++; nfo++;
667             *--iptr = local2global[i];
668             used[i] =edge;
669           }
670         }
671         /* LSH hc summation of ct should be sum[0] */
672       } else {
673         /* mark dofs I own that have signal and not in sep set */
674         for (i=0; i<n; i++) {
675           if ((!used[i])&&(rhs[i]!=0.0)) {
676             ct++; nfo++;
677             *--iptr = local2global[i];
678             used[i] = edge;
679           }
680         }
681         /* RSH hc summation of ct should be sum[1] */
682       }
683 
684       if (ct>1) PCTFS_ivec_sort(iptr,ct);
685       lnsep[edge]=ct;
686       nsep[edge] =sum[0]+sum[1];
687       dir [edge] =BOTH;
688 
689       /* LATER or we can recur on these to order seps at this level */
690       /* do we need full set of separators for this?                */
691 
692       /* fold rhs hc into lower */
693       if (id>=mask) id-=mask;
694     }
695   }
696 
697   /* level 0 is on processor case - so mark the remainder */
698   for (ct=i=0;i<n;i++) {
699     if (!used[i]) {
700       ct++; nfo++;
701       *--iptr = local2global[i];
702       used[i] = edge;
703     }
704   }
705   if (ct>1) PCTFS_ivec_sort(iptr,ct);
706   lnsep[edge]=ct;
707   nsep [edge]=ct;
708   dir  [edge]=BOTH;
709 
710   xyt_handle->info->nsep  = nsep;
711   xyt_handle->info->lnsep = lnsep;
712   xyt_handle->info->fo    = fo;
713   xyt_handle->info->nfo   = nfo;
714 
715   free(dir);
716   free(lhs);
717   free(rhs);
718   free(used);
719   PetscFunctionReturn(0);
720 }
721 
722 /**************************************xyt.c***********************************/
723 static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, PetscErrorCode (*matvec)(mv_info*,PetscScalar*,PetscScalar*), void *grid_data)
724 {
725   mv_info *mvi;
726 
727   mvi              = (mv_info*)malloc(sizeof(mv_info));
728   mvi->n           = n;
729   mvi->m           = m;
730   mvi->n_global    = -1;
731   mvi->m_global    = -1;
732   mvi->local2global= (PetscInt*)malloc((m+1)*sizeof(PetscInt));
733 
734   PCTFS_ivec_copy(mvi->local2global,local2global,m);
735   mvi->local2global[m] = INT_MAX;
736   mvi->matvec          = matvec;
737   mvi->grid_data       = grid_data;
738 
739   /* set xyt communication handle to perform restricted matvec */
740   mvi->PCTFS_gs_handle = PCTFS_gs_init(local2global, m, PCTFS_num_nodes);
741 
742   return(mvi);
743 }
744 
745 /**************************************xyt.c***********************************/
746 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u)
747 {
748   PetscFunctionBegin;
749   A->matvec((mv_info*)A->grid_data,v,u);
750   PetscFunctionReturn(0);
751 }
752