xref: /petsc/src/ksp/pc/impls/tfs/xyt.c (revision baebb345d9d7e79bb5cbdc95c0e4cbdec38ce09d)
1 
2 /*************************************xyt.c************************************
3 Module Name: xyt
4 Module Info:
5 
6 author:  Henry M. Tufo III
7 e-mail:  hmt@asci.uchicago.edu
8 contact:
9 +--------------------------------+--------------------------------+
10 |MCS Division - Building 221     |Department of Computer Science  |
11 |Argonne National Laboratory     |Ryerson 152                     |
12 |9700 S. Cass Avenue             |The University of Chicago       |
13 |Argonne, IL  60439              |Chicago, IL  60637              |
14 |(630) 252-5354/5986 ph/fx       |(773) 702-6019/8487 ph/fx       |
15 +--------------------------------+--------------------------------+
16 
17 Last Modification: 3.20.01
18 **************************************xyt.c***********************************/
19 #include <../src/ksp/pc/impls/tfs/tfs.h>
20 
21 #define LEFT  -1
22 #define RIGHT  1
23 #define BOTH   0
24 
25 typedef struct xyt_solver_info {
26   PetscInt n, m, n_global, m_global;
27   PetscInt nnz, max_nnz, msg_buf_sz;
28   PetscInt *nsep, *lnsep, *fo, nfo, *stages;
29   PetscInt *xcol_sz, *xcol_indices;
30   PetscScalar **xcol_vals, *x, *solve_uu, *solve_w;
31   PetscInt *ycol_sz, *ycol_indices;
32   PetscScalar **ycol_vals, *y;
33   PetscInt nsolves;
34   PetscScalar tot_solve_time;
35 } xyt_info;
36 
37 
38 typedef struct matvec_info {
39   PetscInt n, m, n_global, m_global;
40   PetscInt *local2global;
41   PCTFS_gs_ADT PCTFS_gs_handle;
42   PetscErrorCode (*matvec)(struct matvec_info*,PetscScalar*,PetscScalar*);
43   void *grid_data;
44 } mv_info;
45 
46 struct xyt_CDT{
47   PetscInt id;
48   PetscInt ns;
49   PetscInt level;
50   xyt_info *info;
51   mv_info  *mvi;
52 };
53 
54 static PetscInt n_xyt=0;
55 static PetscInt n_xyt_handles=0;
56 
57 /* prototypes */
58 static PetscErrorCode do_xyt_solve(xyt_ADT xyt_handle, PetscScalar *rhs);
59 static PetscErrorCode check_handle(xyt_ADT xyt_handle);
60 static PetscErrorCode det_separators(xyt_ADT xyt_handle);
61 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u);
62 static PetscInt xyt_generate(xyt_ADT xyt_handle);
63 static PetscInt do_xyt_factor(xyt_ADT xyt_handle);
64 static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, void *matvec, void *grid_data);
65 
66 /**************************************xyt.c***********************************/
67 xyt_ADT XYT_new(void)
68 {
69   xyt_ADT xyt_handle;
70 
71   /* rolling count on n_xyt ... pot. problem here */
72   n_xyt_handles++;
73   xyt_handle       = (xyt_ADT)malloc(sizeof(struct xyt_CDT));
74   xyt_handle->id   = ++n_xyt;
75   xyt_handle->info = NULL;
76   xyt_handle->mvi  = NULL;
77 
78   return(xyt_handle);
79 }
80 
81 /**************************************xyt.c***********************************/
82 PetscInt XYT_factor(xyt_ADT xyt_handle, /* prev. allocated xyt  handle */
83            PetscInt *local2global,  /* global column mapping       */
84            PetscInt n,              /* local num rows              */
85            PetscInt m,              /* local num cols              */
86            void *matvec,       /* b_loc=A_local.x_loc         */
87            void *grid_data     /* grid data for matvec        */
88            )
89 {
90 
91   PCTFS_comm_init();
92   check_handle(xyt_handle);
93 
94   /* only 2^k for now and all nodes participating */
95   if ((1<<(xyt_handle->level=PCTFS_i_log2_num_nodes))!=PCTFS_num_nodes) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"only 2^k for now and MPI_COMM_WORLD!!! %D != %D\n",1<<PCTFS_i_log2_num_nodes,PCTFS_num_nodes);
96 
97   /* space for X info */
98   xyt_handle->info = (xyt_info*)malloc(sizeof(xyt_info));
99 
100   /* set up matvec handles */
101   xyt_handle->mvi  = set_mvi(local2global, n, m, matvec, grid_data);
102 
103   /* matrix is assumed to be of full rank */
104   /* LATER we can reset to indicate rank def. */
105   xyt_handle->ns=0;
106 
107   /* determine separators and generate firing order - NB xyt info set here */
108   det_separators(xyt_handle);
109 
110   return(do_xyt_factor(xyt_handle));
111 }
112 
113 /**************************************xyt.c***********************************/
114 PetscInt XYT_solve(xyt_ADT xyt_handle, PetscScalar *x, PetscScalar *b)
115 {
116   PCTFS_comm_init();
117   check_handle(xyt_handle);
118 
119   /* need to copy b into x? */
120   if (b)
121     {PCTFS_rvec_copy(x,b,xyt_handle->mvi->n);}
122   do_xyt_solve(xyt_handle,x);
123 
124   return(0);
125 }
126 
127 /**************************************xyt.c***********************************/
128 PetscInt XYT_free(xyt_ADT xyt_handle)
129 {
130   PCTFS_comm_init();
131   check_handle(xyt_handle);
132   n_xyt_handles--;
133 
134   free(xyt_handle->info->nsep);
135   free(xyt_handle->info->lnsep);
136   free(xyt_handle->info->fo);
137   free(xyt_handle->info->stages);
138   free(xyt_handle->info->solve_uu);
139   free(xyt_handle->info->solve_w);
140   free(xyt_handle->info->x);
141   free(xyt_handle->info->xcol_vals);
142   free(xyt_handle->info->xcol_sz);
143   free(xyt_handle->info->xcol_indices);
144   free(xyt_handle->info->y);
145   free(xyt_handle->info->ycol_vals);
146   free(xyt_handle->info->ycol_sz);
147   free(xyt_handle->info->ycol_indices);
148   free(xyt_handle->info);
149   free(xyt_handle->mvi->local2global);
150   PCTFS_gs_free(xyt_handle->mvi->PCTFS_gs_handle);
151   free(xyt_handle->mvi);
152   free(xyt_handle);
153 
154 
155   /* if the check fails we nuke */
156   /* if NULL pointer passed to free we nuke */
157   /* if the calls to free fail that's not my problem */
158   return(0);
159 }
160 
161 /**************************************xyt.c***********************************/
162 PetscInt XYT_stats(xyt_ADT xyt_handle)
163 {
164   PetscInt    op[] = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD};
165   PetscInt   fop[] = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD};
166   PetscInt    vals[9],  work[9];
167   PetscScalar fvals[3], fwork[3];
168 
169   PCTFS_comm_init();
170   check_handle(xyt_handle);
171 
172   /* if factorization not done there are no stats */
173   if (!xyt_handle->info||!xyt_handle->mvi) {
174     if (!PCTFS_my_id) PetscPrintf(PETSC_COMM_WORLD,"XYT_stats() :: no stats available!\n");
175     return 1;
176   }
177 
178   vals[0]=vals[1]=vals[2]=xyt_handle->info->nnz;
179   vals[3]=vals[4]=vals[5]=xyt_handle->mvi->n;
180   vals[6]=vals[7]=vals[8]=xyt_handle->info->msg_buf_sz;
181   PCTFS_giop(vals,work,sizeof(op)/sizeof(op[0])-1,op);
182 
183   fvals[0]=fvals[1]=fvals[2]
184     =xyt_handle->info->tot_solve_time/xyt_handle->info->nsolves++;
185   PCTFS_grop(fvals,fwork,sizeof(fop)/sizeof(fop[0])-1,fop);
186 
187   if (!PCTFS_my_id) {
188     PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xyt_nnz=%D\n",PCTFS_my_id,vals[0]);
189     PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xyt_nnz=%D\n",PCTFS_my_id,vals[1]);
190     PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xyt_nnz=%g\n",PCTFS_my_id,1.0*vals[2]/PCTFS_num_nodes);
191     PetscPrintf(PETSC_COMM_WORLD,"%D :: tot   xyt_nnz=%D\n",PCTFS_my_id,vals[2]);
192     PetscPrintf(PETSC_COMM_WORLD,"%D :: xyt   C(2d)  =%g\n",PCTFS_my_id,vals[2]/(pow(1.0*vals[5],1.5)));
193     PetscPrintf(PETSC_COMM_WORLD,"%D :: xyt   C(3d)  =%g\n",PCTFS_my_id,vals[2]/(pow(1.0*vals[5],1.6667)));
194     PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xyt_n  =%D\n",PCTFS_my_id,vals[3]);
195     PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xyt_n  =%D\n",PCTFS_my_id,vals[4]);
196     PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xyt_n  =%g\n",PCTFS_my_id,1.0*vals[5]/PCTFS_num_nodes);
197     PetscPrintf(PETSC_COMM_WORLD,"%D :: tot   xyt_n  =%D\n",PCTFS_my_id,vals[5]);
198     PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xyt_buf=%D\n",PCTFS_my_id,vals[6]);
199     PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xyt_buf=%D\n",PCTFS_my_id,vals[7]);
200     PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xyt_buf=%g\n",PCTFS_my_id,1.0*vals[8]/PCTFS_num_nodes);
201     PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xyt_slv=%g\n",PCTFS_my_id,fvals[0]);
202     PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xyt_slv=%g\n",PCTFS_my_id,fvals[1]);
203     PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xyt_slv=%g\n",PCTFS_my_id,fvals[2]/PCTFS_num_nodes);
204   }
205 
206   return(0);
207 }
208 
209 
210 /*************************************xyt.c************************************
211 
212 Description: get A_local, local portion of global coarse matrix which
213 is a row dist. nxm matrix w/ n<m.
214    o my_ml holds address of ML struct associated w/A_local and coarse grid
215    o local2global holds global number of column i (i=0,...,m-1)
216    o local2global holds global number of row    i (i=0,...,n-1)
217    o mylocmatvec performs A_local . vec_local (note that gs is performed using
218    PCTFS_gs_init/gop).
219 
220 mylocmatvec = my_ml->Amat[grid_tag].matvec->external;
221 mylocmatvec (void :: void *data, double *in, double *out)
222 **************************************xyt.c***********************************/
223 static PetscInt do_xyt_factor(xyt_ADT xyt_handle)
224 {
225   return xyt_generate(xyt_handle);
226 }
227 
228 /**************************************xyt.c***********************************/
229 static PetscInt xyt_generate(xyt_ADT xyt_handle)
230 {
231   PetscInt i,j,k,idx;
232   PetscInt dim, col;
233   PetscScalar *u, *uu, *v, *z, *w, alpha, alpha_w;
234   PetscInt *segs;
235   PetscInt op[] = {GL_ADD,0};
236   PetscInt off, len;
237   PetscScalar *x_ptr, *y_ptr;
238   PetscInt *iptr, flag;
239   PetscInt start=0, end, work;
240   PetscInt op2[] = {GL_MIN,0};
241   PCTFS_gs_ADT PCTFS_gs_handle;
242   PetscInt *nsep, *lnsep, *fo;
243   PetscInt a_n=xyt_handle->mvi->n;
244   PetscInt a_m=xyt_handle->mvi->m;
245   PetscInt *a_local2global=xyt_handle->mvi->local2global;
246   PetscInt level;
247   PetscInt n, m;
248   PetscInt *xcol_sz, *xcol_indices, *stages;
249   PetscScalar **xcol_vals, *x;
250   PetscInt *ycol_sz, *ycol_indices;
251   PetscScalar **ycol_vals, *y;
252   PetscInt n_global;
253   PetscInt xt_nnz=0, xt_max_nnz=0;
254   PetscInt yt_nnz=0, yt_max_nnz=0;
255   PetscInt xt_zero_nnz  =0;
256   PetscInt xt_zero_nnz_0=0;
257   PetscInt yt_zero_nnz  =0;
258   PetscInt yt_zero_nnz_0=0;
259   PetscBLASInt i1 = 1,dlen;
260   PetscScalar dm1 = -1.0;
261   PetscErrorCode ierr;
262 
263   n=xyt_handle->mvi->n;
264   nsep=xyt_handle->info->nsep;
265   lnsep=xyt_handle->info->lnsep;
266   fo=xyt_handle->info->fo;
267   end=lnsep[0];
268   level=xyt_handle->level;
269   PCTFS_gs_handle=xyt_handle->mvi->PCTFS_gs_handle;
270 
271   /* is there a null space? */
272   /* LATER add in ability to detect null space by checking alpha */
273   for (i=0, j=0; i<=level; i++)
274     {j+=nsep[i];}
275 
276   m = j-xyt_handle->ns;
277   if (m!=j) {
278     ierr = PetscPrintf(PETSC_COMM_WORLD,"xyt_generate() :: null space exists %D %D %D\n",m,j,xyt_handle->ns);CHKERRQ(ierr);
279   }
280 
281   ierr = PetscInfo2(0,"xyt_generate() :: X(%D,%D)\n",n,m);CHKERRQ(ierr);
282 
283   /* get and initialize storage for x local         */
284   /* note that x local is nxm and stored by columns */
285   xcol_sz = (PetscInt*) malloc(m*sizeof(PetscInt));
286   xcol_indices = (PetscInt*) malloc((2*m+1)*sizeof(PetscInt));
287   xcol_vals = (PetscScalar **) malloc(m*sizeof(PetscScalar *));
288   for (i=j=0; i<m; i++, j+=2)
289     {
290       xcol_indices[j]=xcol_indices[j+1]=xcol_sz[i]=-1;
291       xcol_vals[i] = NULL;
292     }
293   xcol_indices[j]=-1;
294 
295   /* get and initialize storage for y local         */
296   /* note that y local is nxm and stored by columns */
297   ycol_sz = (PetscInt*) malloc(m*sizeof(PetscInt));
298   ycol_indices = (PetscInt*) malloc((2*m+1)*sizeof(PetscInt));
299   ycol_vals = (PetscScalar **) malloc(m*sizeof(PetscScalar *));
300   for (i=j=0; i<m; i++, j+=2)
301     {
302       ycol_indices[j]=ycol_indices[j+1]=ycol_sz[i]=-1;
303       ycol_vals[i] = NULL;
304     }
305   ycol_indices[j]=-1;
306 
307   /* size of separators for each sub-hc working from bottom of tree to top */
308   /* this looks like nsep[]=segments */
309   stages = (PetscInt*) malloc((level+1)*sizeof(PetscInt));
310   segs   = (PetscInt*) malloc((level+1)*sizeof(PetscInt));
311   PCTFS_ivec_zero(stages,level+1);
312   PCTFS_ivec_copy(segs,nsep,level+1);
313   for (i=0; i<level; i++)
314     {segs[i+1] += segs[i];}
315   stages[0] = segs[0];
316 
317   /* temporary vectors  */
318   u  = (PetscScalar *) malloc(n*sizeof(PetscScalar));
319   z  = (PetscScalar *) malloc(n*sizeof(PetscScalar));
320   v  = (PetscScalar *) malloc(a_m*sizeof(PetscScalar));
321   uu = (PetscScalar *) malloc(m*sizeof(PetscScalar));
322   w  = (PetscScalar *) malloc(m*sizeof(PetscScalar));
323 
324   /* extra nnz due to replication of vertices across separators */
325   for (i=1, j=0; i<=level; i++)
326     {j+=nsep[i];}
327 
328   /* storage for sparse x values */
329   n_global = xyt_handle->info->n_global;
330   xt_max_nnz = yt_max_nnz = (PetscInt)(2.5*pow(1.0*n_global,1.6667) + j*n/2)/PCTFS_num_nodes;
331   x = (PetscScalar *) malloc(xt_max_nnz*sizeof(PetscScalar));
332   y = (PetscScalar *) malloc(yt_max_nnz*sizeof(PetscScalar));
333 
334   /* LATER - can embed next sep to fire in gs */
335   /* time to make the donuts - generate X factor */
336   for (dim=i=j=0;i<m;i++)
337     {
338       /* time to move to the next level? */
339       while (i==segs[dim]){
340         if (dim==level) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"dim about to exceed level\n");
341         stages[dim++]=i;
342         end+=lnsep[dim];
343       }
344       stages[dim]=i;
345 
346       /* which column are we firing? */
347       /* i.e. set v_l */
348       /* use new seps and do global min across hc to determine which one to fire */
349       (start<end) ? (col=fo[start]) : (col=INT_MAX);
350       PCTFS_giop_hc(&col,&work,1,op2,dim);
351 
352       /* shouldn't need this */
353       if (col==INT_MAX)
354         {
355           ierr = PetscInfo(0,"hey ... col==INT_MAX??\n");CHKERRQ(ierr);
356           continue;
357         }
358 
359       /* do I own it? I should */
360       PCTFS_rvec_zero(v ,a_m);
361       if (col==fo[start])
362         {
363           start++;
364           idx=PCTFS_ivec_linear_search(col, a_local2global, a_n);
365           if (idx!=-1)
366             {v[idx] = 1.0; j++;}
367           else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"NOT FOUND!\n");
368         }
369       else
370         {
371           idx=PCTFS_ivec_linear_search(col, a_local2global, a_m);
372           if (idx!=-1)
373             {v[idx] = 1.0;}
374         }
375 
376       /* perform u = A.v_l */
377       PCTFS_rvec_zero(u,n);
378       do_matvec(xyt_handle->mvi,v,u);
379 
380       /* uu =  X^T.u_l (local portion) */
381       /* technically only need to zero out first i entries */
382       /* later turn this into an XYT_solve call ? */
383       PCTFS_rvec_zero(uu,m);
384       y_ptr=y;
385       iptr = ycol_indices;
386       for (k=0; k<i; k++)
387         {
388           off = *iptr++;
389           len = *iptr++;
390           dlen = PetscBLASIntCast(len);
391           uu[k] = BLASdot_(&dlen,u+off,&i1,y_ptr,&i1);
392           y_ptr+=len;
393         }
394 
395       /* uu = X^T.u_l (comm portion) */
396       PCTFS_ssgl_radd  (uu, w, dim, stages);
397 
398       /* z = X.uu */
399       PCTFS_rvec_zero(z,n);
400       x_ptr=x;
401       iptr = xcol_indices;
402       for (k=0; k<i; k++)
403         {
404           off = *iptr++;
405           len = *iptr++;
406           dlen = PetscBLASIntCast(len);
407           BLASaxpy_(&dlen,&uu[k],x_ptr,&i1,z+off,&i1);
408           x_ptr+=len;
409         }
410 
411       /* compute v_l = v_l - z */
412       PCTFS_rvec_zero(v+a_n,a_m-a_n);
413       dlen = PetscBLASIntCast(n);
414       BLASaxpy_(&dlen,&dm1,z,&i1,v,&i1);
415 
416       /* compute u_l = A.v_l */
417       if (a_n!=a_m)
418         {PCTFS_gs_gop_hc(PCTFS_gs_handle,v,"+\0",dim);}
419       PCTFS_rvec_zero(u,n);
420       do_matvec(xyt_handle->mvi,v,u);
421 
422       /* compute sqrt(alpha) = sqrt(u_l^T.u_l) - local portion */
423       dlen = PetscBLASIntCast(n);
424       alpha = BLASdot_(&dlen,u,&i1,u,&i1);
425       /* compute sqrt(alpha) = sqrt(u_l^T.u_l) - comm portion */
426       PCTFS_grop_hc(&alpha, &alpha_w, 1, op, dim);
427 
428       alpha = (PetscScalar) PetscSqrtReal((PetscReal)alpha);
429 
430       /* check for small alpha                             */
431       /* LATER use this to detect and determine null space */
432       if (fabs(alpha)<1.0e-14) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"bad alpha! %g\n",alpha);
433 
434       /* compute v_l = v_l/sqrt(alpha) */
435       PCTFS_rvec_scale(v,1.0/alpha,n);
436       PCTFS_rvec_scale(u,1.0/alpha,n);
437 
438       /* add newly generated column, v_l, to X */
439       flag = 1;
440       off=len=0;
441       for (k=0; k<n; k++)
442         {
443           if (v[k]!=0.0)
444             {
445               len=k;
446               if (flag)
447                 {off=k; flag=0;}
448             }
449         }
450 
451       len -= (off-1);
452 
453       if (len>0)
454         {
455           if ((xt_nnz+len)>xt_max_nnz)
456             {
457               ierr = PetscInfo(0,"increasing space for X by 2x!\n");CHKERRQ(ierr);
458               xt_max_nnz *= 2;
459               x_ptr = (PetscScalar *) malloc(xt_max_nnz*sizeof(PetscScalar));
460               PCTFS_rvec_copy(x_ptr,x,xt_nnz);
461               free(x);
462               x = x_ptr;
463               x_ptr+=xt_nnz;
464             }
465           xt_nnz += len;
466           PCTFS_rvec_copy(x_ptr,v+off,len);
467 
468           /* keep track of number of zeros */
469           if (dim)
470             {
471               for (k=0; k<len; k++)
472                 {
473                   if (x_ptr[k]==0.0)
474                     {xt_zero_nnz++;}
475                 }
476             }
477           else
478             {
479               for (k=0; k<len; k++)
480                 {
481                   if (x_ptr[k]==0.0)
482                     {xt_zero_nnz_0++;}
483                 }
484             }
485           xcol_indices[2*i] = off;
486           xcol_sz[i] = xcol_indices[2*i+1] = len;
487           xcol_vals[i] = x_ptr;
488         }
489       else
490         {
491           xcol_indices[2*i] = 0;
492           xcol_sz[i] = xcol_indices[2*i+1] = 0;
493           xcol_vals[i] = x_ptr;
494         }
495 
496 
497       /* add newly generated column, u_l, to Y */
498       flag = 1;
499       off=len=0;
500       for (k=0; k<n; k++)
501         {
502           if (u[k]!=0.0)
503             {
504               len=k;
505               if (flag)
506                 {off=k; flag=0;}
507             }
508         }
509 
510       len -= (off-1);
511 
512       if (len>0)
513         {
514           if ((yt_nnz+len)>yt_max_nnz)
515             {
516               ierr = PetscInfo(0,"increasing space for Y by 2x!\n");CHKERRQ(ierr);
517               yt_max_nnz *= 2;
518               y_ptr = (PetscScalar *) malloc(yt_max_nnz*sizeof(PetscScalar));
519               PCTFS_rvec_copy(y_ptr,y,yt_nnz);
520               free(y);
521               y = y_ptr;
522               y_ptr+=yt_nnz;
523             }
524           yt_nnz += len;
525           PCTFS_rvec_copy(y_ptr,u+off,len);
526 
527           /* keep track of number of zeros */
528           if (dim)
529             {
530               for (k=0; k<len; k++)
531                 {
532                   if (y_ptr[k]==0.0)
533                     {yt_zero_nnz++;}
534                 }
535             }
536           else
537             {
538               for (k=0; k<len; k++)
539                 {
540                   if (y_ptr[k]==0.0)
541                     {yt_zero_nnz_0++;}
542                 }
543             }
544           ycol_indices[2*i] = off;
545           ycol_sz[i] = ycol_indices[2*i+1] = len;
546           ycol_vals[i] = y_ptr;
547         }
548       else
549         {
550           ycol_indices[2*i] = 0;
551           ycol_sz[i] = ycol_indices[2*i+1] = 0;
552           ycol_vals[i] = y_ptr;
553         }
554     }
555 
556   /* close off stages for execution phase */
557   while (dim!=level)
558     {
559       stages[dim++]=i;
560       ierr = PetscInfo2(0,"disconnected!!! dim(%D)!=level(%D)\n",dim,level);CHKERRQ(ierr);
561     }
562   stages[dim]=i;
563 
564   xyt_handle->info->n=xyt_handle->mvi->n;
565   xyt_handle->info->m=m;
566   xyt_handle->info->nnz=xt_nnz + yt_nnz;
567   xyt_handle->info->max_nnz=xt_max_nnz + yt_max_nnz;
568   xyt_handle->info->msg_buf_sz=stages[level]-stages[0];
569   xyt_handle->info->solve_uu = (PetscScalar *) malloc(m*sizeof(PetscScalar));
570   xyt_handle->info->solve_w  = (PetscScalar *) malloc(m*sizeof(PetscScalar));
571   xyt_handle->info->x=x;
572   xyt_handle->info->xcol_vals=xcol_vals;
573   xyt_handle->info->xcol_sz=xcol_sz;
574   xyt_handle->info->xcol_indices=xcol_indices;
575   xyt_handle->info->stages=stages;
576   xyt_handle->info->y=y;
577   xyt_handle->info->ycol_vals=ycol_vals;
578   xyt_handle->info->ycol_sz=ycol_sz;
579   xyt_handle->info->ycol_indices=ycol_indices;
580 
581   free(segs);
582   free(u);
583   free(v);
584   free(uu);
585   free(z);
586   free(w);
587 
588   return(0);
589 }
590 
591 /**************************************xyt.c***********************************/
592 static PetscErrorCode do_xyt_solve(xyt_ADT xyt_handle,  PetscScalar *uc)
593 {
594   PetscInt off, len, *iptr;
595   PetscInt level       =xyt_handle->level;
596   PetscInt n           =xyt_handle->info->n;
597   PetscInt m           =xyt_handle->info->m;
598   PetscInt *stages     =xyt_handle->info->stages;
599   PetscInt *xcol_indices=xyt_handle->info->xcol_indices;
600   PetscInt *ycol_indices=xyt_handle->info->ycol_indices;
601    PetscScalar *x_ptr, *y_ptr, *uu_ptr;
602   PetscScalar *solve_uu=xyt_handle->info->solve_uu;
603   PetscScalar *solve_w =xyt_handle->info->solve_w;
604   PetscScalar *x       =xyt_handle->info->x;
605   PetscScalar *y       =xyt_handle->info->y;
606   PetscBLASInt i1 = 1,dlen;
607 
608   PetscFunctionBegin;
609   uu_ptr=solve_uu;
610   PCTFS_rvec_zero(uu_ptr,m);
611 
612   /* x  = X.Y^T.b */
613   /* uu = Y^T.b */
614   for (y_ptr=y,iptr=ycol_indices; *iptr!=-1; y_ptr+=len)
615     {
616       off=*iptr++; len=*iptr++;
617       dlen = PetscBLASIntCast(len);
618       *uu_ptr++ = BLASdot_(&dlen,uc+off,&i1,y_ptr,&i1);
619     }
620 
621   /* comunication of beta */
622   uu_ptr=solve_uu;
623   if (level) {PCTFS_ssgl_radd(uu_ptr, solve_w, level, stages);}
624 
625   PCTFS_rvec_zero(uc,n);
626 
627   /* x = X.uu */
628   for (x_ptr=x,iptr=xcol_indices; *iptr!=-1; x_ptr+=len)
629     {
630       off=*iptr++; len=*iptr++;
631       dlen = PetscBLASIntCast(len);
632       BLASaxpy_(&dlen,uu_ptr++,x_ptr,&i1,uc+off,&i1);
633     }
634   PetscFunctionReturn(0);
635 }
636 
637 /**************************************xyt.c***********************************/
638 static PetscErrorCode check_handle(xyt_ADT xyt_handle)
639 {
640   PetscInt vals[2], work[2], op[] = {NON_UNIFORM,GL_MIN,GL_MAX};
641 
642   PetscFunctionBegin;
643   if (!xyt_handle) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"check_handle() :: bad handle :: NULL %D\n",xyt_handle);
644 
645   vals[0]=vals[1]=xyt_handle->id;
646   PCTFS_giop(vals,work,sizeof(op)/sizeof(op[0])-1,op);
647   if ((vals[0]!=vals[1])||(xyt_handle->id<=0)) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"check_handle() :: bad handle :: id mismatch min/max %D/%D %D\n", vals[0],vals[1], xyt_handle->id);
648   PetscFunctionReturn(0);
649 }
650 
651 /**************************************xyt.c***********************************/
652 static PetscErrorCode det_separators(xyt_ADT xyt_handle)
653 {
654   PetscInt i, ct, id;
655   PetscInt mask, edge, *iptr;
656   PetscInt *dir, *used;
657   PetscInt sum[4], w[4];
658   PetscScalar rsum[4], rw[4];
659   PetscInt op[] = {GL_ADD,0};
660   PetscScalar *lhs, *rhs;
661   PetscInt *nsep, *lnsep, *fo, nfo=0;
662   PCTFS_gs_ADT PCTFS_gs_handle=xyt_handle->mvi->PCTFS_gs_handle;
663   PetscInt *local2global=xyt_handle->mvi->local2global;
664   PetscInt  n=xyt_handle->mvi->n;
665   PetscInt  m=xyt_handle->mvi->m;
666   PetscInt level=xyt_handle->level;
667   PetscInt shared=0;
668   PetscErrorCode ierr;
669 
670   PetscFunctionBegin;
671   dir  = (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
672   nsep = (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
673   lnsep= (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
674   fo   = (PetscInt*)malloc(sizeof(PetscInt)*(n+1));
675   used = (PetscInt*)malloc(sizeof(PetscInt)*n);
676 
677   PCTFS_ivec_zero(dir  ,level+1);
678   PCTFS_ivec_zero(nsep ,level+1);
679   PCTFS_ivec_zero(lnsep,level+1);
680   PCTFS_ivec_set (fo   ,-1,n+1);
681   PCTFS_ivec_zero(used,n);
682 
683   lhs  = (PetscScalar*)malloc(sizeof(PetscScalar)*m);
684   rhs  = (PetscScalar*)malloc(sizeof(PetscScalar)*m);
685 
686   /* determine the # of unique dof */
687   PCTFS_rvec_zero(lhs,m);
688   PCTFS_rvec_set(lhs,1.0,n);
689   PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",level);
690   ierr = PetscInfo(0,"done first PCTFS_gs_gop_hc\n");CHKERRQ(ierr);
691   PCTFS_rvec_zero(rsum,2);
692   for (ct=i=0;i<n;i++)
693     {
694       if (lhs[i]!=0.0)
695         {rsum[0]+=1.0/lhs[i]; rsum[1]+=lhs[i];}
696 
697       if (lhs[i]!=1.0)
698         {
699           shared=1;
700         }
701     }
702 
703   PCTFS_grop_hc(rsum,rw,2,op,level);
704   rsum[0]+=0.1;
705   rsum[1]+=0.1;
706 
707   xyt_handle->info->n_global=xyt_handle->info->m_global=(PetscInt) rsum[0];
708   xyt_handle->mvi->n_global =xyt_handle->mvi->m_global =(PetscInt) rsum[0];
709 
710   /* determine separator sets top down */
711   if (shared)
712     {
713       /* solution is to do as in the symmetric shared case but then */
714       /* pick the sub-hc with the most free dofs and do a mat-vec   */
715       /* and pick up the responses on the other sub-hc from the     */
716       /* initial separator set obtained from the symm. shared case  */
717       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"shared dof separator determination not ready ... see hmt!!!\n");
718       for (iptr=fo+n,id=PCTFS_my_id,mask=PCTFS_num_nodes>>1,edge=level;edge>0;edge--,mask>>=1)
719         {
720           /* set rsh of hc, fire, and collect lhs responses */
721           (id<mask) ? PCTFS_rvec_zero(lhs,m) : PCTFS_rvec_set(lhs,1.0,m);
722           PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",edge);
723 
724           /* set lsh of hc, fire, and collect rhs responses */
725           (id<mask) ? PCTFS_rvec_set(rhs,1.0,m) : PCTFS_rvec_zero(rhs,m);
726           PCTFS_gs_gop_hc(PCTFS_gs_handle,rhs,"+\0",edge);
727 
728           for (i=0;i<n;i++)
729             {
730               if (id< mask)
731                 {
732                   if (lhs[i]!=0.0)
733                     {lhs[i]=1.0;}
734                 }
735               if (id>=mask)
736                 {
737                   if (rhs[i]!=0.0)
738                     {rhs[i]=1.0;}
739                 }
740             }
741 
742           if (id< mask)
743             {PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",edge-1);}
744           else
745             {PCTFS_gs_gop_hc(PCTFS_gs_handle,rhs,"+\0",edge-1);}
746 
747           /* count number of dofs I own that have signal and not in sep set */
748           PCTFS_rvec_zero(rsum,4);
749           for (PCTFS_ivec_zero(sum,4),ct=i=0;i<n;i++)
750             {
751               if (!used[i])
752                 {
753                   /* number of unmarked dofs on node */
754                   ct++;
755                   /* number of dofs to be marked on lhs hc */
756                   if (id< mask)
757                     {
758                       if (lhs[i]!=0.0)
759                         {sum[0]++; rsum[0]+=1.0/lhs[i];}
760                     }
761                   /* number of dofs to be marked on rhs hc */
762                   if (id>=mask)
763                     {
764                       if (rhs[i]!=0.0)
765                         {sum[1]++; rsum[1]+=1.0/rhs[i];}
766                     }
767                 }
768             }
769 
770           /* go for load balance - choose half with most unmarked dofs, bias LHS */
771           (id<mask) ? (sum[2]=ct) : (sum[3]=ct);
772           (id<mask) ? (rsum[2]=ct) : (rsum[3]=ct);
773           PCTFS_giop_hc(sum,w,4,op,edge);
774           PCTFS_grop_hc(rsum,rw,4,op,edge);
775           rsum[0]+=0.1; rsum[1]+=0.1; rsum[2]+=0.1; rsum[3]+=0.1;
776 
777           if (id<mask)
778             {
779               /* mark dofs I own that have signal and not in sep set */
780               for (ct=i=0;i<n;i++)
781                 {
782                   if ((!used[i])&&(lhs[i]!=0.0))
783                     {
784                       ct++; nfo++;
785 
786                       if (nfo>n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"nfo about to exceed n\n");
787 
788                       *--iptr = local2global[i];
789                       used[i]=edge;
790                     }
791                 }
792               if (ct>1) {PCTFS_ivec_sort(iptr,ct);}
793 
794               lnsep[edge]=ct;
795               nsep[edge]=(PetscInt) rsum[0];
796               dir [edge]=LEFT;
797             }
798 
799           if (id>=mask)
800             {
801               /* mark dofs I own that have signal and not in sep set */
802               for (ct=i=0;i<n;i++)
803                 {
804                   if ((!used[i])&&(rhs[i]!=0.0))
805                     {
806                       ct++; nfo++;
807 
808                       if (nfo>n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"nfo about to exceed n\n");
809 
810                       *--iptr = local2global[i];
811                       used[i]=edge;
812                     }
813                 }
814               if (ct>1) {PCTFS_ivec_sort(iptr,ct);}
815 
816               lnsep[edge]=ct;
817               nsep[edge]= (PetscInt) rsum[1];
818               dir [edge]=RIGHT;
819             }
820 
821           /* LATER or we can recur on these to order seps at this level */
822           /* do we need full set of separators for this?                */
823 
824           /* fold rhs hc into lower */
825           if (id>=mask)
826             {id-=mask;}
827         }
828     }
829   else
830     {
831       for (iptr=fo+n,id=PCTFS_my_id,mask=PCTFS_num_nodes>>1,edge=level;edge>0;edge--,mask>>=1)
832         {
833           /* set rsh of hc, fire, and collect lhs responses */
834           (id<mask) ? PCTFS_rvec_zero(lhs,m) : PCTFS_rvec_set(lhs,1.0,m);
835           PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",edge);
836 
837           /* set lsh of hc, fire, and collect rhs responses */
838           (id<mask) ? PCTFS_rvec_set(rhs,1.0,m) : PCTFS_rvec_zero(rhs,m);
839           PCTFS_gs_gop_hc(PCTFS_gs_handle,rhs,"+\0",edge);
840 
841           /* count number of dofs I own that have signal and not in sep set */
842           for (PCTFS_ivec_zero(sum,4),ct=i=0;i<n;i++)
843             {
844               if (!used[i])
845                 {
846                   /* number of unmarked dofs on node */
847                   ct++;
848                   /* number of dofs to be marked on lhs hc */
849                   if ((id< mask)&&(lhs[i]!=0.0)) {sum[0]++;}
850                   /* number of dofs to be marked on rhs hc */
851                   if ((id>=mask)&&(rhs[i]!=0.0)) {sum[1]++;}
852                 }
853             }
854 
855           /* for the non-symmetric case we need separators of width 2 */
856           /* so take both sides */
857           (id<mask) ? (sum[2]=ct) : (sum[3]=ct);
858           PCTFS_giop_hc(sum,w,4,op,edge);
859 
860           ct=0;
861           if (id<mask)
862             {
863               /* mark dofs I own that have signal and not in sep set */
864               for (i=0;i<n;i++)
865                 {
866                   if ((!used[i])&&(lhs[i]!=0.0))
867                     {
868                       ct++; nfo++;
869                       *--iptr = local2global[i];
870                       used[i]=edge;
871                     }
872                 }
873               /* LSH hc summation of ct should be sum[0] */
874             }
875           else
876             {
877               /* mark dofs I own that have signal and not in sep set */
878               for (i=0;i<n;i++)
879                 {
880                   if ((!used[i])&&(rhs[i]!=0.0))
881                     {
882                       ct++; nfo++;
883                       *--iptr = local2global[i];
884                       used[i]=edge;
885                     }
886                 }
887               /* RSH hc summation of ct should be sum[1] */
888             }
889 
890           if (ct>1) {PCTFS_ivec_sort(iptr,ct);}
891           lnsep[edge]=ct;
892           nsep[edge]=sum[0]+sum[1];
893           dir [edge]=BOTH;
894 
895           /* LATER or we can recur on these to order seps at this level */
896           /* do we need full set of separators for this?                */
897 
898           /* fold rhs hc into lower */
899           if (id>=mask)
900             {id-=mask;}
901         }
902     }
903 
904   /* level 0 is on processor case - so mark the remainder */
905   for (ct=i=0;i<n;i++)
906     {
907       if (!used[i])
908         {
909           ct++; nfo++;
910           *--iptr = local2global[i];
911           used[i]=edge;
912         }
913     }
914   if (ct>1) {PCTFS_ivec_sort(iptr,ct);}
915   lnsep[edge]=ct;
916   nsep [edge]=ct;
917   dir  [edge]=BOTH;
918 
919   xyt_handle->info->nsep=nsep;
920   xyt_handle->info->lnsep=lnsep;
921   xyt_handle->info->fo=fo;
922   xyt_handle->info->nfo=nfo;
923 
924   free(dir);
925   free(lhs);
926   free(rhs);
927   free(used);
928   PetscFunctionReturn(0);
929 }
930 
931 /**************************************xyt.c***********************************/
932 static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, void *matvec, void *grid_data)
933 {
934   mv_info *mvi;
935 
936 
937   mvi = (mv_info*)malloc(sizeof(mv_info));
938   mvi->n=n;
939   mvi->m=m;
940   mvi->n_global=-1;
941   mvi->m_global=-1;
942   mvi->local2global=(PetscInt*)malloc((m+1)*sizeof(PetscInt));
943   PCTFS_ivec_copy(mvi->local2global,local2global,m);
944   mvi->local2global[m] = INT_MAX;
945   mvi->matvec=(PetscErrorCode (*)(mv_info*,PetscScalar*,PetscScalar*))matvec;
946   mvi->grid_data=grid_data;
947 
948   /* set xyt communication handle to perform restricted matvec */
949   mvi->PCTFS_gs_handle = PCTFS_gs_init(local2global, m, PCTFS_num_nodes);
950 
951   return(mvi);
952 }
953 
954 /**************************************xyt.c***********************************/
955 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u)
956 {
957   PetscFunctionBegin;
958   A->matvec((mv_info*)A->grid_data,v,u);
959   PetscFunctionReturn(0);
960 }
961 
962 
963 
964