xref: /petsc/src/ksp/pc/impls/tfs/xyt.c (revision 6e85cc039ccb92eb657e06ccb8e802e1a84f92c2)
1 
2 /*************************************xyt.c************************************
3 Module Name: xyt
4 Module Info:
5 
6 author:  Henry M. Tufo III
7 e-mail:  hmt@asci.uchicago.edu
8 contact:
9 +--------------------------------+--------------------------------+
10 |MCS Division - Building 221     |Department of Computer Science  |
11 |Argonne National Laboratory     |Ryerson 152                     |
12 |9700 S. Cass Avenue             |The University of Chicago       |
13 |Argonne, IL  60439              |Chicago, IL  60637              |
14 |(630) 252-5354/5986 ph/fx       |(773) 702-6019/8487 ph/fx       |
15 +--------------------------------+--------------------------------+
16 
17 Last Modification: 3.20.01
18 **************************************xyt.c***********************************/
19 #include <../src/ksp/pc/impls/tfs/tfs.h>
20 
21 #define LEFT  -1
22 #define RIGHT  1
23 #define BOTH   0
24 
25 typedef struct xyt_solver_info {
26   PetscInt    n, m, n_global, m_global;
27   PetscInt    nnz, max_nnz, msg_buf_sz;
28   PetscInt    *nsep, *lnsep, *fo, nfo, *stages;
29   PetscInt    *xcol_sz, *xcol_indices;
30   PetscScalar **xcol_vals, *x, *solve_uu, *solve_w;
31   PetscInt    *ycol_sz, *ycol_indices;
32   PetscScalar **ycol_vals, *y;
33   PetscInt    nsolves;
34   PetscScalar tot_solve_time;
35 } xyt_info;
36 
37 
38 typedef struct matvec_info {
39   PetscInt     n, m, n_global, m_global;
40   PetscInt     *local2global;
41   PCTFS_gs_ADT PCTFS_gs_handle;
42   PetscErrorCode (*matvec)(struct matvec_info*,PetscScalar*,PetscScalar*);
43   void *grid_data;
44 } mv_info;
45 
46 struct xyt_CDT {
47   PetscInt id;
48   PetscInt ns;
49   PetscInt level;
50   xyt_info *info;
51   mv_info  *mvi;
52 };
53 
54 static PetscInt n_xyt        =0;
55 static PetscInt n_xyt_handles=0;
56 
57 /* prototypes */
58 static PetscErrorCode do_xyt_solve(xyt_ADT xyt_handle, PetscScalar *rhs);
59 static PetscErrorCode check_handle(xyt_ADT xyt_handle);
60 static PetscErrorCode det_separators(xyt_ADT xyt_handle);
61 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u);
62 static PetscErrorCode xyt_generate(xyt_ADT xyt_handle);
63 static PetscErrorCode do_xyt_factor(xyt_ADT xyt_handle);
64 static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, PetscErrorCode (*matvec)(mv_info*,PetscScalar*,PetscScalar*), void *grid_data);
65 
66 /**************************************xyt.c***********************************/
67 xyt_ADT XYT_new(void)
68 {
69   xyt_ADT xyt_handle;
70 
71   /* rolling count on n_xyt ... pot. problem here */
72   n_xyt_handles++;
73   xyt_handle       = (xyt_ADT)malloc(sizeof(struct xyt_CDT));
74   xyt_handle->id   = ++n_xyt;
75   xyt_handle->info = NULL;
76   xyt_handle->mvi  = NULL;
77 
78   return(xyt_handle);
79 }
80 
81 /**************************************xyt.c***********************************/
82 PetscErrorCode XYT_factor(xyt_ADT xyt_handle,     /* prev. allocated xyt  handle */
83                     PetscInt *local2global, /* global column mapping       */
84                     PetscInt n,             /* local num rows              */
85                     PetscInt m,             /* local num cols              */
86                     PetscErrorCode (*matvec)(void*,PetscScalar*,PetscScalar*), /* b_loc=A_local.x_loc         */
87                     void *grid_data)        /* grid data for matvec        */
88 {
89 
90   PCTFS_comm_init();
91   check_handle(xyt_handle);
92 
93   /* only 2^k for now and all nodes participating */
94   if ((1<<(xyt_handle->level=PCTFS_i_log2_num_nodes))!=PCTFS_num_nodes) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"only 2^k for now and MPI_COMM_WORLD!!! %D != %D\n",1<<PCTFS_i_log2_num_nodes,PCTFS_num_nodes);
95 
96   /* space for X info */
97   xyt_handle->info = (xyt_info*)malloc(sizeof(xyt_info));
98 
99   /* set up matvec handles */
100   xyt_handle->mvi = set_mvi(local2global, n, m, (PetscErrorCode (*)(mv_info*,PetscScalar*,PetscScalar*))matvec, grid_data);
101 
102   /* matrix is assumed to be of full rank */
103   /* LATER we can reset to indicate rank def. */
104   xyt_handle->ns=0;
105 
106   /* determine separators and generate firing order - NB xyt info set here */
107   det_separators(xyt_handle);
108 
109   return(do_xyt_factor(xyt_handle));
110 }
111 
112 /**************************************xyt.c***********************************/
113 PetscErrorCode XYT_solve(xyt_ADT xyt_handle, PetscScalar *x, PetscScalar *b)
114 {
115   PCTFS_comm_init();
116   check_handle(xyt_handle);
117 
118   /* need to copy b into x? */
119   if (b) PCTFS_rvec_copy(x,b,xyt_handle->mvi->n);
120   return do_xyt_solve(xyt_handle,x);
121 }
122 
123 /**************************************xyt.c***********************************/
124 PetscErrorCode XYT_free(xyt_ADT xyt_handle)
125 {
126   PCTFS_comm_init();
127   check_handle(xyt_handle);
128   n_xyt_handles--;
129 
130   free(xyt_handle->info->nsep);
131   free(xyt_handle->info->lnsep);
132   free(xyt_handle->info->fo);
133   free(xyt_handle->info->stages);
134   free(xyt_handle->info->solve_uu);
135   free(xyt_handle->info->solve_w);
136   free(xyt_handle->info->x);
137   free(xyt_handle->info->xcol_vals);
138   free(xyt_handle->info->xcol_sz);
139   free(xyt_handle->info->xcol_indices);
140   free(xyt_handle->info->y);
141   free(xyt_handle->info->ycol_vals);
142   free(xyt_handle->info->ycol_sz);
143   free(xyt_handle->info->ycol_indices);
144   free(xyt_handle->info);
145   free(xyt_handle->mvi->local2global);
146   PCTFS_gs_free(xyt_handle->mvi->PCTFS_gs_handle);
147   free(xyt_handle->mvi);
148   free(xyt_handle);
149 
150 
151   /* if the check fails we nuke */
152   /* if NULL pointer passed to free we nuke */
153   /* if the calls to free fail that's not my problem */
154   return(0);
155 }
156 
157 /**************************************xyt.c***********************************/
158 PetscErrorCode XYT_stats(xyt_ADT xyt_handle)
159 {
160   PetscInt    op[]  = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD};
161   PetscInt    fop[] = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD};
162   PetscInt    vals[9],  work[9];
163   PetscScalar fvals[3], fwork[3];
164 
165   PCTFS_comm_init();
166   check_handle(xyt_handle);
167 
168   /* if factorization not done there are no stats */
169   if (!xyt_handle->info||!xyt_handle->mvi) {
170     if (!PCTFS_my_id) PetscPrintf(PETSC_COMM_WORLD,"XYT_stats() :: no stats available!\n");
171     return 1;
172   }
173 
174   vals[0]=vals[1]=vals[2]=xyt_handle->info->nnz;
175   vals[3]=vals[4]=vals[5]=xyt_handle->mvi->n;
176   vals[6]=vals[7]=vals[8]=xyt_handle->info->msg_buf_sz;
177   PCTFS_giop(vals,work,sizeof(op)/sizeof(op[0])-1,op);
178 
179   fvals[0]=fvals[1]=fvals[2]=xyt_handle->info->tot_solve_time/xyt_handle->info->nsolves++;
180   PCTFS_grop(fvals,fwork,sizeof(fop)/sizeof(fop[0])-1,fop);
181 
182   if (!PCTFS_my_id) {
183     PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xyt_nnz=%D\n",PCTFS_my_id,vals[0]);
184     PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xyt_nnz=%D\n",PCTFS_my_id,vals[1]);
185     PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xyt_nnz=%g\n",PCTFS_my_id,1.0*vals[2]/PCTFS_num_nodes);
186     PetscPrintf(PETSC_COMM_WORLD,"%D :: tot   xyt_nnz=%D\n",PCTFS_my_id,vals[2]);
187     PetscPrintf(PETSC_COMM_WORLD,"%D :: xyt   C(2d)  =%g\n",PCTFS_my_id,vals[2]/(PetscPowReal(1.0*vals[5],1.5)));
188     PetscPrintf(PETSC_COMM_WORLD,"%D :: xyt   C(3d)  =%g\n",PCTFS_my_id,vals[2]/(PetscPowReal(1.0*vals[5],1.6667)));
189     PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xyt_n  =%D\n",PCTFS_my_id,vals[3]);
190     PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xyt_n  =%D\n",PCTFS_my_id,vals[4]);
191     PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xyt_n  =%g\n",PCTFS_my_id,1.0*vals[5]/PCTFS_num_nodes);
192     PetscPrintf(PETSC_COMM_WORLD,"%D :: tot   xyt_n  =%D\n",PCTFS_my_id,vals[5]);
193     PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xyt_buf=%D\n",PCTFS_my_id,vals[6]);
194     PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xyt_buf=%D\n",PCTFS_my_id,vals[7]);
195     PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xyt_buf=%g\n",PCTFS_my_id,1.0*vals[8]/PCTFS_num_nodes);
196     PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xyt_slv=%g\n",PCTFS_my_id,fvals[0]);
197     PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xyt_slv=%g\n",PCTFS_my_id,fvals[1]);
198     PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xyt_slv=%g\n",PCTFS_my_id,fvals[2]/PCTFS_num_nodes);
199   }
200 
201   return(0);
202 }
203 
204 
205 /*************************************xyt.c************************************
206 
207 Description: get A_local, local portion of global coarse matrix which
208 is a row dist. nxm matrix w/ n<m.
209    o my_ml holds address of ML struct associated w/A_local and coarse grid
210    o local2global holds global number of column i (i=0,...,m-1)
211    o local2global holds global number of row    i (i=0,...,n-1)
212    o mylocmatvec performs A_local . vec_local (note that gs is performed using
213    PCTFS_gs_init/gop).
214 
215 mylocmatvec = my_ml->Amat[grid_tag].matvec->external;
216 mylocmatvec (void :: void *data, double *in, double *out)
217 **************************************xyt.c***********************************/
218 static PetscErrorCode do_xyt_factor(xyt_ADT xyt_handle)
219 {
220   return xyt_generate(xyt_handle);
221 }
222 
223 /**************************************xyt.c***********************************/
224 static PetscErrorCode xyt_generate(xyt_ADT xyt_handle)
225 {
226   PetscInt       i,j,k,idx;
227   PetscInt       dim, col;
228   PetscScalar    *u, *uu, *v, *z, *w, alpha, alpha_w;
229   PetscInt       *segs;
230   PetscInt       op[] = {GL_ADD,0};
231   PetscInt       off, len;
232   PetscScalar    *x_ptr, *y_ptr;
233   PetscInt       *iptr, flag;
234   PetscInt       start =0, end, work;
235   PetscInt       op2[] = {GL_MIN,0};
236   PCTFS_gs_ADT   PCTFS_gs_handle;
237   PetscInt       *nsep, *lnsep, *fo;
238   PetscInt       a_n            =xyt_handle->mvi->n;
239   PetscInt       a_m            =xyt_handle->mvi->m;
240   PetscInt       *a_local2global=xyt_handle->mvi->local2global;
241   PetscInt       level;
242   PetscInt       n, m;
243   PetscInt       *xcol_sz, *xcol_indices, *stages;
244   PetscScalar    **xcol_vals, *x;
245   PetscInt       *ycol_sz, *ycol_indices;
246   PetscScalar    **ycol_vals, *y;
247   PetscInt       n_global;
248   PetscInt       xt_nnz       =0, xt_max_nnz=0;
249   PetscInt       yt_nnz       =0, yt_max_nnz=0;
250   PetscInt       xt_zero_nnz  =0;
251   PetscInt       xt_zero_nnz_0=0;
252   PetscInt       yt_zero_nnz  =0;
253   PetscInt       yt_zero_nnz_0=0;
254   PetscBLASInt   i1           = 1,dlen;
255   PetscScalar    dm1          = -1.0;
256   PetscErrorCode ierr;
257 
258   n              =xyt_handle->mvi->n;
259   nsep           =xyt_handle->info->nsep;
260   lnsep          =xyt_handle->info->lnsep;
261   fo             =xyt_handle->info->fo;
262   end            =lnsep[0];
263   level          =xyt_handle->level;
264   PCTFS_gs_handle=xyt_handle->mvi->PCTFS_gs_handle;
265 
266   /* is there a null space? */
267   /* LATER add in ability to detect null space by checking alpha */
268   for (i=0, j=0; i<=level; i++) j+=nsep[i];
269 
270   m = j-xyt_handle->ns;
271   if (m!=j) {
272     ierr = PetscPrintf(PETSC_COMM_WORLD,"xyt_generate() :: null space exists %D %D %D\n",m,j,xyt_handle->ns);CHKERRQ(ierr);
273   }
274 
275   ierr = PetscInfo2(0,"xyt_generate() :: X(%D,%D)\n",n,m);CHKERRQ(ierr);
276 
277   /* get and initialize storage for x local         */
278   /* note that x local is nxm and stored by columns */
279   xcol_sz      = (PetscInt*) malloc(m*sizeof(PetscInt));
280   xcol_indices = (PetscInt*) malloc((2*m+1)*sizeof(PetscInt));
281   xcol_vals    = (PetscScalar**) malloc(m*sizeof(PetscScalar*));
282   for (i=j=0; i<m; i++, j+=2) {
283     xcol_indices[j]=xcol_indices[j+1]=xcol_sz[i]=-1;
284     xcol_vals[i]   = NULL;
285   }
286   xcol_indices[j]=-1;
287 
288   /* get and initialize storage for y local         */
289   /* note that y local is nxm and stored by columns */
290   ycol_sz      = (PetscInt*) malloc(m*sizeof(PetscInt));
291   ycol_indices = (PetscInt*) malloc((2*m+1)*sizeof(PetscInt));
292   ycol_vals    = (PetscScalar**) malloc(m*sizeof(PetscScalar*));
293   for (i=j=0; i<m; i++, j+=2) {
294     ycol_indices[j]=ycol_indices[j+1]=ycol_sz[i]=-1;
295     ycol_vals[i]   = NULL;
296   }
297   ycol_indices[j]=-1;
298 
299   /* size of separators for each sub-hc working from bottom of tree to top */
300   /* this looks like nsep[]=segments */
301   stages = (PetscInt*) malloc((level+1)*sizeof(PetscInt));
302   segs   = (PetscInt*) malloc((level+1)*sizeof(PetscInt));
303   PCTFS_ivec_zero(stages,level+1);
304   PCTFS_ivec_copy(segs,nsep,level+1);
305   for (i=0; i<level; i++) segs[i+1] += segs[i];
306   stages[0] = segs[0];
307 
308   /* temporary vectors  */
309   u  = (PetscScalar*) malloc(n*sizeof(PetscScalar));
310   z  = (PetscScalar*) malloc(n*sizeof(PetscScalar));
311   v  = (PetscScalar*) malloc(a_m*sizeof(PetscScalar));
312   uu = (PetscScalar*) malloc(m*sizeof(PetscScalar));
313   w  = (PetscScalar*) malloc(m*sizeof(PetscScalar));
314 
315   /* extra nnz due to replication of vertices across separators */
316   for (i=1, j=0; i<=level; i++) j+=nsep[i];
317 
318   /* storage for sparse x values */
319   n_global   = xyt_handle->info->n_global;
320   xt_max_nnz = yt_max_nnz = (PetscInt)(2.5*PetscPowReal(1.0*n_global,1.6667) + j*n/2)/PCTFS_num_nodes;
321   x          = (PetscScalar*) malloc(xt_max_nnz*sizeof(PetscScalar));
322   y          = (PetscScalar*) malloc(yt_max_nnz*sizeof(PetscScalar));
323 
324   /* LATER - can embed next sep to fire in gs */
325   /* time to make the donuts - generate X factor */
326   for (dim=i=j=0; i<m; i++) {
327     /* time to move to the next level? */
328     while (i==segs[dim]) {
329       if (dim==level) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"dim about to exceed level\n");
330       stages[dim++]=i;
331       end         +=lnsep[dim];
332     }
333     stages[dim]=i;
334 
335     /* which column are we firing? */
336     /* i.e. set v_l */
337     /* use new seps and do global min across hc to determine which one to fire */
338     (start<end) ? (col=fo[start]) : (col=INT_MAX);
339     PCTFS_giop_hc(&col,&work,1,op2,dim);
340 
341     /* shouldn't need this */
342     if (col==INT_MAX) {
343       ierr = PetscInfo(0,"hey ... col==INT_MAX??\n");CHKERRQ(ierr);
344       continue;
345     }
346 
347     /* do I own it? I should */
348     PCTFS_rvec_zero(v,a_m);
349     if (col==fo[start]) {
350       start++;
351       idx=PCTFS_ivec_linear_search(col, a_local2global, a_n);
352       if (idx!=-1) {
353         v[idx] = 1.0;
354         j++;
355       } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"NOT FOUND!\n");
356     } else {
357       idx=PCTFS_ivec_linear_search(col, a_local2global, a_m);
358       if (idx!=-1) v[idx] = 1.0;
359     }
360 
361     /* perform u = A.v_l */
362     PCTFS_rvec_zero(u,n);
363     do_matvec(xyt_handle->mvi,v,u);
364 
365     /* uu =  X^T.u_l (local portion) */
366     /* technically only need to zero out first i entries */
367     /* later turn this into an XYT_solve call ? */
368     PCTFS_rvec_zero(uu,m);
369     y_ptr=y;
370     iptr = ycol_indices;
371     for (k=0; k<i; k++) {
372       off   = *iptr++;
373       len   = *iptr++;
374       ierr  = PetscBLASIntCast(len,&dlen);CHKERRQ(ierr);
375       PetscStackCallBLAS("BLASdot",uu[k] = BLASdot_(&dlen,u+off,&i1,y_ptr,&i1));
376       y_ptr+=len;
377     }
378 
379     /* uu = X^T.u_l (comm portion) */
380     ierr = PCTFS_ssgl_radd  (uu, w, dim, stages);CHKERRQ(ierr);
381 
382     /* z = X.uu */
383     PCTFS_rvec_zero(z,n);
384     x_ptr=x;
385     iptr = xcol_indices;
386     for (k=0; k<i; k++) {
387       off  = *iptr++;
388       len  = *iptr++;
389       ierr = PetscBLASIntCast(len,&dlen);CHKERRQ(ierr);
390       PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&dlen,&uu[k],x_ptr,&i1,z+off,&i1));
391       x_ptr+=len;
392     }
393 
394     /* compute v_l = v_l - z */
395     PCTFS_rvec_zero(v+a_n,a_m-a_n);
396     ierr = PetscBLASIntCast(n,&dlen);CHKERRQ(ierr);
397     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&dlen,&dm1,z,&i1,v,&i1));
398 
399     /* compute u_l = A.v_l */
400     if (a_n!=a_m) PCTFS_gs_gop_hc(PCTFS_gs_handle,v,"+\0",dim);
401     PCTFS_rvec_zero(u,n);
402     do_matvec(xyt_handle->mvi,v,u);
403 
404     /* compute sqrt(alpha) = sqrt(u_l^T.u_l) - local portion */
405     ierr  = PetscBLASIntCast(n,&dlen);CHKERRQ(ierr);
406     PetscStackCallBLAS("BLASdot",alpha = BLASdot_(&dlen,u,&i1,u,&i1));
407     /* compute sqrt(alpha) = sqrt(u_l^T.u_l) - comm portion */
408     PCTFS_grop_hc(&alpha, &alpha_w, 1, op, dim);
409 
410     alpha = (PetscScalar) PetscSqrtReal((PetscReal)alpha);
411 
412     /* check for small alpha                             */
413     /* LATER use this to detect and determine null space */
414     if (PetscAbsScalar(alpha)<1.0e-14) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"bad alpha! %g\n",alpha);
415 
416     /* compute v_l = v_l/sqrt(alpha) */
417     PCTFS_rvec_scale(v,1.0/alpha,n);
418     PCTFS_rvec_scale(u,1.0/alpha,n);
419 
420     /* add newly generated column, v_l, to X */
421     flag = 1;
422     off  =len=0;
423     for (k=0; k<n; k++) {
424       if (v[k]!=0.0) {
425         len=k;
426         if (flag) {off=k; flag=0;}
427       }
428     }
429 
430     len -= (off-1);
431 
432     if (len>0) {
433       if ((xt_nnz+len)>xt_max_nnz) {
434         ierr        = PetscInfo(0,"increasing space for X by 2x!\n");CHKERRQ(ierr);
435         xt_max_nnz *= 2;
436         x_ptr       = (PetscScalar*) malloc(xt_max_nnz*sizeof(PetscScalar));
437         PCTFS_rvec_copy(x_ptr,x,xt_nnz);
438         free(x);
439         x     = x_ptr;
440         x_ptr+=xt_nnz;
441       }
442       xt_nnz += len;
443       PCTFS_rvec_copy(x_ptr,v+off,len);
444 
445       /* keep track of number of zeros */
446       if (dim) {
447         for (k=0; k<len; k++) {
448           if (x_ptr[k]==0.0) xt_zero_nnz++;
449         }
450       } else {
451         for (k=0; k<len; k++) {
452           if (x_ptr[k]==0.0) xt_zero_nnz_0++;
453         }
454       }
455       xcol_indices[2*i] = off;
456       xcol_sz[i]        = xcol_indices[2*i+1] = len;
457       xcol_vals[i]      = x_ptr;
458     } else {
459       xcol_indices[2*i] = 0;
460       xcol_sz[i]        = xcol_indices[2*i+1] = 0;
461       xcol_vals[i]      = x_ptr;
462     }
463 
464 
465     /* add newly generated column, u_l, to Y */
466     flag = 1;
467     off  =len=0;
468     for (k=0; k<n; k++) {
469       if (u[k]!=0.0) {
470         len=k;
471         if (flag) { off=k; flag=0; }
472       }
473     }
474 
475     len -= (off-1);
476 
477     if (len>0) {
478       if ((yt_nnz+len)>yt_max_nnz) {
479         ierr        = PetscInfo(0,"increasing space for Y by 2x!\n");CHKERRQ(ierr);
480         yt_max_nnz *= 2;
481         y_ptr       = (PetscScalar*) malloc(yt_max_nnz*sizeof(PetscScalar));
482         PCTFS_rvec_copy(y_ptr,y,yt_nnz);
483         free(y);
484         y     = y_ptr;
485         y_ptr+=yt_nnz;
486       }
487       yt_nnz += len;
488       PCTFS_rvec_copy(y_ptr,u+off,len);
489 
490       /* keep track of number of zeros */
491       if (dim) {
492         for (k=0; k<len; k++) {
493           if (y_ptr[k]==0.0) yt_zero_nnz++;
494         }
495       } else {
496         for (k=0; k<len; k++) {
497           if (y_ptr[k]==0.0) yt_zero_nnz_0++;
498         }
499       }
500       ycol_indices[2*i] = off;
501       ycol_sz[i]        = ycol_indices[2*i+1] = len;
502       ycol_vals[i]      = y_ptr;
503     } else {
504       ycol_indices[2*i] = 0;
505       ycol_sz[i]        = ycol_indices[2*i+1] = 0;
506       ycol_vals[i]      = y_ptr;
507     }
508   }
509 
510   /* close off stages for execution phase */
511   while (dim!=level) {
512     stages[dim++]=i;
513     ierr         = PetscInfo2(0,"disconnected!!! dim(%D)!=level(%D)\n",dim,level);CHKERRQ(ierr);
514   }
515   stages[dim]=i;
516 
517   xyt_handle->info->n           =xyt_handle->mvi->n;
518   xyt_handle->info->m           =m;
519   xyt_handle->info->nnz         =xt_nnz + yt_nnz;
520   xyt_handle->info->max_nnz     =xt_max_nnz + yt_max_nnz;
521   xyt_handle->info->msg_buf_sz  =stages[level]-stages[0];
522   xyt_handle->info->solve_uu    = (PetscScalar*) malloc(m*sizeof(PetscScalar));
523   xyt_handle->info->solve_w     = (PetscScalar*) malloc(m*sizeof(PetscScalar));
524   xyt_handle->info->x           =x;
525   xyt_handle->info->xcol_vals   =xcol_vals;
526   xyt_handle->info->xcol_sz     =xcol_sz;
527   xyt_handle->info->xcol_indices=xcol_indices;
528   xyt_handle->info->stages      =stages;
529   xyt_handle->info->y           =y;
530   xyt_handle->info->ycol_vals   =ycol_vals;
531   xyt_handle->info->ycol_sz     =ycol_sz;
532   xyt_handle->info->ycol_indices=ycol_indices;
533 
534   free(segs);
535   free(u);
536   free(v);
537   free(uu);
538   free(z);
539   free(w);
540 
541   return(0);
542 }
543 
544 /**************************************xyt.c***********************************/
545 static PetscErrorCode do_xyt_solve(xyt_ADT xyt_handle,  PetscScalar *uc)
546 {
547   PetscInt       off, len, *iptr;
548   PetscInt       level        =xyt_handle->level;
549   PetscInt       n            =xyt_handle->info->n;
550   PetscInt       m            =xyt_handle->info->m;
551   PetscInt       *stages      =xyt_handle->info->stages;
552   PetscInt       *xcol_indices=xyt_handle->info->xcol_indices;
553   PetscInt       *ycol_indices=xyt_handle->info->ycol_indices;
554   PetscScalar    *x_ptr, *y_ptr, *uu_ptr;
555   PetscScalar    *solve_uu=xyt_handle->info->solve_uu;
556   PetscScalar    *solve_w =xyt_handle->info->solve_w;
557   PetscScalar    *x       =xyt_handle->info->x;
558   PetscScalar    *y       =xyt_handle->info->y;
559   PetscBLASInt   i1       = 1,dlen;
560   PetscErrorCode ierr;
561 
562   PetscFunctionBegin;
563   uu_ptr=solve_uu;
564   PCTFS_rvec_zero(uu_ptr,m);
565 
566   /* x  = X.Y^T.b */
567   /* uu = Y^T.b */
568   for (y_ptr=y,iptr=ycol_indices; *iptr!=-1; y_ptr+=len) {
569     off       =*iptr++;
570     len       =*iptr++;
571     ierr      = PetscBLASIntCast(len,&dlen);CHKERRQ(ierr);
572     PetscStackCallBLAS("BLASdot",*uu_ptr++ = BLASdot_(&dlen,uc+off,&i1,y_ptr,&i1));
573   }
574 
575   /* comunication of beta */
576   uu_ptr=solve_uu;
577   if (level) {ierr = PCTFS_ssgl_radd(uu_ptr, solve_w, level, stages);CHKERRQ(ierr);}
578   PCTFS_rvec_zero(uc,n);
579 
580   /* x = X.uu */
581   for (x_ptr=x,iptr=xcol_indices; *iptr!=-1; x_ptr+=len) {
582     off  =*iptr++;
583     len  =*iptr++;
584     ierr = PetscBLASIntCast(len,&dlen);CHKERRQ(ierr);
585     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&dlen,uu_ptr++,x_ptr,&i1,uc+off,&i1));
586   }
587   PetscFunctionReturn(0);
588 }
589 
590 /**************************************xyt.c***********************************/
591 static PetscErrorCode check_handle(xyt_ADT xyt_handle)
592 {
593   PetscInt vals[2], work[2], op[] = {NON_UNIFORM,GL_MIN,GL_MAX};
594 
595   PetscFunctionBegin;
596   if (!xyt_handle) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"check_handle() :: bad handle :: NULL %D\n",xyt_handle);
597 
598   vals[0]=vals[1]=xyt_handle->id;
599   PCTFS_giop(vals,work,sizeof(op)/sizeof(op[0])-1,op);
600   if ((vals[0]!=vals[1])||(xyt_handle->id<=0)) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"check_handle() :: bad handle :: id mismatch min/max %D/%D %D\n", vals[0],vals[1], xyt_handle->id);
601   PetscFunctionReturn(0);
602 }
603 
604 /**************************************xyt.c***********************************/
605 static PetscErrorCode det_separators(xyt_ADT xyt_handle)
606 {
607   PetscInt       i, ct, id;
608   PetscInt       mask, edge, *iptr;
609   PetscInt       *dir, *used;
610   PetscInt       sum[4], w[4];
611   PetscScalar    rsum[4], rw[4];
612   PetscInt       op[] = {GL_ADD,0};
613   PetscScalar    *lhs, *rhs;
614   PetscInt       *nsep, *lnsep, *fo, nfo=0;
615   PCTFS_gs_ADT   PCTFS_gs_handle=xyt_handle->mvi->PCTFS_gs_handle;
616   PetscInt       *local2global  =xyt_handle->mvi->local2global;
617   PetscInt       n              =xyt_handle->mvi->n;
618   PetscInt       m              =xyt_handle->mvi->m;
619   PetscInt       level          =xyt_handle->level;
620   PetscInt       shared         =0;
621   PetscErrorCode ierr;
622 
623   PetscFunctionBegin;
624   dir  = (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
625   nsep = (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
626   lnsep= (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
627   fo   = (PetscInt*)malloc(sizeof(PetscInt)*(n+1));
628   used = (PetscInt*)malloc(sizeof(PetscInt)*n);
629 
630   PCTFS_ivec_zero(dir,level+1);
631   PCTFS_ivec_zero(nsep,level+1);
632   PCTFS_ivec_zero(lnsep,level+1);
633   PCTFS_ivec_set (fo,-1,n+1);
634   PCTFS_ivec_zero(used,n);
635 
636   lhs = (PetscScalar*)malloc(sizeof(PetscScalar)*m);
637   rhs = (PetscScalar*)malloc(sizeof(PetscScalar)*m);
638 
639   /* determine the # of unique dof */
640   PCTFS_rvec_zero(lhs,m);
641   PCTFS_rvec_set(lhs,1.0,n);
642   PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",level);
643   ierr = PetscInfo(0,"done first PCTFS_gs_gop_hc\n");CHKERRQ(ierr);
644   PCTFS_rvec_zero(rsum,2);
645   for (i=0; i<n; i++) {
646     if (lhs[i]!=0.0) { rsum[0]+=1.0/lhs[i]; rsum[1]+=lhs[i]; }
647     if (lhs[i]!=1.0) shared=1;
648   }
649 
650   PCTFS_grop_hc(rsum,rw,2,op,level);
651   rsum[0]+=0.1;
652   rsum[1]+=0.1;
653 
654   xyt_handle->info->n_global=xyt_handle->info->m_global=(PetscInt) rsum[0];
655   xyt_handle->mvi->n_global =xyt_handle->mvi->m_global =(PetscInt) rsum[0];
656 
657   /* determine separator sets top down */
658   if (shared) {
659     /* solution is to do as in the symmetric shared case but then */
660     /* pick the sub-hc with the most free dofs and do a mat-vec   */
661     /* and pick up the responses on the other sub-hc from the     */
662     /* initial separator set obtained from the symm. shared case  */
663     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"shared dof separator determination not ready ... see hmt!!!\n");
664     /* [dead code deleted since it is unlikely to be completed] */
665   } else {
666     for (iptr=fo+n,id=PCTFS_my_id,mask=PCTFS_num_nodes>>1,edge=level;edge>0;edge--,mask>>=1) {
667       /* set rsh of hc, fire, and collect lhs responses */
668       (id<mask) ? PCTFS_rvec_zero(lhs,m) : PCTFS_rvec_set(lhs,1.0,m);
669       PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",edge);
670 
671       /* set lsh of hc, fire, and collect rhs responses */
672       (id<mask) ? PCTFS_rvec_set(rhs,1.0,m) : PCTFS_rvec_zero(rhs,m);
673       PCTFS_gs_gop_hc(PCTFS_gs_handle,rhs,"+\0",edge);
674 
675       /* count number of dofs I own that have signal and not in sep set */
676       for (PCTFS_ivec_zero(sum,4),ct=i=0;i<n;i++) {
677         if (!used[i]) {
678           /* number of unmarked dofs on node */
679           ct++;
680           /* number of dofs to be marked on lhs hc */
681           if ((id< mask)&&(lhs[i]!=0.0)) sum[0]++;
682           /* number of dofs to be marked on rhs hc */
683           if ((id>=mask)&&(rhs[i]!=0.0)) sum[1]++;
684         }
685       }
686 
687       /* for the non-symmetric case we need separators of width 2 */
688       /* so take both sides */
689       (id<mask) ? (sum[2]=ct) : (sum[3]=ct);
690       PCTFS_giop_hc(sum,w,4,op,edge);
691 
692       ct=0;
693       if (id<mask) {
694         /* mark dofs I own that have signal and not in sep set */
695         for (i=0;i<n;i++) {
696           if ((!used[i])&&(lhs[i]!=0.0)) {
697             ct++; nfo++;
698             *--iptr = local2global[i];
699             used[i] =edge;
700           }
701         }
702         /* LSH hc summation of ct should be sum[0] */
703       } else {
704         /* mark dofs I own that have signal and not in sep set */
705         for (i=0; i<n; i++) {
706           if ((!used[i])&&(rhs[i]!=0.0)) {
707             ct++; nfo++;
708             *--iptr = local2global[i];
709             used[i] = edge;
710           }
711         }
712         /* RSH hc summation of ct should be sum[1] */
713       }
714 
715       if (ct>1) PCTFS_ivec_sort(iptr,ct);
716       lnsep[edge]=ct;
717       nsep[edge] =sum[0]+sum[1];
718       dir [edge] =BOTH;
719 
720       /* LATER or we can recur on these to order seps at this level */
721       /* do we need full set of separators for this?                */
722 
723       /* fold rhs hc into lower */
724       if (id>=mask) id-=mask;
725     }
726   }
727 
728   /* level 0 is on processor case - so mark the remainder */
729   for (ct=i=0;i<n;i++) {
730     if (!used[i]) {
731       ct++; nfo++;
732       *--iptr = local2global[i];
733       used[i] = edge;
734     }
735   }
736   if (ct>1) PCTFS_ivec_sort(iptr,ct);
737   lnsep[edge]=ct;
738   nsep [edge]=ct;
739   dir  [edge]=BOTH;
740 
741   xyt_handle->info->nsep  = nsep;
742   xyt_handle->info->lnsep = lnsep;
743   xyt_handle->info->fo    = fo;
744   xyt_handle->info->nfo   = nfo;
745 
746   free(dir);
747   free(lhs);
748   free(rhs);
749   free(used);
750   PetscFunctionReturn(0);
751 }
752 
753 /**************************************xyt.c***********************************/
754 static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, PetscErrorCode (*matvec)(mv_info*,PetscScalar*,PetscScalar*), void *grid_data)
755 {
756   mv_info *mvi;
757 
758   mvi              = (mv_info*)malloc(sizeof(mv_info));
759   mvi->n           = n;
760   mvi->m           = m;
761   mvi->n_global    = -1;
762   mvi->m_global    = -1;
763   mvi->local2global= (PetscInt*)malloc((m+1)*sizeof(PetscInt));
764 
765   PCTFS_ivec_copy(mvi->local2global,local2global,m);
766   mvi->local2global[m] = INT_MAX;
767   mvi->matvec          = matvec;
768   mvi->grid_data       = grid_data;
769 
770   /* set xyt communication handle to perform restricted matvec */
771   mvi->PCTFS_gs_handle = PCTFS_gs_init(local2global, m, PCTFS_num_nodes);
772 
773   return(mvi);
774 }
775 
776 /**************************************xyt.c***********************************/
777 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u)
778 {
779   PetscFunctionBegin;
780   A->matvec((mv_info*)A->grid_data,v,u);
781   PetscFunctionReturn(0);
782 }
783 
784 
785 
786