xref: /petsc/src/ksp/pc/impls/tfs/xyt.c (revision a6dfd86ebdf75fa024070af26d51b62fd16650a3)
1 
2 /*************************************xyt.c************************************
3 Module Name: xyt
4 Module Info:
5 
6 author:  Henry M. Tufo III
7 e-mail:  hmt@asci.uchicago.edu
8 contact:
9 +--------------------------------+--------------------------------+
10 |MCS Division - Building 221     |Department of Computer Science  |
11 |Argonne National Laboratory     |Ryerson 152                     |
12 |9700 S. Cass Avenue             |The University of Chicago       |
13 |Argonne, IL  60439              |Chicago, IL  60637              |
14 |(630) 252-5354/5986 ph/fx       |(773) 702-6019/8487 ph/fx       |
15 +--------------------------------+--------------------------------+
16 
17 Last Modification: 3.20.01
18 **************************************xyt.c***********************************/
19 #include <../src/ksp/pc/impls/tfs/tfs.h>
20 
21 #define LEFT  -1
22 #define RIGHT  1
23 #define BOTH   0
24 
25 typedef struct xyt_solver_info {
26   PetscInt n, m, n_global, m_global;
27   PetscInt nnz, max_nnz, msg_buf_sz;
28   PetscInt *nsep, *lnsep, *fo, nfo, *stages;
29   PetscInt *xcol_sz, *xcol_indices;
30   PetscScalar **xcol_vals, *x, *solve_uu, *solve_w;
31   PetscInt *ycol_sz, *ycol_indices;
32   PetscScalar **ycol_vals, *y;
33   PetscInt nsolves;
34   PetscScalar tot_solve_time;
35 } xyt_info;
36 
37 
38 typedef struct matvec_info {
39   PetscInt n, m, n_global, m_global;
40   PetscInt *local2global;
41   PCTFS_gs_ADT PCTFS_gs_handle;
42   PetscErrorCode (*matvec)(struct matvec_info*,PetscScalar*,PetscScalar*);
43   void *grid_data;
44 } mv_info;
45 
46 struct xyt_CDT{
47   PetscInt id;
48   PetscInt ns;
49   PetscInt level;
50   xyt_info *info;
51   mv_info  *mvi;
52 };
53 
54 static PetscInt n_xyt=0;
55 static PetscInt n_xyt_handles=0;
56 
57 /* prototypes */
58 static PetscErrorCode do_xyt_solve(xyt_ADT xyt_handle, PetscScalar *rhs);
59 static PetscErrorCode check_handle(xyt_ADT xyt_handle);
60 static PetscErrorCode det_separators(xyt_ADT xyt_handle);
61 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u);
62 static PetscInt xyt_generate(xyt_ADT xyt_handle);
63 static PetscInt do_xyt_factor(xyt_ADT xyt_handle);
64 static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, void *matvec, void *grid_data);
65 
66 /**************************************xyt.c***********************************/
67 xyt_ADT XYT_new(void)
68 {
69   xyt_ADT xyt_handle;
70 
71   /* rolling count on n_xyt ... pot. problem here */
72   n_xyt_handles++;
73   xyt_handle       = (xyt_ADT)malloc(sizeof(struct xyt_CDT));
74   xyt_handle->id   = ++n_xyt;
75   xyt_handle->info = NULL;
76   xyt_handle->mvi  = NULL;
77 
78   return(xyt_handle);
79 }
80 
81 /**************************************xyt.c***********************************/
82 PetscInt XYT_factor(xyt_ADT xyt_handle, /* prev. allocated xyt  handle */
83            PetscInt *local2global,  /* global column mapping       */
84            PetscInt n,              /* local num rows              */
85            PetscInt m,              /* local num cols              */
86            void *matvec,       /* b_loc=A_local.x_loc         */
87            void *grid_data     /* grid data for matvec        */
88            )
89 {
90 
91   PCTFS_comm_init();
92   check_handle(xyt_handle);
93 
94   /* only 2^k for now and all nodes participating */
95   if ((1<<(xyt_handle->level=PCTFS_i_log2_num_nodes))!=PCTFS_num_nodes) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"only 2^k for now and MPI_COMM_WORLD!!! %D != %D\n",1<<PCTFS_i_log2_num_nodes,PCTFS_num_nodes);
96 
97   /* space for X info */
98   xyt_handle->info = (xyt_info*)malloc(sizeof(xyt_info));
99 
100   /* set up matvec handles */
101   xyt_handle->mvi  = set_mvi(local2global, n, m, matvec, grid_data);
102 
103   /* matrix is assumed to be of full rank */
104   /* LATER we can reset to indicate rank def. */
105   xyt_handle->ns=0;
106 
107   /* determine separators and generate firing order - NB xyt info set here */
108   det_separators(xyt_handle);
109 
110   return(do_xyt_factor(xyt_handle));
111 }
112 
113 /**************************************xyt.c***********************************/
114 PetscInt XYT_solve(xyt_ADT xyt_handle, PetscScalar *x, PetscScalar *b)
115 {
116   PCTFS_comm_init();
117   check_handle(xyt_handle);
118 
119   /* need to copy b into x? */
120   if (b)
121     {PCTFS_rvec_copy(x,b,xyt_handle->mvi->n);}
122   do_xyt_solve(xyt_handle,x);
123 
124   return(0);
125 }
126 
127 /**************************************xyt.c***********************************/
128 PetscInt XYT_free(xyt_ADT xyt_handle)
129 {
130   PCTFS_comm_init();
131   check_handle(xyt_handle);
132   n_xyt_handles--;
133 
134   free(xyt_handle->info->nsep);
135   free(xyt_handle->info->lnsep);
136   free(xyt_handle->info->fo);
137   free(xyt_handle->info->stages);
138   free(xyt_handle->info->solve_uu);
139   free(xyt_handle->info->solve_w);
140   free(xyt_handle->info->x);
141   free(xyt_handle->info->xcol_vals);
142   free(xyt_handle->info->xcol_sz);
143   free(xyt_handle->info->xcol_indices);
144   free(xyt_handle->info->y);
145   free(xyt_handle->info->ycol_vals);
146   free(xyt_handle->info->ycol_sz);
147   free(xyt_handle->info->ycol_indices);
148   free(xyt_handle->info);
149   free(xyt_handle->mvi->local2global);
150   PCTFS_gs_free(xyt_handle->mvi->PCTFS_gs_handle);
151   free(xyt_handle->mvi);
152   free(xyt_handle);
153 
154 
155   /* if the check fails we nuke */
156   /* if NULL pointer passed to free we nuke */
157   /* if the calls to free fail that's not my problem */
158   return(0);
159 }
160 
161 /**************************************xyt.c***********************************/
162 PetscInt XYT_stats(xyt_ADT xyt_handle)
163 {
164   PetscInt    op[] = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD};
165   PetscInt   fop[] = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD};
166   PetscInt    vals[9],  work[9];
167   PetscScalar fvals[3], fwork[3];
168 
169   PCTFS_comm_init();
170   check_handle(xyt_handle);
171 
172   /* if factorization not done there are no stats */
173   if (!xyt_handle->info||!xyt_handle->mvi) {
174     if (!PCTFS_my_id) PetscPrintf(PETSC_COMM_WORLD,"XYT_stats() :: no stats available!\n");
175     return 1;
176   }
177 
178   vals[0]=vals[1]=vals[2]=xyt_handle->info->nnz;
179   vals[3]=vals[4]=vals[5]=xyt_handle->mvi->n;
180   vals[6]=vals[7]=vals[8]=xyt_handle->info->msg_buf_sz;
181   PCTFS_giop(vals,work,sizeof(op)/sizeof(op[0])-1,op);
182 
183   fvals[0]=fvals[1]=fvals[2]
184     =xyt_handle->info->tot_solve_time/xyt_handle->info->nsolves++;
185   PCTFS_grop(fvals,fwork,sizeof(fop)/sizeof(fop[0])-1,fop);
186 
187   if (!PCTFS_my_id) {
188     PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xyt_nnz=%D\n",PCTFS_my_id,vals[0]);
189     PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xyt_nnz=%D\n",PCTFS_my_id,vals[1]);
190     PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xyt_nnz=%g\n",PCTFS_my_id,1.0*vals[2]/PCTFS_num_nodes);
191     PetscPrintf(PETSC_COMM_WORLD,"%D :: tot   xyt_nnz=%D\n",PCTFS_my_id,vals[2]);
192     PetscPrintf(PETSC_COMM_WORLD,"%D :: xyt   C(2d)  =%g\n",PCTFS_my_id,vals[2]/(pow(1.0*vals[5],1.5)));
193     PetscPrintf(PETSC_COMM_WORLD,"%D :: xyt   C(3d)  =%g\n",PCTFS_my_id,vals[2]/(pow(1.0*vals[5],1.6667)));
194     PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xyt_n  =%D\n",PCTFS_my_id,vals[3]);
195     PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xyt_n  =%D\n",PCTFS_my_id,vals[4]);
196     PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xyt_n  =%g\n",PCTFS_my_id,1.0*vals[5]/PCTFS_num_nodes);
197     PetscPrintf(PETSC_COMM_WORLD,"%D :: tot   xyt_n  =%D\n",PCTFS_my_id,vals[5]);
198     PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xyt_buf=%D\n",PCTFS_my_id,vals[6]);
199     PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xyt_buf=%D\n",PCTFS_my_id,vals[7]);
200     PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xyt_buf=%g\n",PCTFS_my_id,1.0*vals[8]/PCTFS_num_nodes);
201     PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xyt_slv=%g\n",PCTFS_my_id,fvals[0]);
202     PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xyt_slv=%g\n",PCTFS_my_id,fvals[1]);
203     PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xyt_slv=%g\n",PCTFS_my_id,fvals[2]/PCTFS_num_nodes);
204   }
205 
206   return(0);
207 }
208 
209 
210 /*************************************xyt.c************************************
211 
212 Description: get A_local, local portion of global coarse matrix which
213 is a row dist. nxm matrix w/ n<m.
214    o my_ml holds address of ML struct associated w/A_local and coarse grid
215    o local2global holds global number of column i (i=0,...,m-1)
216    o local2global holds global number of row    i (i=0,...,n-1)
217    o mylocmatvec performs A_local . vec_local (note that gs is performed using
218    PCTFS_gs_init/gop).
219 
220 mylocmatvec = my_ml->Amat[grid_tag].matvec->external;
221 mylocmatvec (void :: void *data, double *in, double *out)
222 **************************************xyt.c***********************************/
223 static PetscInt do_xyt_factor(xyt_ADT xyt_handle)
224 {
225   return xyt_generate(xyt_handle);
226 }
227 
228 /**************************************xyt.c***********************************/
229 static PetscInt xyt_generate(xyt_ADT xyt_handle)
230 {
231   PetscInt i,j,k,idx;
232   PetscInt dim, col;
233   PetscScalar *u, *uu, *v, *z, *w, alpha, alpha_w;
234   PetscInt *segs;
235   PetscInt op[] = {GL_ADD,0};
236   PetscInt off, len;
237   PetscScalar *x_ptr, *y_ptr;
238   PetscInt *iptr, flag;
239   PetscInt start=0, end, work;
240   PetscInt op2[] = {GL_MIN,0};
241   PCTFS_gs_ADT PCTFS_gs_handle;
242   PetscInt *nsep, *lnsep, *fo;
243   PetscInt a_n=xyt_handle->mvi->n;
244   PetscInt a_m=xyt_handle->mvi->m;
245   PetscInt *a_local2global=xyt_handle->mvi->local2global;
246   PetscInt level;
247   PetscInt n, m;
248   PetscInt *xcol_sz, *xcol_indices, *stages;
249   PetscScalar **xcol_vals, *x;
250   PetscInt *ycol_sz, *ycol_indices;
251   PetscScalar **ycol_vals, *y;
252   PetscInt n_global;
253   PetscInt xt_nnz=0, xt_max_nnz=0;
254   PetscInt yt_nnz=0, yt_max_nnz=0;
255   PetscInt xt_zero_nnz  =0;
256   PetscInt xt_zero_nnz_0=0;
257   PetscInt yt_zero_nnz  =0;
258   PetscInt yt_zero_nnz_0=0;
259   PetscBLASInt i1 = 1,dlen;
260   PetscScalar dm1 = -1.0;
261   PetscErrorCode ierr;
262 
263   n=xyt_handle->mvi->n;
264   nsep=xyt_handle->info->nsep;
265   lnsep=xyt_handle->info->lnsep;
266   fo=xyt_handle->info->fo;
267   end=lnsep[0];
268   level=xyt_handle->level;
269   PCTFS_gs_handle=xyt_handle->mvi->PCTFS_gs_handle;
270 
271   /* is there a null space? */
272   /* LATER add in ability to detect null space by checking alpha */
273   for (i=0, j=0; i<=level; i++) { j+=nsep[i]; }
274 
275   m = j-xyt_handle->ns;
276   if (m!=j) {
277     ierr = PetscPrintf(PETSC_COMM_WORLD,"xyt_generate() :: null space exists %D %D %D\n",m,j,xyt_handle->ns);CHKERRQ(ierr);
278   }
279 
280   ierr = PetscInfo2(0,"xyt_generate() :: X(%D,%D)\n",n,m);CHKERRQ(ierr);
281 
282   /* get and initialize storage for x local         */
283   /* note that x local is nxm and stored by columns */
284   xcol_sz = (PetscInt*) malloc(m*sizeof(PetscInt));
285   xcol_indices = (PetscInt*) malloc((2*m+1)*sizeof(PetscInt));
286   xcol_vals = (PetscScalar **) malloc(m*sizeof(PetscScalar *));
287   for (i=j=0; i<m; i++, j+=2) {
288     xcol_indices[j]=xcol_indices[j+1]=xcol_sz[i]=-1;
289     xcol_vals[i] = NULL;
290   }
291   xcol_indices[j]=-1;
292 
293   /* get and initialize storage for y local         */
294   /* note that y local is nxm and stored by columns */
295   ycol_sz = (PetscInt*) malloc(m*sizeof(PetscInt));
296   ycol_indices = (PetscInt*) malloc((2*m+1)*sizeof(PetscInt));
297   ycol_vals = (PetscScalar **) malloc(m*sizeof(PetscScalar *));
298   for (i=j=0; i<m; i++, j+=2) {
299     ycol_indices[j]=ycol_indices[j+1]=ycol_sz[i]=-1;
300     ycol_vals[i] = NULL;
301   }
302   ycol_indices[j]=-1;
303 
304   /* size of separators for each sub-hc working from bottom of tree to top */
305   /* this looks like nsep[]=segments */
306   stages = (PetscInt*) malloc((level+1)*sizeof(PetscInt));
307   segs   = (PetscInt*) malloc((level+1)*sizeof(PetscInt));
308   PCTFS_ivec_zero(stages,level+1);
309   PCTFS_ivec_copy(segs,nsep,level+1);
310   for (i=0; i<level; i++) { segs[i+1] += segs[i]; }
311   stages[0] = segs[0];
312 
313   /* temporary vectors  */
314   u  = (PetscScalar *) malloc(n*sizeof(PetscScalar));
315   z  = (PetscScalar *) malloc(n*sizeof(PetscScalar));
316   v  = (PetscScalar *) malloc(a_m*sizeof(PetscScalar));
317   uu = (PetscScalar *) malloc(m*sizeof(PetscScalar));
318   w  = (PetscScalar *) malloc(m*sizeof(PetscScalar));
319 
320   /* extra nnz due to replication of vertices across separators */
321   for (i=1, j=0; i<=level; i++) { j+=nsep[i]; }
322 
323   /* storage for sparse x values */
324   n_global = xyt_handle->info->n_global;
325   xt_max_nnz = yt_max_nnz = (PetscInt)(2.5*pow(1.0*n_global,1.6667) + j*n/2)/PCTFS_num_nodes;
326   x = (PetscScalar *) malloc(xt_max_nnz*sizeof(PetscScalar));
327   y = (PetscScalar *) malloc(yt_max_nnz*sizeof(PetscScalar));
328 
329   /* LATER - can embed next sep to fire in gs */
330   /* time to make the donuts - generate X factor */
331   for (dim=i=j=0;i<m;i++) {
332     /* time to move to the next level? */
333     while (i==segs[dim]) {
334       if (dim==level) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"dim about to exceed level\n");
335       stages[dim++]=i;
336       end+=lnsep[dim];
337     }
338     stages[dim]=i;
339 
340     /* which column are we firing? */
341     /* i.e. set v_l */
342     /* use new seps and do global min across hc to determine which one to fire */
343     (start<end) ? (col=fo[start]) : (col=INT_MAX);
344     PCTFS_giop_hc(&col,&work,1,op2,dim);
345 
346     /* shouldn't need this */
347     if (col==INT_MAX) {
348       ierr = PetscInfo(0,"hey ... col==INT_MAX??\n");CHKERRQ(ierr);
349       continue;
350     }
351 
352     /* do I own it? I should */
353     PCTFS_rvec_zero(v ,a_m);
354     if (col==fo[start])
355     {
356       start++;
357       idx=PCTFS_ivec_linear_search(col, a_local2global, a_n);
358       if (idx!=-1) { v[idx] = 1.0; j++; }
359       else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"NOT FOUND!\n");
360     } else {
361       idx=PCTFS_ivec_linear_search(col, a_local2global, a_m);
362       if (idx!=-1) {v[idx] = 1.0;}
363     }
364 
365     /* perform u = A.v_l */
366     PCTFS_rvec_zero(u,n);
367     do_matvec(xyt_handle->mvi,v,u);
368 
369     /* uu =  X^T.u_l (local portion) */
370     /* technically only need to zero out first i entries */
371     /* later turn this into an XYT_solve call ? */
372     PCTFS_rvec_zero(uu,m);
373     y_ptr=y;
374     iptr = ycol_indices;
375     for (k=0; k<i; k++) {
376       off = *iptr++;
377       len = *iptr++;
378       dlen = PetscBLASIntCast(len);
379       uu[k] = BLASdot_(&dlen,u+off,&i1,y_ptr,&i1);
380       y_ptr+=len;
381     }
382 
383     /* uu = X^T.u_l (comm portion) */
384     PCTFS_ssgl_radd  (uu, w, dim, stages);
385 
386     /* z = X.uu */
387     PCTFS_rvec_zero(z,n);
388     x_ptr=x;
389     iptr = xcol_indices;
390     for (k=0; k<i; k++) {
391       off = *iptr++;
392       len = *iptr++;
393       dlen = PetscBLASIntCast(len);
394       BLASaxpy_(&dlen,&uu[k],x_ptr,&i1,z+off,&i1);
395       x_ptr+=len;
396     }
397 
398     /* compute v_l = v_l - z */
399     PCTFS_rvec_zero(v+a_n,a_m-a_n);
400     dlen = PetscBLASIntCast(n);
401     BLASaxpy_(&dlen,&dm1,z,&i1,v,&i1);
402 
403     /* compute u_l = A.v_l */
404     if (a_n!=a_m) { PCTFS_gs_gop_hc(PCTFS_gs_handle,v,"+\0",dim); }
405     PCTFS_rvec_zero(u,n);
406     do_matvec(xyt_handle->mvi,v,u);
407 
408     /* compute sqrt(alpha) = sqrt(u_l^T.u_l) - local portion */
409     dlen = PetscBLASIntCast(n);
410     alpha = BLASdot_(&dlen,u,&i1,u,&i1);
411     /* compute sqrt(alpha) = sqrt(u_l^T.u_l) - comm portion */
412     PCTFS_grop_hc(&alpha, &alpha_w, 1, op, dim);
413 
414     alpha = (PetscScalar) PetscSqrtReal((PetscReal)alpha);
415 
416     /* check for small alpha                             */
417     /* LATER use this to detect and determine null space */
418     if (fabs(alpha)<1.0e-14) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"bad alpha! %g\n",alpha);
419 
420     /* compute v_l = v_l/sqrt(alpha) */
421     PCTFS_rvec_scale(v,1.0/alpha,n);
422     PCTFS_rvec_scale(u,1.0/alpha,n);
423 
424     /* add newly generated column, v_l, to X */
425     flag = 1;
426     off=len=0;
427     for (k=0; k<n; k++) {
428       if (v[k]!=0.0) {
429         len=k;
430         if (flag) {off=k; flag=0;}
431       }
432     }
433 
434     len -= (off-1);
435 
436     if (len>0) {
437       if ((xt_nnz+len)>xt_max_nnz) {
438         ierr = PetscInfo(0,"increasing space for X by 2x!\n");CHKERRQ(ierr);
439         xt_max_nnz *= 2;
440         x_ptr = (PetscScalar *) malloc(xt_max_nnz*sizeof(PetscScalar));
441         PCTFS_rvec_copy(x_ptr,x,xt_nnz);
442         free(x);
443         x = x_ptr;
444         x_ptr+=xt_nnz;
445       }
446       xt_nnz += len;
447       PCTFS_rvec_copy(x_ptr,v+off,len);
448 
449       /* keep track of number of zeros */
450       if (dim) {
451         for (k=0; k<len; k++) {
452           if (x_ptr[k]==0.0) {xt_zero_nnz++;}
453         }
454       } else {
455         for (k=0; k<len; k++) {
456           if (x_ptr[k]==0.0) { xt_zero_nnz_0++; }
457         }
458       }
459       xcol_indices[2*i] = off;
460       xcol_sz[i] = xcol_indices[2*i+1] = len;
461       xcol_vals[i] = x_ptr;
462     } else {
463       xcol_indices[2*i] = 0;
464       xcol_sz[i] = xcol_indices[2*i+1] = 0;
465       xcol_vals[i] = x_ptr;
466     }
467 
468 
469     /* add newly generated column, u_l, to Y */
470     flag = 1;
471     off=len=0;
472     for (k=0; k<n; k++) {
473       if (u[k]!=0.0) {
474         len=k;
475         if (flag) { off=k; flag=0; }
476       }
477     }
478 
479     len -= (off-1);
480 
481     if (len>0) {
482       if ((yt_nnz+len)>yt_max_nnz) {
483         ierr = PetscInfo(0,"increasing space for Y by 2x!\n");CHKERRQ(ierr);
484         yt_max_nnz *= 2;
485         y_ptr = (PetscScalar *) malloc(yt_max_nnz*sizeof(PetscScalar));
486         PCTFS_rvec_copy(y_ptr,y,yt_nnz);
487         free(y);
488         y = y_ptr;
489         y_ptr+=yt_nnz;
490       }
491       yt_nnz += len;
492       PCTFS_rvec_copy(y_ptr,u+off,len);
493 
494       /* keep track of number of zeros */
495       if (dim) {
496         for (k=0; k<len; k++) {
497           if (y_ptr[k]==0.0) { yt_zero_nnz++; }
498         }
499       } else {
500         for (k=0; k<len; k++) {
501           if (y_ptr[k]==0.0) { yt_zero_nnz_0++; }
502         }
503       }
504       ycol_indices[2*i] = off;
505       ycol_sz[i] = ycol_indices[2*i+1] = len;
506       ycol_vals[i] = y_ptr;
507     } else {
508       ycol_indices[2*i] = 0;
509       ycol_sz[i] = ycol_indices[2*i+1] = 0;
510       ycol_vals[i] = y_ptr;
511     }
512   }
513 
514   /* close off stages for execution phase */
515   while (dim!=level) {
516     stages[dim++]=i;
517     ierr = PetscInfo2(0,"disconnected!!! dim(%D)!=level(%D)\n",dim,level);CHKERRQ(ierr);
518   }
519   stages[dim]=i;
520 
521   xyt_handle->info->n=xyt_handle->mvi->n;
522   xyt_handle->info->m=m;
523   xyt_handle->info->nnz=xt_nnz + yt_nnz;
524   xyt_handle->info->max_nnz=xt_max_nnz + yt_max_nnz;
525   xyt_handle->info->msg_buf_sz=stages[level]-stages[0];
526   xyt_handle->info->solve_uu = (PetscScalar *) malloc(m*sizeof(PetscScalar));
527   xyt_handle->info->solve_w  = (PetscScalar *) malloc(m*sizeof(PetscScalar));
528   xyt_handle->info->x=x;
529   xyt_handle->info->xcol_vals=xcol_vals;
530   xyt_handle->info->xcol_sz=xcol_sz;
531   xyt_handle->info->xcol_indices=xcol_indices;
532   xyt_handle->info->stages=stages;
533   xyt_handle->info->y=y;
534   xyt_handle->info->ycol_vals=ycol_vals;
535   xyt_handle->info->ycol_sz=ycol_sz;
536   xyt_handle->info->ycol_indices=ycol_indices;
537 
538   free(segs);
539   free(u);
540   free(v);
541   free(uu);
542   free(z);
543   free(w);
544 
545   return(0);
546 }
547 
548 /**************************************xyt.c***********************************/
549 static PetscErrorCode do_xyt_solve(xyt_ADT xyt_handle,  PetscScalar *uc)
550 {
551   PetscInt off, len, *iptr;
552   PetscInt level       =xyt_handle->level;
553   PetscInt n           =xyt_handle->info->n;
554   PetscInt m           =xyt_handle->info->m;
555   PetscInt *stages     =xyt_handle->info->stages;
556   PetscInt *xcol_indices=xyt_handle->info->xcol_indices;
557   PetscInt *ycol_indices=xyt_handle->info->ycol_indices;
558   PetscScalar *x_ptr, *y_ptr, *uu_ptr;
559   PetscScalar *solve_uu=xyt_handle->info->solve_uu;
560   PetscScalar *solve_w =xyt_handle->info->solve_w;
561   PetscScalar *x       =xyt_handle->info->x;
562   PetscScalar *y       =xyt_handle->info->y;
563   PetscBLASInt i1 = 1,dlen;
564 
565   PetscFunctionBegin;
566   uu_ptr=solve_uu;
567   PCTFS_rvec_zero(uu_ptr,m);
568 
569   /* x  = X.Y^T.b */
570   /* uu = Y^T.b */
571   for (y_ptr=y,iptr=ycol_indices; *iptr!=-1; y_ptr+=len)
572   {
573     off=*iptr++; len=*iptr++;
574     dlen = PetscBLASIntCast(len);
575     *uu_ptr++ = BLASdot_(&dlen,uc+off,&i1,y_ptr,&i1);
576   }
577 
578   /* comunication of beta */
579   uu_ptr=solve_uu;
580   if (level) { PCTFS_ssgl_radd(uu_ptr, solve_w, level, stages); }
581 
582   PCTFS_rvec_zero(uc,n);
583 
584   /* x = X.uu */
585   for (x_ptr=x,iptr=xcol_indices; *iptr!=-1; x_ptr+=len) {
586     off=*iptr++; len=*iptr++;
587     dlen = PetscBLASIntCast(len);
588     BLASaxpy_(&dlen,uu_ptr++,x_ptr,&i1,uc+off,&i1);
589   }
590   PetscFunctionReturn(0);
591 }
592 
593 /**************************************xyt.c***********************************/
594 static PetscErrorCode check_handle(xyt_ADT xyt_handle)
595 {
596   PetscInt vals[2], work[2], op[] = {NON_UNIFORM,GL_MIN,GL_MAX};
597 
598   PetscFunctionBegin;
599   if (!xyt_handle) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"check_handle() :: bad handle :: NULL %D\n",xyt_handle);
600 
601   vals[0]=vals[1]=xyt_handle->id;
602   PCTFS_giop(vals,work,sizeof(op)/sizeof(op[0])-1,op);
603   if ((vals[0]!=vals[1])||(xyt_handle->id<=0)) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"check_handle() :: bad handle :: id mismatch min/max %D/%D %D\n", vals[0],vals[1], xyt_handle->id);
604   PetscFunctionReturn(0);
605 }
606 
607 /**************************************xyt.c***********************************/
608 static PetscErrorCode det_separators(xyt_ADT xyt_handle)
609 {
610   PetscInt i, ct, id;
611   PetscInt mask, edge, *iptr;
612   PetscInt *dir, *used;
613   PetscInt sum[4], w[4];
614   PetscScalar rsum[4], rw[4];
615   PetscInt op[] = {GL_ADD,0};
616   PetscScalar *lhs, *rhs;
617   PetscInt *nsep, *lnsep, *fo, nfo=0;
618   PCTFS_gs_ADT PCTFS_gs_handle=xyt_handle->mvi->PCTFS_gs_handle;
619   PetscInt *local2global=xyt_handle->mvi->local2global;
620   PetscInt  n=xyt_handle->mvi->n;
621   PetscInt  m=xyt_handle->mvi->m;
622   PetscInt level=xyt_handle->level;
623   PetscInt shared=0;
624   PetscErrorCode ierr;
625 
626   PetscFunctionBegin;
627   dir  = (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
628   nsep = (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
629   lnsep= (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
630   fo   = (PetscInt*)malloc(sizeof(PetscInt)*(n+1));
631   used = (PetscInt*)malloc(sizeof(PetscInt)*n);
632 
633   PCTFS_ivec_zero(dir  ,level+1);
634   PCTFS_ivec_zero(nsep ,level+1);
635   PCTFS_ivec_zero(lnsep,level+1);
636   PCTFS_ivec_set (fo   ,-1,n+1);
637   PCTFS_ivec_zero(used,n);
638 
639   lhs  = (PetscScalar*)malloc(sizeof(PetscScalar)*m);
640   rhs  = (PetscScalar*)malloc(sizeof(PetscScalar)*m);
641 
642   /* determine the # of unique dof */
643   PCTFS_rvec_zero(lhs,m);
644   PCTFS_rvec_set(lhs,1.0,n);
645   PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",level);
646   ierr = PetscInfo(0,"done first PCTFS_gs_gop_hc\n");CHKERRQ(ierr);
647   PCTFS_rvec_zero(rsum,2);
648   for (ct=i=0;i<n;i++)
649   {
650     if (lhs[i]!=0.0) { rsum[0]+=1.0/lhs[i]; rsum[1]+=lhs[i]; }
651 
652     if (lhs[i]!=1.0) {
653       shared=1;
654     }
655   }
656 
657   PCTFS_grop_hc(rsum,rw,2,op,level);
658   rsum[0]+=0.1;
659   rsum[1]+=0.1;
660 
661   xyt_handle->info->n_global=xyt_handle->info->m_global=(PetscInt) rsum[0];
662   xyt_handle->mvi->n_global =xyt_handle->mvi->m_global =(PetscInt) rsum[0];
663 
664   /* determine separator sets top down */
665   if (shared)
666   {
667     /* solution is to do as in the symmetric shared case but then */
668     /* pick the sub-hc with the most free dofs and do a mat-vec   */
669     /* and pick up the responses on the other sub-hc from the     */
670     /* initial separator set obtained from the symm. shared case  */
671     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"shared dof separator determination not ready ... see hmt!!!\n");
672     for (iptr=fo+n,id=PCTFS_my_id,mask=PCTFS_num_nodes>>1,edge=level;edge>0;edge--,mask>>=1) {
673 
674       /* set rsh of hc, fire, and collect lhs responses */
675       (id<mask) ? PCTFS_rvec_zero(lhs,m) : PCTFS_rvec_set(lhs,1.0,m);
676       PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",edge);
677 
678       /* set lsh of hc, fire, and collect rhs responses */
679       (id<mask) ? PCTFS_rvec_set(rhs,1.0,m) : PCTFS_rvec_zero(rhs,m);
680       PCTFS_gs_gop_hc(PCTFS_gs_handle,rhs,"+\0",edge);
681 
682       for (i=0;i<n;i++) {
683         if (id< mask) {
684           if (lhs[i]!=0.0) { lhs[i]=1.0; }
685         }
686         if (id>=mask) {
687           if (rhs[i]!=0.0) { rhs[i]=1.0; }
688         }
689       }
690 
691       if (id< mask) { PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",edge-1); }
692       else { PCTFS_gs_gop_hc(PCTFS_gs_handle,rhs,"+\0",edge-1); }
693 
694       /* count number of dofs I own that have signal and not in sep set */
695       PCTFS_rvec_zero(rsum,4);
696       for (PCTFS_ivec_zero(sum,4),ct=i=0;i<n;i++) {
697         if (!used[i]) {
698           /* number of unmarked dofs on node */
699           ct++;
700           /* number of dofs to be marked on lhs hc */
701           if (id< mask) {
702             if (lhs[i]!=0.0) { sum[0]++; rsum[0]+=1.0/lhs[i]; }
703           }
704           /* number of dofs to be marked on rhs hc */
705           if (id>=mask) {
706             if (rhs[i]!=0.0) { sum[1]++; rsum[1]+=1.0/rhs[i]; }
707           }
708         }
709       }
710 
711       /* go for load balance - choose half with most unmarked dofs, bias LHS */
712       (id<mask) ? (sum[2]=ct) : (sum[3]=ct);
713       (id<mask) ? (rsum[2]=ct) : (rsum[3]=ct);
714       PCTFS_giop_hc(sum,w,4,op,edge);
715       PCTFS_grop_hc(rsum,rw,4,op,edge);
716       rsum[0]+=0.1; rsum[1]+=0.1; rsum[2]+=0.1; rsum[3]+=0.1;
717 
718       if (id<mask) {
719         /* mark dofs I own that have signal and not in sep set */
720         for (ct=i=0;i<n;i++) {
721           if ((!used[i])&&(lhs[i]!=0.0)) {
722             ct++; nfo++;
723 
724             if (nfo>n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"nfo about to exceed n\n");
725 
726             *--iptr = local2global[i];
727             used[i]=edge;
728           }
729         }
730         if (ct>1) {PCTFS_ivec_sort(iptr,ct);}
731 
732         lnsep[edge]=ct;
733         nsep[edge]=(PetscInt) rsum[0];
734         dir [edge]=LEFT;
735       }
736 
737       if (id>=mask) {
738         /* mark dofs I own that have signal and not in sep set */
739         for (ct=i=0;i<n;i++) {
740           if ((!used[i])&&(rhs[i]!=0.0)) {
741             ct++; nfo++;
742 
743             if (nfo>n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"nfo about to exceed n\n");
744 
745             *--iptr = local2global[i];
746             used[i]=edge;
747           }
748         }
749         if (ct>1) { PCTFS_ivec_sort(iptr,ct); }
750 
751         lnsep[edge]=ct;
752         nsep[edge]= (PetscInt) rsum[1];
753         dir [edge]=RIGHT;
754       }
755 
756       /* LATER or we can recur on these to order seps at this level */
757       /* do we need full set of separators for this?                */
758 
759       /* fold rhs hc into lower */
760       if (id>=mask) { id-=mask; }
761     }
762   } else {
763     for (iptr=fo+n,id=PCTFS_my_id,mask=PCTFS_num_nodes>>1,edge=level;edge>0;edge--,mask>>=1) {
764       /* set rsh of hc, fire, and collect lhs responses */
765       (id<mask) ? PCTFS_rvec_zero(lhs,m) : PCTFS_rvec_set(lhs,1.0,m);
766       PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",edge);
767 
768       /* set lsh of hc, fire, and collect rhs responses */
769       (id<mask) ? PCTFS_rvec_set(rhs,1.0,m) : PCTFS_rvec_zero(rhs,m);
770       PCTFS_gs_gop_hc(PCTFS_gs_handle,rhs,"+\0",edge);
771 
772       /* count number of dofs I own that have signal and not in sep set */
773       for (PCTFS_ivec_zero(sum,4),ct=i=0;i<n;i++) {
774         if (!used[i]) {
775           /* number of unmarked dofs on node */
776           ct++;
777           /* number of dofs to be marked on lhs hc */
778           if ((id< mask)&&(lhs[i]!=0.0)) { sum[0]++; }
779           /* number of dofs to be marked on rhs hc */
780           if ((id>=mask)&&(rhs[i]!=0.0)) { sum[1]++; }
781         }
782       }
783 
784       /* for the non-symmetric case we need separators of width 2 */
785       /* so take both sides */
786       (id<mask) ? (sum[2]=ct) : (sum[3]=ct);
787       PCTFS_giop_hc(sum,w,4,op,edge);
788 
789       ct=0;
790       if (id<mask) {
791         /* mark dofs I own that have signal and not in sep set */
792         for (i=0;i<n;i++) {
793           if ((!used[i])&&(lhs[i]!=0.0)) {
794             ct++; nfo++;
795             *--iptr = local2global[i];
796             used[i]=edge;
797           }
798         }
799         /* LSH hc summation of ct should be sum[0] */
800       } else {
801         /* mark dofs I own that have signal and not in sep set */
802         for (i=0;i<n;i++) {
803           if ((!used[i])&&(rhs[i]!=0.0)) {
804             ct++; nfo++;
805             *--iptr = local2global[i];
806             used[i]=edge;
807           }
808         }
809         /* RSH hc summation of ct should be sum[1] */
810       }
811 
812       if (ct>1) { PCTFS_ivec_sort(iptr,ct); }
813       lnsep[edge]=ct;
814       nsep[edge]=sum[0]+sum[1];
815       dir [edge]=BOTH;
816 
817       /* LATER or we can recur on these to order seps at this level */
818       /* do we need full set of separators for this?                */
819 
820       /* fold rhs hc into lower */
821       if (id>=mask)
822         {id-=mask;}
823     }
824   }
825 
826   /* level 0 is on processor case - so mark the remainder */
827   for (ct=i=0;i<n;i++) {
828     if (!used[i]) {
829       ct++; nfo++;
830       *--iptr = local2global[i];
831       used[i]=edge;
832     }
833   }
834   if (ct>1) {PCTFS_ivec_sort(iptr,ct);}
835   lnsep[edge]=ct;
836   nsep [edge]=ct;
837   dir  [edge]=BOTH;
838 
839   xyt_handle->info->nsep=nsep;
840   xyt_handle->info->lnsep=lnsep;
841   xyt_handle->info->fo=fo;
842   xyt_handle->info->nfo=nfo;
843 
844   free(dir);
845   free(lhs);
846   free(rhs);
847   free(used);
848   PetscFunctionReturn(0);
849 }
850 
851 /**************************************xyt.c***********************************/
852 static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, void *matvec, void *grid_data)
853 {
854   mv_info *mvi;
855 
856 
857   mvi = (mv_info*)malloc(sizeof(mv_info));
858   mvi->n=n;
859   mvi->m=m;
860   mvi->n_global=-1;
861   mvi->m_global=-1;
862   mvi->local2global=(PetscInt*)malloc((m+1)*sizeof(PetscInt));
863   PCTFS_ivec_copy(mvi->local2global,local2global,m);
864   mvi->local2global[m] = INT_MAX;
865   mvi->matvec=(PetscErrorCode (*)(mv_info*,PetscScalar*,PetscScalar*))matvec;
866   mvi->grid_data=grid_data;
867 
868   /* set xyt communication handle to perform restricted matvec */
869   mvi->PCTFS_gs_handle = PCTFS_gs_init(local2global, m, PCTFS_num_nodes);
870 
871   return(mvi);
872 }
873 
874 /**************************************xyt.c***********************************/
875 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u)
876 {
877   PetscFunctionBegin;
878   A->matvec((mv_info*)A->grid_data,v,u);
879   PetscFunctionReturn(0);
880 }
881 
882 
883 
884