xref: /petsc/src/ksp/pc/impls/tfs/xyt.c (revision d736bfeb4d37a01fcbdf00fe73fb60d6f0ba2142)
1 #define PETSCKSP_DLL
2 
3 /*************************************xyt.c************************************
4 Module Name: xyt
5 Module Info:
6 
7 author:  Henry M. Tufo III
8 e-mail:  hmt@asci.uchicago.edu
9 contact:
10 +--------------------------------+--------------------------------+
11 |MCS Division - Building 221     |Department of Computer Science  |
12 |Argonne National Laboratory     |Ryerson 152                     |
13 |9700 S. Cass Avenue             |The University of Chicago       |
14 |Argonne, IL  60439              |Chicago, IL  60637              |
15 |(630) 252-5354/5986 ph/fx       |(773) 702-6019/8487 ph/fx       |
16 +--------------------------------+--------------------------------+
17 
18 Last Modification: 3.20.01
19 **************************************xyt.c***********************************/
20 #include "../src/ksp/pc/impls/tfs/tfs.h"
21 
22 #define LEFT  -1
23 #define RIGHT  1
24 #define BOTH   0
25 
26 typedef struct xyt_solver_info {
27   PetscInt n, m, n_global, m_global;
28   PetscInt nnz, max_nnz, msg_buf_sz;
29   PetscInt *nsep, *lnsep, *fo, nfo, *stages;
30   PetscInt *xcol_sz, *xcol_indices;
31   PetscScalar **xcol_vals, *x, *solve_uu, *solve_w;
32   PetscInt *ycol_sz, *ycol_indices;
33   PetscScalar **ycol_vals, *y;
34   PetscInt nsolves;
35   PetscScalar tot_solve_time;
36 } xyt_info;
37 
38 
39 typedef struct matvec_info {
40   PetscInt n, m, n_global, m_global;
41   PetscInt *local2global;
42   gs_ADT gs_handle;
43   PetscErrorCode (*matvec)(struct matvec_info*,PetscScalar*,PetscScalar*);
44   void *grid_data;
45 } mv_info;
46 
47 struct xyt_CDT{
48   PetscInt id;
49   PetscInt ns;
50   PetscInt level;
51   xyt_info *info;
52   mv_info  *mvi;
53 };
54 
55 static PetscInt n_xyt=0;
56 static PetscInt n_xyt_handles=0;
57 
58 /* prototypes */
59 static PetscErrorCode do_xyt_solve(xyt_ADT xyt_handle, PetscScalar *rhs);
60 static PetscErrorCode check_handle(xyt_ADT xyt_handle);
61 static PetscErrorCode det_separators(xyt_ADT xyt_handle);
62 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u);
63 static PetscInt xyt_generate(xyt_ADT xyt_handle);
64 static PetscInt do_xyt_factor(xyt_ADT xyt_handle);
65 static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, void *matvec, void *grid_data);
66 
67 /**************************************xyt.c***********************************/
68 xyt_ADT XYT_new(void)
69 {
70   xyt_ADT xyt_handle;
71 
72   /* rolling count on n_xyt ... pot. problem here */
73   n_xyt_handles++;
74   xyt_handle       = (xyt_ADT)malloc(sizeof(struct xyt_CDT));
75   xyt_handle->id   = ++n_xyt;
76   xyt_handle->info = NULL;
77   xyt_handle->mvi  = NULL;
78 
79   return(xyt_handle);
80 }
81 
82 /**************************************xyt.c***********************************/
83 PetscInt XYT_factor(xyt_ADT xyt_handle, /* prev. allocated xyt  handle */
84 	   PetscInt *local2global,  /* global column mapping       */
85 	   PetscInt n,              /* local num rows              */
86 	   PetscInt m,              /* local num cols              */
87 	   void *matvec,       /* b_loc=A_local.x_loc         */
88 	   void *grid_data     /* grid data for matvec        */
89 	   )
90 {
91 
92   comm_init();
93   check_handle(xyt_handle);
94 
95   /* only 2^k for now and all nodes participating */
96   if ((1<<(xyt_handle->level=i_log2_num_nodes))!=num_nodes) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"only 2^k for now and MPI_COMM_WORLD!!! %D != %D\n",1<<i_log2_num_nodes,num_nodes);
97 
98   /* space for X info */
99   xyt_handle->info = (xyt_info*)malloc(sizeof(xyt_info));
100 
101   /* set up matvec handles */
102   xyt_handle->mvi  = set_mvi(local2global, n, m, matvec, grid_data);
103 
104   /* matrix is assumed to be of full rank */
105   /* LATER we can reset to indicate rank def. */
106   xyt_handle->ns=0;
107 
108   /* determine separators and generate firing order - NB xyt info set here */
109   det_separators(xyt_handle);
110 
111   return(do_xyt_factor(xyt_handle));
112 }
113 
114 /**************************************xyt.c***********************************/
115 PetscInt XYT_solve(xyt_ADT xyt_handle, PetscScalar *x, PetscScalar *b)
116 {
117   comm_init();
118   check_handle(xyt_handle);
119 
120   /* need to copy b into x? */
121   if (b)
122     {rvec_copy(x,b,xyt_handle->mvi->n);}
123   do_xyt_solve(xyt_handle,x);
124 
125   return(0);
126 }
127 
128 /**************************************xyt.c***********************************/
129 PetscInt XYT_free(xyt_ADT xyt_handle)
130 {
131   comm_init();
132   check_handle(xyt_handle);
133   n_xyt_handles--;
134 
135   free(xyt_handle->info->nsep);
136   free(xyt_handle->info->lnsep);
137   free(xyt_handle->info->fo);
138   free(xyt_handle->info->stages);
139   free(xyt_handle->info->solve_uu);
140   free(xyt_handle->info->solve_w);
141   free(xyt_handle->info->x);
142   free(xyt_handle->info->xcol_vals);
143   free(xyt_handle->info->xcol_sz);
144   free(xyt_handle->info->xcol_indices);
145   free(xyt_handle->info->y);
146   free(xyt_handle->info->ycol_vals);
147   free(xyt_handle->info->ycol_sz);
148   free(xyt_handle->info->ycol_indices);
149   free(xyt_handle->info);
150   free(xyt_handle->mvi->local2global);
151   gs_free(xyt_handle->mvi->gs_handle);
152   free(xyt_handle->mvi);
153   free(xyt_handle);
154 
155 
156   /* if the check fails we nuke */
157   /* if NULL pointer passed to free we nuke */
158   /* if the calls to free fail that's not my problem */
159   return(0);
160 }
161 
162 /**************************************xyt.c***********************************/
163 PetscInt XYT_stats(xyt_ADT xyt_handle)
164 {
165   PetscInt  op[] = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD};
166   PetscInt fop[] = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD};
167   PetscInt   vals[9],  work[9];
168   PetscScalar fvals[3], fwork[3];
169   PetscErrorCode ierr;
170 
171   comm_init();
172   check_handle(xyt_handle);
173 
174   /* if factorization not done there are no stats */
175   if (!xyt_handle->info||!xyt_handle->mvi)
176     {
177       if (!my_id)
178 	{ierr = PetscPrintf(PETSC_COMM_WORLD,"XYT_stats() :: no stats available!\n");}
179       return 1;
180     }
181 
182   vals[0]=vals[1]=vals[2]=xyt_handle->info->nnz;
183   vals[3]=vals[4]=vals[5]=xyt_handle->mvi->n;
184   vals[6]=vals[7]=vals[8]=xyt_handle->info->msg_buf_sz;
185   giop(vals,work,sizeof(op)/sizeof(op[0])-1,op);
186 
187   fvals[0]=fvals[1]=fvals[2]
188     =xyt_handle->info->tot_solve_time/xyt_handle->info->nsolves++;
189   grop(fvals,fwork,sizeof(fop)/sizeof(fop[0])-1,fop);
190 
191   if (!my_id)
192     {
193       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xyt_nnz=%D\n",my_id,vals[0]);
194       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xyt_nnz=%D\n",my_id,vals[1]);
195       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xyt_nnz=%g\n",my_id,1.0*vals[2]/num_nodes);
196       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: tot   xyt_nnz=%D\n",my_id,vals[2]);
197       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: xyt   C(2d)  =%g\n",my_id,vals[2]/(pow(1.0*vals[5],1.5)));
198       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: xyt   C(3d)  =%g\n",my_id,vals[2]/(pow(1.0*vals[5],1.6667)));
199       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xyt_n  =%D\n",my_id,vals[3]);
200       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xyt_n  =%D\n",my_id,vals[4]);
201       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xyt_n  =%g\n",my_id,1.0*vals[5]/num_nodes);
202       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: tot   xyt_n  =%D\n",my_id,vals[5]);
203       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xyt_buf=%D\n",my_id,vals[6]);
204       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xyt_buf=%D\n",my_id,vals[7]);
205       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xyt_buf=%g\n",my_id,1.0*vals[8]/num_nodes);
206       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xyt_slv=%g\n",my_id,fvals[0]);
207       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xyt_slv=%g\n",my_id,fvals[1]);
208       ierr = PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xyt_slv=%g\n",my_id,fvals[2]/num_nodes);
209     }
210 
211   return(0);
212 }
213 
214 
215 /*************************************xyt.c************************************
216 
217 Description: get A_local, local portion of global coarse matrix which
218 is a row dist. nxm matrix w/ n<m.
219    o my_ml holds address of ML struct associated w/A_local and coarse grid
220    o local2global holds global number of column i (i=0,...,m-1)
221    o local2global holds global number of row    i (i=0,...,n-1)
222    o mylocmatvec performs A_local . vec_local (note that gs is performed using
223    gs_init/gop).
224 
225 mylocmatvec = my_ml->Amat[grid_tag].matvec->external;
226 mylocmatvec (void :: void *data, double *in, double *out)
227 **************************************xyt.c***********************************/
228 static PetscInt do_xyt_factor(xyt_ADT xyt_handle)
229 {
230   return xyt_generate(xyt_handle);
231 }
232 
233 /**************************************xyt.c***********************************/
234 static PetscInt xyt_generate(xyt_ADT xyt_handle)
235 {
236   PetscInt i,j,k,idx;
237   PetscInt dim, col;
238   PetscScalar *u, *uu, *v, *z, *w, alpha, alpha_w;
239   PetscInt *segs;
240   PetscInt op[] = {GL_ADD,0};
241   PetscInt off, len;
242   PetscScalar *x_ptr, *y_ptr;
243   PetscInt *iptr, flag;
244   PetscInt start=0, end, work;
245   PetscInt op2[] = {GL_MIN,0};
246   gs_ADT gs_handle;
247   PetscInt *nsep, *lnsep, *fo;
248   PetscInt a_n=xyt_handle->mvi->n;
249   PetscInt a_m=xyt_handle->mvi->m;
250   PetscInt *a_local2global=xyt_handle->mvi->local2global;
251   PetscInt level;
252   PetscInt n, m;
253   PetscInt *xcol_sz, *xcol_indices, *stages;
254   PetscScalar **xcol_vals, *x;
255   PetscInt *ycol_sz, *ycol_indices;
256   PetscScalar **ycol_vals, *y;
257   PetscInt n_global;
258   PetscInt xt_nnz=0, xt_max_nnz=0;
259   PetscInt yt_nnz=0, yt_max_nnz=0;
260   PetscInt xt_zero_nnz  =0;
261   PetscInt xt_zero_nnz_0=0;
262   PetscInt yt_zero_nnz  =0;
263   PetscInt yt_zero_nnz_0=0;
264   PetscBLASInt i1 = 1,dlen;
265   PetscScalar dm1 = -1.0;
266   PetscErrorCode ierr;
267 
268   n=xyt_handle->mvi->n;
269   nsep=xyt_handle->info->nsep;
270   lnsep=xyt_handle->info->lnsep;
271   fo=xyt_handle->info->fo;
272   end=lnsep[0];
273   level=xyt_handle->level;
274   gs_handle=xyt_handle->mvi->gs_handle;
275 
276   /* is there a null space? */
277   /* LATER add in ability to detect null space by checking alpha */
278   for (i=0, j=0; i<=level; i++)
279     {j+=nsep[i];}
280 
281   m = j-xyt_handle->ns;
282   if (m!=j)
283     {ierr = PetscPrintf(PETSC_COMM_WORLD,"xyt_generate() :: null space exists %D %D %D\n",m,j,xyt_handle->ns);}
284 
285   ierr = PetscInfo2(0,"xyt_generate() :: X(%D,%D)\n",n,m);CHKERRQ(ierr);
286 
287   /* get and initialize storage for x local         */
288   /* note that x local is nxm and stored by columns */
289   xcol_sz = (PetscInt*) malloc(m*sizeof(PetscInt));
290   xcol_indices = (PetscInt*) malloc((2*m+1)*sizeof(PetscInt));
291   xcol_vals = (PetscScalar **) malloc(m*sizeof(PetscScalar *));
292   for (i=j=0; i<m; i++, j+=2)
293     {
294       xcol_indices[j]=xcol_indices[j+1]=xcol_sz[i]=-1;
295       xcol_vals[i] = NULL;
296     }
297   xcol_indices[j]=-1;
298 
299   /* get and initialize storage for y local         */
300   /* note that y local is nxm and stored by columns */
301   ycol_sz = (PetscInt*) malloc(m*sizeof(PetscInt));
302   ycol_indices = (PetscInt*) malloc((2*m+1)*sizeof(PetscInt));
303   ycol_vals = (PetscScalar **) malloc(m*sizeof(PetscScalar *));
304   for (i=j=0; i<m; i++, j+=2)
305     {
306       ycol_indices[j]=ycol_indices[j+1]=ycol_sz[i]=-1;
307       ycol_vals[i] = NULL;
308     }
309   ycol_indices[j]=-1;
310 
311   /* size of separators for each sub-hc working from bottom of tree to top */
312   /* this looks like nsep[]=segments */
313   stages = (PetscInt*) malloc((level+1)*sizeof(PetscInt));
314   segs   = (PetscInt*) malloc((level+1)*sizeof(PetscInt));
315   ivec_zero(stages,level+1);
316   ivec_copy(segs,nsep,level+1);
317   for (i=0; i<level; i++)
318     {segs[i+1] += segs[i];}
319   stages[0] = segs[0];
320 
321   /* temporary vectors  */
322   u  = (PetscScalar *) malloc(n*sizeof(PetscScalar));
323   z  = (PetscScalar *) malloc(n*sizeof(PetscScalar));
324   v  = (PetscScalar *) malloc(a_m*sizeof(PetscScalar));
325   uu = (PetscScalar *) malloc(m*sizeof(PetscScalar));
326   w  = (PetscScalar *) malloc(m*sizeof(PetscScalar));
327 
328   /* extra nnz due to replication of vertices across separators */
329   for (i=1, j=0; i<=level; i++)
330     {j+=nsep[i];}
331 
332   /* storage for sparse x values */
333   n_global = xyt_handle->info->n_global;
334   xt_max_nnz = yt_max_nnz = (PetscInt)(2.5*pow(1.0*n_global,1.6667) + j*n/2)/num_nodes;
335   x = (PetscScalar *) malloc(xt_max_nnz*sizeof(PetscScalar));
336   y = (PetscScalar *) malloc(yt_max_nnz*sizeof(PetscScalar));
337 
338   /* LATER - can embed next sep to fire in gs */
339   /* time to make the donuts - generate X factor */
340   for (dim=i=j=0;i<m;i++)
341     {
342       /* time to move to the next level? */
343       while (i==segs[dim]){
344         if (dim==level) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"dim about to exceed level\n");
345 	stages[dim++]=i;
346 	end+=lnsep[dim];
347       }
348       stages[dim]=i;
349 
350       /* which column are we firing? */
351       /* i.e. set v_l */
352       /* use new seps and do global min across hc to determine which one to fire */
353       (start<end) ? (col=fo[start]) : (col=INT_MAX);
354       giop_hc(&col,&work,1,op2,dim);
355 
356       /* shouldn't need this */
357       if (col==INT_MAX)
358 	{
359 	  ierr = PetscInfo(0,"hey ... col==INT_MAX??\n");CHKERRQ(ierr);
360 	  continue;
361 	}
362 
363       /* do I own it? I should */
364       rvec_zero(v ,a_m);
365       if (col==fo[start])
366 	{
367 	  start++;
368 	  idx=ivec_linear_search(col, a_local2global, a_n);
369 	  if (idx!=-1)
370 	    {v[idx] = 1.0; j++;}
371 	  else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"NOT FOUND!\n");
372 	}
373       else
374 	{
375 	  idx=ivec_linear_search(col, a_local2global, a_m);
376 	  if (idx!=-1)
377 	    {v[idx] = 1.0;}
378 	}
379 
380       /* perform u = A.v_l */
381       rvec_zero(u,n);
382       do_matvec(xyt_handle->mvi,v,u);
383 
384       /* uu =  X^T.u_l (local portion) */
385       /* technically only need to zero out first i entries */
386       /* later turn this into an XYT_solve call ? */
387       rvec_zero(uu,m);
388       y_ptr=y;
389       iptr = ycol_indices;
390       for (k=0; k<i; k++)
391 	{
392 	  off = *iptr++;
393 	  len = *iptr++;
394           dlen = PetscBLASIntCast(len);
395 	  uu[k] = BLASdot_(&dlen,u+off,&i1,y_ptr,&i1);
396 	  y_ptr+=len;
397 	}
398 
399       /* uu = X^T.u_l (comm portion) */
400       ssgl_radd  (uu, w, dim, stages);
401 
402       /* z = X.uu */
403       rvec_zero(z,n);
404       x_ptr=x;
405       iptr = xcol_indices;
406       for (k=0; k<i; k++)
407 	{
408 	  off = *iptr++;
409 	  len = *iptr++;
410           dlen = PetscBLASIntCast(len);
411 	  BLASaxpy_(&dlen,&uu[k],x_ptr,&i1,z+off,&i1);
412 	  x_ptr+=len;
413 	}
414 
415       /* compute v_l = v_l - z */
416       rvec_zero(v+a_n,a_m-a_n);
417       dlen = PetscBLASIntCast(n);
418       BLASaxpy_(&dlen,&dm1,z,&i1,v,&i1);
419 
420       /* compute u_l = A.v_l */
421       if (a_n!=a_m)
422 	{gs_gop_hc(gs_handle,v,"+\0",dim);}
423       rvec_zero(u,n);
424      do_matvec(xyt_handle->mvi,v,u);
425 
426       /* compute sqrt(alpha) = sqrt(u_l^T.u_l) - local portion */
427      dlen = PetscBLASIntCast(n);
428       alpha = BLASdot_(&dlen,u,&i1,u,&i1);
429       /* compute sqrt(alpha) = sqrt(u_l^T.u_l) - comm portion */
430       grop_hc(&alpha, &alpha_w, 1, op, dim);
431 
432       alpha = (PetscScalar) sqrt((double)alpha);
433 
434       /* check for small alpha                             */
435       /* LATER use this to detect and determine null space */
436       if (fabs(alpha)<1.0e-14) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"bad alpha! %g\n",alpha);
437 
438       /* compute v_l = v_l/sqrt(alpha) */
439       rvec_scale(v,1.0/alpha,n);
440       rvec_scale(u,1.0/alpha,n);
441 
442       /* add newly generated column, v_l, to X */
443       flag = 1;
444       off=len=0;
445       for (k=0; k<n; k++)
446 	{
447 	  if (v[k]!=0.0)
448 	    {
449 	      len=k;
450 	      if (flag)
451 		{off=k; flag=0;}
452 	    }
453 	}
454 
455       len -= (off-1);
456 
457       if (len>0)
458 	{
459 	  if ((xt_nnz+len)>xt_max_nnz)
460 	    {
461 	      ierr = PetscInfo(0,"increasing space for X by 2x!\n");CHKERRQ(ierr);
462 	      xt_max_nnz *= 2;
463 	      x_ptr = (PetscScalar *) malloc(xt_max_nnz*sizeof(PetscScalar));
464 	      rvec_copy(x_ptr,x,xt_nnz);
465 	      free(x);
466 	      x = x_ptr;
467 	      x_ptr+=xt_nnz;
468 	    }
469 	  xt_nnz += len;
470 	  rvec_copy(x_ptr,v+off,len);
471 
472           /* keep track of number of zeros */
473 	  if (dim)
474 	    {
475 	      for (k=0; k<len; k++)
476 		{
477 		  if (x_ptr[k]==0.0)
478 		    {xt_zero_nnz++;}
479 		}
480 	    }
481 	  else
482 	    {
483 	      for (k=0; k<len; k++)
484 		{
485 		  if (x_ptr[k]==0.0)
486 		    {xt_zero_nnz_0++;}
487 		}
488 	    }
489 	  xcol_indices[2*i] = off;
490 	  xcol_sz[i] = xcol_indices[2*i+1] = len;
491 	  xcol_vals[i] = x_ptr;
492 	}
493       else
494 	{
495 	  xcol_indices[2*i] = 0;
496 	  xcol_sz[i] = xcol_indices[2*i+1] = 0;
497 	  xcol_vals[i] = x_ptr;
498 	}
499 
500 
501       /* add newly generated column, u_l, to Y */
502       flag = 1;
503       off=len=0;
504       for (k=0; k<n; k++)
505 	{
506 	  if (u[k]!=0.0)
507 	    {
508 	      len=k;
509 	      if (flag)
510 		{off=k; flag=0;}
511 	    }
512 	}
513 
514       len -= (off-1);
515 
516       if (len>0)
517 	{
518 	  if ((yt_nnz+len)>yt_max_nnz)
519 	    {
520 	      ierr = PetscInfo(0,"increasing space for Y by 2x!\n");CHKERRQ(ierr);
521 	      yt_max_nnz *= 2;
522 	      y_ptr = (PetscScalar *) malloc(yt_max_nnz*sizeof(PetscScalar));
523 	      rvec_copy(y_ptr,y,yt_nnz);
524 	      free(y);
525 	      y = y_ptr;
526 	      y_ptr+=yt_nnz;
527 	    }
528 	  yt_nnz += len;
529 	  rvec_copy(y_ptr,u+off,len);
530 
531           /* keep track of number of zeros */
532 	  if (dim)
533 	    {
534 	      for (k=0; k<len; k++)
535 		{
536 		  if (y_ptr[k]==0.0)
537 		    {yt_zero_nnz++;}
538 		}
539 	    }
540 	  else
541 	    {
542 	      for (k=0; k<len; k++)
543 		{
544 		  if (y_ptr[k]==0.0)
545 		    {yt_zero_nnz_0++;}
546 		}
547 	    }
548 	  ycol_indices[2*i] = off;
549 	  ycol_sz[i] = ycol_indices[2*i+1] = len;
550 	  ycol_vals[i] = y_ptr;
551 	}
552       else
553 	{
554 	  ycol_indices[2*i] = 0;
555 	  ycol_sz[i] = ycol_indices[2*i+1] = 0;
556 	  ycol_vals[i] = y_ptr;
557 	}
558     }
559 
560   /* close off stages for execution phase */
561   while (dim!=level)
562     {
563       stages[dim++]=i;
564       ierr = PetscInfo2(0,"disconnected!!! dim(%D)!=level(%D)\n",dim,level);CHKERRQ(ierr);
565     }
566   stages[dim]=i;
567 
568   xyt_handle->info->n=xyt_handle->mvi->n;
569   xyt_handle->info->m=m;
570   xyt_handle->info->nnz=xt_nnz + yt_nnz;
571   xyt_handle->info->max_nnz=xt_max_nnz + yt_max_nnz;
572   xyt_handle->info->msg_buf_sz=stages[level]-stages[0];
573   xyt_handle->info->solve_uu = (PetscScalar *) malloc(m*sizeof(PetscScalar));
574   xyt_handle->info->solve_w  = (PetscScalar *) malloc(m*sizeof(PetscScalar));
575   xyt_handle->info->x=x;
576   xyt_handle->info->xcol_vals=xcol_vals;
577   xyt_handle->info->xcol_sz=xcol_sz;
578   xyt_handle->info->xcol_indices=xcol_indices;
579   xyt_handle->info->stages=stages;
580   xyt_handle->info->y=y;
581   xyt_handle->info->ycol_vals=ycol_vals;
582   xyt_handle->info->ycol_sz=ycol_sz;
583   xyt_handle->info->ycol_indices=ycol_indices;
584 
585   free(segs);
586   free(u);
587   free(v);
588   free(uu);
589   free(z);
590   free(w);
591 
592   return(0);
593 }
594 
595 /**************************************xyt.c***********************************/
596 static PetscErrorCode do_xyt_solve(xyt_ADT xyt_handle,  PetscScalar *uc)
597 {
598   PetscInt off, len, *iptr;
599   PetscInt level       =xyt_handle->level;
600   PetscInt n           =xyt_handle->info->n;
601   PetscInt m           =xyt_handle->info->m;
602   PetscInt *stages     =xyt_handle->info->stages;
603   PetscInt *xcol_indices=xyt_handle->info->xcol_indices;
604   PetscInt *ycol_indices=xyt_handle->info->ycol_indices;
605    PetscScalar *x_ptr, *y_ptr, *uu_ptr;
606   PetscScalar *solve_uu=xyt_handle->info->solve_uu;
607   PetscScalar *solve_w =xyt_handle->info->solve_w;
608   PetscScalar *x       =xyt_handle->info->x;
609   PetscScalar *y       =xyt_handle->info->y;
610   PetscBLASInt i1 = 1,dlen;
611 
612   PetscFunctionBegin;
613   uu_ptr=solve_uu;
614   rvec_zero(uu_ptr,m);
615 
616   /* x  = X.Y^T.b */
617   /* uu = Y^T.b */
618   for (y_ptr=y,iptr=ycol_indices; *iptr!=-1; y_ptr+=len)
619     {
620       off=*iptr++; len=*iptr++;
621       dlen = PetscBLASIntCast(len);
622       *uu_ptr++ = BLASdot_(&dlen,uc+off,&i1,y_ptr,&i1);
623     }
624 
625   /* comunication of beta */
626   uu_ptr=solve_uu;
627   if (level) {ssgl_radd(uu_ptr, solve_w, level, stages);}
628 
629   rvec_zero(uc,n);
630 
631   /* x = X.uu */
632   for (x_ptr=x,iptr=xcol_indices; *iptr!=-1; x_ptr+=len)
633     {
634       off=*iptr++; len=*iptr++;
635       dlen = PetscBLASIntCast(len);
636       BLASaxpy_(&dlen,uu_ptr++,x_ptr,&i1,uc+off,&i1);
637     }
638   PetscFunctionReturn(0);
639 }
640 
641 /**************************************xyt.c***********************************/
642 static PetscErrorCode check_handle(xyt_ADT xyt_handle)
643 {
644   PetscInt vals[2], work[2], op[] = {NON_UNIFORM,GL_MIN,GL_MAX};
645 
646   PetscFunctionBegin;
647   if (!xyt_handle) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"check_handle() :: bad handle :: NULL %D\n",xyt_handle);
648 
649   vals[0]=vals[1]=xyt_handle->id;
650   giop(vals,work,sizeof(op)/sizeof(op[0])-1,op);
651   if ((vals[0]!=vals[1])||(xyt_handle->id<=0)) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"check_handle() :: bad handle :: id mismatch min/max %D/%D %D\n", vals[0],vals[1], xyt_handle->id);
652   PetscFunctionReturn(0);
653 }
654 
655 /**************************************xyt.c***********************************/
656 static PetscErrorCode det_separators(xyt_ADT xyt_handle)
657 {
658   PetscInt i, ct, id;
659   PetscInt mask, edge, *iptr;
660   PetscInt *dir, *used;
661   PetscInt sum[4], w[4];
662   PetscScalar rsum[4], rw[4];
663   PetscInt op[] = {GL_ADD,0};
664   PetscScalar *lhs, *rhs;
665   PetscInt *nsep, *lnsep, *fo, nfo=0;
666   gs_ADT gs_handle=xyt_handle->mvi->gs_handle;
667   PetscInt *local2global=xyt_handle->mvi->local2global;
668   PetscInt  n=xyt_handle->mvi->n;
669   PetscInt  m=xyt_handle->mvi->m;
670   PetscInt level=xyt_handle->level;
671   PetscInt shared=FALSE;
672   PetscErrorCode ierr;
673 
674   PetscFunctionBegin;
675   dir  = (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
676   nsep = (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
677   lnsep= (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
678   fo   = (PetscInt*)malloc(sizeof(PetscInt)*(n+1));
679   used = (PetscInt*)malloc(sizeof(PetscInt)*n);
680 
681   ivec_zero(dir  ,level+1);
682   ivec_zero(nsep ,level+1);
683   ivec_zero(lnsep,level+1);
684   ivec_set (fo   ,-1,n+1);
685   ivec_zero(used,n);
686 
687   lhs  = (PetscScalar*)malloc(sizeof(PetscScalar)*m);
688   rhs  = (PetscScalar*)malloc(sizeof(PetscScalar)*m);
689 
690   /* determine the # of unique dof */
691   rvec_zero(lhs,m);
692   rvec_set(lhs,1.0,n);
693   gs_gop_hc(gs_handle,lhs,"+\0",level);
694   ierr = PetscInfo(0,"done first gs_gop_hc\n");CHKERRQ(ierr);
695   rvec_zero(rsum,2);
696   for (ct=i=0;i<n;i++)
697     {
698       if (lhs[i]!=0.0)
699 	{rsum[0]+=1.0/lhs[i]; rsum[1]+=lhs[i];}
700 
701       if (lhs[i]!=1.0)
702 	{
703           shared=TRUE;
704         }
705     }
706 
707   grop_hc(rsum,rw,2,op,level);
708   rsum[0]+=0.1;
709   rsum[1]+=0.1;
710 
711   xyt_handle->info->n_global=xyt_handle->info->m_global=(PetscInt) rsum[0];
712   xyt_handle->mvi->n_global =xyt_handle->mvi->m_global =(PetscInt) rsum[0];
713 
714   /* determine separator sets top down */
715   if (shared)
716     {
717       /* solution is to do as in the symmetric shared case but then */
718       /* pick the sub-hc with the most free dofs and do a mat-vec   */
719       /* and pick up the responses on the other sub-hc from the     */
720       /* initial separator set obtained from the symm. shared case  */
721       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"shared dof separator determination not ready ... see hmt!!!\n");
722       for (iptr=fo+n,id=my_id,mask=num_nodes>>1,edge=level;edge>0;edge--,mask>>=1)
723 	{
724 	  /* set rsh of hc, fire, and collect lhs responses */
725 	  (id<mask) ? rvec_zero(lhs,m) : rvec_set(lhs,1.0,m);
726 	  gs_gop_hc(gs_handle,lhs,"+\0",edge);
727 
728 	  /* set lsh of hc, fire, and collect rhs responses */
729 	  (id<mask) ? rvec_set(rhs,1.0,m) : rvec_zero(rhs,m);
730 	  gs_gop_hc(gs_handle,rhs,"+\0",edge);
731 
732 	  for (i=0;i<n;i++)
733 	    {
734 	      if (id< mask)
735 		{
736 		  if (lhs[i]!=0.0)
737 		    {lhs[i]=1.0;}
738 		}
739 	      if (id>=mask)
740 		{
741 		  if (rhs[i]!=0.0)
742 		    {rhs[i]=1.0;}
743 		}
744 	    }
745 
746 	  if (id< mask)
747 	    {gs_gop_hc(gs_handle,lhs,"+\0",edge-1);}
748 	  else
749 	    {gs_gop_hc(gs_handle,rhs,"+\0",edge-1);}
750 
751 	  /* count number of dofs I own that have signal and not in sep set */
752 	  rvec_zero(rsum,4);
753 	  for (ivec_zero(sum,4),ct=i=0;i<n;i++)
754 	    {
755 	      if (!used[i])
756 		{
757 		  /* number of unmarked dofs on node */
758 		  ct++;
759 		  /* number of dofs to be marked on lhs hc */
760 		  if (id< mask)
761 		    {
762 		      if (lhs[i]!=0.0)
763 			{sum[0]++; rsum[0]+=1.0/lhs[i];}
764 		    }
765 		  /* number of dofs to be marked on rhs hc */
766 		  if (id>=mask)
767 		    {
768 		      if (rhs[i]!=0.0)
769 			{sum[1]++; rsum[1]+=1.0/rhs[i];}
770 		    }
771 		}
772 	    }
773 
774 	  /* go for load balance - choose half with most unmarked dofs, bias LHS */
775 	  (id<mask) ? (sum[2]=ct) : (sum[3]=ct);
776 	  (id<mask) ? (rsum[2]=ct) : (rsum[3]=ct);
777 	  giop_hc(sum,w,4,op,edge);
778 	  grop_hc(rsum,rw,4,op,edge);
779 	  rsum[0]+=0.1; rsum[1]+=0.1; rsum[2]+=0.1; rsum[3]+=0.1;
780 
781 	  if (id<mask)
782 	    {
783 	      /* mark dofs I own that have signal and not in sep set */
784 	      for (ct=i=0;i<n;i++)
785 		{
786 		  if ((!used[i])&&(lhs[i]!=0.0))
787 		    {
788 		      ct++; nfo++;
789 
790 		      if (nfo>n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"nfo about to exceed n\n");
791 
792 		      *--iptr = local2global[i];
793 		      used[i]=edge;
794 		    }
795 		}
796 	      if (ct>1) {ivec_sort(iptr,ct);}
797 
798 	      lnsep[edge]=ct;
799 	      nsep[edge]=(PetscInt) rsum[0];
800 	      dir [edge]=LEFT;
801 	    }
802 
803 	  if (id>=mask)
804 	    {
805 	      /* mark dofs I own that have signal and not in sep set */
806 	      for (ct=i=0;i<n;i++)
807 		{
808 		  if ((!used[i])&&(rhs[i]!=0.0))
809 		    {
810 		      ct++; nfo++;
811 
812 		      if (nfo>n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"nfo about to exceed n\n");
813 
814 		      *--iptr = local2global[i];
815 		      used[i]=edge;
816 		    }
817 		}
818 	      if (ct>1) {ivec_sort(iptr,ct);}
819 
820 	      lnsep[edge]=ct;
821 	      nsep[edge]= (PetscInt) rsum[1];
822 	      dir [edge]=RIGHT;
823 	    }
824 
825 	  /* LATER or we can recur on these to order seps at this level */
826 	  /* do we need full set of separators for this?                */
827 
828 	  /* fold rhs hc into lower */
829 	  if (id>=mask)
830 	    {id-=mask;}
831 	}
832     }
833   else
834     {
835       for (iptr=fo+n,id=my_id,mask=num_nodes>>1,edge=level;edge>0;edge--,mask>>=1)
836 	{
837 	  /* set rsh of hc, fire, and collect lhs responses */
838 	  (id<mask) ? rvec_zero(lhs,m) : rvec_set(lhs,1.0,m);
839 	  gs_gop_hc(gs_handle,lhs,"+\0",edge);
840 
841 	  /* set lsh of hc, fire, and collect rhs responses */
842 	  (id<mask) ? rvec_set(rhs,1.0,m) : rvec_zero(rhs,m);
843 	  gs_gop_hc(gs_handle,rhs,"+\0",edge);
844 
845 	  /* count number of dofs I own that have signal and not in sep set */
846 	  for (ivec_zero(sum,4),ct=i=0;i<n;i++)
847 	    {
848 	      if (!used[i])
849 		{
850 		  /* number of unmarked dofs on node */
851 		  ct++;
852 		  /* number of dofs to be marked on lhs hc */
853 		  if ((id< mask)&&(lhs[i]!=0.0)) {sum[0]++;}
854 		  /* number of dofs to be marked on rhs hc */
855 		  if ((id>=mask)&&(rhs[i]!=0.0)) {sum[1]++;}
856 		}
857 	    }
858 
859 	  /* for the non-symmetric case we need separators of width 2 */
860 	  /* so take both sides */
861 	  (id<mask) ? (sum[2]=ct) : (sum[3]=ct);
862 	  giop_hc(sum,w,4,op,edge);
863 
864 	  ct=0;
865 	  if (id<mask)
866 	    {
867 	      /* mark dofs I own that have signal and not in sep set */
868 	      for (i=0;i<n;i++)
869 		{
870 		  if ((!used[i])&&(lhs[i]!=0.0))
871 		    {
872 		      ct++; nfo++;
873 		      *--iptr = local2global[i];
874 		      used[i]=edge;
875 		    }
876 		}
877 	      /* LSH hc summation of ct should be sum[0] */
878 	    }
879 	  else
880 	    {
881 	      /* mark dofs I own that have signal and not in sep set */
882 	      for (i=0;i<n;i++)
883 		{
884 		  if ((!used[i])&&(rhs[i]!=0.0))
885 		    {
886 		      ct++; nfo++;
887 		      *--iptr = local2global[i];
888 		      used[i]=edge;
889 		    }
890 		}
891 	      /* RSH hc summation of ct should be sum[1] */
892 	    }
893 
894 	  if (ct>1) {ivec_sort(iptr,ct);}
895 	  lnsep[edge]=ct;
896 	  nsep[edge]=sum[0]+sum[1];
897 	  dir [edge]=BOTH;
898 
899 	  /* LATER or we can recur on these to order seps at this level */
900 	  /* do we need full set of separators for this?                */
901 
902 	  /* fold rhs hc into lower */
903 	  if (id>=mask)
904 	    {id-=mask;}
905 	}
906     }
907 
908   /* level 0 is on processor case - so mark the remainder */
909   for (ct=i=0;i<n;i++)
910     {
911       if (!used[i])
912 	{
913 	  ct++; nfo++;
914 	  *--iptr = local2global[i];
915 	  used[i]=edge;
916 	}
917     }
918   if (ct>1) {ivec_sort(iptr,ct);}
919   lnsep[edge]=ct;
920   nsep [edge]=ct;
921   dir  [edge]=BOTH;
922 
923   xyt_handle->info->nsep=nsep;
924   xyt_handle->info->lnsep=lnsep;
925   xyt_handle->info->fo=fo;
926   xyt_handle->info->nfo=nfo;
927 
928   free(dir);
929   free(lhs);
930   free(rhs);
931   free(used);
932   PetscFunctionReturn(0);
933 }
934 
935 /**************************************xyt.c***********************************/
936 static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, void *matvec, void *grid_data)
937 {
938   mv_info *mvi;
939 
940 
941   mvi = (mv_info*)malloc(sizeof(mv_info));
942   mvi->n=n;
943   mvi->m=m;
944   mvi->n_global=-1;
945   mvi->m_global=-1;
946   mvi->local2global=(PetscInt*)malloc((m+1)*sizeof(PetscInt));
947   ivec_copy(mvi->local2global,local2global,m);
948   mvi->local2global[m] = INT_MAX;
949   mvi->matvec=(PetscErrorCode (*)(mv_info*,PetscScalar*,PetscScalar*))matvec;
950   mvi->grid_data=grid_data;
951 
952   /* set xyt communication handle to perform restricted matvec */
953   mvi->gs_handle = gs_init(local2global, m, num_nodes);
954 
955   return(mvi);
956 }
957 
958 /**************************************xyt.c***********************************/
959 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u)
960 {
961   PetscFunctionBegin;
962   A->matvec((mv_info*)A->grid_data,v,u);
963   PetscFunctionReturn(0);
964 }
965 
966 
967 
968