1 #define PETSCKSP_DLL 2 3 /*************************************xxt.c************************************ 4 Module Name: xxt 5 Module Info: 6 7 author: Henry M. Tufo III 8 e-mail: hmt@asci.uchicago.edu 9 contact: 10 +--------------------------------+--------------------------------+ 11 |MCS Division - Building 221 |Department of Computer Science | 12 |Argonne National Laboratory |Ryerson 152 | 13 |9700 S. Cass Avenue |The University of Chicago | 14 |Argonne, IL 60439 |Chicago, IL 60637 | 15 |(630) 252-5354/5986 ph/fx |(773) 702-6019/8487 ph/fx | 16 +--------------------------------+--------------------------------+ 17 18 Last Modification: 3.20.01 19 **************************************xxt.c***********************************/ 20 21 22 /*************************************xxt.c************************************ 23 NOTES ON USAGE: 24 25 **************************************xxt.c***********************************/ 26 #include "src/ksp/pc/impls/tfs/tfs.h" 27 28 #define LEFT -1 29 #define RIGHT 1 30 #define BOTH 0 31 #define MAX_FORTRAN_HANDLES 10 32 33 typedef struct xxt_solver_info { 34 int n, m, n_global, m_global; 35 int nnz, max_nnz, msg_buf_sz; 36 int *nsep, *lnsep, *fo, nfo, *stages; 37 int *col_sz, *col_indices; 38 PetscScalar **col_vals, *x, *solve_uu, *solve_w; 39 int nsolves; 40 PetscScalar tot_solve_time; 41 } xxt_info; 42 43 typedef struct matvec_info { 44 int n, m, n_global, m_global; 45 int *local2global; 46 gs_ADT gs_handle; 47 PetscErrorCode (*matvec)(struct matvec_info*,PetscScalar*,PetscScalar*); 48 void *grid_data; 49 } mv_info; 50 51 struct xxt_CDT{ 52 int id; 53 int ns; 54 int level; 55 xxt_info *info; 56 mv_info *mvi; 57 }; 58 59 static int n_xxt=0; 60 static int n_xxt_handles=0; 61 62 /* prototypes */ 63 static PetscErrorCode do_xxt_solve(xxt_ADT xxt_handle, PetscScalar *rhs); 64 static PetscErrorCode check_handle(xxt_ADT xxt_handle); 65 static PetscErrorCode det_separators(xxt_ADT xxt_handle); 66 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u); 67 static int xxt_generate(xxt_ADT xxt_handle); 68 static int do_xxt_factor(xxt_ADT xxt_handle); 69 static mv_info *set_mvi(int *local2global, int n, int m, void *matvec, void *grid_data); 70 71 72 73 /*************************************xxt.c************************************ 74 Function: XXT_new() 75 76 Input : 77 Output: 78 Return: 79 Description: 80 **************************************xxt.c***********************************/ 81 xxt_ADT 82 XXT_new(void) 83 { 84 xxt_ADT xxt_handle; 85 86 87 88 /* rolling count on n_xxt ... pot. problem here */ 89 n_xxt_handles++; 90 xxt_handle = (xxt_ADT)malloc(sizeof(struct xxt_CDT)); 91 xxt_handle->id = ++n_xxt; 92 xxt_handle->info = NULL; xxt_handle->mvi = NULL; 93 94 return(xxt_handle); 95 } 96 97 98 /*************************************xxt.c************************************ 99 Function: XXT_factor() 100 101 Input : 102 Output: 103 Return: 104 Description: 105 **************************************xxt.c***********************************/ 106 int 107 XXT_factor(xxt_ADT xxt_handle, /* prev. allocated xxt handle */ 108 int *local2global, /* global column mapping */ 109 int n, /* local num rows */ 110 int m, /* local num cols */ 111 void *matvec, /* b_loc=A_local.x_loc */ 112 void *grid_data /* grid data for matvec */ 113 ) 114 { 115 comm_init(); 116 check_handle(xxt_handle); 117 118 /* only 2^k for now and all nodes participating */ 119 if ((1<<(xxt_handle->level=i_log2_num_nodes))!=num_nodes) 120 {error_msg_fatal("only 2^k for now and MPI_COMM_WORLD!!! %d != %d\n",1<<i_log2_num_nodes,num_nodes);} 121 122 /* space for X info */ 123 xxt_handle->info = (xxt_info*)malloc(sizeof(xxt_info)); 124 125 /* set up matvec handles */ 126 xxt_handle->mvi = set_mvi(local2global, n, m, matvec, grid_data); 127 128 /* matrix is assumed to be of full rank */ 129 /* LATER we can reset to indicate rank def. */ 130 xxt_handle->ns=0; 131 132 /* determine separators and generate firing order - NB xxt info set here */ 133 det_separators(xxt_handle); 134 135 return(do_xxt_factor(xxt_handle)); 136 } 137 138 139 /*************************************xxt.c************************************ 140 Function: XXT_solve 141 142 Input : 143 Output: 144 Return: 145 Description: 146 **************************************xxt.c***********************************/ 147 int 148 XXT_solve(xxt_ADT xxt_handle, double *x, double *b) 149 { 150 151 comm_init(); 152 check_handle(xxt_handle); 153 154 /* need to copy b into x? */ 155 if (b) 156 {rvec_copy(x,b,xxt_handle->mvi->n);} 157 do_xxt_solve(xxt_handle,x); 158 159 return(0); 160 } 161 162 163 /*************************************xxt.c************************************ 164 Function: XXT_free() 165 166 Input : 167 Output: 168 Return: 169 Description: 170 **************************************xxt.c***********************************/ 171 int 172 XXT_free(xxt_ADT xxt_handle) 173 { 174 175 comm_init(); 176 check_handle(xxt_handle); 177 n_xxt_handles--; 178 179 free(xxt_handle->info->nsep); 180 free(xxt_handle->info->lnsep); 181 free(xxt_handle->info->fo); 182 free(xxt_handle->info->stages); 183 free(xxt_handle->info->solve_uu); 184 free(xxt_handle->info->solve_w); 185 free(xxt_handle->info->x); 186 free(xxt_handle->info->col_vals); 187 free(xxt_handle->info->col_sz); 188 free(xxt_handle->info->col_indices); 189 free(xxt_handle->info); 190 free(xxt_handle->mvi->local2global); 191 gs_free(xxt_handle->mvi->gs_handle); 192 free(xxt_handle->mvi); 193 free(xxt_handle); 194 195 196 197 /* if the check fails we nuke */ 198 /* if NULL pointer passed to free we nuke */ 199 /* if the calls to free fail that's not my problem */ 200 return(0); 201 } 202 203 204 205 /*************************************xxt.c************************************ 206 Function: 207 208 Input : 209 Output: 210 Return: 211 Description: 212 **************************************xxt.c***********************************/ 213 int 214 XXT_stats(xxt_ADT xxt_handle) 215 { 216 int op[] = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD}; 217 int fop[] = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD}; 218 int vals[9], work[9]; 219 PetscScalar fvals[3], fwork[3]; 220 221 222 223 comm_init(); 224 check_handle(xxt_handle); 225 226 /* if factorization not done there are no stats */ 227 if (!xxt_handle->info||!xxt_handle->mvi) 228 { 229 if (!my_id) 230 {printf("XXT_stats() :: no stats available!\n");} 231 return 1; 232 } 233 234 vals[0]=vals[1]=vals[2]=xxt_handle->info->nnz; 235 vals[3]=vals[4]=vals[5]=xxt_handle->mvi->n; 236 vals[6]=vals[7]=vals[8]=xxt_handle->info->msg_buf_sz; 237 giop(vals,work,sizeof(op)/sizeof(op[0])-1,op); 238 239 fvals[0]=fvals[1]=fvals[2] 240 =xxt_handle->info->tot_solve_time/xxt_handle->info->nsolves++; 241 grop(fvals,fwork,sizeof(fop)/sizeof(fop[0])-1,fop); 242 243 if (!my_id) 244 { 245 printf("%d :: min xxt_nnz=%d\n",my_id,vals[0]); 246 printf("%d :: max xxt_nnz=%d\n",my_id,vals[1]); 247 printf("%d :: avg xxt_nnz=%g\n",my_id,1.0*vals[2]/num_nodes); 248 printf("%d :: tot xxt_nnz=%d\n",my_id,vals[2]); 249 printf("%d :: xxt C(2d) =%g\n",my_id,vals[2]/(pow(1.0*vals[5],1.5))); 250 printf("%d :: xxt C(3d) =%g\n",my_id,vals[2]/(pow(1.0*vals[5],1.6667))); 251 printf("%d :: min xxt_n =%d\n",my_id,vals[3]); 252 printf("%d :: max xxt_n =%d\n",my_id,vals[4]); 253 printf("%d :: avg xxt_n =%g\n",my_id,1.0*vals[5]/num_nodes); 254 printf("%d :: tot xxt_n =%d\n",my_id,vals[5]); 255 printf("%d :: min xxt_buf=%d\n",my_id,vals[6]); 256 printf("%d :: max xxt_buf=%d\n",my_id,vals[7]); 257 printf("%d :: avg xxt_buf=%g\n",my_id,1.0*vals[8]/num_nodes); 258 printf("%d :: min xxt_slv=%g\n",my_id,fvals[0]); 259 printf("%d :: max xxt_slv=%g\n",my_id,fvals[1]); 260 printf("%d :: avg xxt_slv=%g\n",my_id,fvals[2]/num_nodes); 261 } 262 263 return(0); 264 } 265 266 267 /*************************************xxt.c************************************ 268 Function: do_xxt_factor 269 270 Input : 271 Output: 272 Return: 273 Description: get A_local, local portion of global coarse matrix which 274 is a row dist. nxm matrix w/ n<m. 275 o my_ml holds address of ML struct associated w/A_local and coarse grid 276 o local2global holds global number of column i (i=0,...,m-1) 277 o local2global holds global number of row i (i=0,...,n-1) 278 o mylocmatvec performs A_local . vec_local (note that gs is performed using 279 gs_init/gop). 280 281 mylocmatvec = my_ml->Amat[grid_tag].matvec->external; 282 mylocmatvec (void :: void *data, double *in, double *out) 283 **************************************xxt.c***********************************/ 284 static int do_xxt_factor(xxt_ADT xxt_handle) 285 { 286 int flag; 287 288 289 flag=xxt_generate(xxt_handle); 290 291 return(flag); 292 } 293 294 295 /*************************************xxt.c************************************ 296 Function: 297 298 Input : 299 Output: 300 Return: 301 Description: 302 **************************************xxt.c***********************************/ 303 static int xxt_generate(xxt_ADT xxt_handle) 304 { 305 int i,j,k,idex; 306 int dim, col; 307 PetscScalar *u, *uu, *v, *z, *w, alpha, alpha_w; 308 int *segs; 309 int op[] = {GL_ADD,0}; 310 int off, len; 311 PetscScalar *x_ptr; 312 int *iptr, flag; 313 int start=0, end, work; 314 int op2[] = {GL_MIN,0}; 315 gs_ADT gs_handle; 316 int *nsep, *lnsep, *fo; 317 int a_n=xxt_handle->mvi->n; 318 int a_m=xxt_handle->mvi->m; 319 int *a_local2global=xxt_handle->mvi->local2global; 320 int level; 321 int xxt_nnz=0, xxt_max_nnz=0; 322 int n, m; 323 int *col_sz, *col_indices, *stages; 324 PetscScalar **col_vals, *x; 325 int n_global; 326 int xxt_zero_nnz=0; 327 int xxt_zero_nnz_0=0; 328 PetscBLASInt i1 = 1; 329 PetscScalar dm1 = -1.0; 330 331 n=xxt_handle->mvi->n; 332 nsep=xxt_handle->info->nsep; 333 lnsep=xxt_handle->info->lnsep; 334 fo=xxt_handle->info->fo; 335 end=lnsep[0]; 336 level=xxt_handle->level; 337 gs_handle=xxt_handle->mvi->gs_handle; 338 339 /* is there a null space? */ 340 /* LATER add in ability to detect null space by checking alpha */ 341 for (i=0, j=0; i<=level; i++) 342 {j+=nsep[i];} 343 344 m = j-xxt_handle->ns; 345 if (m!=j) 346 {printf("xxt_generate() :: null space exists %d %d %d\n",m,j,xxt_handle->ns);} 347 348 /* get and initialize storage for x local */ 349 /* note that x local is nxm and stored by columns */ 350 col_sz = (int*) malloc(m*sizeof(PetscInt)); 351 col_indices = (int*) malloc((2*m+1)*sizeof(int)); 352 col_vals = (PetscScalar **) malloc(m*sizeof(PetscScalar *)); 353 for (i=j=0; i<m; i++, j+=2) 354 { 355 col_indices[j]=col_indices[j+1]=col_sz[i]=-1; 356 col_vals[i] = NULL; 357 } 358 col_indices[j]=-1; 359 360 /* size of separators for each sub-hc working from bottom of tree to top */ 361 /* this looks like nsep[]=segments */ 362 stages = (int*) malloc((level+1)*sizeof(PetscInt)); 363 segs = (int*) malloc((level+1)*sizeof(PetscInt)); 364 ivec_zero(stages,level+1); 365 ivec_copy(segs,nsep,level+1); 366 for (i=0; i<level; i++) 367 {segs[i+1] += segs[i];} 368 stages[0] = segs[0]; 369 370 /* temporary vectors */ 371 u = (PetscScalar *) malloc(n*sizeof(PetscScalar)); 372 z = (PetscScalar *) malloc(n*sizeof(PetscScalar)); 373 v = (PetscScalar *) malloc(a_m*sizeof(PetscScalar)); 374 uu = (PetscScalar *) malloc(m*sizeof(PetscScalar)); 375 w = (PetscScalar *) malloc(m*sizeof(PetscScalar)); 376 377 /* extra nnz due to replication of vertices across separators */ 378 for (i=1, j=0; i<=level; i++) 379 {j+=nsep[i];} 380 381 /* storage for sparse x values */ 382 n_global = xxt_handle->info->n_global; 383 xxt_max_nnz = (int)(2.5*pow(1.0*n_global,1.6667) + j*n/2)/num_nodes; 384 x = (PetscScalar *) malloc(xxt_max_nnz*sizeof(PetscScalar)); 385 xxt_nnz = 0; 386 387 /* LATER - can embed next sep to fire in gs */ 388 /* time to make the donuts - generate X factor */ 389 for (dim=i=j=0;i<m;i++) 390 { 391 /* time to move to the next level? */ 392 while (i==segs[dim]) 393 { 394 if (dim==level) 395 {error_msg_fatal("dim about to exceed level\n"); break;} 396 397 stages[dim++]=i; 398 end+=lnsep[dim]; 399 } 400 stages[dim]=i; 401 402 /* which column are we firing? */ 403 /* i.e. set v_l */ 404 /* use new seps and do global min across hc to determine which one to fire */ 405 (start<end) ? (col=fo[start]) : (col=INT_MAX); 406 giop_hc(&col,&work,1,op2,dim); 407 408 /* shouldn't need this */ 409 if (col==INT_MAX) 410 { 411 error_msg_warning("hey ... col==INT_MAX??\n"); 412 continue; 413 } 414 415 /* do I own it? I should */ 416 rvec_zero(v ,a_m); 417 if (col==fo[start]) 418 { 419 start++; 420 idex=ivec_linear_search(col, a_local2global, a_n); 421 if (idex!=-1) 422 {v[idex] = 1.0; j++;} 423 else 424 {error_msg_fatal("NOT FOUND!\n");} 425 } 426 else 427 { 428 idex=ivec_linear_search(col, a_local2global, a_m); 429 if (idex!=-1) 430 {v[idex] = 1.0;} 431 } 432 433 /* perform u = A.v_l */ 434 rvec_zero(u,n); 435 do_matvec(xxt_handle->mvi,v,u); 436 437 /* uu = X^T.u_l (local portion) */ 438 /* technically only need to zero out first i entries */ 439 /* later turn this into an XXT_solve call ? */ 440 rvec_zero(uu,m); 441 x_ptr=x; 442 iptr = col_indices; 443 for (k=0; k<i; k++) 444 { 445 off = *iptr++; 446 len = *iptr++; 447 448 uu[k] = BLASdot_(&len,u+off,&i1,x_ptr,&i1); 449 x_ptr+=len; 450 } 451 452 453 /* uu = X^T.u_l (comm portion) */ 454 ssgl_radd (uu, w, dim, stages); 455 456 /* z = X.uu */ 457 rvec_zero(z,n); 458 x_ptr=x; 459 iptr = col_indices; 460 for (k=0; k<i; k++) 461 { 462 off = *iptr++; 463 len = *iptr++; 464 465 BLASaxpy_(&len,&uu[k],x_ptr,&i1,z+off,&i1); 466 x_ptr+=len; 467 } 468 469 /* compute v_l = v_l - z */ 470 rvec_zero(v+a_n,a_m-a_n); 471 BLASaxpy_(&n,&dm1,z,&i1,v,&i1); 472 473 /* compute u_l = A.v_l */ 474 if (a_n!=a_m) 475 {gs_gop_hc(gs_handle,v,"+\0",dim);} 476 rvec_zero(u,n); 477 do_matvec(xxt_handle->mvi,v,u); 478 479 /* compute sqrt(alpha) = sqrt(v_l^T.u_l) - local portion */ 480 alpha = BLASdot_(&n,u,&i1,v,&i1); 481 /* compute sqrt(alpha) = sqrt(v_l^T.u_l) - comm portion */ 482 grop_hc(&alpha, &alpha_w, 1, op, dim); 483 484 alpha = (PetscScalar) sqrt((double)alpha); 485 486 /* check for small alpha */ 487 /* LATER use this to detect and determine null space */ 488 if (fabs(alpha)<1.0e-14) 489 {error_msg_fatal("bad alpha! %g\n",alpha);} 490 491 /* compute v_l = v_l/sqrt(alpha) */ 492 rvec_scale(v,1.0/alpha,n); 493 494 /* add newly generated column, v_l, to X */ 495 flag = 1; 496 off=len=0; 497 for (k=0; k<n; k++) 498 { 499 if (v[k]!=0.0) 500 { 501 len=k; 502 if (flag) 503 {off=k; flag=0;} 504 } 505 } 506 507 len -= (off-1); 508 509 if (len>0) 510 { 511 if ((xxt_nnz+len)>xxt_max_nnz) 512 { 513 error_msg_warning("increasing space for X by 2x!\n"); 514 xxt_max_nnz *= 2; 515 x_ptr = (PetscScalar *) malloc(xxt_max_nnz*sizeof(PetscScalar)); 516 rvec_copy(x_ptr,x,xxt_nnz); 517 free(x); 518 x = x_ptr; 519 x_ptr+=xxt_nnz; 520 } 521 xxt_nnz += len; 522 rvec_copy(x_ptr,v+off,len); 523 524 /* keep track of number of zeros */ 525 if (dim) 526 { 527 for (k=0; k<len; k++) 528 { 529 if (x_ptr[k]==0.0) 530 {xxt_zero_nnz++;} 531 } 532 } 533 else 534 { 535 for (k=0; k<len; k++) 536 { 537 if (x_ptr[k]==0.0) 538 {xxt_zero_nnz_0++;} 539 } 540 } 541 col_indices[2*i] = off; 542 col_sz[i] = col_indices[2*i+1] = len; 543 col_vals[i] = x_ptr; 544 } 545 else 546 { 547 col_indices[2*i] = 0; 548 col_sz[i] = col_indices[2*i+1] = 0; 549 col_vals[i] = x_ptr; 550 } 551 } 552 553 /* close off stages for execution phase */ 554 while (dim!=level) 555 { 556 stages[dim++]=i; 557 error_msg_warning("disconnected!!! dim(%d)!=level(%d)\n",dim,level); 558 } 559 stages[dim]=i; 560 561 xxt_handle->info->n=xxt_handle->mvi->n; 562 xxt_handle->info->m=m; 563 xxt_handle->info->nnz=xxt_nnz; 564 xxt_handle->info->max_nnz=xxt_max_nnz; 565 xxt_handle->info->msg_buf_sz=stages[level]-stages[0]; 566 xxt_handle->info->solve_uu = (PetscScalar *) malloc(m*sizeof(PetscScalar)); 567 xxt_handle->info->solve_w = (PetscScalar *) malloc(m*sizeof(PetscScalar)); 568 xxt_handle->info->x=x; 569 xxt_handle->info->col_vals=col_vals; 570 xxt_handle->info->col_sz=col_sz; 571 xxt_handle->info->col_indices=col_indices; 572 xxt_handle->info->stages=stages; 573 xxt_handle->info->nsolves=0; 574 xxt_handle->info->tot_solve_time=0.0; 575 576 free(segs); 577 free(u); 578 free(v); 579 free(uu); 580 free(z); 581 free(w); 582 583 return(0); 584 } 585 586 587 /*************************************xxt.c************************************ 588 Function: 589 590 Input : 591 Output: 592 Return: 593 Description: 594 **************************************xxt.c***********************************/ 595 static PetscErrorCode do_xxt_solve(xxt_ADT xxt_handle, PetscScalar *uc) 596 { 597 int off, len, *iptr; 598 int level =xxt_handle->level; 599 int n =xxt_handle->info->n; 600 int m =xxt_handle->info->m; 601 int *stages =xxt_handle->info->stages; 602 int *col_indices=xxt_handle->info->col_indices; 603 PetscScalar *x_ptr, *uu_ptr; 604 PetscScalar *solve_uu=xxt_handle->info->solve_uu; 605 PetscScalar *solve_w =xxt_handle->info->solve_w; 606 PetscScalar *x =xxt_handle->info->x; 607 PetscBLASInt i1 = 1; 608 609 PetscFunctionBegin; 610 uu_ptr=solve_uu; 611 rvec_zero(uu_ptr,m); 612 613 /* x = X.Y^T.b */ 614 /* uu = Y^T.b */ 615 for (x_ptr=x,iptr=col_indices; *iptr!=-1; x_ptr+=len) 616 { 617 off=*iptr++; len=*iptr++; 618 *uu_ptr++ = BLASdot_(&len,uc+off,&i1,x_ptr,&i1); 619 } 620 621 /* comunication of beta */ 622 uu_ptr=solve_uu; 623 if (level) {ssgl_radd(uu_ptr, solve_w, level, stages);} 624 625 rvec_zero(uc,n); 626 627 /* x = X.uu */ 628 for (x_ptr=x,iptr=col_indices; *iptr!=-1; x_ptr+=len) 629 { 630 off=*iptr++; len=*iptr++; 631 BLASaxpy_(&len,uu_ptr++,x_ptr,&i1,uc+off,&i1); 632 } 633 PetscFunctionReturn(0); 634 } 635 636 /*************************************xxt.c************************************ 637 Function: check_handle() 638 639 Input : 640 Output: 641 Return: 642 Description: 643 **************************************xxt.c***********************************/ 644 static PetscErrorCode check_handle(xxt_ADT xxt_handle) 645 { 646 int vals[2], work[2], op[] = {NON_UNIFORM,GL_MIN,GL_MAX}; 647 648 PetscFunctionBegin; 649 if (xxt_handle==NULL) 650 {error_msg_fatal("check_handle() :: bad handle :: NULL %d\n",xxt_handle);} 651 652 vals[0]=vals[1]=xxt_handle->id; 653 giop(vals,work,sizeof(op)/sizeof(op[0])-1,op); 654 if ((vals[0]!=vals[1])||(xxt_handle->id<=0)) 655 {error_msg_fatal("check_handle() :: bad handle :: id mismatch min/max %d/%d %d\n", 656 vals[0],vals[1], xxt_handle->id);} 657 PetscFunctionReturn(0); 658 } 659 660 661 /*************************************xxt.c************************************ 662 Function: det_separators 663 664 Input : 665 Output: 666 Return: 667 Description: 668 det_separators(xxt_handle, local2global, n, m, mylocmatvec, grid_data); 669 **************************************xxt.c***********************************/ 670 static PetscErrorCode det_separators(xxt_ADT xxt_handle) 671 { 672 int i, ct, id; 673 int mask, edge, *iptr; 674 int *dir, *used; 675 int sum[4], w[4]; 676 PetscScalar rsum[4], rw[4]; 677 int op[] = {GL_ADD,0}; 678 PetscScalar *lhs, *rhs; 679 int *nsep, *lnsep, *fo, nfo=0; 680 gs_ADT gs_handle=xxt_handle->mvi->gs_handle; 681 int *local2global=xxt_handle->mvi->local2global; 682 int n=xxt_handle->mvi->n; 683 int m=xxt_handle->mvi->m; 684 int level=xxt_handle->level; 685 int shared=FALSE; 686 687 PetscFunctionBegin; 688 dir = (int*)malloc(sizeof(PetscInt)*(level+1)); 689 nsep = (int*)malloc(sizeof(PetscInt)*(level+1)); 690 lnsep= (int*)malloc(sizeof(PetscInt)*(level+1)); 691 fo = (int*)malloc(sizeof(PetscInt)*(n+1)); 692 used = (int*)malloc(sizeof(PetscInt)*n); 693 694 ivec_zero(dir ,level+1); 695 ivec_zero(nsep ,level+1); 696 ivec_zero(lnsep,level+1); 697 ivec_set (fo ,-1,n+1); 698 ivec_zero(used,n); 699 700 lhs = (double*)malloc(sizeof(PetscScalar)*m); 701 rhs = (double*)malloc(sizeof(PetscScalar)*m); 702 703 /* determine the # of unique dof */ 704 rvec_zero(lhs,m); 705 rvec_set(lhs,1.0,n); 706 gs_gop_hc(gs_handle,lhs,"+\0",level); 707 rvec_zero(rsum,2); 708 for (ct=i=0;i<n;i++) 709 { 710 if (lhs[i]!=0.0) 711 {rsum[0]+=1.0/lhs[i]; rsum[1]+=lhs[i];} 712 } 713 grop_hc(rsum,rw,2,op,level); 714 rsum[0]+=0.1; 715 rsum[1]+=0.1; 716 /* if (!my_id) 717 { 718 printf("xxt n unique = %d (%g)\n",(int) rsum[0], rsum[0]); 719 printf("xxt n shared = %d (%g)\n",(int) rsum[1], rsum[1]); 720 }*/ 721 722 if (fabs(rsum[0]-rsum[1])>EPS) 723 {shared=TRUE;} 724 725 xxt_handle->info->n_global=xxt_handle->info->m_global=(int) rsum[0]; 726 xxt_handle->mvi->n_global =xxt_handle->mvi->m_global =(int) rsum[0]; 727 728 /* determine separator sets top down */ 729 if (shared) 730 { 731 for (iptr=fo+n,id=my_id,mask=num_nodes>>1,edge=level;edge>0;edge--,mask>>=1) 732 { 733 /* set rsh of hc, fire, and collect lhs responses */ 734 (id<mask) ? rvec_zero(lhs,m) : rvec_set(lhs,1.0,m); 735 gs_gop_hc(gs_handle,lhs,"+\0",edge); 736 737 /* set lsh of hc, fire, and collect rhs responses */ 738 (id<mask) ? rvec_set(rhs,1.0,m) : rvec_zero(rhs,m); 739 gs_gop_hc(gs_handle,rhs,"+\0",edge); 740 741 for (i=0;i<n;i++) 742 { 743 if (id< mask) 744 { 745 if (lhs[i]!=0.0) 746 {lhs[i]=1.0;} 747 } 748 if (id>=mask) 749 { 750 if (rhs[i]!=0.0) 751 {rhs[i]=1.0;} 752 } 753 } 754 755 if (id< mask) 756 {gs_gop_hc(gs_handle,lhs,"+\0",edge-1);} 757 else 758 {gs_gop_hc(gs_handle,rhs,"+\0",edge-1);} 759 760 /* count number of dofs I own that have signal and not in sep set */ 761 rvec_zero(rsum,4); 762 for (ivec_zero(sum,4),ct=i=0;i<n;i++) 763 { 764 if (!used[i]) 765 { 766 /* number of unmarked dofs on node */ 767 ct++; 768 /* number of dofs to be marked on lhs hc */ 769 if (id< mask) 770 { 771 if (lhs[i]!=0.0) 772 {sum[0]++; rsum[0]+=1.0/lhs[i];} 773 } 774 /* number of dofs to be marked on rhs hc */ 775 if (id>=mask) 776 { 777 if (rhs[i]!=0.0) 778 {sum[1]++; rsum[1]+=1.0/rhs[i];} 779 } 780 } 781 } 782 783 /* go for load balance - choose half with most unmarked dofs, bias LHS */ 784 (id<mask) ? (sum[2]=ct) : (sum[3]=ct); 785 (id<mask) ? (rsum[2]=ct) : (rsum[3]=ct); 786 giop_hc(sum,w,4,op,edge); 787 grop_hc(rsum,rw,4,op,edge); 788 rsum[0]+=0.1; rsum[1]+=0.1; rsum[2]+=0.1; rsum[3]+=0.1; 789 790 if (id<mask) 791 { 792 /* mark dofs I own that have signal and not in sep set */ 793 for (ct=i=0;i<n;i++) 794 { 795 if ((!used[i])&&(lhs[i]!=0.0)) 796 { 797 ct++; nfo++; 798 799 if (nfo>n) 800 {error_msg_fatal("nfo about to exceed n\n");} 801 802 *--iptr = local2global[i]; 803 used[i]=edge; 804 } 805 } 806 if (ct>1) {ivec_sort(iptr,ct);} 807 808 lnsep[edge]=ct; 809 nsep[edge]=(int) rsum[0]; 810 dir [edge]=LEFT; 811 } 812 813 if (id>=mask) 814 { 815 /* mark dofs I own that have signal and not in sep set */ 816 for (ct=i=0;i<n;i++) 817 { 818 if ((!used[i])&&(rhs[i]!=0.0)) 819 { 820 ct++; nfo++; 821 822 if (nfo>n) 823 {error_msg_fatal("nfo about to exceed n\n");} 824 825 *--iptr = local2global[i]; 826 used[i]=edge; 827 } 828 } 829 if (ct>1) {ivec_sort(iptr,ct);} 830 831 lnsep[edge]=ct; 832 nsep[edge]= (int) rsum[1]; 833 dir [edge]=RIGHT; 834 } 835 836 /* LATER or we can recur on these to order seps at this level */ 837 /* do we need full set of separators for this? */ 838 839 /* fold rhs hc into lower */ 840 if (id>=mask) 841 {id-=mask;} 842 } 843 } 844 else 845 { 846 for (iptr=fo+n,id=my_id,mask=num_nodes>>1,edge=level;edge>0;edge--,mask>>=1) 847 { 848 /* set rsh of hc, fire, and collect lhs responses */ 849 (id<mask) ? rvec_zero(lhs,m) : rvec_set(lhs,1.0,m); 850 gs_gop_hc(gs_handle,lhs,"+\0",edge); 851 852 /* set lsh of hc, fire, and collect rhs responses */ 853 (id<mask) ? rvec_set(rhs,1.0,m) : rvec_zero(rhs,m); 854 gs_gop_hc(gs_handle,rhs,"+\0",edge); 855 856 /* count number of dofs I own that have signal and not in sep set */ 857 for (ivec_zero(sum,4),ct=i=0;i<n;i++) 858 { 859 if (!used[i]) 860 { 861 /* number of unmarked dofs on node */ 862 ct++; 863 /* number of dofs to be marked on lhs hc */ 864 if ((id< mask)&&(lhs[i]!=0.0)) {sum[0]++;} 865 /* number of dofs to be marked on rhs hc */ 866 if ((id>=mask)&&(rhs[i]!=0.0)) {sum[1]++;} 867 } 868 } 869 870 /* go for load balance - choose half with most unmarked dofs, bias LHS */ 871 (id<mask) ? (sum[2]=ct) : (sum[3]=ct); 872 giop_hc(sum,w,4,op,edge); 873 874 /* lhs hc wins */ 875 if (sum[2]>=sum[3]) 876 { 877 if (id<mask) 878 { 879 /* mark dofs I own that have signal and not in sep set */ 880 for (ct=i=0;i<n;i++) 881 { 882 if ((!used[i])&&(lhs[i]!=0.0)) 883 { 884 ct++; nfo++; 885 *--iptr = local2global[i]; 886 used[i]=edge; 887 } 888 } 889 if (ct>1) {ivec_sort(iptr,ct);} 890 lnsep[edge]=ct; 891 } 892 nsep[edge]=sum[0]; 893 dir [edge]=LEFT; 894 } 895 /* rhs hc wins */ 896 else 897 { 898 if (id>=mask) 899 { 900 /* mark dofs I own that have signal and not in sep set */ 901 for (ct=i=0;i<n;i++) 902 { 903 if ((!used[i])&&(rhs[i]!=0.0)) 904 { 905 ct++; nfo++; 906 *--iptr = local2global[i]; 907 used[i]=edge; 908 } 909 } 910 if (ct>1) {ivec_sort(iptr,ct);} 911 lnsep[edge]=ct; 912 } 913 nsep[edge]=sum[1]; 914 dir [edge]=RIGHT; 915 } 916 /* LATER or we can recur on these to order seps at this level */ 917 /* do we need full set of separators for this? */ 918 919 /* fold rhs hc into lower */ 920 if (id>=mask) 921 {id-=mask;} 922 } 923 } 924 925 926 /* level 0 is on processor case - so mark the remainder */ 927 for (ct=i=0;i<n;i++) 928 { 929 if (!used[i]) 930 { 931 ct++; nfo++; 932 *--iptr = local2global[i]; 933 used[i]=edge; 934 } 935 } 936 if (ct>1) {ivec_sort(iptr,ct);} 937 lnsep[edge]=ct; 938 nsep [edge]=ct; 939 dir [edge]=LEFT; 940 941 xxt_handle->info->nsep=nsep; 942 xxt_handle->info->lnsep=lnsep; 943 xxt_handle->info->fo=fo; 944 xxt_handle->info->nfo=nfo; 945 946 free(dir); 947 free(lhs); 948 free(rhs); 949 free(used); 950 PetscFunctionReturn(0); 951 } 952 953 954 /*************************************xxt.c************************************ 955 Function: set_mvi 956 957 Input : 958 Output: 959 Return: 960 Description: 961 **************************************xxt.c***********************************/ 962 static 963 mv_info *set_mvi(int *local2global, int n, int m, void *matvec, void *grid_data) 964 { 965 mv_info *mvi; 966 967 968 mvi = (mv_info*)malloc(sizeof(mv_info)); 969 mvi->n=n; 970 mvi->m=m; 971 mvi->n_global=-1; 972 mvi->m_global=-1; 973 mvi->local2global=(int*)malloc((m+1)*sizeof(PetscInt)); 974 ivec_copy(mvi->local2global,local2global,m); 975 mvi->local2global[m] = INT_MAX; 976 mvi->matvec=(PetscErrorCode (*)(mv_info*,PetscScalar*,PetscScalar*))matvec; 977 mvi->grid_data=grid_data; 978 979 /* set xxt communication handle to perform restricted matvec */ 980 mvi->gs_handle = gs_init(local2global, m, num_nodes); 981 982 return(mvi); 983 } 984 985 986 /*************************************xxt.c************************************ 987 Function: set_mvi 988 989 Input : 990 Output: 991 Return: 992 Description: 993 994 computes u = A.v 995 do_matvec(xxt_handle->mvi,v,u); 996 **************************************xxt.c***********************************/ 997 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u) 998 { 999 PetscFunctionBegin; 1000 A->matvec((mv_info*)A->grid_data,v,u); 1001 PetscFunctionReturn(0); 1002 } 1003 1004 1005 1006