1 2 /*************************************xyt.c************************************ 3 Module Name: xyt 4 Module Info: 5 6 author: Henry M. Tufo III 7 e-mail: hmt@asci.uchicago.edu 8 contact: 9 +--------------------------------+--------------------------------+ 10 |MCS Division - Building 221 |Department of Computer Science | 11 |Argonne National Laboratory |Ryerson 152 | 12 |9700 S. Cass Avenue |The University of Chicago | 13 |Argonne, IL 60439 |Chicago, IL 60637 | 14 |(630) 252-5354/5986 ph/fx |(773) 702-6019/8487 ph/fx | 15 +--------------------------------+--------------------------------+ 16 17 Last Modification: 3.20.01 18 **************************************xyt.c***********************************/ 19 #include <../src/ksp/pc/impls/tfs/tfs.h> 20 21 #define LEFT -1 22 #define RIGHT 1 23 #define BOTH 0 24 25 typedef struct xyt_solver_info { 26 PetscInt n, m, n_global, m_global; 27 PetscInt nnz, max_nnz, msg_buf_sz; 28 PetscInt *nsep, *lnsep, *fo, nfo, *stages; 29 PetscInt *xcol_sz, *xcol_indices; 30 PetscScalar **xcol_vals, *x, *solve_uu, *solve_w; 31 PetscInt *ycol_sz, *ycol_indices; 32 PetscScalar **ycol_vals, *y; 33 PetscInt nsolves; 34 PetscScalar tot_solve_time; 35 } xyt_info; 36 37 38 typedef struct matvec_info { 39 PetscInt n, m, n_global, m_global; 40 PetscInt *local2global; 41 gs_ADT gs_handle; 42 PetscErrorCode (*matvec)(struct matvec_info*,PetscScalar*,PetscScalar*); 43 void *grid_data; 44 } mv_info; 45 46 struct xyt_CDT{ 47 PetscInt id; 48 PetscInt ns; 49 PetscInt level; 50 xyt_info *info; 51 mv_info *mvi; 52 }; 53 54 static PetscInt n_xyt=0; 55 static PetscInt n_xyt_handles=0; 56 57 /* prototypes */ 58 static PetscErrorCode do_xyt_solve(xyt_ADT xyt_handle, PetscScalar *rhs); 59 static PetscErrorCode check_handle(xyt_ADT xyt_handle); 60 static PetscErrorCode det_separators(xyt_ADT xyt_handle); 61 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u); 62 static PetscInt xyt_generate(xyt_ADT xyt_handle); 63 static PetscInt do_xyt_factor(xyt_ADT xyt_handle); 64 static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, void *matvec, void *grid_data); 65 66 /**************************************xyt.c***********************************/ 67 xyt_ADT XYT_new(void) 68 { 69 xyt_ADT xyt_handle; 70 71 /* rolling count on n_xyt ... pot. problem here */ 72 n_xyt_handles++; 73 xyt_handle = (xyt_ADT)malloc(sizeof(struct xyt_CDT)); 74 xyt_handle->id = ++n_xyt; 75 xyt_handle->info = NULL; 76 xyt_handle->mvi = NULL; 77 78 return(xyt_handle); 79 } 80 81 /**************************************xyt.c***********************************/ 82 PetscInt XYT_factor(xyt_ADT xyt_handle, /* prev. allocated xyt handle */ 83 PetscInt *local2global, /* global column mapping */ 84 PetscInt n, /* local num rows */ 85 PetscInt m, /* local num cols */ 86 void *matvec, /* b_loc=A_local.x_loc */ 87 void *grid_data /* grid data for matvec */ 88 ) 89 { 90 91 comm_init(); 92 check_handle(xyt_handle); 93 94 /* only 2^k for now and all nodes participating */ 95 if ((1<<(xyt_handle->level=i_log2_num_nodes))!=num_nodes) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"only 2^k for now and MPI_COMM_WORLD!!! %D != %D\n",1<<i_log2_num_nodes,num_nodes); 96 97 /* space for X info */ 98 xyt_handle->info = (xyt_info*)malloc(sizeof(xyt_info)); 99 100 /* set up matvec handles */ 101 xyt_handle->mvi = set_mvi(local2global, n, m, matvec, grid_data); 102 103 /* matrix is assumed to be of full rank */ 104 /* LATER we can reset to indicate rank def. */ 105 xyt_handle->ns=0; 106 107 /* determine separators and generate firing order - NB xyt info set here */ 108 det_separators(xyt_handle); 109 110 return(do_xyt_factor(xyt_handle)); 111 } 112 113 /**************************************xyt.c***********************************/ 114 PetscInt XYT_solve(xyt_ADT xyt_handle, PetscScalar *x, PetscScalar *b) 115 { 116 comm_init(); 117 check_handle(xyt_handle); 118 119 /* need to copy b into x? */ 120 if (b) 121 {rvec_copy(x,b,xyt_handle->mvi->n);} 122 do_xyt_solve(xyt_handle,x); 123 124 return(0); 125 } 126 127 /**************************************xyt.c***********************************/ 128 PetscInt XYT_free(xyt_ADT xyt_handle) 129 { 130 comm_init(); 131 check_handle(xyt_handle); 132 n_xyt_handles--; 133 134 free(xyt_handle->info->nsep); 135 free(xyt_handle->info->lnsep); 136 free(xyt_handle->info->fo); 137 free(xyt_handle->info->stages); 138 free(xyt_handle->info->solve_uu); 139 free(xyt_handle->info->solve_w); 140 free(xyt_handle->info->x); 141 free(xyt_handle->info->xcol_vals); 142 free(xyt_handle->info->xcol_sz); 143 free(xyt_handle->info->xcol_indices); 144 free(xyt_handle->info->y); 145 free(xyt_handle->info->ycol_vals); 146 free(xyt_handle->info->ycol_sz); 147 free(xyt_handle->info->ycol_indices); 148 free(xyt_handle->info); 149 free(xyt_handle->mvi->local2global); 150 gs_free(xyt_handle->mvi->gs_handle); 151 free(xyt_handle->mvi); 152 free(xyt_handle); 153 154 155 /* if the check fails we nuke */ 156 /* if NULL pointer passed to free we nuke */ 157 /* if the calls to free fail that's not my problem */ 158 return(0); 159 } 160 161 /**************************************xyt.c***********************************/ 162 PetscInt XYT_stats(xyt_ADT xyt_handle) 163 { 164 PetscInt op[] = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD}; 165 PetscInt fop[] = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD}; 166 PetscInt vals[9], work[9]; 167 PetscScalar fvals[3], fwork[3]; 168 169 comm_init(); 170 check_handle(xyt_handle); 171 172 /* if factorization not done there are no stats */ 173 if (!xyt_handle->info||!xyt_handle->mvi) { 174 if (!my_id) PetscPrintf(PETSC_COMM_WORLD,"XYT_stats() :: no stats available!\n"); 175 return 1; 176 } 177 178 vals[0]=vals[1]=vals[2]=xyt_handle->info->nnz; 179 vals[3]=vals[4]=vals[5]=xyt_handle->mvi->n; 180 vals[6]=vals[7]=vals[8]=xyt_handle->info->msg_buf_sz; 181 giop(vals,work,sizeof(op)/sizeof(op[0])-1,op); 182 183 fvals[0]=fvals[1]=fvals[2] 184 =xyt_handle->info->tot_solve_time/xyt_handle->info->nsolves++; 185 grop(fvals,fwork,sizeof(fop)/sizeof(fop[0])-1,fop); 186 187 if (!my_id) { 188 PetscPrintf(PETSC_COMM_WORLD,"%D :: min xyt_nnz=%D\n",my_id,vals[0]); 189 PetscPrintf(PETSC_COMM_WORLD,"%D :: max xyt_nnz=%D\n",my_id,vals[1]); 190 PetscPrintf(PETSC_COMM_WORLD,"%D :: avg xyt_nnz=%g\n",my_id,1.0*vals[2]/num_nodes); 191 PetscPrintf(PETSC_COMM_WORLD,"%D :: tot xyt_nnz=%D\n",my_id,vals[2]); 192 PetscPrintf(PETSC_COMM_WORLD,"%D :: xyt C(2d) =%g\n",my_id,vals[2]/(pow(1.0*vals[5],1.5))); 193 PetscPrintf(PETSC_COMM_WORLD,"%D :: xyt C(3d) =%g\n",my_id,vals[2]/(pow(1.0*vals[5],1.6667))); 194 PetscPrintf(PETSC_COMM_WORLD,"%D :: min xyt_n =%D\n",my_id,vals[3]); 195 PetscPrintf(PETSC_COMM_WORLD,"%D :: max xyt_n =%D\n",my_id,vals[4]); 196 PetscPrintf(PETSC_COMM_WORLD,"%D :: avg xyt_n =%g\n",my_id,1.0*vals[5]/num_nodes); 197 PetscPrintf(PETSC_COMM_WORLD,"%D :: tot xyt_n =%D\n",my_id,vals[5]); 198 PetscPrintf(PETSC_COMM_WORLD,"%D :: min xyt_buf=%D\n",my_id,vals[6]); 199 PetscPrintf(PETSC_COMM_WORLD,"%D :: max xyt_buf=%D\n",my_id,vals[7]); 200 PetscPrintf(PETSC_COMM_WORLD,"%D :: avg xyt_buf=%g\n",my_id,1.0*vals[8]/num_nodes); 201 PetscPrintf(PETSC_COMM_WORLD,"%D :: min xyt_slv=%g\n",my_id,fvals[0]); 202 PetscPrintf(PETSC_COMM_WORLD,"%D :: max xyt_slv=%g\n",my_id,fvals[1]); 203 PetscPrintf(PETSC_COMM_WORLD,"%D :: avg xyt_slv=%g\n",my_id,fvals[2]/num_nodes); 204 } 205 206 return(0); 207 } 208 209 210 /*************************************xyt.c************************************ 211 212 Description: get A_local, local portion of global coarse matrix which 213 is a row dist. nxm matrix w/ n<m. 214 o my_ml holds address of ML struct associated w/A_local and coarse grid 215 o local2global holds global number of column i (i=0,...,m-1) 216 o local2global holds global number of row i (i=0,...,n-1) 217 o mylocmatvec performs A_local . vec_local (note that gs is performed using 218 gs_init/gop). 219 220 mylocmatvec = my_ml->Amat[grid_tag].matvec->external; 221 mylocmatvec (void :: void *data, double *in, double *out) 222 **************************************xyt.c***********************************/ 223 static PetscInt do_xyt_factor(xyt_ADT xyt_handle) 224 { 225 return xyt_generate(xyt_handle); 226 } 227 228 /**************************************xyt.c***********************************/ 229 static PetscInt xyt_generate(xyt_ADT xyt_handle) 230 { 231 PetscInt i,j,k,idx; 232 PetscInt dim, col; 233 PetscScalar *u, *uu, *v, *z, *w, alpha, alpha_w; 234 PetscInt *segs; 235 PetscInt op[] = {GL_ADD,0}; 236 PetscInt off, len; 237 PetscScalar *x_ptr, *y_ptr; 238 PetscInt *iptr, flag; 239 PetscInt start=0, end, work; 240 PetscInt op2[] = {GL_MIN,0}; 241 gs_ADT gs_handle; 242 PetscInt *nsep, *lnsep, *fo; 243 PetscInt a_n=xyt_handle->mvi->n; 244 PetscInt a_m=xyt_handle->mvi->m; 245 PetscInt *a_local2global=xyt_handle->mvi->local2global; 246 PetscInt level; 247 PetscInt n, m; 248 PetscInt *xcol_sz, *xcol_indices, *stages; 249 PetscScalar **xcol_vals, *x; 250 PetscInt *ycol_sz, *ycol_indices; 251 PetscScalar **ycol_vals, *y; 252 PetscInt n_global; 253 PetscInt xt_nnz=0, xt_max_nnz=0; 254 PetscInt yt_nnz=0, yt_max_nnz=0; 255 PetscInt xt_zero_nnz =0; 256 PetscInt xt_zero_nnz_0=0; 257 PetscInt yt_zero_nnz =0; 258 PetscInt yt_zero_nnz_0=0; 259 PetscBLASInt i1 = 1,dlen; 260 PetscScalar dm1 = -1.0; 261 PetscErrorCode ierr; 262 263 n=xyt_handle->mvi->n; 264 nsep=xyt_handle->info->nsep; 265 lnsep=xyt_handle->info->lnsep; 266 fo=xyt_handle->info->fo; 267 end=lnsep[0]; 268 level=xyt_handle->level; 269 gs_handle=xyt_handle->mvi->gs_handle; 270 271 /* is there a null space? */ 272 /* LATER add in ability to detect null space by checking alpha */ 273 for (i=0, j=0; i<=level; i++) 274 {j+=nsep[i];} 275 276 m = j-xyt_handle->ns; 277 if (m!=j) 278 {ierr = PetscPrintf(PETSC_COMM_WORLD,"xyt_generate() :: null space exists %D %D %D\n",m,j,xyt_handle->ns);} 279 280 ierr = PetscInfo2(0,"xyt_generate() :: X(%D,%D)\n",n,m);CHKERRQ(ierr); 281 282 /* get and initialize storage for x local */ 283 /* note that x local is nxm and stored by columns */ 284 xcol_sz = (PetscInt*) malloc(m*sizeof(PetscInt)); 285 xcol_indices = (PetscInt*) malloc((2*m+1)*sizeof(PetscInt)); 286 xcol_vals = (PetscScalar **) malloc(m*sizeof(PetscScalar *)); 287 for (i=j=0; i<m; i++, j+=2) 288 { 289 xcol_indices[j]=xcol_indices[j+1]=xcol_sz[i]=-1; 290 xcol_vals[i] = NULL; 291 } 292 xcol_indices[j]=-1; 293 294 /* get and initialize storage for y local */ 295 /* note that y local is nxm and stored by columns */ 296 ycol_sz = (PetscInt*) malloc(m*sizeof(PetscInt)); 297 ycol_indices = (PetscInt*) malloc((2*m+1)*sizeof(PetscInt)); 298 ycol_vals = (PetscScalar **) malloc(m*sizeof(PetscScalar *)); 299 for (i=j=0; i<m; i++, j+=2) 300 { 301 ycol_indices[j]=ycol_indices[j+1]=ycol_sz[i]=-1; 302 ycol_vals[i] = NULL; 303 } 304 ycol_indices[j]=-1; 305 306 /* size of separators for each sub-hc working from bottom of tree to top */ 307 /* this looks like nsep[]=segments */ 308 stages = (PetscInt*) malloc((level+1)*sizeof(PetscInt)); 309 segs = (PetscInt*) malloc((level+1)*sizeof(PetscInt)); 310 ivec_zero(stages,level+1); 311 ivec_copy(segs,nsep,level+1); 312 for (i=0; i<level; i++) 313 {segs[i+1] += segs[i];} 314 stages[0] = segs[0]; 315 316 /* temporary vectors */ 317 u = (PetscScalar *) malloc(n*sizeof(PetscScalar)); 318 z = (PetscScalar *) malloc(n*sizeof(PetscScalar)); 319 v = (PetscScalar *) malloc(a_m*sizeof(PetscScalar)); 320 uu = (PetscScalar *) malloc(m*sizeof(PetscScalar)); 321 w = (PetscScalar *) malloc(m*sizeof(PetscScalar)); 322 323 /* extra nnz due to replication of vertices across separators */ 324 for (i=1, j=0; i<=level; i++) 325 {j+=nsep[i];} 326 327 /* storage for sparse x values */ 328 n_global = xyt_handle->info->n_global; 329 xt_max_nnz = yt_max_nnz = (PetscInt)(2.5*pow(1.0*n_global,1.6667) + j*n/2)/num_nodes; 330 x = (PetscScalar *) malloc(xt_max_nnz*sizeof(PetscScalar)); 331 y = (PetscScalar *) malloc(yt_max_nnz*sizeof(PetscScalar)); 332 333 /* LATER - can embed next sep to fire in gs */ 334 /* time to make the donuts - generate X factor */ 335 for (dim=i=j=0;i<m;i++) 336 { 337 /* time to move to the next level? */ 338 while (i==segs[dim]){ 339 if (dim==level) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"dim about to exceed level\n"); 340 stages[dim++]=i; 341 end+=lnsep[dim]; 342 } 343 stages[dim]=i; 344 345 /* which column are we firing? */ 346 /* i.e. set v_l */ 347 /* use new seps and do global min across hc to determine which one to fire */ 348 (start<end) ? (col=fo[start]) : (col=INT_MAX); 349 giop_hc(&col,&work,1,op2,dim); 350 351 /* shouldn't need this */ 352 if (col==INT_MAX) 353 { 354 ierr = PetscInfo(0,"hey ... col==INT_MAX??\n");CHKERRQ(ierr); 355 continue; 356 } 357 358 /* do I own it? I should */ 359 rvec_zero(v ,a_m); 360 if (col==fo[start]) 361 { 362 start++; 363 idx=ivec_linear_search(col, a_local2global, a_n); 364 if (idx!=-1) 365 {v[idx] = 1.0; j++;} 366 else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"NOT FOUND!\n"); 367 } 368 else 369 { 370 idx=ivec_linear_search(col, a_local2global, a_m); 371 if (idx!=-1) 372 {v[idx] = 1.0;} 373 } 374 375 /* perform u = A.v_l */ 376 rvec_zero(u,n); 377 do_matvec(xyt_handle->mvi,v,u); 378 379 /* uu = X^T.u_l (local portion) */ 380 /* technically only need to zero out first i entries */ 381 /* later turn this into an XYT_solve call ? */ 382 rvec_zero(uu,m); 383 y_ptr=y; 384 iptr = ycol_indices; 385 for (k=0; k<i; k++) 386 { 387 off = *iptr++; 388 len = *iptr++; 389 dlen = PetscBLASIntCast(len); 390 uu[k] = BLASdot_(&dlen,u+off,&i1,y_ptr,&i1); 391 y_ptr+=len; 392 } 393 394 /* uu = X^T.u_l (comm portion) */ 395 ssgl_radd (uu, w, dim, stages); 396 397 /* z = X.uu */ 398 rvec_zero(z,n); 399 x_ptr=x; 400 iptr = xcol_indices; 401 for (k=0; k<i; k++) 402 { 403 off = *iptr++; 404 len = *iptr++; 405 dlen = PetscBLASIntCast(len); 406 BLASaxpy_(&dlen,&uu[k],x_ptr,&i1,z+off,&i1); 407 x_ptr+=len; 408 } 409 410 /* compute v_l = v_l - z */ 411 rvec_zero(v+a_n,a_m-a_n); 412 dlen = PetscBLASIntCast(n); 413 BLASaxpy_(&dlen,&dm1,z,&i1,v,&i1); 414 415 /* compute u_l = A.v_l */ 416 if (a_n!=a_m) 417 {gs_gop_hc(gs_handle,v,"+\0",dim);} 418 rvec_zero(u,n); 419 do_matvec(xyt_handle->mvi,v,u); 420 421 /* compute sqrt(alpha) = sqrt(u_l^T.u_l) - local portion */ 422 dlen = PetscBLASIntCast(n); 423 alpha = BLASdot_(&dlen,u,&i1,u,&i1); 424 /* compute sqrt(alpha) = sqrt(u_l^T.u_l) - comm portion */ 425 grop_hc(&alpha, &alpha_w, 1, op, dim); 426 427 alpha = (PetscScalar) PetscSqrtReal((PetscReal)alpha); 428 429 /* check for small alpha */ 430 /* LATER use this to detect and determine null space */ 431 if (fabs(alpha)<1.0e-14) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"bad alpha! %g\n",alpha); 432 433 /* compute v_l = v_l/sqrt(alpha) */ 434 rvec_scale(v,1.0/alpha,n); 435 rvec_scale(u,1.0/alpha,n); 436 437 /* add newly generated column, v_l, to X */ 438 flag = 1; 439 off=len=0; 440 for (k=0; k<n; k++) 441 { 442 if (v[k]!=0.0) 443 { 444 len=k; 445 if (flag) 446 {off=k; flag=0;} 447 } 448 } 449 450 len -= (off-1); 451 452 if (len>0) 453 { 454 if ((xt_nnz+len)>xt_max_nnz) 455 { 456 ierr = PetscInfo(0,"increasing space for X by 2x!\n");CHKERRQ(ierr); 457 xt_max_nnz *= 2; 458 x_ptr = (PetscScalar *) malloc(xt_max_nnz*sizeof(PetscScalar)); 459 rvec_copy(x_ptr,x,xt_nnz); 460 free(x); 461 x = x_ptr; 462 x_ptr+=xt_nnz; 463 } 464 xt_nnz += len; 465 rvec_copy(x_ptr,v+off,len); 466 467 /* keep track of number of zeros */ 468 if (dim) 469 { 470 for (k=0; k<len; k++) 471 { 472 if (x_ptr[k]==0.0) 473 {xt_zero_nnz++;} 474 } 475 } 476 else 477 { 478 for (k=0; k<len; k++) 479 { 480 if (x_ptr[k]==0.0) 481 {xt_zero_nnz_0++;} 482 } 483 } 484 xcol_indices[2*i] = off; 485 xcol_sz[i] = xcol_indices[2*i+1] = len; 486 xcol_vals[i] = x_ptr; 487 } 488 else 489 { 490 xcol_indices[2*i] = 0; 491 xcol_sz[i] = xcol_indices[2*i+1] = 0; 492 xcol_vals[i] = x_ptr; 493 } 494 495 496 /* add newly generated column, u_l, to Y */ 497 flag = 1; 498 off=len=0; 499 for (k=0; k<n; k++) 500 { 501 if (u[k]!=0.0) 502 { 503 len=k; 504 if (flag) 505 {off=k; flag=0;} 506 } 507 } 508 509 len -= (off-1); 510 511 if (len>0) 512 { 513 if ((yt_nnz+len)>yt_max_nnz) 514 { 515 ierr = PetscInfo(0,"increasing space for Y by 2x!\n");CHKERRQ(ierr); 516 yt_max_nnz *= 2; 517 y_ptr = (PetscScalar *) malloc(yt_max_nnz*sizeof(PetscScalar)); 518 rvec_copy(y_ptr,y,yt_nnz); 519 free(y); 520 y = y_ptr; 521 y_ptr+=yt_nnz; 522 } 523 yt_nnz += len; 524 rvec_copy(y_ptr,u+off,len); 525 526 /* keep track of number of zeros */ 527 if (dim) 528 { 529 for (k=0; k<len; k++) 530 { 531 if (y_ptr[k]==0.0) 532 {yt_zero_nnz++;} 533 } 534 } 535 else 536 { 537 for (k=0; k<len; k++) 538 { 539 if (y_ptr[k]==0.0) 540 {yt_zero_nnz_0++;} 541 } 542 } 543 ycol_indices[2*i] = off; 544 ycol_sz[i] = ycol_indices[2*i+1] = len; 545 ycol_vals[i] = y_ptr; 546 } 547 else 548 { 549 ycol_indices[2*i] = 0; 550 ycol_sz[i] = ycol_indices[2*i+1] = 0; 551 ycol_vals[i] = y_ptr; 552 } 553 } 554 555 /* close off stages for execution phase */ 556 while (dim!=level) 557 { 558 stages[dim++]=i; 559 ierr = PetscInfo2(0,"disconnected!!! dim(%D)!=level(%D)\n",dim,level);CHKERRQ(ierr); 560 } 561 stages[dim]=i; 562 563 xyt_handle->info->n=xyt_handle->mvi->n; 564 xyt_handle->info->m=m; 565 xyt_handle->info->nnz=xt_nnz + yt_nnz; 566 xyt_handle->info->max_nnz=xt_max_nnz + yt_max_nnz; 567 xyt_handle->info->msg_buf_sz=stages[level]-stages[0]; 568 xyt_handle->info->solve_uu = (PetscScalar *) malloc(m*sizeof(PetscScalar)); 569 xyt_handle->info->solve_w = (PetscScalar *) malloc(m*sizeof(PetscScalar)); 570 xyt_handle->info->x=x; 571 xyt_handle->info->xcol_vals=xcol_vals; 572 xyt_handle->info->xcol_sz=xcol_sz; 573 xyt_handle->info->xcol_indices=xcol_indices; 574 xyt_handle->info->stages=stages; 575 xyt_handle->info->y=y; 576 xyt_handle->info->ycol_vals=ycol_vals; 577 xyt_handle->info->ycol_sz=ycol_sz; 578 xyt_handle->info->ycol_indices=ycol_indices; 579 580 free(segs); 581 free(u); 582 free(v); 583 free(uu); 584 free(z); 585 free(w); 586 587 return(0); 588 } 589 590 /**************************************xyt.c***********************************/ 591 static PetscErrorCode do_xyt_solve(xyt_ADT xyt_handle, PetscScalar *uc) 592 { 593 PetscInt off, len, *iptr; 594 PetscInt level =xyt_handle->level; 595 PetscInt n =xyt_handle->info->n; 596 PetscInt m =xyt_handle->info->m; 597 PetscInt *stages =xyt_handle->info->stages; 598 PetscInt *xcol_indices=xyt_handle->info->xcol_indices; 599 PetscInt *ycol_indices=xyt_handle->info->ycol_indices; 600 PetscScalar *x_ptr, *y_ptr, *uu_ptr; 601 PetscScalar *solve_uu=xyt_handle->info->solve_uu; 602 PetscScalar *solve_w =xyt_handle->info->solve_w; 603 PetscScalar *x =xyt_handle->info->x; 604 PetscScalar *y =xyt_handle->info->y; 605 PetscBLASInt i1 = 1,dlen; 606 607 PetscFunctionBegin; 608 uu_ptr=solve_uu; 609 rvec_zero(uu_ptr,m); 610 611 /* x = X.Y^T.b */ 612 /* uu = Y^T.b */ 613 for (y_ptr=y,iptr=ycol_indices; *iptr!=-1; y_ptr+=len) 614 { 615 off=*iptr++; len=*iptr++; 616 dlen = PetscBLASIntCast(len); 617 *uu_ptr++ = BLASdot_(&dlen,uc+off,&i1,y_ptr,&i1); 618 } 619 620 /* comunication of beta */ 621 uu_ptr=solve_uu; 622 if (level) {ssgl_radd(uu_ptr, solve_w, level, stages);} 623 624 rvec_zero(uc,n); 625 626 /* x = X.uu */ 627 for (x_ptr=x,iptr=xcol_indices; *iptr!=-1; x_ptr+=len) 628 { 629 off=*iptr++; len=*iptr++; 630 dlen = PetscBLASIntCast(len); 631 BLASaxpy_(&dlen,uu_ptr++,x_ptr,&i1,uc+off,&i1); 632 } 633 PetscFunctionReturn(0); 634 } 635 636 /**************************************xyt.c***********************************/ 637 static PetscErrorCode check_handle(xyt_ADT xyt_handle) 638 { 639 PetscInt vals[2], work[2], op[] = {NON_UNIFORM,GL_MIN,GL_MAX}; 640 641 PetscFunctionBegin; 642 if (!xyt_handle) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"check_handle() :: bad handle :: NULL %D\n",xyt_handle); 643 644 vals[0]=vals[1]=xyt_handle->id; 645 giop(vals,work,sizeof(op)/sizeof(op[0])-1,op); 646 if ((vals[0]!=vals[1])||(xyt_handle->id<=0)) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"check_handle() :: bad handle :: id mismatch min/max %D/%D %D\n", vals[0],vals[1], xyt_handle->id); 647 PetscFunctionReturn(0); 648 } 649 650 /**************************************xyt.c***********************************/ 651 static PetscErrorCode det_separators(xyt_ADT xyt_handle) 652 { 653 PetscInt i, ct, id; 654 PetscInt mask, edge, *iptr; 655 PetscInt *dir, *used; 656 PetscInt sum[4], w[4]; 657 PetscScalar rsum[4], rw[4]; 658 PetscInt op[] = {GL_ADD,0}; 659 PetscScalar *lhs, *rhs; 660 PetscInt *nsep, *lnsep, *fo, nfo=0; 661 gs_ADT gs_handle=xyt_handle->mvi->gs_handle; 662 PetscInt *local2global=xyt_handle->mvi->local2global; 663 PetscInt n=xyt_handle->mvi->n; 664 PetscInt m=xyt_handle->mvi->m; 665 PetscInt level=xyt_handle->level; 666 PetscInt shared=0; 667 PetscErrorCode ierr; 668 669 PetscFunctionBegin; 670 dir = (PetscInt*)malloc(sizeof(PetscInt)*(level+1)); 671 nsep = (PetscInt*)malloc(sizeof(PetscInt)*(level+1)); 672 lnsep= (PetscInt*)malloc(sizeof(PetscInt)*(level+1)); 673 fo = (PetscInt*)malloc(sizeof(PetscInt)*(n+1)); 674 used = (PetscInt*)malloc(sizeof(PetscInt)*n); 675 676 ivec_zero(dir ,level+1); 677 ivec_zero(nsep ,level+1); 678 ivec_zero(lnsep,level+1); 679 ivec_set (fo ,-1,n+1); 680 ivec_zero(used,n); 681 682 lhs = (PetscScalar*)malloc(sizeof(PetscScalar)*m); 683 rhs = (PetscScalar*)malloc(sizeof(PetscScalar)*m); 684 685 /* determine the # of unique dof */ 686 rvec_zero(lhs,m); 687 rvec_set(lhs,1.0,n); 688 gs_gop_hc(gs_handle,lhs,"+\0",level); 689 ierr = PetscInfo(0,"done first gs_gop_hc\n");CHKERRQ(ierr); 690 rvec_zero(rsum,2); 691 for (ct=i=0;i<n;i++) 692 { 693 if (lhs[i]!=0.0) 694 {rsum[0]+=1.0/lhs[i]; rsum[1]+=lhs[i];} 695 696 if (lhs[i]!=1.0) 697 { 698 shared=1; 699 } 700 } 701 702 grop_hc(rsum,rw,2,op,level); 703 rsum[0]+=0.1; 704 rsum[1]+=0.1; 705 706 xyt_handle->info->n_global=xyt_handle->info->m_global=(PetscInt) rsum[0]; 707 xyt_handle->mvi->n_global =xyt_handle->mvi->m_global =(PetscInt) rsum[0]; 708 709 /* determine separator sets top down */ 710 if (shared) 711 { 712 /* solution is to do as in the symmetric shared case but then */ 713 /* pick the sub-hc with the most free dofs and do a mat-vec */ 714 /* and pick up the responses on the other sub-hc from the */ 715 /* initial separator set obtained from the symm. shared case */ 716 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"shared dof separator determination not ready ... see hmt!!!\n"); 717 for (iptr=fo+n,id=my_id,mask=num_nodes>>1,edge=level;edge>0;edge--,mask>>=1) 718 { 719 /* set rsh of hc, fire, and collect lhs responses */ 720 (id<mask) ? rvec_zero(lhs,m) : rvec_set(lhs,1.0,m); 721 gs_gop_hc(gs_handle,lhs,"+\0",edge); 722 723 /* set lsh of hc, fire, and collect rhs responses */ 724 (id<mask) ? rvec_set(rhs,1.0,m) : rvec_zero(rhs,m); 725 gs_gop_hc(gs_handle,rhs,"+\0",edge); 726 727 for (i=0;i<n;i++) 728 { 729 if (id< mask) 730 { 731 if (lhs[i]!=0.0) 732 {lhs[i]=1.0;} 733 } 734 if (id>=mask) 735 { 736 if (rhs[i]!=0.0) 737 {rhs[i]=1.0;} 738 } 739 } 740 741 if (id< mask) 742 {gs_gop_hc(gs_handle,lhs,"+\0",edge-1);} 743 else 744 {gs_gop_hc(gs_handle,rhs,"+\0",edge-1);} 745 746 /* count number of dofs I own that have signal and not in sep set */ 747 rvec_zero(rsum,4); 748 for (ivec_zero(sum,4),ct=i=0;i<n;i++) 749 { 750 if (!used[i]) 751 { 752 /* number of unmarked dofs on node */ 753 ct++; 754 /* number of dofs to be marked on lhs hc */ 755 if (id< mask) 756 { 757 if (lhs[i]!=0.0) 758 {sum[0]++; rsum[0]+=1.0/lhs[i];} 759 } 760 /* number of dofs to be marked on rhs hc */ 761 if (id>=mask) 762 { 763 if (rhs[i]!=0.0) 764 {sum[1]++; rsum[1]+=1.0/rhs[i];} 765 } 766 } 767 } 768 769 /* go for load balance - choose half with most unmarked dofs, bias LHS */ 770 (id<mask) ? (sum[2]=ct) : (sum[3]=ct); 771 (id<mask) ? (rsum[2]=ct) : (rsum[3]=ct); 772 giop_hc(sum,w,4,op,edge); 773 grop_hc(rsum,rw,4,op,edge); 774 rsum[0]+=0.1; rsum[1]+=0.1; rsum[2]+=0.1; rsum[3]+=0.1; 775 776 if (id<mask) 777 { 778 /* mark dofs I own that have signal and not in sep set */ 779 for (ct=i=0;i<n;i++) 780 { 781 if ((!used[i])&&(lhs[i]!=0.0)) 782 { 783 ct++; nfo++; 784 785 if (nfo>n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"nfo about to exceed n\n"); 786 787 *--iptr = local2global[i]; 788 used[i]=edge; 789 } 790 } 791 if (ct>1) {ivec_sort(iptr,ct);} 792 793 lnsep[edge]=ct; 794 nsep[edge]=(PetscInt) rsum[0]; 795 dir [edge]=LEFT; 796 } 797 798 if (id>=mask) 799 { 800 /* mark dofs I own that have signal and not in sep set */ 801 for (ct=i=0;i<n;i++) 802 { 803 if ((!used[i])&&(rhs[i]!=0.0)) 804 { 805 ct++; nfo++; 806 807 if (nfo>n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"nfo about to exceed n\n"); 808 809 *--iptr = local2global[i]; 810 used[i]=edge; 811 } 812 } 813 if (ct>1) {ivec_sort(iptr,ct);} 814 815 lnsep[edge]=ct; 816 nsep[edge]= (PetscInt) rsum[1]; 817 dir [edge]=RIGHT; 818 } 819 820 /* LATER or we can recur on these to order seps at this level */ 821 /* do we need full set of separators for this? */ 822 823 /* fold rhs hc into lower */ 824 if (id>=mask) 825 {id-=mask;} 826 } 827 } 828 else 829 { 830 for (iptr=fo+n,id=my_id,mask=num_nodes>>1,edge=level;edge>0;edge--,mask>>=1) 831 { 832 /* set rsh of hc, fire, and collect lhs responses */ 833 (id<mask) ? rvec_zero(lhs,m) : rvec_set(lhs,1.0,m); 834 gs_gop_hc(gs_handle,lhs,"+\0",edge); 835 836 /* set lsh of hc, fire, and collect rhs responses */ 837 (id<mask) ? rvec_set(rhs,1.0,m) : rvec_zero(rhs,m); 838 gs_gop_hc(gs_handle,rhs,"+\0",edge); 839 840 /* count number of dofs I own that have signal and not in sep set */ 841 for (ivec_zero(sum,4),ct=i=0;i<n;i++) 842 { 843 if (!used[i]) 844 { 845 /* number of unmarked dofs on node */ 846 ct++; 847 /* number of dofs to be marked on lhs hc */ 848 if ((id< mask)&&(lhs[i]!=0.0)) {sum[0]++;} 849 /* number of dofs to be marked on rhs hc */ 850 if ((id>=mask)&&(rhs[i]!=0.0)) {sum[1]++;} 851 } 852 } 853 854 /* for the non-symmetric case we need separators of width 2 */ 855 /* so take both sides */ 856 (id<mask) ? (sum[2]=ct) : (sum[3]=ct); 857 giop_hc(sum,w,4,op,edge); 858 859 ct=0; 860 if (id<mask) 861 { 862 /* mark dofs I own that have signal and not in sep set */ 863 for (i=0;i<n;i++) 864 { 865 if ((!used[i])&&(lhs[i]!=0.0)) 866 { 867 ct++; nfo++; 868 *--iptr = local2global[i]; 869 used[i]=edge; 870 } 871 } 872 /* LSH hc summation of ct should be sum[0] */ 873 } 874 else 875 { 876 /* mark dofs I own that have signal and not in sep set */ 877 for (i=0;i<n;i++) 878 { 879 if ((!used[i])&&(rhs[i]!=0.0)) 880 { 881 ct++; nfo++; 882 *--iptr = local2global[i]; 883 used[i]=edge; 884 } 885 } 886 /* RSH hc summation of ct should be sum[1] */ 887 } 888 889 if (ct>1) {ivec_sort(iptr,ct);} 890 lnsep[edge]=ct; 891 nsep[edge]=sum[0]+sum[1]; 892 dir [edge]=BOTH; 893 894 /* LATER or we can recur on these to order seps at this level */ 895 /* do we need full set of separators for this? */ 896 897 /* fold rhs hc into lower */ 898 if (id>=mask) 899 {id-=mask;} 900 } 901 } 902 903 /* level 0 is on processor case - so mark the remainder */ 904 for (ct=i=0;i<n;i++) 905 { 906 if (!used[i]) 907 { 908 ct++; nfo++; 909 *--iptr = local2global[i]; 910 used[i]=edge; 911 } 912 } 913 if (ct>1) {ivec_sort(iptr,ct);} 914 lnsep[edge]=ct; 915 nsep [edge]=ct; 916 dir [edge]=BOTH; 917 918 xyt_handle->info->nsep=nsep; 919 xyt_handle->info->lnsep=lnsep; 920 xyt_handle->info->fo=fo; 921 xyt_handle->info->nfo=nfo; 922 923 free(dir); 924 free(lhs); 925 free(rhs); 926 free(used); 927 PetscFunctionReturn(0); 928 } 929 930 /**************************************xyt.c***********************************/ 931 static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, void *matvec, void *grid_data) 932 { 933 mv_info *mvi; 934 935 936 mvi = (mv_info*)malloc(sizeof(mv_info)); 937 mvi->n=n; 938 mvi->m=m; 939 mvi->n_global=-1; 940 mvi->m_global=-1; 941 mvi->local2global=(PetscInt*)malloc((m+1)*sizeof(PetscInt)); 942 ivec_copy(mvi->local2global,local2global,m); 943 mvi->local2global[m] = INT_MAX; 944 mvi->matvec=(PetscErrorCode (*)(mv_info*,PetscScalar*,PetscScalar*))matvec; 945 mvi->grid_data=grid_data; 946 947 /* set xyt communication handle to perform restricted matvec */ 948 mvi->gs_handle = gs_init(local2global, m, num_nodes); 949 950 return(mvi); 951 } 952 953 /**************************************xyt.c***********************************/ 954 static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u) 955 { 956 PetscFunctionBegin; 957 A->matvec((mv_info*)A->grid_data,v,u); 958 PetscFunctionReturn(0); 959 } 960 961 962 963